blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a553fa36ec58507d0007583c057fe7610606dc55 | a8d68074db5c2b2697650ed0281979d3e00cf5a8 | /Nyspider/www.ukers.cn/ukers.py | e92d125aff6d9367e604e49ebd06fccde217d7bb | [] | no_license | 15807857476/bogdata-2 | 9595609ea2ae5ae0a48c511f911df2498456467e | 1934cdfa234b77ca91e349b84688db113ff39e8c | refs/heads/master | 2023-05-26T19:10:18.439269 | 2019-05-24T02:50:41 | 2019-05-24T02:50:41 | 188,327,526 | 3 | 1 | null | 2023-05-22T21:37:27 | 2019-05-24T00:53:28 | Python | UTF-8 | Python | false | false | 18,022 | py | import requests
from bs4 import BeautifulSoup
import json
from PyQt5 import QtCore, QtGui, QtWidgets
import time
import datetime
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1102, 733)
self.centralWidget = QtWidgets.QWidget(MainWindow)
self.centralWidget.setObjectName("centralWidget")
self.tableWidget = QtWidgets.QTableWidget(self.centralWidget)
self.tableWidget.setGeometry(QtCore.QRect(10, 220, 1081, 481))
self.tableWidget.setObjectName("tableWidget")
self.tableWidget.setColumnCount(8)
self.tableWidget.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(7, item)
self.label = QtWidgets.QLabel(self.centralWidget)
self.label.setGeometry(QtCore.QRect(10, 60, 41, 31))
self.label.setObjectName("label")
self.name_lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.name_lineEdit.setGeometry(QtCore.QRect(80, 60, 101, 31))
self.name_lineEdit.setObjectName("name_lineEdit")
self.label_2 = QtWidgets.QLabel(self.centralWidget)
self.label_2.setGeometry(QtCore.QRect(200, 60, 71, 31))
self.label_2.setObjectName("label_2")
self.person_num_lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.person_num_lineEdit.setGeometry(QtCore.QRect(270, 60, 71, 31))
self.person_num_lineEdit.setObjectName("person_num_lineEdit")
self.label_3 = QtWidgets.QLabel(self.centralWidget)
self.label_3.setGeometry(QtCore.QRect(350, 60, 41, 31))
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.centralWidget)
self.label_4.setGeometry(QtCore.QRect(400, 66, 81, 21))
self.label_4.setObjectName("label_4")
self.tang1_lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.tang1_lineEdit.setGeometry(QtCore.QRect(480, 60, 81, 31))
self.tang1_lineEdit.setObjectName("tang1_lineEdit")
self.tang1_lineEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.tang1_lineEdit_2.setGeometry(QtCore.QRect(560, 60, 91, 31))
self.tang1_lineEdit_2.setObjectName("tang1_lineEdit_2")
self.tang1_lineEdit_3 = QtWidgets.QLineEdit(self.centralWidget)
self.tang1_lineEdit_3.setGeometry(QtCore.QRect(650, 60, 81, 31))
self.tang1_lineEdit_3.setObjectName("tang1_lineEdit_3")
self.label_5 = QtWidgets.QLabel(self.centralWidget)
self.label_5.setGeometry(QtCore.QRect(750, 66, 71, 21))
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.centralWidget)
self.label_6.setGeometry(QtCore.QRect(946, 60, 16, 31))
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.centralWidget)
self.label_7.setGeometry(QtCore.QRect(10, 110, 51, 31))
self.label_7.setObjectName("label_7")
self.label_8 = QtWidgets.QLabel(self.centralWidget)
self.label_8.setGeometry(QtCore.QRect(220, 110, 21, 21))
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.centralWidget)
self.label_9.setGeometry(QtCore.QRect(400, 110, 61, 31))
self.label_9.setObjectName("label_9")
self.month_in_lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.month_in_lineEdit.setGeometry(QtCore.QRect(480, 110, 81, 31))
self.month_in_lineEdit.setObjectName("month_in_lineEdit")
self.label_10 = QtWidgets.QLabel(self.centralWidget)
self.label_10.setGeometry(QtCore.QRect(570, 110, 21, 31))
self.label_10.setObjectName("label_10")
self.month_in_lineEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.month_in_lineEdit_2.setGeometry(QtCore.QRect(590, 110, 81, 31))
self.month_in_lineEdit_2.setObjectName("month_in_lineEdit_2")
self.label_11 = QtWidgets.QLabel(self.centralWidget)
self.label_11.setGeometry(QtCore.QRect(750, 110, 71, 31))
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.centralWidget)
self.label_12.setGeometry(QtCore.QRect(920, 116, 21, 21))
self.label_12.setObjectName("label_12")
self.newprice_lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.newprice_lineEdit.setGeometry(QtCore.QRect(820, 110, 91, 31))
self.newprice_lineEdit.setObjectName("newprice_lineEdit")
self.newprice_lineEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.newprice_lineEdit_2.setGeometry(QtCore.QRect(940, 110, 91, 31))
self.newprice_lineEdit_2.setObjectName("newprice_lineEdit_2")
self.label_13 = QtWidgets.QLabel(self.centralWidget)
self.label_13.setGeometry(QtCore.QRect(10, 160, 71, 31))
self.label_13.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.centralWidget)
self.label_14.setGeometry(QtCore.QRect(180, 166, 21, 21))
self.label_14.setObjectName("label_14")
self.last_tang_lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.last_tang_lineEdit.setGeometry(QtCore.QRect(80, 160, 91, 31))
self.last_tang_lineEdit.setObjectName("last_tang_lineEdit")
self.last_tang_lineEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.last_tang_lineEdit_2.setGeometry(QtCore.QRect(210, 160, 81, 31))
self.last_tang_lineEdit_2.setObjectName("last_tang_lineEdit_2")
self.label_15 = QtWidgets.QLabel(self.centralWidget)
self.label_15.setGeometry(QtCore.QRect(400, 166, 71, 21))
self.label_15.setObjectName("label_15")
self.pushButton = QtWidgets.QPushButton(self.centralWidget)
self.pushButton.setGeometry(QtCore.QRect(750, 160, 91, 31))
self.pushButton.setObjectName("pushButton")
self.pushButton_1 = QtWidgets.QPushButton(self.centralWidget)
self.pushButton_1.setGeometry(QtCore.QRect(850, 160, 91, 31))
self.pushButton_1.setText("重新计算")
self.pushButton_1.setObjectName("pushButton_1")
self.pushButton_2 = QtWidgets.QPushButton(self.centralWidget)
self.pushButton_2.setGeometry(QtCore.QRect(950, 160, 91, 31))
self.pushButton_2.setText("停止")
self.pushButton_2.setObjectName("pushButton_2")
self.upload_dateEdit = QtWidgets.QLineEdit(self.centralWidget)
self.upload_dateEdit.setGeometry(QtCore.QRect(820, 60, 113, 31))
self.upload_dateEdit.setObjectName("upload_dateEdit")
self.update_dateEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.update_dateEdit_2.setGeometry(QtCore.QRect(960, 60, 113, 31))
self.update_dateEdit_2.setObjectName("update_dateEdit_2")
self.share_dateEdit_1 = QtWidgets.QLineEdit(self.centralWidget)
self.share_dateEdit_1.setGeometry(QtCore.QRect(80, 110, 131, 31))
self.share_dateEdit_1.setObjectName("share_dateEdit_1")
self.share_dateEdit_2 = QtWidgets.QLineEdit(self.centralWidget)
self.share_dateEdit_2.setGeometry(QtCore.QRect(240, 110, 131, 31))
self.share_dateEdit_2.setObjectName("share_dateEdit_2")
self.newdate_dateEdit = QtWidgets.QLineEdit(self.centralWidget)
self.newdate_dateEdit.setGeometry(QtCore.QRect(480, 160, 171, 31))
self.newdate_dateEdit.setObjectName("newdate_dateEdit")
MainWindow.setCentralWidget(self.centralWidget)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1102, 22))
self.menuBar.setObjectName("menuBar")
self.menu = QtWidgets.QMenu(self.menuBar)
self.menu.setObjectName("menu")
MainWindow.setMenuBar(self.menuBar)
self.action = QtWidgets.QAction(MainWindow)
self.action.setObjectName("action")
self.menu.addAction(self.action)
self.menuBar.addAction(self.menu.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "代码"))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "府名"))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "卖一价格"))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(_translate("MainWindow", "挂单数量"))
item = self.tableWidget.horizontalHeaderItem(4)
item.setText(_translate("MainWindow", "月收益"))
item = self.tableWidget.horizontalHeaderItem(5)
item.setText(_translate("MainWindow", "糖块1"))
item = self.tableWidget.horizontalHeaderItem(6)
item.setText(_translate("MainWindow", "分糖日"))
item = self.tableWidget.horizontalHeaderItem(7)
item.setText(_translate("MainWindow", "计算结果"))
self.label.setText(_translate("MainWindow", "府名:"))
self.label_2.setText(_translate("MainWindow", "府内人数:"))
self.label_3.setText(_translate("MainWindow", "~ 500"))
self.label_4.setText(_translate("MainWindow", "所含大糖块:"))
self.label_5.setText(_translate("MainWindow", "上架日期:"))
self.label_6.setText(_translate("MainWindow", "~"))
self.label_7.setText(_translate("MainWindow", "分糖日:"))
self.label_8.setText(_translate("MainWindow", "~"))
self.label_9.setText(_translate("MainWindow", "月收益:"))
self.label_10.setText(_translate("MainWindow", "~"))
self.label_11.setText(_translate("MainWindow", "最新价格:"))
self.label_12.setText(_translate("MainWindow", "~"))
self.label_13.setText(_translate("MainWindow", "上期分糖:"))
self.label_14.setText(_translate("MainWindow", "~"))
self.label_15.setText(_translate("MainWindow", "最新时间:"))
self.pushButton.setText(_translate("MainWindow", "查询"))
self.menu.setTitle(_translate("MainWindow", "菜单"))
self.action.setText(_translate("MainWindow", "退出"))
class Ukers(QtWidgets.QMainWindow,Ui_MainWindow):
def __init__(self):
super(Ukers,self).__init__()
self.setupUi(self)
self.setWindowTitle('Ukers')
self.base_init()
self.table_result=[]
def base_init(self):
self.action.triggered.connect(self.close)
self.pushButton.clicked.connect(self.crawl)
self.newdate_dateEdit.setText('2016-09-16')
self.pushButton_1.clicked.connect(self.recalculate)
self.pushButton_2.clicked.connect(self.stop_crawl)
def stop_crawl(self):
try:
self.crawler.terminate()
except:
pass
self.pushButton.setEnabled(True)
self.pushButton.setText("查询")
def get_input_data(self):
data={}
data['fu_name']=self.name_lineEdit.text()
data['dtk1']=self.tang1_lineEdit.text()
data['dtk2']=self.tang1_lineEdit_2.text()
data['dtk3']=self.tang1_lineEdit_3.text()
data['price_floor']=self.newprice_lineEdit.text()
data['price_cell']=self.newprice_lineEdit_2.text()
data['stock_avg_bonus_floor']=self.month_in_lineEdit.text()
data['stock_avg_bonus_cell']=self.month_in_lineEdit_2.text()
data['pop_floor']=self.person_num_lineEdit.text()
data['ft_floor']=self.last_tang_lineEdit.text()
data['ft_cell']=self.last_tang_lineEdit_2.text()
data['sj_date_floor']=self.upload_dateEdit.text().replace(' ','')
data['sj_date_cell']=self.update_dateEdit_2.text().replace(' ','')
data['ft_date_floor']=self.share_dateEdit_1.text().replace(' ','')
data['ft_date_cell']=self.share_dateEdit_2.text().replace(' ','')
data['page']=1
data['order']=''
data['sort']=''
self.newest_date=self.newdate_dateEdit.text().replace(' ','')
return data
def table_show(self):
self.tableWidget.clearContents()
#self.tableWidget.setHorizontalHeaderLabels(['代码','府名','卖一价格','挂单数量',
#'月收益','糖块1','分糖日期','计算结果'])
result=sorted(self.table_result,key=lambda x:x[-1],reverse=True)
self.tableWidget.setColumnCount(8)
self.tableWidget.setRowCount(len(result))
for num in range(len(result)):
for i in range(8):
newItem=QtWidgets.QTableWidgetItem()
newItem.setText(str(result[num][i]))
self.tableWidget.setItem(num,i,newItem)
def crawl(self):
self.pushButton.setEnabled(False)
self.pushButton.setText("查询中")
self.table_result.clear()
self.table_show()
data=self.get_input_data()
self.crawler=Crawl(data)
self.crawler._page_ok_signal.connect(self.load_result)
self.crawler._ok_signal.connect(self.crawl_ok)
self.crawler.start()
def crawl_ok(self):
self.pushButton.setEnabled(True)
self.pushButton.setText("查询")
def load_result(self,result):
keys=['code','name','sell_one','sell_one_amount','stock_avg_bonus','dtk_1','ft_date']
for item in result:
line=[]
for key in keys:
try:
line.append(item[key])
except:
line.append('-')
line.append(self.calculate(item))
if line in self.table_result:
continue
self.table_result.append(line)
self.table_show()
def recalculate(self):
self.newest_date=self.newdate_dateEdit.text()
for index in range(len(self.table_result)):
line=self.table_result[index]
self.table_result[index][-1]=self.calculate({'sell_one':line[2],'stock_avg_bonus':line[4],'ft_date':line[6]})
self.table_show()
def calculate(self,item):
if item['sell_one']==-1:
return 0
if item['sell_one']==0:
return 0
try:
result=float(item['stock_avg_bonus'])+27.72-float(item['sell_one'])
d1=time.strptime(self.newest_date,'%Y-%m-%d')
d2=time.strptime(item['ft_date'],'%Y-%m-%d')
d1=datetime.date(d1[0],d1[1],d1[2])
d2=datetime.date(d2[0],d2[1],d2[2])
days=(d1-d2).days+1
result=result/(days*30)
result=result/float(item['sell_one'])
except:
return 0
return result
class Crawl(QtCore.QThread):
_page_ok_signal=QtCore.pyqtSignal(list)
_ok_signal=QtCore.pyqtSignal(int)
def __init__(self,data):
super(Crawl,self).__init__()
self.data=data
self.headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "en-US,en;q=0.5",
"Connection": "keep-alive",
"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0"}
def login(self):
self.session=requests.session()
self.session.get('http://www.ukers.cn/identity/sign/sign_location',headers=self.headers)
data={
'phone':'',
'password':''
}
self.session.post('http://www.ukers.cn/identity/sign/signin_action',data=data,headers=self.headers)
def run(self):
self.login()
page=1
while True:
self.data['page']=page
result=[]
try:
html=self.session.post('http://www.ukers.cn/stock/stocksenior/get_senior_info',data=self.data,headers=self.headers,timeout=30).text
except:
break
try:
if html.startswith(u'\ufeff'):
html=html.encode('utf8')[3:].decode('utf8')
items=json.loads(html)['data']
except:
break
if items==[]:
break
for item in items:
try:
office_id=item['office_id']
selldata=self.get_sell(office_id)
item['sell_one']=selldata['sell_one']
item['sell_one_amount']=selldata['sell_one_amount']
result.append(item)
except:
continue
page+=1
self._page_ok_signal.emit(result)
self._ok_signal.emit(1)
def get_sell(self,office_id):
html=self.session.get('http://www.ukers.cn/stock/stocksenior/get_utcard_ten?id='+str(office_id),headers=self.headers,timeout=30).text
if html.startswith(u'\ufeff'):
html=html.encode('utf8')[3:].decode('utf8')
data=json.loads(html)
return data['data']
if __name__ == '__main__':
import sys
app=QtWidgets.QApplication(sys.argv)
management=Ukers()
management.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
9dceb1c120d54fd28545a8cb89ddaa77f569dcfe | 87464da8dafe98587862269c7a93402ce87744fa | /pyscappe.py | fdf77afcd4904f26a5df1c684de96cd24f8f651b | [] | no_license | theharisshah/login-linkedin-script | e2001aa5b3d6e623b2e9522b61de81bcae3667aa | ad211fd80401f9a2840fa8fef3b2350fc5cdae53 | refs/heads/master | 2020-09-15T11:06:58.613088 | 2019-11-22T15:22:16 | 2019-11-22T15:22:16 | 223,428,885 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 602 | py | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
driver = webdriver.Chrome('/Users/harisshah/Downloads/chromedriver')
driver.get('https://www.linkedin.com')
driver.fullscreen_window()
driver.implicitly_wait(30)
email = driver.find_element_by_name('session_key')
driver.implicitly_wait(30)
email.send_keys('[email protected]')
driver.implicitly_wait(30)
password = driver.find_element_by_name('session_password')
driver.implicitly_wait(30)
password.send_keys('Password')
driver.implicitly_wait(30)
driver.find_elements_by_class_name('sign-in-form__submit-btn')[0].click() | [
"[email protected]"
] | |
55e6259543807b9be2b1d0cf864283a7e92b3e31 | 930f619494f61aa29b33c0f5996a0f5de116282b | /phase/constants.py | 84466333ac53bf7a81433fcbef9d3bae6c7f0f3f | [] | no_license | scott-trinkle/xphase | a86af0ec5188c2be074da2da2812aadca955d5c5 | 8125f13217e59b315ca1e5e20c3766f9113e000d | refs/heads/master | 2020-04-27T19:30:37.753364 | 2019-03-21T19:09:42 | 2019-03-21T19:09:42 | 174,621,935 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 723 | py | '''
Physical constants
Note: I am keeping all energy units in keV, and
all length units in cm
'''
# Planck's constant
h = 4.135667662e-15 # eV, https://en.wikipedia.org/wiki/Planck_constant
h *= 1e-3 # keV
# Speed of light in vacuum
c = 299792458 # m/s, https://en.wikipedia.org/wiki/Speed_of_light
c *= 100 # cm / s
# Avogadro Constant
N_A = 6.022140857e23 # mol^-1, https://en.wikipedia.org/wiki/Avogadro_constant
# Classical electron radius
re = 2.8179403227e-15 # m, https://en.wikipedia.org/wiki/Classical_electron_radius
re *= 100 # cm
# Density (g/cc) and molar mass (g/mol) for useful elements
elemprops = {'H2O': [1.0, 18.015],
'U': [19.1, 238.02891],
'Os': [22.59, 190.23]}
| [
"[email protected]"
] | |
f29e6bad6a3de332f20d1c654fb2f33cf1f13642 | 42fdf741bf64ea2e63d1546bb08356286f994505 | /test_00_vd2ota/rasp30a_gen4.py | 5b403f8fe4aaa65b6ddfd89eab7fd642a113d42a | [] | no_license | skim819/RASP_Workspace_sihwan | 7e3cd403dc3965b8306ec203007490e3ea911e3b | 0799e146586595577c8efa05c647b8cb92b962f4 | refs/heads/master | 2020-12-24T05:22:25.775823 | 2017-04-01T22:15:18 | 2017-04-01T22:15:18 | 41,511,563 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,867 | py | #O/PS
li_sm = ['gnd' ,[0, 0], #inputs from CAB and device outputs
'vcc' ,[0, 1],#y
'cab.I[0:12]' ,[0, range( 2, 15)],#y to be shifted for the decoder
'vmm4x4_dummy[0:3]' ,[0,range(19,23)], #middle LI for VMM turn
#O/PS OF CAB DEVICES
'fgota[0:1].out[0]' ,[0, range(15, 17)],#y
'h_rect[0].out[0]', [0,26], # Half Wave Rectifier
'ota_buf[0].out[0]' ,[0, 17],#y
'ota[0].out[0]' ,[0, 18],#y
'bias_gen[0].out[0:1]' , [0, [32,19]],#ynmirror our and cap out
'ladder_filter[0].out[0:2]' , [0, [15,16,18]],#y
'cap[0:3].out[0]' ,[0, range(19, 23)],#y
'nfet[0:1].out[0]' ,[0, range(24, 22, -1)],#y numbering chnge for nFET0(24) and nFET1(23), needs to be verified
'pfet[0:1].out[0]' ,[0, range(26, 24,-1)],#y numbering chnge for pFETt0(26) and pFET1(23)
'tgate[0:3].out[0]' ,[0, range(27, 31)],#y
'mux4_1[0].out[0]' ,[0, 27],#y
'nmirror[0:1].out[0]',[0, range(31, 33)],#y
'ladder_blk[0].out[0:2]',[0,[17,15,16]],
'TIA_blk[0].out[0]' ,[0,17],
'ichar_nfet[0].out[0]',[0,25],
'c4_blk[0].out[0]' ,[0,15],# c4 with floating gates
'Algo_ADC[0].out[0]' ,[0,17],
'Nagating_blk[0].out[0]' ,[0,15],
'speech[0].out[0:1]' ,[0,[17,26]], #25 only c4 and pd. 26 with pfet out to inverse.
'gnd_out[0].out[0]',[0,24],
'vdd_out[0].out[0]',[0,24],
'in2in_x1[0].out[0]',[0,24],
'in2in_x6[0].out[0]',[0,24],
'volt_div[0].out[0]',[0,17], #OTA0's output
'volt_div_fgota[0].out[0]',[0,15], #FGOTA0's output
'integrator[0].out[0]',[0,18],
'ichar_nfet[0].in[0:1]' ,[[19,30],0],#vg,vd
'integrator_nmirror[0].out[0]',[0,18],
'lpf[0].out[0]',[0, 17],
'nfet_i2v[0].out[0]',[0, 17], #ota0 output
'pfet_i2v[0].out[0]',[0, 17], #ota0 output
'nmirror_w_bias[0].out[0]',[0, 31], #nmirror0 output
'fgswc_nmirror_w_bias[0].out[0]',[0, 31], #nmirror0 output
'i2v_pfet_gatefgota[0].out[0]',[0,17], #ota0 output
'mismatch_meas[0].out[0]',[0,16], #fgota1 output
'mmap_local_swc[0].out[0]' ,[0,18+15], #chose col-18
'INFneuron[0].out[0]',[0,17],
'ramp_fe[0].out[0:3]' , [0,[18,20,21,22]], #26
'sigma_delta_fe[0].out[0]', [0,17], #[0,[18,17]], # 18:ota1.out 17: ota0.out
'volswc[0:1].out[0]',[0, range(33, 35)],
'hhneuron[0].out[0:2]',[0,[18,15,16]],#Vmem,VNa,VK
'dendiff[0].out[0]',[0,0],
'tgate_so[0].out[0]',[0,19],
'ota_vmm[0].out[0]' ,[0, 18],#y
'nmirror_vmm[0:1].out[0]' ,[0, range(31, 33)],#y
'vmm4x4_SR[0].out[0]' ,[0,34], #19+15--->15 is offset for middle LI
'vmm4x4_SR2[0].out[0]' ,[0,34], #19+15--->15 is offset for middle LI
'vmm8x4_SR[0].out[0]' ,[0,34], #19+15--->15 is offset for middle LI
'SR4[0].out[0:4]', [0,[19,20,21,22,18+15]],#cap--ops+15, and the 18+15
'vmm4x4[0].out[0:3]', [0,range(19,23)],
'vmm8x4[0].out[0]', [0,0], #dummy output
'vmm8inx8in[0].out[0]', [0,19], #dummy output cap0's output
'vmm8x4_in[0].out[0]', [0,0], #dummy output
'vmm12x1[0].out[0]', [0,18], #wta output
'sftreg[0].out[0]' ,[0,18+15], #chose col-18
'DAC_sftreg[0].out[0]' ,[0,18+15], #chose col-18
'sftreg2[0].out[0]' ,[0,18+15], #chose col-18
'sftreg3[0].out[0]' ,[0,33], #chose col-18
'sftreg4[0].out[0]' ,[0,27], #tgate0's output
'Adaptive_receptor[0].out[0]' ,[0,18],
'vmm_senseamp1[0].out[0]',[0,17], #ota0 output
'vmm_senseamp2[0].out[0:1]',[0,[17,18]],
'vmm12x1_wowta[0].out[0]', [0,19], #vmm on cap out
'inv_mcab[0].out[0]',[0,23],
'Hyst_diff[0].out[0]',[0,18],
'Max_detect[0].out[0]',[0,23],
'Min_detect[0].out[0]',[0,25],
'wta_w_bias[0].out[0]',[0,17],
'hhn[0].out[0]',[0,18],
'fgswitch[0].out[0]',[0,19],
'common_drain[0].out[0]',[0,25],
'common_drain_nfet[0].out[0]',[0,23],
'hhn_debug[0].out[0:2]',[0,[18,16,15]],
'vd_2ota[0].out[0]',[0,17],
| [
"ubuntu@ubuntu-VirtualBox.(none)"
] | ubuntu@ubuntu-VirtualBox.(none) |
1f26b85fbe548f9a9b78c8cc8708bb238d8f6d67 | 4f9153272b7cb3c3611e5a71a080b0ed03acb741 | /venv/bin/cython | a171f35299af4e1df19db4e52d5ea37cd4f5e8de | [] | no_license | SusyPinkBash/bad_smell_detection | 50b815dcb085db4dbc826b31d5d49a96ccce1478 | 570a3ef6693f18c947c99d4c7deae774ece24ece | refs/heads/main | 2023-01-22T09:57:11.805880 | 2020-11-10T11:32:47 | 2020-11-10T11:32:47 | 301,667,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | #!/Users/Susy/University/Master/3rd_Semester/Knowledge_Analysis_and_Management/Projects/Project1/bad_smell_detection/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from Cython.Compiler.Main import setuptools_main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(setuptools_main())
| [
"[email protected]"
] | ||
d617ce56db427e677389e9b5e5c577e8ff0cc076 | 3ef49bb6ee79cb91a2cf45e3636d7fcae178fbae | /graphics/hh_br/hh_br.py | 64dd1ddf9ccf71e409d2ddb912a829b911eb5836 | [
"CC0-1.0"
] | permissive | dantrim/phd_thesis | 170eb89e2a2255ec397c5a5091be448749e72e51 | df0a2c449a58709ef5efee9bf0602114611514d9 | refs/heads/master | 2021-07-24T09:30:45.528035 | 2020-05-06T19:23:43 | 2020-05-06T19:23:43 | 163,771,696 | 2 | 1 | null | 2020-05-06T19:23:45 | 2019-01-01T22:42:55 | TeX | UTF-8 | Python | false | false | 4,763 | py | #!/bin/env python
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import matplotlib.ticker as ticker
import numpy as np
import sys, os
N = 9
plt.rcParams["axes.prop_cycle"] = plt.cycler("color", plt.cm.hsv(np.linspace(0,1,N)))
def higgs_br(which = "bb") :
br_dict = {
"bb" : 0.5809
,"tautau" : 0.06256
,"mumu" : 0.0002171
,"cc" : 0.0284
,"gg" : 0.08180
,"gamgam" : 0.00227
,"zgam" : 0.001541
,"ww" : 0.2152
,"zz" : 0.02641
}
return br_dict[which]
def x_idx(which = "bb") :
br_idx = {
"bb" : 0
,"ww" : 1
,"gg" : 2
,"tautau" : 3
,"cc" : 4
,"zz" : 5
,"gamgam" : 6
,"zgam" : 7
,"mumu" : 8
}
return br_idx[which]
def y_idx(which = "bb") :
br_idx = {
"bb" : 8
,"ww" : 7
,"gg" : 6
,"tautau" : 5
,"cc" : 4
,"zz" : 3
,"gamgam" : 2
,"zgam" : 1
,"mumu" : 0
}
return br_idx[which]
def br_name(which = "bb") :
names = {
"bb" : r"$bb$"
,"ww" : r"$WW$"
,"gg" : r"$gg$"
,"tautau" : r"$\tau \tau$"
,"cc" : r"$cc$"
,"zz" : r"$ZZ$"
,"gamgam" : r"$\gamma \gamma$"
,"zgam" : r"$Z \gamma$"
,"mumu" : r"$\mu \mu$"
}
return names[which]
def main() :
higgs_decays = ["bb", "ww", "gg", "tautau", "cc", "zz", "gamgam", "zgam", "mumu"]
n_bins = len(higgs_decays)
data = {}
bbbb_br = higgs_br("bb") * higgs_br("bb")
vals = []
for i in range(n_bins) :
for j in range(n_bins) :
if j < i : continue
bin_tuple = tuple((x_idx(higgs_decays[i]), y_idx(higgs_decays[j])))
hh_br = higgs_br(higgs_decays[i]) * higgs_br(higgs_decays[j])
val = hh_br / bbbb_br
vals.append(val)
data[bin_tuple] = val
#print("({},{}) --> ({},{})".format(higgs_decays[i], higgs_decays[j], x_idx(higgs_decays[i]), y_idx(higgs_decays[j])))
vals = list(set(vals))
min_val = min(vals)
fig, ax = plt.subplots(1,1)
labels = [br_name(x) for x in higgs_decays]
ax.set_xticklabels(labels)
ax.set_yticklabels(labels[::-1])
ax.tick_params(which = "both", direction = "in", length = 0)
bw = 1
bins = np.arange(0,len(higgs_decays)+1, bw)
ax.set_xticks(bins[:-1]+0.5 * bw) #, minor = True)
ax.set_yticks(bins[:-1]+0.5 * bw) #, minor = True)
ax.set_xticks(bins[:-1], minor = True)# +0.5 * bw)
ax.set_yticks(bins[:-1], minor = True)# +0.5 * bw)
x_vals = []
y_vals = []
w_vals = []
for dp, w in data.items() :
x, y = dp
x_vals.append(x)
y_vals.append(y)
w_vals.append(w)
h, x, y, im = ax.hist2d(x_vals, y_vals, bins = bins,
weights = w_vals,
cmin = min_val,
#cmap = plt.cm.hsv(np.linspace(0,1,N))
#cmap = "YlGnBu",
#cmap = "YlOrRd",
cmap = "jet",
norm = LogNorm()
)
cbar = fig.colorbar(im)
cbar.set_label("Branching fraction relative to 4b",
horizontalalignment = "right",
y = 1,
size = 14
)
# ax.xaxis.set_major_formatter(ticker.NullFormatter())
# for tick in ax.xaxis.get_minor_ticks() :
# tick.label1.set_horizontalalignment("center")
ax.grid(which = "minor", alpha = 0.8)
#ax.set_ylabel("Larger BR", horizontalalignment = "right", y = 1)
# bbbb
ax.text(0.16, 8.6, r"100%", size = 7, weight = "bold", color = "white")
ax.text(0.14, 8.33, r"(34%)", size = 7, weight = "bold", color = "white")
#ax.text(0.25, 7.41, r"13%", size = 6, weight = "bold", color = "white")
# bbww
ax.text(0.25, 7.6, r"38%", size = 7, weight = "bold", color = "white")
ax.text(0.14, 7.33, r"(13%)", size = 7, weight = "bold", color = "white")
## wwww
##ax.text(1.25, 7.41, r"5%", size = 8, weight = "bold", color = "white")
#ax.text(1.25, 7.6, r"14%", size = 7, weight = "bold", color = "white")
#ax.text(1.19, 7.33, r"(5%)", size = 7, weight = "bold", color = "white")
# bbtautau
#ax.text(0.27, 5.41, r"4%", size = 8, weight = "bold", color = "white")
ax.text(0.25, 5.6, r"11%", size = 7, weight = "bold", color = "white")
ax.text(0.21, 5.33, r"(4%)", size = 7, weight = "bold", color = "white")
# bbyy
#ax.text(0.20, 2.41, r"<1%", size = 8, weight = "bold", color = "k")
ax.text(0.20, 2.58, r"0.4%", size = 7, weight = "bold", color = "k")
ax.text(0.11, 2.33, r"(0.1%)", size = 7, weight = "bold", color = "k")
#fig.show()
fig.savefig("hh_br.eps", bbox_inches = "tight")
fig.savefig("hh_br.pdf", bbox_inches = "tight")
#x = input()
if __name__ == "__main__" :
main()
| [
"[email protected]"
] | |
8896b3df93c48fbf051e48e3722ca7bf453ccece | 7bc9ba2b8edf97b7bbacd28e26276b6c967d0f42 | /task-library/cisco-aci/CiscoAciCreateTenant.py | 5e610fd6e56b5adcaea1afe1e5dfefdaa8a7d311 | [
"MIT"
] | permissive | nutanix/blueprints | 99318a31ddce360cdf7a3b108951ba37dad91a7b | 68c822ad9fd2624f2730371816cee4d14fdd8302 | refs/heads/master | 2023-06-23T01:42:40.136161 | 2022-04-25T07:12:40 | 2022-04-25T07:12:40 | 142,585,120 | 73 | 104 | MIT | 2023-06-18T03:48:44 | 2018-07-27T14:08:58 | Python | UTF-8 | Python | false | false | 4,315 | py | # region headers
# escript-template v20190611 / [email protected]
# * author: [email protected], [email protected]
# * version: 2019/06/12 - v1
# task_name: CiscoAciCreateTenant
# description: Creates a Cisco ACI tenant object.
# endregion
# region capture Calm variables
username = "@@{aci_user.username}@@"
username_secret = "@@{aci_user.secret}@@"
api_server = "@@{aci_ip}@@"
aci_tenant_name = "@@{aci_tenant_name}@@"
# endregion
# region prepare variables
rn = "tn-{}".format(aci_tenant_name)
dn = "uni/{}".format(rn)
# endregion
# region generic prepare api call
api_server_port = "443"
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
# endregion
# region login
# prepare
api_server_endpoint = "/api/aaaLogin.json"
url = "https://{}:{}{}".format(
api_server,
api_server_port,
api_server_endpoint
)
method = "POST"
# Compose the json payload
payload = {
"aaaUser": {
"attributes": {
"name": username,
"pwd": username_secret
}
}
}
# make the API call and capture the results in the variable called "resp"
print("Making a {} API call to {}".format(method, url))
# ! Get rid of verify=False if you're using proper certificates
resp = urlreq(
url,
verb=method,
params=json.dumps(payload),
headers=headers,
verify=False
)
# deal with the result/response
if resp.ok:
print("Login request was successful")
json_resp = json.loads(resp.content)
aci_token = json_resp['imdata'][0]['aaaLogin']['attributes']['token']
headers = {'content-type': 'application/json', 'Cookie': 'APIC-Cookie=' + aci_token}
else:
print("Request failed")
print("Headers: {}".format(headers))
print("Payload: {}".format(json.dumps(payload)))
print('Status code: {}'.format(resp.status_code))
print('Response: {}'.format(json.dumps(json.loads(resp.content), indent=4)))
exit(1)
# endregion
# region POST new tenant
# prepare
api_server_endpoint = "/api/node/mo/uni/{}.json".format(rn)
url = "https://{}:{}{}".format(
api_server,
api_server_port,
api_server_endpoint
)
method = "POST"
# Compose the json payload
payload = {
"fvTenant": {
"attributes": {
"dn": dn,
"name": aci_tenant_name,
"rn": rn,
"status": "created,modified"
},
"children": []
}
}
# make the API call and capture the results in the variable called "resp"
print("Making a {} API call to {}".format(method, url))
# ! Get rid of verify=False if you're using proper certificates
resp = urlreq(
url,
verb=method,
params=json.dumps(payload),
headers=headers,
verify=False
)
# deal with the result/response
if resp.ok:
print("Request to create tenant {} was successful".format(aci_tenant_name))
print('Status code: {}'.format(resp.status_code))
print('Response: {}'.format(json.dumps(json.loads(resp.content), indent=4)))
else:
print("Request failed")
print("Headers: {}".format(headers))
print("Payload: {}".format(json.dumps(payload)))
print('Status code: {}'.format(resp.status_code))
print('Response: {}'.format(json.dumps(json.loads(resp.content), indent=4)))
exit(1)
# endregion
# region logout
# prepare
api_server_endpoint = "/api/aaaLogout.json"
url = "https://{}:{}{}".format(
api_server,
api_server_port,
api_server_endpoint
)
method = "POST"
# Compose the json payload
payload = {
"aaaUser": {
"attributes": {
"name": username,
"pwd": username_secret
}
}
}
# make the API call and capture the results in the variable called "resp"
print("Making a {} API call to {}".format(method, url))
# ! Get rid of verify=False if you're using proper certificates
resp = urlreq(
url,
verb=method,
params=json.dumps(payload),
headers=headers,
verify=False
)
# deal with the result/response
if resp.ok:
print("Logout request was successful")
exit(0)
else:
print("Request failed")
print("Headers: {}".format(headers))
print("Payload: {}".format(json.dumps(payload)))
print('Status code: {}'.format(resp.status_code))
print('Response: {}'.format(json.dumps(json.loads(resp.content), indent=4)))
exit(1)
# endregion
| [
"[email protected]"
] | |
ca331858c469640a507e1cc97e82a82e8af28191 | 4936c1d20aef7a93ad2ded2f5731b102631ad8b2 | /Tablas/tablas/Ruido20/maxVotos/IPF/menu_4.py | 1a7a042e3df03ff46bf76dd545d5757749400140 | [
"LicenseRef-scancode-other-permissive"
] | permissive | jcarlosorte/pyMIL-BNF | 530f60081607deecfee7c72264000c0ba34984fe | 36e282e35242815bf57310db98707da70d69b183 | refs/heads/master | 2022-11-12T20:58:49.058513 | 2020-07-06T15:35:01 | 2020-07-06T15:35:01 | 182,646,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 10 10:11:00 2019
@author: Usuario
"""
import sys,os,warnings
os.chdir('../../../../MILpy')
sys.path.append(os.path.realpath('..'))
warnings.filterwarnings('ignore')
#from funciones import fvc
from filters import EF
from filters import CVCF
from filters import IPF
folds = 5
votacion = 'maxVotos'
DataSet = ['fox_scaled']
#ruido = [0,5,10,15,20,25,30]
ruido = [20]
#print('********** Crear dataset con ruido **********')
#fvc.fvc_part(DataSet,folds,ruido)
#print('********** Ensemble Filter por '+str(votacion)+'**********')
#EF.EF(DataSet,votacion,folds,ruido)
#print('********** CV Committees Filter por '+str(votacion)+'**********')
#CVCF.CVcF(DataSet,votacion,folds,ruido)
print('********** Iterative Partitioning Filter por '+str(votacion)+'**********')
IPF.IPF(DataSet,votacion,folds,ruido)
#votacion = 'maxVotos'
#print('********** Ensemble Filter por '+str(votacion)+'**********')
#EF.EF(DataSet,votacion,folds,ruido)
#print('********** CV Committees Filter por '+str(votacion)+'**********')
#CVCF.CVcF(DataSet,votacion,folds,ruido)
#print('********** Iterative Partitioning Filter por '+str(votacion)+'**********')
#IPF.IPF(DataSet,votacion,folds,ruido) | [
"[email protected]"
] | |
33e7076a08a3184973e0fd5037ffc220e26d2e56 | 0706c7a7651303c8c602d77fdfbb78503adc7207 | /test.py | 9373f2cd65fb9425f0080280928f9686b2b5e104 | [] | no_license | Mereng/Seq2SeqTrainChatBot | a7308739ee2c76a8cef28e3ca53822601a2f25af | e6be0c279ae3c433eb75871d2d682767d809e004 | refs/heads/master | 2021-08-18T16:30:08.572245 | 2017-11-23T08:36:20 | 2017-11-23T08:36:20 | 111,223,526 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,329 | py | import tensorflow
import tensorlayer
import pickle
with open('data/metadata.pkl', 'rb') as f:
metadata = pickle.load(f)
idx2word = metadata['idx2word']
word2idx = metadata['word2idx']
vocab_size = len(idx2word)
start_id = vocab_size
end_id = vocab_size + 1
word2idx.update({'start_id': start_id})
word2idx.update({'end_id': end_id})
idx2word = idx2word + ['start_id', 'end_id']
vocab_size = len(idx2word)
embedding_size = 1024
def get_model(encode_seq, decode_seq, is_train, reuse):
with tensorflow.variable_scope('model', reuse=reuse):
with tensorflow.variable_scope('embedding') as vs:
net_encode = tensorlayer.layers.EmbeddingInputlayer(
inputs=encode_seq,
vocabulary_size=vocab_size,
embedding_size=embedding_size,
name='embedding'
)
vs.reuse_variables()
tensorlayer.layers.set_name_reuse(True)
net_decode = tensorlayer.layers.EmbeddingInputlayer(
inputs=decode_seq,
vocabulary_size=vocab_size,
embedding_size=embedding_size,
name='embedding'
)
net_rnn = tensorlayer.layers.Seq2Seq(
net_encode,
net_decode,
cell_fn=tensorflow.nn.rnn_cell.BasicLSTMCell,
n_hidden=embedding_size,
initializer=tensorflow.random_uniform_initializer(-0.1, 0.1),
encode_sequence_length=tensorlayer.layers.retrieve_seq_length_op2(encode_seq),
decode_sequence_length=tensorlayer.layers.retrieve_seq_length_op2(decode_seq),
initial_state_encode=None,
dropout=(0.5 if is_train else None),
n_layer=3,
return_seq_2d=True,
name='seq2seq'
)
net_out = tensorlayer.layers.DenseLayer(net_rnn, n_units=vocab_size, act=tensorflow.identity, name='output')
return net_out, net_rnn
encode_seqs = tensorflow.placeholder(tensorflow.int64, [1, None], 'encode_seqs')
decode_seqs = tensorflow.placeholder(tensorflow.int64, [1, None], 'decode_seqs')
net, net_rnn = get_model(encode_seqs, decode_seqs, False, False)
y = tensorflow.nn.softmax(net.outputs)
session = tensorflow.Session(config=tensorflow.ConfigProto(allow_soft_placement=True, log_device_placement=False))
tensorlayer.layers.initialize_global_variables(session)
tensorlayer.files.load_and_assign_npz(session, 'checkpoints/model.npz', net)
while True:
msg = input('> ')
idxs = [word2idx.get(word, word2idx['unk']) for word in msg.split(' ')]
state = session.run(net_rnn.final_state_encode, {
encode_seqs: [idxs]
})
o, state = session.run([y, net_rnn.final_state_decode], {
net_rnn.initial_state_decode: state,
decode_seqs: [[start_id]]
})
word_idx = tensorlayer.nlp.sample_top(o[0], top_k=3)
word = idx2word[word_idx]
sentence = [word]
for _ in range(30):
o, state = session.run([y , net_rnn.final_state_decode], {
net_rnn.initial_state_decode: state,
decode_seqs: [[word_idx]]
})
word_idx = tensorlayer.nlp.sample_top(o[0], top_k=2)
word = idx2word[word_idx]
if word_idx == end_id:
break
sentence = sentence + [word]
print("A > ", ' '.join(sentence))
| [
"[email protected]"
] | |
7978f404cf2024353de47bdadb14af9ccee99e16 | 503b38aaec3c632b78285f7ca016f13a2c14fcd4 | /users/models.py | 7d1d2a776b3b683f60ab5f5bdfe02453a37dcbf7 | [] | no_license | Glitch-dev1/Test | f876ddaafccf2f02748001f8a53e43f031649f23 | a46d375a40cae70b4d10c36bbf9fe16b8a118901 | refs/heads/master | 2023-07-02T19:05:47.461216 | 2021-08-08T16:17:07 | 2021-08-08T16:17:07 | 392,296,916 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 81 | py | from django.db import models
from django.
# Create your models here.
Class user() | [
"[email protected]"
] | |
929fff6570a31d8cd6967c09fdd8456b2611ccbb | 1e744fefe48220623813bc6494890f2e6e9d2f70 | /pythonHacking02/crack.py | 268f0c0a25084c94ef3ffb8e282971c4a902cb8d | [] | no_license | locata/python | 7fb31afe3850b8ddaf92d6450b6db971cd0a8555 | cc8cf0a09b290d4718bd6669c87526ff65b30e5e | refs/heads/master | 2021-01-17T20:24:55.508323 | 2016-06-04T00:41:27 | 2016-06-04T00:41:27 | 60,384,900 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,061 | py | import crypt
import optparse
def testPass(cryptPass, dname):
salt = cryptPass[0:2]
dictFile = open(dname, 'r')
for word in dictFile.readlines():
word = word.strip('\n\r')
cryptWord = crypt.crypt(word,salt)
if (cryptWord == cryptPass):
print "[+] Found Password: "+word+ "\n"
return
print "[-] Password not found. \n"
return
def Main():
parser = optparse.OptionParser("usage %prog "+"-f <passwordFiles> -d <dictionary>")
parser.add_option('-f', dest='pname', type='string', help='specify password file')
parser.add_option('-d', dest='dname', type='string', help='specify dictionary file')
(options, args) = parser.parse_args()
if (options.pname == None) | (options.dname == None):
print parser.usage
exit(0);
else:
pname = options.pname
dname = options.dname
passFile = open(pname, 'r')
for line in passFile.readlines():
if ":" in line:
user = line.split(':')[0]
cryptPass = line.split(':')[1].strip(' ')
print "[+] cracking password for: "+user
testPass(cryptPass, dname)
if __name__ == '__main__':
Main()
| [
"[email protected]"
] | |
7ada5ecf5bbe49c639f0bc49e4d6c277e4527b11 | c4739b12d39ba3166cfab1276e477e6ad5e0d30c | /install.py | 9184d2569f707d039fc1a86bf0e9dfffac9402c5 | [
"Apache-2.0"
] | permissive | wempy24/bot-wa-termux | b9465254e6f24c7cdda18cd17e4cc2c864379be1 | d18574672ea4ae98eff3dfc69bd8a5e355333f12 | refs/heads/main | 2023-03-03T10:33:16.281013 | 2021-02-05T05:55:44 | 2021-02-05T05:55:44 | 336,153,219 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,687 | py | import sys, time, os
def m(s):
for c in s + '\n':
sys.stdout.write(c)
sys.stdout.flush()
time.sleep(0.1)
os.system("clear")
print("""
▄ ▄ ▄▄▄▄▄▄▄ ▄▄ ▄▄ ▄▄▄▄▄▄▄ ▄▄ ▄▄
█ █ ▄ █ █ █ █▄█ █ █ █ █ █
█ ██ ██ █ ▄▄▄█ █ ▄ █ █▄█ █
█ █ █▄▄▄█ █ █▄█ █ █
█ █ ▄▄▄█ █ ▄▄▄█▄ ▄█
█ ▄ █ █▄▄▄█ ██▄██ █ █ █ █
█▄▄█ █▄▄█▄▄▄▄▄▄▄█▄█ █▄█▄▄▄█ █▄▄▄█
░ ░\x1b[00m\033[041m TERMUX WHATSAPP BOT WEM \033[00m\x1b[1;00m░░
░ ░ ░ ░ ░ ░ ░ ░ ░ ░\x1b[00m
""")
m('\x1b[00m\033[041m Install bahan automatis \033[00m')
m('\x1b[00m\033[041m Jangan Keluar Dari Termux Ngentofd Sebelum Selesai Menginstall!! \033[00m')
os.system("pkg update -y")
os.system("pkg upgrade -y")
os.system("pkg install nano")
os.system("pkg install python -y")
os.system("pkg install python2 -y")
os.system("pkg install nodejs -y")
os.system("pkg install libwebp -y")
os.system("pkg install ffmpeg -y")
os.system("pkg install wget -y")
os.system("pkg install tesseract -y")
os.system("bash install.sh")
os.system("npm install -g npm")
os.system("npm install --dev")
os.system("npm audit fix")
os.system("npm i imgbb-uploader")
os.system("npm cache clean -f")
os.system("npm i got")
m('\x1b[00m\033[041m Memulai Whatsapp Bot... \033[00m')
os.system("clear")
os.system("python start.py")
m("DONE")
| [
"[email protected]"
] | |
56dd2eeaf613400036b13f3be8e83fd8d919bbb6 | 974d04d2ea27b1bba1c01015a98112d2afb78fe5 | /python/paddle/incubate/distributed/models/moe/gate/switch_gate.py | 98474dafd0111a054eb01d9a35d71fedf9b7e193 | [
"Apache-2.0"
] | permissive | PaddlePaddle/Paddle | b3d2583119082c8e4b74331dacc4d39ed4d7cff0 | 22a11a60e0e3d10a3cf610077a3d9942a6f964cb | refs/heads/develop | 2023-08-17T21:27:30.568889 | 2023-08-17T12:38:22 | 2023-08-17T12:38:22 | 65,711,522 | 20,414 | 5,891 | Apache-2.0 | 2023-09-14T19:20:51 | 2016-08-15T06:59:08 | C++ | UTF-8 | Python | false | false | 2,893 | py | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# The file has been adapted from the file:
# https://github.com/laekov/fastmoe/blob/master/fmoe/gates/switch_gate.py
# Git commit hash: 295a615aacce7e54a37e7935274ba15e901c78e4
# We retain the following license from the original files:
# Copyright 2021, Jiaao He. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
import math
import paddle
import paddle.nn.functional as F
from ..utils import limit_by_capacity
from .naive_gate import NaiveGate
class SwitchGate(NaiveGate):
def __init__(
self,
d_model,
num_expert,
world_size,
topk=1,
switch_eps=0.1,
capacity=(1.2, 2.4),
group=None,
):
assert topk == 1, "topk should be 1 in switch"
super().__init__(d_model, num_expert, world_size, topk=1)
self.switch_eps = switch_eps
self.capacity = capacity
self.group = group
def forward(self, inp):
score = self.gate(inp)
if self.training:
noise = paddle.rand(shape=score.shape)
noise = noise * 2 * self.switch_eps + 1.0 - self.switch_eps
score += noise
score = F.softmax(score, axis=-1)
top1_score, top1_idx = paddle.topk(score, k=1, axis=-1, largest=True)
cap_rate = self.capacity[0 if self.training else 1]
capacity = math.ceil(cap_rate * inp.shape[0])
_new_lec, _new_gec, top1_idx = limit_by_capacity(
top1_idx,
self.num_expert,
self.world_size,
capacity,
group=self.group,
)
valid_idx = top1_idx[top1_idx > -1]
valid_idx_tmp = paddle.reshape(valid_idx, shape=[len(valid_idx), 1])
fraction_expert = (
paddle.scatter_nd_add(
x=paddle.zeros(shape=[self.tot_expert]),
index=valid_idx_tmp,
updates=paddle.ones_like(
valid_idx, dtype=paddle.float32
).reshape(shape=[len(valid_idx)]),
)
/ valid_idx.numel()
)
prob_expert = score.sum(axis=0) / valid_idx.numel()
loss = (fraction_expert * prob_expert).sum() * self.tot_expert
self.set_loss(loss)
return top1_score, top1_idx
| [
"[email protected]"
] | |
a20a8ed3e32d019d9a0272a3629e24d37c542c12 | 0bd7a6bef178bb93b2c3fb19e789c7e3b364e6d1 | /simple_frame/niiprocess/niiprocess.py | 6f9bfe0c6b73ee23c66cb66fe0c274832253db65 | [] | no_license | Magnety/Simple_Frame_linux | 090e07491e170314718c9ba5f2da2a4393bdb1ad | 7e1ef5d11e3baa8784fd9b6bbf81b0d954dd1ca6 | refs/heads/main | 2023-06-02T09:35:36.023461 | 2021-06-17T09:23:01 | 2021-06-17T09:23:01 | 371,412,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | import SimpleITK as sitk
import numpy
import os
def redo_spacing(img,label,path):
label.SetOrigin(img.GetOrigin())
label.SetSpacing(img.GetSpacing())
sitk.WriteImage(label,path+'/segmentation.nii.gz')
dir = "/home/ubuntu/liuyiyao/Simple_Frame_data/breast_data_153_noclsmask"
names = os.listdir(dir)
for name in names:
print(name)
path = dir +'/'+name
img = sitk.ReadImage(path+'/imaging.nii.gz')
label = sitk.ReadImage(path+'/segmentation.nii.gz')
redo_spacing(img,label,path) | [
"[email protected]"
] | |
c18bcc079045f29c93a7eba44f3eb27323d676fb | 6c60dadd15e0b87cd5b2b05865aeee0dae4478f0 | /codinginterview/Remove Duplicate Letters.py | 6c62b6b772e4f23191d827ba11d04180ef733cba | [] | no_license | chanhyuklee/python_codingdojang | d59ffa08d2cfa936fe76744ec8ccf622301e7ae6 | 1c7c83bc1674cad13b71a723f56bae5059049f3e | refs/heads/main | 2023-03-08T16:27:23.788403 | 2021-02-16T10:25:34 | 2021-02-16T10:25:34 | 332,654,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,189 | py | # #풀이1 재귀를 이용한 제거
# class Solution:
# def removeDuplicateLetters(self, s: str) -> str:
# # 집합으로 정렬
# for char in sorted(set(s)):
# suffix = s[s.index(char):]
# # 전체 집합과 접미사 집합이 일치할때 분리 진행
# if set(s) == set(suffix):
# return char + self.removeDuplicateLetters(suffix.replace(char, ''))
# return ''
import collections
#풀이2 스택을 이용한 제거
class Solution:
def removeDuplicateLetters(self, s: str) -> str:
counter, seen, stack = collections.Counter(s), set(), [] #카운터, 집합, 리스트
for char in s:
counter[char] -= 1
if char in seen: #이미 처리된 문자는 건너뜀
continue
# 뒤에 붙일 문자가 남아 있다면 스택에서 제거
while stack and char < stack[-1] and counter[stack[-1]] > 0: #카운터가 0이상이면 앞에 문자 제거 (뒤에서 다시 붙일 수 있기 떄문에)
seen.remove(stack.pop())
stack.append(char)
seen.add(char)
return ''.join(stack) | [
"[email protected]"
] | |
c035676e0ece8278b435126f181f62cec4a3df9f | 2ec84952a894d3f97ffbe8027bb0435398aef3cd | /main.py | 02820075e05c563eca9b985bf8d5d9aba09c89f5 | [] | no_license | spencerc99/spotify-song-classification | 7a90486082fe0728e02e976615974a979849e51f | 39b74094a359bdf8e15c480c760b4b24454492f2 | refs/heads/master | 2021-09-02T05:43:36.499161 | 2017-12-30T20:43:28 | 2017-12-30T20:43:28 | 115,822,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,476 | py | import itertools
import numpy as np
import spotipy
import spotipy.util
from dotenv import load_dotenv, find_dotenv
import os
load_dotenv(find_dotenv())
# Create your own Spotify app to get the ID and secret.
# https://beta.developer.spotify.com/dashboard/applications
CLIENT_ID = os.environ.get('CLIENT_ID')
CLIENT_SECRET = os.environ.get('CLIENT_SECRET')
if not CLIENT_ID or not CLIENT_SECRET:
raise Exception('No client id or secret found')
# Put your Spotify username here.
USERNAME = 'spencerc99'
REDIRECT_URI = 'http://localhost/'
SCOPE = 'user-library-read playlist-modify-public'
# Create a Spotify client that can access my saved song information.
token = spotipy.util.prompt_for_user_token(USERNAME,
SCOPE,
client_id=CLIENT_ID,
client_secret=CLIENT_SECRET,
redirect_uri=REDIRECT_URI)
sp = spotipy.Spotify(auth=token)
def get_spotify_client():
return sp
uris = set([])
def add_uris(fetched):
for item in fetched['items']:
uris.add(item['track']['uri'])
results = sp.current_user_saved_tracks()
add_uris(results)
while results['next']:
results = sp.next(results)
add_uris(results)
# Function that returns the next n elements from the iterator. Used because
# Spotify limits how many items you can group into each of its API calls.
def grouper(n, iterable):
it = iter(iterable)
while True:
chunk = tuple(itertools.islice(it, n))
if not chunk:
return
yield chunk
# Get the audio features of each of the URIs fetched above.
uris_to_features = {}
for group in grouper(50, uris):
res = sp.audio_features(tracks=group)
for item in res:
uris_to_features[item['uri']] = item
FEATURE_VECTOR = [
'acousticness',
'danceability',
'duration_ms',
'energy',
'instrumentalness',
'key',
'liveness',
'loudness',
'mode',
'speechiness',
'tempo',
'time_signature',
'valence']
def features_to_vector(item):
return np.array([item[key] for key in FEATURE_VECTOR])
vectors = [(x[0], features_to_vector(x[1])) for x in uris_to_features.items()]
# write song vector data to csv
# with open('songs.csv', 'w') as f:
# for track, vec in vectors:
# f.write(track + ',')
# f.writelines(','.join([str(x) for x in vec]))
# f.write('\n')
| [
"[email protected]"
] | |
715ad6e1725d39e33daaf2da20b997e1cb1c08c0 | b807a82d96245c489b273fcef3af0b1f892651af | /proyecto_grupo5/grupo5__project/asgi.py | c3c56a46c5a614e28bd22e4435e755387c4baf64 | [] | no_license | AFLLM75/git_proyecto_grupo5 | 5afb514cd1dd0ec5308c2280e6f29acbcef2621f | 51871f766c92f9443e4c3c38bf8308ea9fd4ae79 | refs/heads/master | 2023-01-29T13:17:35.350900 | 2020-12-09T17:12:46 | 2020-12-09T17:12:46 | 313,370,209 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | """
ASGI config for grupo5__project project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'grupo5__project.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
614372057ee6922b41d9ce36038f4101d54d89bb | 6661c08b70d205bb9553007c38b1c3ab26479052 | /gym_dummy/envs/fobs/__init__.py | 454e01c9090945ff4de27055e5c182e32dcc4d88 | [
"MIT"
] | permissive | jackblandin/gym-dummy | 943890cf4494ae0ef307c6ab7a273c9037a53324 | 45ec72b5129ea305911bfd2a9eb5a64bd7f3d466 | refs/heads/master | 2020-06-06T01:02:22.187615 | 2019-06-23T23:47:31 | 2019-06-23T23:47:31 | 192,595,832 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,931 | py | import logging.config
import random
import gym
from gym import spaces
import numpy as np
from tabulate import tabulate
class GreaterThanZeroEnv(gym.Env):
"""A Naive OpenAI Gym environment for basic testing of RL agents.
Only a single observation is required to predict the optimal action. Goal
is to identify if the last observation is greater than zero, i.e. take
action 0 if observation is < 0, and take action 1 if observation > 0.
Observation Space
Continuous number randomly sampled from standard normal distribution.
Action Space
2 possible actions: 0 or 1
Reward function
if last obs == 0, then reward function returns
* +1 for taking action 0
* -1 for taking action 1
if last obs == 1, then reward function returns
* +1 for taking action 1
* -1 for taking action 0
"""
metadata = {'render.modes': ['human']}
def __init__(self, max_steps_per_episode=100):
"""
Parameters
----------
max_steps_per_episode : int, default=100
Maximum allowed steps per episode. This will define how long an
episode lasts, since the game does not end otherwise.
Attributes
----------
curr_episode : int
Current episode as a count.
obs_episode_memory : list<int>
History of observations in episode.
action_episode_memory : list<int>
History of actions taken in episode.
curr_step : int
Current timestep in episode, as a count.
action_space : gym.spaces.Discrete
Action space.
observation_space : gym.spaces.Box
Observation space.
"""
self.max_steps_per_episode = max_steps_per_episode
self.__version__ = "0.0.2"
logging.info("GreaterThanZero - Version {}".format(self.__version__))
self.curr_episode = -1 # Set to -1 b/c reset() adds 1 to episode
self.obs_episode_memory = []
self.action_episode_memory = []
self.curr_step = 0
self.action_space = spaces.Discrete(2)
self.observation_space = spaces.Box(low=-5.6, high=5.6, shape=(1,))
def step(self, action):
"""The agent takes a step in the environment.
Parameters
----------
action : int
Action to take.
Returns
-------
ob, reward, episode_over, info : tuple
ob : list
A list of ones or zeros which together represent the state of
the environment.
reward : float
Amount of reward achieved by the previous action. The scale
varies between environments, but the goal is always to increase
your total reward.
episode_over : bool
Whether it's time to reset the environment again. Most (but not
all) tasks are divided up into well-defined episodes, and done
being True indicates the episode has terminated. (For example,
perhaps the pole tipped too far, or you lost your last life.)
info : dict
Diagnostic information useful for debugging. It can sometimes
be useful for learning (for example, it might contain the raw
probabilities behind the environment's last state change).
However, official evaluations of your agent are not allowed to
use this for learning.
"""
done = self.curr_step >= self.max_steps_per_episode
if done:
raise RuntimeError("Episode is done")
self.curr_step += 1
self.action_episode_memory[self.curr_episode].append(action)
self._take_action(action)
# Recompute done since action may have modified it
done = self.curr_step >= self.max_steps_per_episode
reward = self._get_reward()
ob = self._get_obs()
self.obs_episode_memory[self.curr_episode].append(ob)
# Perform resets that happen after each timestep
self._step_reset()
return ob, reward, done, {}
def reset(self):
"""Reset the state of the environment and returns an initial obs..
Returns
-------
object
The initial observation of the space.
"""
self.curr_step = 0
self.curr_episode += 1
self.action_episode_memory.append([])
initial_obs = [np.random.randn()]
self.obs_episode_memory.append([initial_obs])
return initial_obs
def render(self, mode='human'):
return
def close(self):
pass
def _take_action(self, action):
"""How to change the environment when taking an action.
Parameters
----------
action : int
Action.
Returns
-------
None
"""
if action not in [0, 1]:
raise ValueError('Invalid action ', action)
def _get_reward(self):
"""Obtain the reward for the current state of the environment.
Returns
-------
float
Reward.
"""
action = self.action_episode_memory[self.curr_episode][-1]
last_obs = self.obs_episode_memory[self.curr_episode][-1]
if last_obs[0] > 0:
if action == 1:
r = 1
else:
r = -1
else:
if action == 1:
r = -1
else:
r = 1
return r
def _get_obs(self):
"""Obtain the observation for the current state of the environment.
Returns
-------
list
Observation.
"""
return [np.random.randn()]
def _step_reset(self):
"""Performs resets that happen after each timestep.
Returns
-------
None
"""
pass
class NotXOREnv(gym.Env):
"""A Naive OpenAI Gym environment for basic testing of RL agents.
Represents the opposite of an XOR gate. Optimal policy is to take action 1
when only one of the inputs are on. This is the MDP representation of the
pobs.TwoInARow, but with the last two observations flattened to a single
observation.
Observation Space
Tuple(Discrete(2), Discrete(2))
Observation possibilities are [0, 0], [1, 1], [0, 1], [1, 0]
Action Space
Discrete(2)
Reward function
if both observation inputs are equal, then reward is
* +1 for taking action 0
* -1 for taking action 1
if last obs == 1, then reward is
* +1 for taking action 1
* -1 for taking action 0
"""
metadata = {'render.modes': ['human']}
def __init__(self, max_steps_per_episode=100):
"""
Parameters
----------
max_steps_per_episode : int, default=100
Maximum allowed steps per episode. This will define how long an
episode lasts, since the game does not end otherwise.
Attributes
----------
curr_episode : int
Current episode as a count.
obs_episode_memory : list<int>
History of observations in episode.
action_episode_memory : list<int>
History of actions taken in episode.
curr_step : int
Current timestep in episode, as a count.
action_space : gym.spaces.Discrete
Action space.
observation_space : gym.spaces.Tuple
Observation space.
"""
self.max_steps_per_episode = max_steps_per_episode
self.__version__ = "0.0.2"
logging.info("NotXOR - Version {}".format(self.__version__))
self.curr_episode = -1 # Set to -1 b/c reset() adds 1 to episode
self.obs_episode_memory = []
self.action_episode_memory = []
self.curr_step = 0
self.action_space = spaces.Discrete(2)
self.observation_space = spaces.Tuple([spaces.Discrete(2),
spaces.Discrete(2)])
def step(self, action):
"""The agent takes a step in the environment.
Parameters
----------
action : int
Action to take.
Returns
-------
ob, reward, episode_over, info : tuple
ob : list
A list of ones or zeros which together represent the state of
the environment.
reward : float
Amount of reward achieved by the previous action. The scale
varies between environments, but the goal is always to increase
your total reward.
episode_over : bool
Whether it's time to reset the environment again. Most (but not
all) tasks are divided up into well-defined episodes, and done
being True indicates the episode has terminated. (For example,
perhaps the pole tipped too far, or you lost your last life.)
info : dict
Diagnostic information useful for debugging. It can sometimes
be useful for learning (for example, it might contain the raw
probabilities behind the environment's last state change).
However, official evaluations of your agent are not allowed to
use this for learning.
"""
done = self.curr_step >= self.max_steps_per_episode
if done:
raise RuntimeError("Episode is done")
self.curr_step += 1
self.action_episode_memory[self.curr_episode].append(action)
self._take_action(action)
# Recompute done since action may have modified it
done = self.curr_step >= self.max_steps_per_episode
reward = self._get_reward()
ob = self._get_obs()
self.obs_episode_memory[self.curr_episode].append(ob)
# Perform resets that happen after each timestep
self._step_reset()
return ob, reward, done, {}
def reset(self):
"""Reset the state of the environment and returns an initial obs..
Returns
-------
object
The initial observation of the space.
"""
self.curr_step = 0
self.curr_episode += 1
self.action_episode_memory.append([])
initial_obs = self._get_obs()
self.obs_episode_memory.append([initial_obs])
return initial_obs
def render(self, mode='human'):
return
def close(self):
pass
def q_values(self, model):
"""Returns a string representation of the Q values for each state.
This assumes a deterministic policy.
Parameters
----------
model : object
Model trying to learn q-values of the env. Must have a `predict`
method.
Returns
-------
str
String representation of Q values.
TODOs
-----
If policy is stochastic, we could add a `sample` boolean parameter
which would call `model.predict()` multiple times and average the
returned values.
"""
inputs = [[0, 0], [1, 1], [0, 1], [1, 0]]
preds = [model.predict(inp) for inp in inputs]
data = []
for i, inp in enumerate(inputs):
inp_str = ','.join([str(_inp) for _inp in inp])
data.append([inp_str]+list(preds[i][0]))
s = tabulate(data, headers=['Obs.', 'Action 0', 'Action 1'])
s = '\n' + s + '\n'
return s
def _take_action(self, action):
"""How to change the environment when taking an action.
Parameters
----------
action : int
Action.
Returns
-------
None
"""
if action not in [0, 1]:
raise ValueError('Invalid action ', action)
def _get_reward(self):
"""Obtain the reward for the current state of the environment.
Returns
-------
float
Reward.
"""
action = self.action_episode_memory[self.curr_episode][-1]
last_obs = self.obs_episode_memory[self.curr_episode][-1]
if last_obs[0] == last_obs[1]:
if action == 1:
r = 1
else:
r = -1
else:
if action == 0:
r = 1
else:
r = -1
return r
def _get_obs(self):
"""Obtain the observation for the current state of the environment.
Returns
-------
list
Observation.
"""
return [random.choice([0, 1]),
random.choice([0, 1])]
def _step_reset(self):
"""Performs resets that happen after each timestep.
Returns
-------
None
"""
pass
| [
"[email protected]"
] | |
7470e163fdefd31a4a2d642c0c33d163da686394 | 32afceefb98f8de5c11997c7f45bcc71970ca37d | /venv/bin/python-config | 5dba065fb0b53fb7b660264bacf6a4a8699852e4 | [] | no_license | duhjesus/flaskApp | d26deb9ecda52c8e66fcb71f2c31e9e4f6a56212 | cd18d2222638d22ab264c084732b6e341301baf0 | refs/heads/master | 2020-08-01T08:16:54.451137 | 2020-04-27T10:00:55 | 2020-04-27T10:00:55 | 210,927,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,343 | #!/home/jesus/flaskApp/venv/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"[email protected]"
] | ||
236f507d04ea8e194d95e3b3a9ee273b32fa9d13 | 0183f01feb040edc2aa8ec2e0d2e3e12ed324206 | /week_2/575_distribute_candies.py | f1165f5e33d94c79d9d6add34e3a0d021a6b39c6 | [] | no_license | VictoriaNguyenMD/wallbreakers_homework | 705283216498cdd2bd4828267ffce02e899af02c | a934d36cf7cb315647b0cdb876be308133d8c193 | refs/heads/master | 2021-01-07T14:38:08.382289 | 2020-02-19T21:41:36 | 2020-02-19T21:41:36 | 241,728,193 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | class Solution:
def distributeCandies(self, candies: List[int]) -> int:
set_candies = set(candies)
len_set = len(set_candies)
if (len_set <= len(candies)//2):
return len_set
else:
return len(candies)//2
| [
"[email protected]"
] | |
b1ee93c0443a4d94009021861bd1d0855de1360f | f18ad1bffc340d8b3882d512137a70373d70ffa8 | /util.py | 10c94a44d04e351753e6faa4bd125fdbf2b317c9 | [] | no_license | flyingsleeves/g51-twitter | df66c8b78d66a442dd86510c65684bc727903960 | 62eceb9ef75c306a1ba0c55efad5a6d89a9d3019 | refs/heads/master | 2016-09-09T20:32:47.370665 | 2014-11-24T20:22:02 | 2014-11-24T20:22:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,048 | py | import twiminer
import pickle
masterlist = {}
def mine(userID):
m = twiminer.Miner()
userScreenName = m.mineUser(userID)
print "mined: " + userScreenName
def printMinedUsers():
with open('masterlist.pickle', 'r') as filepath:
masterList = pickle.load(filepath)
for ID in masterList:
print masterList[ID]['screen_name'] + " : " + str(ID)
def printAllData():
with open('masterlist.pickle', 'r') as filepath:
masterList = pickle.load(filepath)
for ID in masterList:
userInfo = masterList[ID]
print ''
print 'screen_name: ' + userInfo['screen_name']
print 'id: ' + str(userInfo['id'])
print 'name: ' + userInfo['name']
print 'friends: ' + str(userInfo['friends'][:5]).rstrip(']') + ', ...]'
print 'followers: ' + str(userInfo['followers'][:5]).rstrip(']') + ', ...]'
def mineIndex(idx):
with open('masterlist.pickle', 'r') as filepath:
masterList = pickle.load(filepath)
userInfo = masterList['179651009']
mine(userInfo['friends'][idx])
def mineHundred():
i = 0
while i < 100:
mineIndex(i)
i += 1 | [
"[email protected]"
] | |
31b9c8a80ef02da0e3e59f5416aa555362b1d39b | 738b6d6ec4572f5848940b6adc58907a03bda6fb | /tests/pymcell4/1720_count_rxn_vol_region_expr/parameters.py | 7380103bb61af613f71cfc6d8188418753e9ee5d | [
"MIT",
"Unlicense",
"LicenseRef-scancode-public-domain"
] | permissive | mcellteam/mcell_tests | 09cd1010a356e0e07c88d7e044a73c5606c6e51a | 34d2d967b75d56edbae999bf0090641850f4f4fe | refs/heads/master | 2021-12-24T02:36:24.987085 | 2021-09-24T14:19:41 | 2021-09-24T14:19:41 | 174,733,926 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | # WARNING: This is an automatically generated file and will be overwritten
# by CellBlender on the next model export.
import sys
import os
import math
import shared
import mcell as m
MODEL_PATH = os.path.dirname(os.path.abspath(__file__))
# ---- model parameters ----
# declare all items from parameter_overrides as variables
for parameter_name, value in shared.parameter_overrides.items():
setattr(sys.modules[__name__], parameter_name, value)
# auxiliary function used to determine whether a parameter was defined
def not_defined(parameter_name):
return parameter_name not in globals()
# ---- simulation setup ----
if not_defined('ITERATIONS'):
ITERATIONS = 20
if not_defined('TIME_STEP'):
TIME_STEP = 1e-06
if not_defined('DUMP'):
DUMP = False
if not_defined('EXPORT_DATA_MODEL'):
EXPORT_DATA_MODEL = True
if not_defined('SEED'):
SEED = 1
| [
"[email protected]"
] | |
fff483ae8f0b293c36e792724b72c21eb158db14 | ae3a6593c0b11d6a889321bf4a359b14186e852d | /build/control_fsm/catkin_generated/pkg.develspace.context.pc.py | ea83d4996ab48a449a5a0adb6a73fced5612fc12 | [] | no_license | Forrest-Z/wssample | 074e71ae56bcdd4f19a911804c7a2a96ce5ce39e | 5694177fddd656b373e1843182b246ff690b713d | refs/heads/master | 2023-08-04T17:39:20.531393 | 2021-09-03T02:17:30 | 2021-09-03T02:17:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "control_fsm"
PROJECT_SPACE_DIR = "/home/oligay_61/zone3/devel/.private/control_fsm"
PROJECT_VERSION = "1.0.0"
| [
"“[email protected]”"
] | |
2660012756f441b6d1dcffbb3ff0a573928f6ecf | 100debd483b39b53f4c58b87180940cbe790c1f1 | /trainer_project_env/bin/gunicorn_paster | 632ff435f142d8216727551a9d5a0ad89f7430ae | [] | no_license | gabriellend/trainer_project | 8b697bf7febbf2b977f3cccf7fa008d33608a439 | 56de19c232b6d14a20236645bf2f024be155cfa4 | refs/heads/master | 2022-03-29T18:02:16.099435 | 2019-12-04T00:32:41 | 2019-12-04T00:32:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | #!/home/zach/trainer_project/trainer_project/trainer_project_env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from gunicorn.app.pasterapp import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run())
| [
"[email protected]"
] | ||
98a4bb27803976359a02b363a0080c28e3811704 | 557c14ae45342c7409181ec5b8bd256aa340a740 | /tests/integration/protonets_miniimagenet_test_notravis.py | ab2e45db8408b1bf89ead122121698086658fa91 | [
"MIT"
] | permissive | JasonMa2016/learn2learn | 5b72f4aa831b97a8bd2f91ee82eb66b5e62254c4 | 502e1ea6db64481d7464fdda4d4d0be9b0f1089a | refs/heads/master | 2020-11-27T15:19:43.958730 | 2019-12-28T02:47:34 | 2019-12-28T02:47:34 | 229,508,990 | 0 | 0 | MIT | 2019-12-22T02:36:45 | 2019-12-22T02:36:44 | null | UTF-8 | Python | false | false | 8,005 | py | #!/usr/bin/env python3
import unittest
import random
import numpy as np
import torch
from torch import nn, optim
from torchvision import transforms
import torch.nn.functional as F
from torch.utils.data import DataLoader
import learn2learn as l2l
def pairwise_distances_logits(a, b):
n = a.shape[0]
m = b.shape[0]
logits = -((a.unsqueeze(1).expand(n, m, -1) -
b.unsqueeze(0).expand(n, m, -1))**2).sum(dim=2)
return logits
def conv_block(in_channels, out_channels):
bn = nn.BatchNorm2d(out_channels)
nn.init.uniform_(bn.weight)
return nn.Sequential(
nn.Conv2d(in_channels, out_channels, 3, padding=1),
bn,
nn.ReLU(),
nn.MaxPool2d(2)
)
def accuracy(predictions, targets):
predictions = predictions.argmax(dim=1).view(targets.shape)
return (predictions == targets).sum().float() / targets.size(0)
class Convnet(nn.Module):
# TODO: Is this architecture better than the one we have
# in l2l.vision.models.ConvBase ?
def __init__(self, x_dim=3, hid_dim=64, z_dim=64):
super().__init__()
self.encoder = nn.Sequential(
conv_block(x_dim, hid_dim),
conv_block(hid_dim, hid_dim),
conv_block(hid_dim, hid_dim),
conv_block(hid_dim, z_dim),
)
self.out_channels = 1600
def forward(self, x):
x = self.encoder(x)
return x.view(x.size(0), -1)
def fast_adapt(model, batch, ways, shot, query_num, metric=None, device=None):
if metric is None:
metric = pairwise_distances_logits
if device is None:
device = model.device()
data, labels = batch
data = data.to(device)
labels = labels.to(device)
n_items = shot * ways
# Sort data samples by labels
# TODO: Can this be replaced by ConsecutiveLabels ?
sort = torch.sort(labels)
data = data.squeeze(0)[sort.indices].squeeze(0)
labels = labels.squeeze(0)[sort.indices].squeeze(0)
# Compute support and query embeddings
embeddings = model(data)
support_indices = torch.zeros(data.size(0)).byte()
selection = torch.arange(ways) * (shot + query_num)
for offset in range(shot):
support_indices[selection + offset] = 1
support = embeddings[support_indices]
support = support.reshape(ways, shot, -1).mean(dim=1)
query = embeddings[1 - support_indices]
labels = labels[1 - support_indices].long()
logits = pairwise_distances_logits(query, support)
loss = F.cross_entropy(logits, labels)
acc = accuracy(logits, labels)
return loss, acc
class Object:
pass
def main(num_iterations=250):
args = Object()
setattr(args, 'max_epoch', num_iterations)
setattr(args, 'shot', 1)
setattr(args, 'test_way', 5)
setattr(args, 'test_shot', 1)
setattr(args, 'test_query', 30)
setattr(args, 'train_query', 15)
setattr(args, 'train_way', 30)
setattr(args, 'gpu', 0)
device = torch.device('cpu')
if args.gpu and torch.cuda.device_count():
torch.cuda.manual_seed(43)
device = torch.device('cuda')
model = Convnet()
model.to(device)
path_data = './data'
train_dataset = l2l.vision.datasets.MiniImagenet(
root=path_data, mode='train')
valid_dataset = l2l.vision.datasets.MiniImagenet(
root=path_data, mode='validation')
test_dataset = l2l.vision.datasets.MiniImagenet(
root=path_data, mode='test')
train_dataset = l2l.data.MetaDataset(train_dataset)
train_transforms = [
l2l.data.transforms.NWays(train_dataset, args.train_way),
l2l.data.transforms.KShots(train_dataset, args.train_query + args.shot),
l2l.data.transforms.LoadData(train_dataset),
l2l.data.transforms.RemapLabels(train_dataset),
]
train_tasks = l2l.data.TaskDataset(train_dataset, task_transforms=train_transforms)
train_loader = DataLoader(train_tasks, pin_memory=True, shuffle=True)
valid_dataset = l2l.data.MetaDataset(valid_dataset)
valid_transforms = [
l2l.data.transforms.NWays(valid_dataset, args.test_way),
l2l.data.transforms.KShots(valid_dataset, args.test_query + args.test_shot),
l2l.data.transforms.LoadData(valid_dataset),
l2l.data.transforms.RemapLabels(valid_dataset),
]
valid_tasks = l2l.data.TaskDataset(valid_dataset,
task_transforms=valid_transforms,
num_tasks=200)
valid_loader = DataLoader(valid_tasks, pin_memory=True, shuffle=True)
test_dataset = l2l.data.MetaDataset(test_dataset)
test_transforms = [
l2l.data.transforms.NWays(test_dataset, args.test_way),
l2l.data.transforms.KShots(test_dataset, args.test_query + args.test_shot),
l2l.data.transforms.LoadData(test_dataset),
l2l.data.transforms.RemapLabels(test_dataset),
]
test_tasks = l2l.data.TaskDataset(test_dataset,
task_transforms=test_transforms,
num_tasks=200)
test_loader = DataLoader(test_tasks, pin_memory=True, shuffle=True)
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
lr_scheduler = torch.optim.lr_scheduler.StepLR(
optimizer, step_size=20, gamma=0.5)
for epoch in range(1, args.max_epoch + 1):
lr_scheduler.step()
model.train()
loss_ctr = 0
n_loss = 0
n_acc = 0
for i in range(100):
batch = next(iter(train_loader))
loss, acc = fast_adapt(model,
batch,
args.train_way,
args.shot,
args.train_query,
metric=pairwise_distances_logits,
device=device)
loss_ctr += 1
n_loss += loss.item()
n_acc += acc
optimizer.zero_grad()
loss.backward()
optimizer.step()
print('epoch {}, train, loss={:.4f} acc={:.4f}'.format(
epoch, n_loss/loss_ctr, n_acc/loss_ctr))
train_accuracy = n_acc / loss_ctr
model.eval()
loss_ctr = 0
n_loss = 0
n_acc = 0
for i, batch in enumerate(valid_loader):
loss, acc = fast_adapt(model,
batch,
args.test_way,
args.test_shot,
args.test_query,
metric=pairwise_distances_logits,
device=device)
loss_ctr += 1
n_loss += loss.item()
n_acc += acc
print('epoch {}, val, loss={:.4f} acc={:.4f}'.format(
epoch, n_loss/loss_ctr, n_acc/loss_ctr))
valid_accuracy = n_acc / loss_ctr
loss_ctr = 0
n_acc = 0
for i, batch in enumerate(test_loader, 1):
loss, acc = fast_adapt(model,
batch,
args.test_way,
args.test_shot,
args.test_query,
metric=pairwise_distances_logits,
device=device)
loss_ctr += 1
n_acc += acc
print('batch {}: {:.2f}({:.2f})'.format(
i, n_acc/loss_ctr * 100, acc * 100))
test_accuracy = n_acc / loss_ctr
return train_accuracy, valid_accuracy, test_accuracy
class ProtoNetMiniImageNetIntegrationTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_final_accuracy(self):
train_acc, valid_acc, test_acc = main(num_iterations=1)
self.assertTrue(train_acc > 0.20)
self.assertTrue(valid_acc > 0.20)
self.assertTrue(test_acc > 0.20)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
0b200854cb4f9c9c7b2c8c6a88c52c3486a403d8 | db575f3401a5e25494e30d98ec915158dd7e529b | /BIO_Stocks/ADVM.py | a8ee6dcf42e81f66cee04649f6497695996988d1 | [] | no_license | andisc/StockWebScraping | b10453295b4b16f065064db6a1e3bbcba0d62bad | 41db75e941cfccaa7043a53b0e23ba6e5daa958a | refs/heads/main | 2023-08-08T01:33:33.495541 | 2023-07-22T21:41:08 | 2023-07-22T21:41:08 | 355,332,230 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,183 | py | import requests
from lxml import html
from bs4 import BeautifulSoup
import os
from datetime import date, datetime
from ValidationTools import validateday
from Database_Connections import InsertData, Insert_Logging
def main(id_control):
try:
url = 'https://investors.adverum.com/press-releases'
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
result = requests.get(url, headers=headers)
#print(result.content.decode())
html_content = result.content.decode()
soup = BeautifulSoup(html_content, 'html.parser')
#print(soup)
table = soup.find('table', attrs={'class':'nirtable views-table views-view-table cols-2 collapse-table'})
#print(table)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
FIRST_ROW_columns = rows[0].find_all('td')
v_article_date = FIRST_ROW_columns[0].text.lstrip().rstrip()
article_desc = FIRST_ROW_columns[1].find('div', attrs={'class':'nir-widget--field nir-widget--news--headline'})
#if the process find any article with the today date
istoday, v_art_date = validateday(v_article_date)
if (istoday == True):
v_ticker = os.path.basename(__file__).replace(".py", "")
v_url = article_desc.a.get('href')
v_description = article_desc.text.lstrip().rstrip()
now = datetime.now()
print("URL: " + v_url)
print("DESCRIPTION: " + v_description)
print("ARTICLE_DATE: " + str(now))
# Insert articles
if "https://" in v_url:
InsertData(v_ticker, v_description, v_url, v_art_date)
else:
InsertData(v_ticker, v_description, url, v_art_date)
except Exception:
error_message = "Entrou na excepção ao tratar " + os.path.basename(__file__) + "..."
print(error_message)
Insert_Logging(id_control, 'Detail', error_message)
pass
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
899d621d5e36ff015d2b7581b2ea8cd5030394a0 | d6d6289fa057e90964f57107a7a691f43165fea3 | /setup.py | 1962aa71697b53e8cc6f7e059d92f91e54693e85 | [
"MIT"
] | permissive | Gass0014/docassemble-P07Prototype | cf1b58369bda9311b1d85a507535ba1b680ce501 | cb9422a5ab67ab28a6d8bb5a93e04acf07de8703 | refs/heads/master | 2022-07-11T20:25:44.623978 | 2020-05-06T01:46:17 | 2020-05-06T01:46:17 | 261,625,230 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,475 | py | import os
import sys
from setuptools import setup, find_packages
from fnmatch import fnmatchcase
from distutils.util import convert_path
standard_exclude = ('*.pyc', '*~', '.*', '*.bak', '*.swp*')
standard_exclude_directories = ('.*', 'CVS', '_darcs', './build', './dist', 'EGG-INFO', '*.egg-info')
def find_package_data(where='.', package='', exclude=standard_exclude, exclude_directories=standard_exclude_directories):
out = {}
stack = [(convert_path(where), '', package)]
while stack:
where, prefix, package = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where, name)
if os.path.isdir(fn):
bad_name = False
for pattern in exclude_directories:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
break
if bad_name:
continue
if os.path.isfile(os.path.join(fn, '__init__.py')):
if not package:
new_package = name
else:
new_package = package + '.' + name
stack.append((fn, '', new_package))
else:
stack.append((fn, prefix + name + '/', package))
else:
bad_name = False
for pattern in exclude:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
break
if bad_name:
continue
out.setdefault(package, []).append(prefix+name)
return out
setup(name='docassemble.P07Prototype',
version='0.0.1',
description=('A docassemble extension.'),
long_description='# docassemble.P07Prototype\n\nA docassemble extension.\n\n## Author\n\nBryce Gassner, [email protected]\n\n',
long_description_content_type='text/markdown',
author='Bryce Gassner',
author_email='[email protected]',
license='The MIT License (MIT)',
url='https://docassemble.org',
packages=find_packages(),
namespace_packages=['docassemble'],
install_requires=[],
zip_safe=False,
package_data=find_package_data(where='docassemble/P07Prototype/', package='docassemble.P07Prototype'),
)
| [
"[email protected]"
] | |
0efb59a12aed1f21abba19513c831a8ba888b213 | 534a13a18fc93b5c163f76edb88ab9d97813b9c9 | /titanic/controller.py | 66e81fad8e7fe2377637ee74f492066a8841d62d | [] | no_license | B0Kimm/Study | 83312b7fa32ed677ed90123d3f551b139c2ecdd0 | 16fbdc1360c16d855656f78d4f929e64dd1d4622 | refs/heads/master | 2022-12-29T13:38:16.126029 | 2020-10-16T10:44:23 | 2020-10-16T10:44:23 | 297,597,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,424 | py | import sys
sys.path.insert(0, '/Users/USER/SbaProjects')
from titanic.entity import Entity
from titanic.service import Service
class Controller:
def __init__(self) :
self.entity = Entity()
self.service = Service()
def modelling(self, train, test) :
service = self.service
this = self.preprocessing(train, test)
# print(f'훈련 컬럼 : {this.train.columns}')
this.label = service.create_label(this)
this.train = service.create_train(this)
return this
def preprocessing(self, train, test):
service = self.service
this = self.entity
this.train = service.new_model(train) # payload
this.test = service.new_model(test)
this.id = this.test['PassengerId'] # 머신에게는 이것이 Question이 됩니다.
print(f'variables/features before drop : {this.train.columns}')
this = service.drop_feature(this, 'Cabin')
this = service.drop_feature(this, 'Ticket')
print(f'variables/features after drop : {this.train.columns}')
this = service.embarked_nominal(this)
print(f'승선한 항구 정제 결과 : {this.train.head()}')
this = service.title_nominal(this)
print(f'타이틀 정제 결과 : {this.train.head()}')
# name 변수에서 title을 추출했으니 name은 필요가 없어졌고, str이니
# 후에 ML-lib가 이를 인식하는 과정에서 에러를 발생시킬것이다.
this = service.drop_feature(this, 'Name')
this = service.drop_feature(this, 'PassengerId')
this = service.age_ordinal(this)
print(f'나이 정제 결과 : {this.train.head()}')
this = service.sex_nominal(this)
print(f'성별 정제 결과 : {this.train.head()}')
this = service.fareBand_nominal(this)
print(f'요금 정제 결과 : {this.train.head()}')
this = service.drop_feature(this, 'Fare')
print(f'-----------------------------------TRAIN 정제 결과-----------------------------------')
print(f'{this.train.head()}')
print(f'-----------------------------------TEST 정제 결과-----------------------------------')
print(f'{this.test.head()}')
print(f'-----------------------------------TRAIN na 체크-----------------------------------')
print(f'{this.train.isnull().sum()}')
print(f'-----------------------------------Test na 체크-----------------------------------')
print(f'{this.test.isnull().sum()}')
return this
def learning(self, train, test) :
service = self.service
this = self.modelling(train, test)
print('===================================Learning 결과==========================')
print(f'결정트리 검증결과 : {service.accuracy_by_dtree(this)}')
print(f'랜덤포레스트 검증결과 : {service.accuracy_by_rforest(this)}')
print(f'나이브베이즈 검증결과 : {service.accuracy_by_nb(this)}')
print(f'KNN 검증결과 : {service.accuracy_by_knn(this)}')
print(f'SVM 검증결과 : {service.accuracy_by_svm(this)}')
return this
def submit(self) : # machine이 된다. 이 단계에서는 캐글에게 내 머신을 보내서 평가 받게 하는 것
pass
if __name__ == '__main__':
ctrl = Controller()
ctrl.learning('train.csv', 'test.csv')
| [
"[email protected]"
] | |
50a5880403fea90ef3a075333678cde1a82fc309 | 06f19f8361fc0a07ab87c00e16af49299046d5e8 | /ch05/dynamic_array_resize.py | bebacd19da01fc15f57207de304c96d670b3edf9 | [] | no_license | AllySmith1/CP2410SampleCode | 4826bec8ea41d30268c560d54f29604ea85f6747 | 2789c56fa1181378b50a3963dd2a5de71ab50770 | refs/heads/master | 2020-06-01T23:16:37.167263 | 2019-06-09T04:20:17 | 2019-06-09T04:20:17 | 190,962,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,460 | py | # Copyright 2013, Michael H. Goldwasser
#
# Developed for use with the book:
#
# Data Structures and Algorithms in Python
# Michael T. Goodrich, Roberto Tamassia, and Michael H. Goldwasser
# John Wiley & Sons, 2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ctypes # provides low-level arrays
class DynamicArray:
"""A dynamic array class akin to a simplified Python list."""
def __init__(self, resize_factor):
"""Create an empty array."""
self._n = 0 # count actual elements
self._capacity = 1 # default array capacity
self._A = self._make_array(self._capacity) # low-level array
self.resize_factor = resize_factor
def __len__(self):
"""Return number of elements stored in the array."""
return self._n
def __getitem__(self, k):
"""Return element at index k."""
if not 0 <= k < self._n:
raise IndexError('invalid index')
return self._A[k] # retrieve from array
def append(self, obj):
"""Add object to end of the array."""
if self._n == self._capacity: # not enough room
self._resize(self.resize_factor * self._capacity) # so double capacity
self._A[self._n] = obj
self._n += 1
def _resize(self, c): # nonpublic utitity
"""Resize internal array to capacity c."""
B = self._make_array(c) # new (bigger) array
for k in range(self._n): # for each existing value
B[k] = self._A[k]
self._A = B # use the bigger array
self._capacity = c
def _make_array(self, c): # nonpublic utitity
"""Return new array with capacity c."""
return (c * ctypes.py_object)() # see ctypes documentation
def insert(self, k, value):
"""Insert value at index k, shifting subsequent values rightward."""
# (for simplicity, we assume 0 <= k <= n in this verion)
if self._n == self._capacity: # not enough room
self._resize(self.resize_factor * self._capacity) # so double capacity
for j in range(self._n, k, -1): # shift rightmost first
self._A[j] = self._A[j - 1]
self._A[k] = value # store newest element
self._n += 1
def remove(self, value):
"""Remove first occurrence of value (or raise ValueError)."""
# note: we do not consider shrinking the dynamic array in this version
for k in range(self._n):
if self._A[k] == value: # found a match!
for j in range(k, self._n - 1): # shift others to fill gap
self._A[j] = self._A[j + 1]
self._A[self._n - 1] = None # help garbage collection
self._n -= 1 # we have one less item
return # exit immediately
raise ValueError('value not found') # only reached if no match
| [
"[email protected]"
] | |
c9d04a45a313d626354fd3e48b1b5e8369552722 | e6c0683afc2a3d48ada10ffa9f7d257e7c64589e | /purity_fb/purity_fb_1dot5/rest.py | 1460ba5967aac764761839ba79e57b09b80eb9f8 | [
"Apache-2.0"
] | permissive | unixtreme/purity_fb_python_client | 9a5a0375f4505421974aadc674ed04982c2bf84f | e836afe9804ffa99f74bf4b5202f181c3c04d9df | refs/heads/master | 2020-04-24T14:53:56.977344 | 2019-02-22T12:37:45 | 2019-02-22T12:37:45 | 172,042,713 | 0 | 0 | NOASSERTION | 2019-02-22T10:05:44 | 2019-02-22T10:05:44 | null | UTF-8 | Python | false | false | 12,977 | py | # coding: utf-8
"""
Purity//FB REST Client
Client for Purity//FB REST API (1.0), developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.5
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import io
import json
import ssl
import certifi
import logging
import re
# python 2 and python 3 compatibility library
from six import PY3
from six.moves.urllib.parse import urlencode
from .configuration import Configuration
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""
Returns a dictionary of the response headers.
"""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""
Returns a given response header.
"""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, pools_size=4, maxsize=4):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680
# maxsize is the number of requests to host that are allowed in parallel
# ca_certs vs cert_file vs key_file
# http://stackoverflow.com/a/23957365/2985775
# cert_reqs
if Configuration().verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if Configuration().ssl_ca_cert:
ca_certs = Configuration().ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
# cert_file
cert_file = Configuration().cert_file
# key file
key_file = Configuration().key_file
# proxy
proxy = Configuration().proxy
# https pool manager
if proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=cert_file,
key_file=key_file,
proxy_url=proxy
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=cert_file,
key_file=key_file
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True, _request_timeout=None):
"""
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will be returned without
reading/decoding response data. Default is True.
:param _request_timeout: timeout setting for this request. If one number provided, it will be total request
timeout. It can also be a pair (tuple) of (connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if PY3 else (int, long)):
timeout = urllib3.Timeout(total=_request_timeout)
elif isinstance(_request_timeout, tuple) and len(_request_timeout) == 2:
timeout = urllib3.Timeout(connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = None
if body:
request_body = json.dumps(body)
r = self.pool_manager.request(method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded':
r = self.pool_manager.request(method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct Content-Type
# which generated by urllib3 will be overwritten.
del headers['Content-Type']
r = self.pool_manager.request(method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is provided
# in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided arguments.
Please check that your arguments match declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True, _request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None, _preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None, body=None, _preload_content=True,
_request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""
Custom error messages for exception
"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
| [
"[email protected]"
] | |
8436baedfbbf14cfc8b06b4002d3dcb7f4e8be1f | c07f6fcbbfabd047a63a071e7b14f2787332f84f | /manage_mturk.py | 4dcccc2dcbd7e6502dec07df0cf658a3e03adb29 | [] | no_license | hyolee/pose_experiments_human | 217d9cdd1150287cc6e33dcb950ef8ed8537e384 | 0422f3e69069fc4021291288205f9a39bf76f318 | refs/heads/master | 2021-01-10T10:10:13.942943 | 2015-12-12T18:07:58 | 2015-12-12T18:07:58 | 47,889,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,914 | py | import boto
import pymongo as pm
import numpy as np
from boto.mturk.connection import MTurkConnection
from mturkutils.base import parse_credentials_file
from mturkutils.base import parse_human_data_from_HITdata
from mturkutils.base import update_mongodb_once
sandbox = False
max_assignments = 10
comment = "Pose judgement in Rosch dataset"
description = 'Make object 3-d pose judgements for up to 50 cent bonus'
# Mturk
access_key_id, secretkey = parse_credentials_file(section_name='MTurkCredentials')
if not sandbox:
conn = MTurkConnection(aws_access_key_id=access_key_id,
aws_secret_access_key=secretkey)
else:
conn = MTurkConnection(aws_access_key_id=access_key_id,
aws_secret_access_key=secretkey,
host='mechanicalturk.sandbox.amazonaws.com')
# --- hits --- retrieve Pose Judgement experiments only
allhits = [hit for hit in conn.get_all_hits()]
hits = []
for hit in allhits:
if 'Pose' in hit.Title and 'changed' in hit.Title:
hits.append(hit)
for hit in hits:
print hit.HITId, ':', hit.Title
assignments = conn.get_assignments(hit.HITId)
print len(assignments), ' assignments for this HIT'
for a in assignments:
print a.AssignmentId, ':', a.AssignmentStatus
# mongoDB
mongo_conn = pm.Connection(host='localhost', port=22334)
db = mongo_conn['mturk']
coll = db['rosch_pose_exp']
for doc in coll.find():
print doc['HITid'], ':', doc['Title']
print "AssignmentID", doc['AssignmentID']
print "BonusAwarded?", 'BonusAwarded' in doc.keys()
# getHITdata
def getHITdataraw(hitid, retry=5):
"""Get the human data as raw boto objects for the given `hitid`"""
# NOTE: be extra careful when modify this function.
# especially download_results() and cli.make_backup()
# depends on this. In short: avoid modification of this func
# as much as possible, especially the returned data.
try:
assignments = conn.get_assignments(hit_id=hitid,
page_size=min(max_assignments, 100))
HITdata = conn.get_hit(hit_id=hitid)
except Exception as e:
if retry == 0:
raise e
from time import sleep
sleep(5)
assignments, HITdata = getHITdataraw(hitid, retry=retry - 1)
return assignments, HITdata
def getHITdata(hitid, verbose=True, full=False):
assignments, HITdata = getHITdataraw(hitid)
return parse_human_data_from_HITdata(assignments, HITdata,
comment=comment, description=description,
full=full, verbose=verbose)
# updateDBwithHITS
def updateDBwithHITs(hitids, **kwargs):
"""See the documentation of updateDBwithHITs() and
updateDBwithHITslocal()"""
meta = None
if coll is None:
print('**NO DB CONNECTION**')
return
if sandbox:
print('**WORKING IN SANDBOX MODE**')
all_data = []
for src in hitids:
sdata = getHITdata(src, full=False)
update_mongodb_once(coll, sdata, meta,
**kwargs)
all_data.extend(sdata)
return all_data
# payBonus
def payBonuses(hitids, performance_threshold=0.375, bonus_threshold=None,
performance_key='Error', performance_error=False,
auto_approve=True):
"""
This function approves and grants bonuses on all hits above a certain
performance, with a bonus (stored in database) under a certain
threshold (checked for safety).
"""
if auto_approve:
for hitid in hitids:
assignments = conn.get_assignments(hitid)
for a in assignments:
try:
assignment_id = a.AssignmentId
assignment_status = a.AssignmentStatus
doc = coll.find_one({ "AssignmentID": a.AssignmentId })
if doc == None:
continue
performance = doc.get(performance_key)
if (performance_threshold is not None) and \
(performance is not None):
if (performance_error and
performance < performance_threshold) or \
(performance > performance_threshold):
if assignment_status in ['Submitted']:
conn.reject_assignment(assignment_id,
feedback='Your performance was '
'significantly lower than other subjects')
else:
if assignment_status in ['Submitted']:
conn.approve_assignment(assignment_id)
else:
if assignment_status in ['Submitted']:
conn.approve_assignment(assignment_id)
except boto.mturk.connection.MTurkRequestError, e:
print('Error for assignment_id %s' % assignment_id, e)
for hitid in hitids:
assignments = conn.get_assignments(hitid)
for a in assignments:
try:
assignment_status = a.AssignmentStatus
doc = coll.find_one({ "AssignmentID": a.AssignmentId })
if doc == None:
continue
assignment_id = doc['AssignmentID']
worker_id = doc['WorkerID']
except boto.mturk.connection.MTurkRequestError, e:
print('Error for assignment_id %s' % assignment_id, e)
continue
bonus = doc.get('Bonus')
if (bonus is not None) and (assignment_status == 'Approved'):
if (bonus_threshold is None) or (float(bonus) <
float(bonus_threshold)):
if not doc.get('BonusAwarded', False):
bonus = np.round(float(bonus) * 100) / 100
if bonus >= 0.01:
p = boto.mturk.price.Price(bonus)
print 'award granted'
print bonus
conn.grant_bonus(worker_id,
assignment_id,
p,
"Performance Bonus")
coll.update({'_id': doc['_id']},
{'$set': {'BonusAwarded': True}},
multi=True)
# approve mistakenly rejected HITs
def approveRejectedHITs(hitids):
for hitid in hitids:
assignments = conn.get_assignments(hitid)
for a in assignments:
params = {'AssignmentId': a.AssignmentId}
try:
conn._process_request('ApproveRejectedAssignment', params)
except:
"Couldn't be approved. check if it is already approved"
| [
"[email protected]"
] | |
670fb82f2775cdd7773db5fbb222929a38103a16 | 9f79c385ec0b70c0eee9b7c3558e42feb5724e75 | /src/image_segmentation.py | f442bef3980f73cb3410137e123cd6b08ea10f7d | [] | no_license | phiggin1/obj_segmentation | e8ef78b778785acd19772cd26dbdbdf95291d1d8 | 28fd4cb9f6cf43413a83981748ae9fa6fe7896df | refs/heads/main | 2023-08-19T06:25:51.787828 | 2021-10-13T16:31:14 | 2021-10-13T16:31:14 | 334,986,546 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,473 | py | #!/usr/bin/env python2
import rospy
import message_filters
import numpy as np
import image_geometry
import math
import cv2
from cv_bridge import CvBridge
from obj_segmentation.msg import SegmentedClustersArray
from obj_segmentation.msg import Object
from obj_segmentation.msg import ObjectArray
from obj_segmentation.srv import GetImages, GetImagesResponse
from sensor_msgs.msg import PointCloud2
from sensor_msgs.msg import Image, CameraInfo
import sensor_msgs.point_cloud2 as pc2
from matplotlib import pyplot as plt
import sys
class ImageSegment:
def __init__(self):
rospy.init_node('image_segment', anonymous=True)
self.bridge = CvBridge()
self.rgb_cam_info = rospy.wait_for_message("/camera/unityrgb/camera_info", CameraInfo, timeout=None)
#self.depth_cam_info = rospy.wait_for_message("/camera/unitydepth/camera_info", CameraInfo, timeout=None)
#self.cam_model = image_geometry.StereoCameraModel()
#self.cam_model.fromCameraInfo(self.depth_cam_info, self.rgb_cam_info)
self.cam_model = image_geometry.PinholeCameraModel()
self.cam_model.fromCameraInfo(self.rgb_cam_info)
self.have_imgs = False
self.obj_pub = rospy.Publisher('/objects_images', ObjectArray, queue_size=10)
self.rgb_image_sub = message_filters.Subscriber('/camera/unityrgb/image_raw', Image)
self.depth_image_sub = message_filters.Subscriber('/camera/unitydepth/image_raw', Image)
#self.object_clusters_sub = message_filters.Subscriber('/object_clusters', SegmentedClustersArray)
#self.ts = message_filters.ApproximateTimeSynchronizer([self.rgb_image_sub, self.depth_image_sub, self.object_clusters_sub], 10, slop=2.0)
self.ts = message_filters.ApproximateTimeSynchronizer([self.rgb_image_sub, self.depth_image_sub], 10, slop=2.0)
self.ts.registerCallback(self.callback)
#self.testpub = rospy.Publisher('/test', ObjectArray, queue_size=10)
self.obj_cluster_sub = rospy.Subscriber("/object_clusters", SegmentedClustersArray, self.process_clusters)
#print("Service ready")
#self.service = rospy.Service('get_images', GetImages, self.process_clusters)
rospy.spin()
#grap the pointclouds of all objects and the depth/rgb image for the same frame
def callback(self, rgb_ros_image, depth_ros_image):
#self.object_clusters = object_clusters
self.rgb = np.asarray(self.bridge.imgmsg_to_cv2(rgb_ros_image, desired_encoding="passthrough"))
self.depth = np.asarray(self.bridge.imgmsg_to_cv2(depth_ros_image, desired_encoding="passthrough"))
self.have_imgs = True
def process_clusters(self, req):
if self.have_imgs:
#print("recv serv req")
object_array = ObjectArray()
object_array.header = req.header
#iterate through all the objects
for i, pc in enumerate(req.clusters):
#print("obj %d" % i)
obj = Object()
min_x = 1000.0
min_y = 1000.0
min_z = 1000.0
max_x = -1000.0
max_y = -1000.0
max_z = -1000.0
#for each object get a bounding box
for p in pc2.read_points(pc):
if p[0] > max_x:
max_x = p[0]
if p[0] < min_x:
min_x = p[0]
if p[1] > max_y:
max_y = p[1]
if p[1] < min_y:
min_y = p[1]
if p[2] > max_z:
max_z = p[2]
if p[2] < min_z:
min_z = p[2]
center = [(min_x + max_x)/2, (min_y + max_y)/2, (min_z + max_z)/2]
w = max_x-min_x
h = max_y-min_y
d = max_z-min_z
min_pix = self.cam_model.project3dToPixel( [ min_x, min_y, min_z ] )
max_pix = self.cam_model.project3dToPixel( [ max_x, max_y, max_z ] )
#25 is to try and avoid clipping the object off
#this might not be needed/a bad idea
u_min = max(int(math.floor(min_pix[0]))-25, 0)
v_min = max(int(math.floor(min_pix[1]))-25, 0)
u_max = min(int(math.ceil(max_pix[0]))+25, self.rgb.shape[1])
v_max = min(int(math.ceil(max_pix[1]))+25, self.rgb.shape[1])
rgb_cropped = self.rgb[v_min:v_max, u_min:u_max].copy()
depth_cropped = self.depth[v_min:v_max, u_min:u_max].copy()
obj.loc.header = pc.header
obj.loc.point.x = center[0]
obj.loc.point.y = center[1]
obj.loc.point.z = center[2]
obj.depth = self.bridge.cv2_to_imgmsg(depth_cropped, encoding="16UC1")
obj.rgb = self.bridge.cv2_to_imgmsg(rgb_cropped, encoding="bgr8")
obj.depth.header = pc.header
obj.rgb.header = pc.header
object_array.objects.append(obj)
self.obj_pub.publish(object_array)
#return GetImagesResponse(object_array)
if __name__ == '__main__':
segmenter = ImageSegment()
| [
"[email protected]"
] | |
c81aeaffb03bea6d3fc2cc3509fc39b5bb9cff69 | 0696ead001a7e711cb792f3686b9ad5572131409 | /handlers/AddUpdatePatient.py | 8be4156d958d37c577b754b1d6953a8cf2175678 | [] | no_license | iOSDevD/EMR_System | 075133585f364645f9fe2af5108c1101961b2339 | d285d582477a137962ba5af181182dd154aeba11 | refs/heads/master | 2023-08-24T20:54:03.483972 | 2021-10-23T00:15:25 | 2021-10-23T00:15:25 | 418,509,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,929 | py | """
Nikunj Upadhyay
Class: CS 521 - Fall 1
Date: 10/16/2021
Homework Problem # Project
Description of Problem (1-2 sentence summary in your own words):
AddUpdatePatient helps to add a new patient to the system or update
the demographic details of the patient. Common functionality like
update address and contact details are re-used between new patient flow
and update patient flow.
"""
from model.Patient import Patient
from utilities.AppConstants import AppConstants
from utilities.CSVUtility import FileHandlerUtility
from utilities.PatientValidator import PatientValidator
class AddUpdateFlowHandler:
"""Helps to add a new patient or updated details of existing patient.
Initializing the class with a non-empty patient, starts the flow in
update demographic mode. It defaults to an empty patient object during
initialization.
"""
# Prompt to be displayed to request basic details like, name , date of
# birth and gender.
NAME_GENDER_INPUT_MESSAGE = "Please enter FirstName, LastName,Date of " \
"Birth and Gender of the patient you want " \
"to add, separated by '{}'\nFor Gender " \
"please enter M for Male, F for Female " \
"and O for Other."\
.format(AppConstants.INPUT_DELIMITER)
# Prompt to be displayed to request address details like address line 1,
# state, zip and etc.
ADDRESS_INPUT_MESSAGE = "Please enter Address Line 1, Line 2, City, " \
"State and Zip all separated " \
"by '{}'".format(AppConstants.INPUT_DELIMITER)
# Prompt to be displayed to request contact details like phone and email.
CONTACT_INPUT_MESSAGE = "Please enter Phone and email-id separated " \
"by '{}'".format(AppConstants.INPUT_DELIMITER)
# Error message to be displayed when invalid gender value is entered out
# of permissible values.
ERROR_MESSAGE_GENDER = "Error! Please enter a valid gender value."
# Error message to be displayed when a duplicate match is found, which
# matches name, dob and gender.
ERROR_DUPLICATE_PATIENT = "Error! Found Duplicate patient with same" \
" name {},{}.\nSystem does not allow to" \
" add Duplicate entries. Please try again!"
# Error message to be displayed when the user enters zip code in in-valid
# format.
ERROR_ZIP_CODE = "Error! Please enter a valid value for zip code ex:" \
" 12345"
# Error message to be displayed when user enters invalid character like a
# comma in the input, which could affect the CSV format.
ERROR_INVALID_DELIMITER_ENTRIES = "Error! Please enter valid " \
"entries separated by '{}'". \
format(AppConstants.INPUT_DELIMITER)
# Message to be displayed when a new patient is registered.
NEW_PATIENT_REGISTRATION_SUCCESS = "New Patient {},{} has been " \
"added registered successfully " \
"with the system.\n"
# Count pertaining First name, last name , date of birth and Gender as the
# basic demographic entries expected from input function.
BASIC_DEMOGRAPHIC_ENTRIES = 4
# Count pertaining address details like Address line 1, line 2, City, State
# amd zip, expected from input function.
ADDRESS_ENTRIES = 5
# Count pertaining contact details like phone and e-mail
# expected from input function.
CONTACT_ENTRIES = 2
def __init__(self, patient=None):
""" Initialize with non empty patient object in case we want to use
the current class in update patient details mode, else it defaults
to new patient creation as the default patient value is immutable
value None"""
if patient is None:
# Set a new object if none to private patient attribute
# ex: New Patient creation flow.
self.__patient = Patient()
else:
# Set the patient object if not None to private
# patient attribute. ex: Update patient
self.__patient = patient
self.validator = PatientValidator() # public validator attribute
def add_update_patient_flow(self):
"""Start the create patient or update patient details mode. For
a input patient object which is created using a non empty patient id
it starts the update patient details mode.
For update patient mode, name,dob and gender validation is skipped as
the patient is already identified and
flow directly jumps to address and contact details validation.
Also in update patient mode, it returns a patient object and does not
save the object. It is upto the receiver to identify the change and
save it.
For new patient, the patient details are saved to the file.
"""
name_dob_validation = False # Name and dob validation status
address_validation = False # Address validation status
contact_validation = False # Contact validation status
while True: # Keep prompting user to enter patient details.
if name_dob_validation is False and self.__is_in_new_patient_mode():
# First perform basic details validation like name which exists
# only for new patient.
name_dob_validation = self.__handle_basic_entries()
elif address_validation is False:
# Second perform validation for address details.
address_validation = self.__handle_address_entries()
elif contact_validation is False:
# At last perform validation for contact details.
contact_validation = self.__handle_contact_entries()
if (name_dob_validation or
self.__is_in_new_patient_mode() is False) and \
address_validation and contact_validation:
# All Validation passed so save the data.
file_handler = FileHandlerUtility() # CSV, File handler object
if self.__is_in_new_patient_mode() is False:
return self.__patient # Update returns modified patient
else:
# New Patient writes to the CSV.
# New patient id is the max id + 1. This way a unique
# patient id is created with simple logic.
new_patient_id = self.validator.generate_new_patient_id()
self.__patient.set_patient_id(new_patient_id)
# Convert Patient object to list before saving to csv.
patient_details_list = \
self.__patient.get_list_template_to_save()
file_handler.write_new_record(patient_details_list)
print(AddUpdateFlowHandler.NEW_PATIENT_REGISTRATION_SUCCESS
.format(self.__patient.get_first_name(),
self.__patient.get_last_name()))
break
def __is_in_new_patient_mode(self):
"""If the class is initialized with patient object which has a
patient id, the flow starts in update demographic mode.
It returns true to help identify if its a new patient creation else
returns false for update demographics."""
if len(self.__patient.get_patient_id()) == 0:
return True # New patient mode
else:
return False # Update patient demographics mode.
def __handle_basic_entries(self):
"""Handle the flow of basic patient entries like name, dob, gender and
also perform the required validation with it. This is usually done
during new patient registration.
Returns True once the user enters all the input values correctly.
After which flow can move to next input which is address details."""
# Get the user input for address entries from console.
name_dob = input(AddUpdateFlowHandler.NAME_GENDER_INPUT_MESSAGE)
# Perform input string validation for invalid characters
text_has_valid_chars = self.validator.has_valid_character(name_dob)
if text_has_valid_chars[0] is False: # In-valid input characters.
# Error! Print invalid character message.s
print(text_has_valid_chars[1])
else: # Found input text valid, proceed ahead.
name_dob_input_list = self.validator.get_list_from_input(name_dob)
if len(name_dob_input_list) != \
AddUpdateFlowHandler.BASIC_DEMOGRAPHIC_ENTRIES:
# User entered less or more entries separated by delimiter.
# Show the error message.
print(AddUpdateFlowHandler.ERROR_INVALID_DELIMITER_ENTRIES)
else:
first_name_str, last_name_str, dob_str, gender_abbreviation_str\
= name_dob_input_list
gender_str = self.validator.validate_gender(
gender_abbreviation_str) # Validate gender abbreviation.
dob_validation = self.validator.has_validate_date_format(
dob_str) # Validate dob is in correct format or not.
if dob_validation[0] is False: # DOB validation failed.
print(dob_validation[1]) # Show DOB invalid message.
elif gender_str is not None:
# All validation passed, finally check for duplicate.
if self.validator.check_if_patient_exits(first_name_str,
last_name_str,
dob_str,
gender_str):
# Duplicate patient found, show error message.
print(AddUpdateFlowHandler.ERROR_DUPLICATE_PATIENT.
format(last_name_str.title(),
first_name_str.title()))
else:
# No duplicates and all validation passed update
# the patient attribute.
self.__patient.set_gender(gender_str)
self.__patient.set_first_name(first_name_str)
self.__patient.set_last_name(last_name_str)
self.__patient.set_dob(dob_str)
return True # True helps to end basic info entry flow
else: # Gender Validation failed
print(AddUpdateFlowHandler.ERROR_MESSAGE_GENDER)
return False
def __handle_address_entries(self):
"""Handles the flow to ask user for address details during new
patient registration or during update demographics.
Returns True once the user enters all the input values correctly.
After which flow can move to next input which is contact details."""
# Get the user input for address entries from console.
address = input(AddUpdateFlowHandler.ADDRESS_INPUT_MESSAGE)
# Perform input string validation for invalid characters
text_has_valid_chars = self.validator.has_valid_character(address)
if text_has_valid_chars[0] is False: # In-valid input characters.
# Error! Print invalid character message.
print(text_has_valid_chars[1])
else: # Found input text valid, proceed ahead.
address_input_list = self.validator.get_list_from_input(address)
if len(address_input_list) != AddUpdateFlowHandler.ADDRESS_ENTRIES:
# User entered less or more entries separated by delimiter.
# Show the error message.
print(AddUpdateFlowHandler.ERROR_INVALID_DELIMITER_ENTRIES)
else: # List size matches the expected size
address1, address2, city, state, address_zip = \
address_input_list # assign using multiple assignment
# Validate state input, must be 2 characters and match
# the available list of states.
state_validation = self.validator.has_valid_state(state)
if self.validator.is_zip_code_valid(address_zip) is False:
# User entered invalid zip code, show message.
print(self.ERROR_ZIP_CODE)
elif state_validation[0] is False: # State validation failed.
print(state_validation[1]) # print error message
else:
# All validation passed update the patient attribute.
self.__patient.set_address_line_1(address1)
self.__patient.set_address_line_2(address2)
self.__patient.set_address_city(city)
self.__patient.set_address_state(state)
self.__patient.set_address_zip(address_zip)
return True # True helps to end address entry flow
return False
def __handle_contact_entries(self):
"""Handles the flow to ask user for contact details like phone and
email during patient registration or during update demographics.
Returns True once the user enters all the input values correctly.
After which flow can be completed either by updating the demographics
or creating a new patient.
"""
# Get the user input for contact entries from console
contact_details = input(
AddUpdateFlowHandler.CONTACT_INPUT_MESSAGE)
# Perform input string validation for invalid characters
text_has_valid_chars = self.validator. \
has_valid_character(contact_details)
if text_has_valid_chars[0] is False: # In-valid input characters.
# Error! Print invalid character message.
print(text_has_valid_chars[1])
else: # Found input text valid, proceed ahead.
contact_input_list = self.validator.\
get_list_from_input(contact_details)
if len(contact_input_list) != AddUpdateFlowHandler.CONTACT_ENTRIES:
# User entered less or more entries separated by delimiter.
# Show the error message.
print(AddUpdateFlowHandler.ERROR_INVALID_DELIMITER_ENTRIES)
else:
# All validation passed update the patient attribute.
phone, email = contact_input_list
self.__patient.set_phone(phone)
self.__patient.set_email(email)
return True # True helps to end contact entry flow
return False
| [
"[email protected]"
] | |
0e44203aaf883b38fa8558f7af1f5caae0ffd1d7 | e581248c7a31794ce780c96d24cb110f1fcefa6f | /train_util.py | 0cfaa5e5c6b58ba892bbf4dc3295cb7285b38eb3 | [
"MIT"
] | permissive | yuanqqq/SIN | 3bf2278deb0beb1e827b0d5189a21b67eb789277 | e5f9fb17264cefbb1e644938fc59adea800d78e3 | refs/heads/master | 2023-08-18T01:45:08.014388 | 2021-10-20T03:31:37 | 2021-10-20T03:31:37 | 418,005,337 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 29,334 | py | import torch
import torch.nn.functional as F
import torch.nn as nn
import numpy as np
from skimage.segmentation import mark_boundaries
import cv2
import sys
sys.path.append('./third_party/cython')
# from connectivity import enforce_connectivity
def init_spixel_grid(args, b_train=True):
if b_train:
img_height, img_width = args.train_img_height, args.train_img_width
else:
img_height, img_width = args.input_img_height, args.input_img_width
# get spixel id for the final assignment
n_spixl_h = int(np.floor(img_height/args.downsize))
n_spixl_w = int(np.floor(img_width/args.downsize))
spixel_height = int(img_height / (1. * n_spixl_h))
spixel_width = int(img_width / (1. * n_spixl_w))
spix_values = np.int32(np.arange(0, n_spixl_w * n_spixl_h).reshape((n_spixl_h, n_spixl_w)))
spix_idx_tensor_ = shift9pos(spix_values)
spix_idx_tensor = np.repeat(
np.repeat(spix_idx_tensor_, spixel_height,axis=1), spixel_width, axis=2)
torch_spix_idx_tensor = torch.from_numpy(
np.tile(spix_idx_tensor, (args.batch_size, 1, 1, 1))).type(torch.float).cuda()
curr_img_height = int(np.floor(img_height))
curr_img_width = int(np.floor(img_width))
# pixel coord
all_h_coords = np.arange(0, curr_img_height, 1)
all_w_coords = np.arange(0, curr_img_width, 1)
curr_pxl_coord = np.array(np.meshgrid(all_h_coords, all_w_coords, indexing='ij'))
coord_tensor = np.concatenate([curr_pxl_coord[1:2, :, :], curr_pxl_coord[:1, :, :]])
all_XY_feat = (torch.from_numpy(
np.tile(coord_tensor, (args.batch_size, 1, 1, 1)).astype(np.float32)).cuda())
return torch_spix_idx_tensor, all_XY_feat
#===================== pooling and upsampling feature ==========================================
def shift9pos(input, h_shift_unit=1, w_shift_unit=1):
# input should be padding as (c, 1+ height+1, 1+width+1)
input_pd = np.pad(input, ((h_shift_unit, h_shift_unit), (w_shift_unit, w_shift_unit)), mode='edge')
input_pd = np.expand_dims(input_pd, axis=0)
# assign to ...
top = input_pd[:, :-2 * h_shift_unit, w_shift_unit:-w_shift_unit]
bottom = input_pd[:, 2 * h_shift_unit:, w_shift_unit:-w_shift_unit]
left = input_pd[:, h_shift_unit:-h_shift_unit, :-2 * w_shift_unit]
right = input_pd[:, h_shift_unit:-h_shift_unit, 2 * w_shift_unit:]
center = input_pd[:,h_shift_unit:-h_shift_unit,w_shift_unit:-w_shift_unit]
bottom_right = input_pd[:, 2 * h_shift_unit:, 2 * w_shift_unit:]
bottom_left = input_pd[:, 2 * h_shift_unit:, :-2 * w_shift_unit]
top_right = input_pd[:, :-2 * h_shift_unit, 2 * w_shift_unit:]
top_left = input_pd[:, :-2 * h_shift_unit, :-2 * w_shift_unit]
shift_tensor = np.concatenate([ top_left, top, top_right,
left, center, right,
bottom_left, bottom, bottom_right], axis=0)
return shift_tensor
def poolfeat(input, prob, sp_h=2, sp_w=2):
def feat_prob_sum(feat_sum, prob_sum, shift_feat):
feat_sum += shift_feat[:, :-1, :, :]
prob_sum += shift_feat[:, -1:, :, :]
return feat_sum, prob_sum
b, _, h, w = input.shape
h_shift_unit = 1
w_shift_unit = 1
p2d = (w_shift_unit, w_shift_unit, h_shift_unit, h_shift_unit)
feat_ = torch.cat([input, torch.ones([b, 1, h, w]).cuda()], dim=1) # b* (n+1) *h*w
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 0, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
send_to_top_left = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, 2 * h_shift_unit:, 2 * w_shift_unit:]
feat_sum = send_to_top_left[:, :-1, :, :].clone()
prob_sum = send_to_top_left[:, -1:, :, :].clone()
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 1, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
top = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, 2 * h_shift_unit:, w_shift_unit:-w_shift_unit]
feat_sum, prob_sum = feat_prob_sum(feat_sum,prob_sum,top )
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 2, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
top_right = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, 2 * h_shift_unit:, :-2 * w_shift_unit]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, top_right)
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 3, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
left = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, h_shift_unit:-h_shift_unit, 2 * w_shift_unit:]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, left)
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 4, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
center = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, h_shift_unit:-h_shift_unit, w_shift_unit:-w_shift_unit]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, center)
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 5, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
right = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, h_shift_unit:-h_shift_unit, :-2 * w_shift_unit]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, right)
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 6, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
bottom_left = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, :-2 * h_shift_unit, 2 * w_shift_unit:]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, bottom_left)
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 7, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
bottom = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, :-2 * h_shift_unit, w_shift_unit:-w_shift_unit]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, bottom)
prob_feat = F.avg_pool2d(feat_ * prob.narrow(1, 8, 1), kernel_size=(sp_h, sp_w), stride=(sp_h, sp_w)) # b * (n+1) * h* w
bottom_right = F.pad(prob_feat, p2d, mode='constant', value=0)[:, :, :-2 * h_shift_unit, :-2 * w_shift_unit]
feat_sum, prob_sum = feat_prob_sum(feat_sum, prob_sum, bottom_right)
pooled_feat = feat_sum / (prob_sum + 1e-8)
return pooled_feat
def upfeat(input, prob, up_h=2, up_w=2):
# input b*n*H*W downsampled
# prob b*9*h*w
b, c, h, w = input.shape
h_shift = 1
w_shift = 1
p2d = (w_shift, w_shift, h_shift, h_shift)
feat_pd = F.pad(input, p2d, mode='constant', value=0)
gt_frm_top_left = F.interpolate(feat_pd[:, :, :-2 * h_shift, :-2 * w_shift], size=(h * up_h, w * up_w),mode='nearest')
feat_sum = gt_frm_top_left * prob.narrow(1,0,1)
top = F.interpolate(feat_pd[:, :, :-2 * h_shift, w_shift:-w_shift], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += top * prob.narrow(1, 1, 1)
top_right = F.interpolate(feat_pd[:, :, :-2 * h_shift, 2 * w_shift:], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += top_right * prob.narrow(1,2,1)
left = F.interpolate(feat_pd[:, :, h_shift:-w_shift, :-2 * w_shift], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += left * prob.narrow(1, 3, 1)
center = F.interpolate(input, (h * up_h, w * up_w), mode='nearest')
feat_sum += center * prob.narrow(1, 4, 1)
right = F.interpolate(feat_pd[:, :, h_shift:-w_shift, 2 * w_shift:], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += right * prob.narrow(1, 5, 1)
bottom_left = F.interpolate(feat_pd[:, :, 2 * h_shift:, :-2 * w_shift], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += bottom_left * prob.narrow(1, 6, 1)
bottom = F.interpolate(feat_pd[:, :, 2 * h_shift:, w_shift:-w_shift], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += bottom * prob.narrow(1, 7, 1)
bottom_right = F.interpolate(feat_pd[:, :, 2 * h_shift:, 2 * w_shift:], size=(h * up_h, w * up_w), mode='nearest')
feat_sum += bottom_right * prob.narrow(1, 8, 1)
return feat_sum
# ================= - spixel related -=============
def assign2uint8(assign):
#red up, green mid, blue down, for debug only
b,c,h,w = assign.shape
red = torch.cat([torch.ones(size=assign.shape), torch.zeros(size=[b,2,h,w])],dim=1).cuda()
green = torch.cat([ torch.zeros(size=[b,1,h,w]),
torch.ones(size=assign.shape),
torch.zeros(size=[b,1,h,w])],dim=1).cuda()
blue = torch.cat([torch.zeros(size=[b,2,h,w]),
torch.ones(size=assign.shape)],dim=1).cuda()
black = torch.zeros(size=[b,3,h,w]).cuda()
white = torch.ones(size=[b,3,h,w]).cuda()
# up probablity
mat_vis = torch.where(assign.type(torch.float) < 0. , white, black)
mat_vis = torch.where(assign.type(torch.float) >= 0. , red* (assign.type(torch.float)+1)/3, mat_vis)
mat_vis = torch.where(assign.type(torch.float) >= 3., green*(assign.type(torch.float)-2)/3, mat_vis)
mat_vis = torch.where(assign.type(torch.float) >= 6., blue * (assign.type(torch.float) - 5.) / 3, mat_vis)
return (mat_vis * 255.).type(torch.uint8)
def val2uint8(mat,maxVal):
maxVal_mat = torch.ones(mat.shape).cuda() * maxVal
mat_vis = torch.where(mat > maxVal_mat, maxVal_mat, mat)
return (mat_vis * 255. / maxVal).type(torch.uint8)
def update_spixl_map (spixl_map_idx_in, assig_map_in):
assig_map = assig_map_in.clone()
b,_,h,w = assig_map.shape
_, _, id_h, id_w = spixl_map_idx_in.shape
if (id_h == h) and (id_w == w):
spixl_map_idx = spixl_map_idx_in
else:
spixl_map_idx = F.interpolate(spixl_map_idx_in, size=(h,w), mode='nearest')
assig_max,_ = torch.max(assig_map, dim=1, keepdim= True)
assignment_ = torch.where(assig_map == assig_max, torch.ones(assig_map.shape).cuda(),torch.zeros(assig_map.shape).cuda())
new_spixl_map_ = spixl_map_idx * assignment_ # winner take all
new_spixl_map = torch.sum(new_spixl_map_,dim=1,keepdim=True).type(torch.int)
return new_spixl_map
def get_spixel_image(given_img, spix_index, n_spixels = 600, b_enforce_connect = False):
if not isinstance(given_img, np.ndarray):
given_img_np_ = given_img.detach().cpu().numpy().transpose(1,2,0)
else: # for cvt lab to rgb case
given_img_np_ = given_img
if not isinstance(spix_index, np.ndarray):
spix_index_np = spix_index.detach().cpu().numpy().transpose(0,1)
else:
spix_index_np = spix_index
h, w = spix_index_np.shape
given_img_np = cv2.resize(given_img_np_, dsize=(w, h), interpolation=cv2.INTER_CUBIC)
if b_enforce_connect:
spix_index_np = spix_index_np.astype(np.int64)
segment_size = (given_img_np_.shape[0] * given_img_np_.shape[1]) / (int(n_spixels) * 1.0)
min_size = int(0.06 * segment_size)
max_size = int(3 * segment_size)
spix_index_np = enforce_connectivity(spix_index_np[None, :, :], min_size, max_size)[0]
cur_max = np.max(given_img_np)
spixel_bd_image = mark_boundaries(given_img_np/cur_max, spix_index_np.astype(int), color = (0,1,1)) #cyna
return (cur_max*spixel_bd_image).astype(np.float32).transpose(2,0,1), spix_index_np #
# ============ accumulate Q =============================
def spixlIdx(args, b_train = False):
# code modified from ssn
if b_train:
n_spixl_h = int(np.floor(args.train_img_height / args.downsize))
n_spixl_w = int(np.floor(args.train_img_width / args.downsize))
else:
n_spixl_h = int(np.floor(args.input_img_height / args.downsize))
n_spixl_w = int(np.floor(args.input_img_width / args.downsize))
spix_values = np.int32(np.arange(0, n_spixl_w * n_spixl_h).reshape((n_spixl_h, n_spixl_w)))
spix_idx_tensor = shift9pos(spix_values)
torch_spix_idx_tensor = torch.from_numpy(
np.tile(spix_idx_tensor, (args.batch_size, 1, 1, 1))).type(torch.float).cuda()
return torch_spix_idx_tensor
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __repr__(self):
return '{:.3f} ({:.3f})'.format(self.val, self.avg)
def batch2img(img):
b,_,h,w = img.shape
tmp = img.permute(0,2,3,1)
for i in range(b):
if i ==0:
tmp_stack = tmp[i,:,:,:]
else:
tmp_stack = torch.cat([tmp_stack,tmp[i,:,:,:]],dim=-2)
return tmp_stack
def build_LABXY_feat(label_in, XY_feat):
img_lab = label_in.clone().type(torch.float)
b, _, curr_img_height, curr_img_width = XY_feat.shape
scale_img = F.interpolate(img_lab, size=(curr_img_height,curr_img_width), mode='nearest')
LABXY_feat = torch.cat([scale_img, XY_feat],dim=1)
return LABXY_feat
def rgb2Lab_torch(img_in, mean_values = None):
# self implemented function that convert RGB image to LAB
# inpu img intense should be [0,1] float b*3*h*w
img= (img_in.clone() + mean_values.cuda()).clamp(0, 1)
assert img.min() >= 0 and img.max() <= 1
mask = img > 0.04045
img[mask] = torch.pow((img[mask] + 0.055) / 1.055, 2.4)
img[~mask] /= 12.92
xyz_from_rgb = torch.tensor([[0.412453, 0.357580, 0.180423],
[0.212671, 0.715160, 0.072169],
[0.019334, 0.119193, 0.950227]]).cuda()
rgb = img.permute(0,2,3,1)
xyz_img = torch.matmul(rgb, xyz_from_rgb.transpose_(0,1))
xyz_ref_white = torch.tensor([0.95047, 1., 1.08883]).cuda()
# scale by CIE XYZ tristimulus values of the reference white point
lab = xyz_img / xyz_ref_white
# Nonlinear distortion and linear transformation
mask = lab > 0.008856
lab[mask] = torch.pow(lab[mask], 1. / 3.)
lab[~mask] = 7.787 * lab[~mask] + 16. / 116.
x, y, z = lab[..., 0:1], lab[..., 1:2], lab[..., 2:3]
# Vector scaling
L = (116. * y) - 16.
a = 500.0 * (x - y)
b = 200.0 * (y - z)
return torch.cat([L, a, b], dim=-1).permute(0,3,1,2)
def label2one_hot_torch(labels, C=14):
# w.r.t http://jacobkimmel.github.io/pytorch_onehot/
'''
Converts an integer label torch.autograd.Variable to a one-hot Variable.
Parameters
----------
labels : torch.autograd.Variable of torch.cuda.LongTensor
N x 1 x H x W, where N is batch size.
Each value is an integer representing correct classification.
C : integer.
number of classes in labels.
Returns
-------
target : torch.cuda.FloatTensor
N x C x H x W, where C is class number. One-hot encoded.
'''
b,_, h, w = labels.shape
one_hot = torch.zeros(b, C, h, w, dtype=torch.long).cuda()
target = one_hot.scatter_(1, labels.type(torch.long).data, 1) #require long type
return target.type(torch.float32)
# ===============compute labxy loss(unsupervised)=======================
def get_lab_loss(img, img_grad, map_bound, map_non_bound):
# _, _, h, w = img_grad.shape
# img_grad = img_grad*img_grad
# max_grad = img_grad.max(dim=-1)[0].max(dim=-1)[0]
# mean_grad = torch.sum(torch.sum(img_grad, dim=-1), dim=-1)/(h*w)
# img_grad = F.relu(img_grad-mean_grad.unsqueeze(-1).unsqueeze(-1))
# overlap = torch.sum(torch.sum(map_bound*img_grad, dim=-1), dim=-1)
# gt_bound_all = torch.sum(torch.sum(img_grad, dim=-1), dim=-1)
# pred_bound_all = torch.sum(torch.sum(map_bound, dim=-1), dim=-1)
# lab_loss = torch.sum(gt_bound_all-overlap, dim=-1)
test_loss = torch.sum(torch.sum(img*img*map_non_bound, dim=-1), dim=-1)
return test_loss
def gaussian_kernel(img):
b, c, h, w = img.shape
kernel = [[2, 4, 5, 4, 2],
[4, 9, 12, 9, 4],
[5, 12, 15, 12, 5],
[4, 9, 12, 9, 4],
[2, 4, 5, 4, 2]]
kernel = torch.tensor(kernel).float().cuda().repeat(c, 1, 1).unsqueeze(1)*(1/139.)
out = F.conv2d(img, kernel, padding=0, groups=c)
out = F.pad(out, (2, 2, 2, 2), mode='replicate')
return out
def labxy_loss(map, img):
device = map.device
bz, c, h, w = img.shape
c = 1
y_feat = torch.arange(0, h).repeat(bz, h, 1).unsqueeze(1).float().to(device)
x_feat = y_feat.transpose(2, 3)
kernel_img = [[[-1, 0, 0], [0, 1, 0], [0, 0, 0]],
[[0, -1, 0], [0, 1, 0], [0, 0, 0]],
[[0, 0, -1], [0, 1, 0], [0, 0, 0]],
[[0, 0, 0], [-1, 1, 0], [0, 0, 0]],
[[0, 0, 0], [0, 1, -1], [0, 0, 0]],
[[0, 0, 0], [0, 1, 0], [-1, 0, 0]],
[[0, 0, 0], [0, 1, 0], [0, -1, 0]],
[[0, 0, 0], [0, 1, 0], [0, 0, -1]]]
# kernel_xy = [[[-1, 0, 0], [0, 1, 0], [0, 0, 0]],
# [[0, 0, 0], [0, 0, 0], [0, 0, 0]],
# [[0, 0, -1], [0, 1, 0], [0, 0, 0]],
# [[0, 0, 0], [0, 0, 0], [0, 0, 0]],
# [[0, 0, 0], [0, 0, 0], [0, 0, 0]],
# [[0, 0, 0], [0, 1, 0], [-1, 0, 0]],
# [[0, 0, 0], [0, 0, 0], [0, 0, 0]],
# [[0, 0, 0], [0, 1, 0], [0, 0, -1]]]
# kernel_img = [[[0, -1, 0], [0, 1, 0], [0, 0, 0]],
# [[0, 0, 0], [-1, 1, 0], [0, 0, 0]],
# [[0, 0, 0], [0, 1, -1], [0, 0, 0]],
# [[0, 0, 0], [0, 1, 0], [0, -1, 0]]]
# kernel_map = [[-1, -1, -1], [-1, 8, -1], [-1, -1, -1]]
kernel_img = torch.tensor(kernel_img).float().to(device).repeat(c+3, 1, 1).unsqueeze(1)
# kernel_xy = torch.tensor(kernel_xy).float().to(device).repeat(2, 1, 1).unsqueeze(1)
# kernel_map = torch.tensor(kernel_map).float().to(device).repeat(1, 1, 1, 1)
# lab_img = rgb2Lab_torch(img, torch.tensor([0.411, 0.432, 0.45]).unsqueeze(-1).unsqueeze(-1))
img = gaussian_kernel(img)
gray_img = (0.2989*img[:, 0, :, :] + 0.5870*img[:, 1, :, :] + 0.1140*img[:, 2, :, :]).unsqueeze(1)
cat_feat = torch.cat((gray_img, x_feat, y_feat, map.float()), dim=1)
cat_feat = F.pad(cat_feat, (1, 1, 1, 1), mode='replicate')
feat = F.conv2d(cat_feat, kernel_img, groups=c+3)
cat_xy = F.pad(torch.cat((x_feat, y_feat), dim=1), (1, 1, 1, 1), mode='replicate')
# xy_feat = F.conv2d(cat_xy, kernel_xy, groups=2)
img_grad = feat[:, :8*c, :, :]
xy_grad = feat[:, 8*c:-8, :, :]
# xy_grad = xy_feat
map_bound = feat[:, -8:, :, :]
map_bound_8dim = -F.relu(-map_bound * map_bound+1)+1
map_bound_1dim = -F.relu(-torch.sum(map_bound*map_bound, dim=1)+1)+1
map_non_bound_1dim = -map_bound_1dim + 1
map_non_bound_8dim = -map_bound_8dim + 1
# img_grad_non_bound = torch.sum(img_grad*img_grad*map_non_bound_8dim.repeat(1, c, 1, 1), dim=1)
# img_grad_all = torch.sum(img_grad*img_grad, dim=1)
# img_grad *= map_non_bound_1dim
# lab_loss = torch.sum(torch.sum(img_grad_non_bound, dim=-1), dim=-1)/torch.sum(torch.sum(img_grad_all, dim=-1), dim=-1)
# lab_loss = torch.sum(torch.sum(img_grad_non_bound, dim=-1), dim=-1)
# lab_loss = lab_loss/torch.sum(torch.sum(img_grad_all, dim=-1), dim=-1)
lab_loss = get_lab_loss(gray_img.squeeze(1), img_grad, map_bound_1dim, map_non_bound_1dim)
xy_grad = torch.sum(xy_grad * xy_grad * map_bound_8dim.repeat(1, 2, 1, 1), dim=1)
xy_loss1 = torch.sum(torch.sum(xy_grad, dim=-1), dim=-1)
xy_loss2 = torch.sum(torch.sum(map_bound_1dim, dim=-1), dim=-1)
xy_loss = xy_loss1
# map_bound = F.conv2d(map, kernel_map)
return lab_loss, xy_loss
def labxy_v_loss(prob, label):
_, color_c, _, _ = label.shape
kernel = [[[0, -1, 0], [0, 1, 0], [0, 0, 0]],
[[0, 0, 0], [0, 1, 0], [0, -1, 0]]]
kernel = torch.tensor(kernel).float().cuda().repeat(color_c, 1, 1).unsqueeze(1)
# cat_feat = img
label = F.pad(label, (1, 1, 0, 0), mode='replicate')
cat_feat = F.conv2d(label, kernel, stride=(2, 1), padding=(0, 0), groups=color_c)
cat_feat = cat_feat*cat_feat
cat_feat = F.relu(-F.relu(cat_feat)+1)
b, c, h, w = cat_feat.shape
_, gt_id = cat_feat.permute(0, 2, 3, 1).reshape(-1, 2).max(1, keepdim=False)
# color_prob = cat_feat
# if color_prob.shape[1] > 2:
# color_prob = color_prob[:, 0:2, :, :] + color_prob[:, 2:4, :, :] + color_prob[:, 4:, :, :]
# color_prob = color_prob.permute(0, 2, 3, 1).reshape(-1, 2)
# _, color_id = color_prob.min(1, keepdim=False)
# b, _, h, w = gt.shape
# gt_id = torch.where(gt > 0, torch.ones(gt.shape).cuda(), torch.zeros(gt.shape).cuda())
# gt_id = gt_id.reshape(-1, ).long()
# gt = F.softmax(gt, dim=1)
# _, gt_id = gt.permute(0, 2, 3, 1).reshape(-1, 2).min(1, keepdim=False)
cross_loss = nn.CrossEntropyLoss(reduction='none')
color_loss = cross_loss(prob[:, :, 1:-1:2, :].permute(0, 2, 3, 1).reshape(-1, 2), gt_id)
color_loss = color_loss.view(b, h, w)
# gt_prob = F.softmax(cat_feat, dim=1)
# pred_prob = prob[:, :, 1:-1:2, :]
# loss = (gt_prob[:, 0, :, :]-pred_prob[:, 0, :, :])*(gt_prob[:, 0, :, :]-pred_prob[:, 0, :, :]) +\
# (gt_prob[:, 1, :, :] - pred_prob[:, 1, :, :]) * (gt_prob[:, 1, :, :] - pred_prob[:, 1, :, :])
# loss = torch.sum(torch.sum(loss, dim=-1), dim=-1)
gt = cat_feat[:, 0, :, :] - cat_feat[:, 1, :, :]
weight = gt * gt
color_loss = weight*color_loss
color_loss = torch.sum(torch.sum(color_loss, dim=-1), dim=-1)
# regular_weight = 1-weight
# regular_loss = regular_weight * (prob[:, 0, 1:-1:2, :]-prob[:, 1, 1:-1:2, :])
# regular_loss = torch.sum(torch.sum(regular_loss, dim=-1), dim=-1)
# regular_loss = regular_loss * regular_loss
# color_loss += regular_loss
return color_loss
def labxy_h_loss(prob, label):
_, color_c, _, _ = label.shape
kernel = [[[0, 0, 0], [-1, 1, 0], [0, 0, 0]],
[[0, 0, 0], [0, 1, -1], [0, 0, 0]]]
kernel = torch.tensor(kernel).float().cuda().repeat(color_c, 1, 1).unsqueeze(1)
# cat_feat = img
label = F.pad(label, (0, 0, 1, 1), mode='replicate')
cat_feat = F.conv2d(label, kernel, stride=(1, 2), padding=(0, 0), groups=color_c)
cat_feat = cat_feat * cat_feat
# b, c, h, w = cat_feat.shape
# gt_prob = F.softmax(cat_feat.view(-1, 2, h, w), dim=1).view(b, -1, h, w)
#
# color_prob = cat_feat
# if color_prob.shape[1] > 2:
# color_prob = color_prob[:, 0:2, :, :] + color_prob[:, 2:4, :, :] + color_prob[:, 4:, :, :]
# color_prob = color_prob.permute(0, 2, 3, 1).reshape(-1, 2)
# _, color_id = color_prob.min(1, keepdim=False)
# b, _, h, w = gt.shape
# gt_id = torch.where(gt > 0, torch.ones(gt.shape).cuda(), torch.zeros(gt.shape).cuda())
# gt_id = gt_id.reshape(-1, ).long()
# gt = F.softmax(gt, dim=1)
# _, gt_id = gt.permute(0, 2, 3, 1).reshape(-1, 2).min(1, keepdim=False)
cat_feat = F.relu(-F.relu(cat_feat) + 1)
b, c, h, w = cat_feat.shape
_, gt_id = cat_feat.permute(0, 2, 3, 1).reshape(-1, 2).max(1, keepdim=False)
cross_loss = nn.CrossEntropyLoss(reduction='none')
color_loss = cross_loss(prob[:, :, :, 1:-1:2].permute(0, 2, 3, 1).reshape(-1, 2), gt_id)
color_loss = color_loss.view(b, h, w)
gt = cat_feat[:, 0, :, :] - cat_feat[:, 1, :, :]
weight = gt * gt
color_loss = weight * color_loss
color_loss = torch.sum(torch.sum(color_loss, dim=-1), dim=-1)
# regular_weight = 1 - weight
# regular_loss = regular_weight * (prob[:, 0, :, 1:-1:2] - prob[:, 1, :, 1:-1:2])
# regular_loss = torch.sum(torch.sum(regular_loss, dim=-1), dim=-1)
# regular_loss = regular_loss*regular_loss
# color_loss += regular_loss
# gt_prob = F.softmax(cat_feat, dim=1)
# pred_prob = prob[:, :, :, 1:-1:2]
# loss = (gt_prob[:, 0, :, :] - pred_prob[:, 0, :, :]) * (gt_prob[:, 0, :, :] - pred_prob[:, 0, :, :]) + \
# (gt_prob[:, 1, :, :] - pred_prob[:, 1, :, :]) * (gt_prob[:, 1, :, :] - pred_prob[:, 1, :, :])
# loss = torch.sum(torch.sum(loss, dim=-1), dim=-1)
return color_loss
def compute_lr_grad(img):
img_expand = F.pad(img, (1, 1, 0, 0), mode='replicate')
img_l = img_expand[:, :, :, :-2]
img_r = img_expand[:, :, :, 2:]
l_grad = img - img_l
r_grad = img_r - img
lr_grad = l_grad*l_grad + r_grad*r_grad
lr_grad = torch.sum(lr_grad, dim=1, keepdim=True)
return lr_grad
def compute_tb_grad(img):
img_expand = F.pad(img, (0, 0, 1, 1), mode='replicate')
img_t = img_expand[:, :, :-2, :]
img_b = img_expand[:, :, 2:, :]
t_grad = img - img_t
b_grad = img_b - img
tb_grad = t_grad*t_grad + b_grad*b_grad
tb_grad = torch.sum(tb_grad, dim=1, keepdim=True)
return tb_grad
def compute_gt(img):
_, c, _, _ = img.shape
kernel_17 = [[1, 1, 1, 1, 1, 1, 1, 1, -8, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, -8, 1, 1, 1, 1, 1, 1, 1, 1]]
kernel_9 = [[1, 1, 1, 1, -4, 0, 0, 0, 0],
[0, 0, 0, 0, -4, 1, 1, 1, 1]]
kernel_5 = [[1, 1, -2, 0, 0],
[0, 0, -2, 1, 1]]
kernel_3 = [[1, -1, 0],
[0, -1, 1]]
kernel_17 = torch.tensor(kernel_17).float().cuda().repeat(c, 1).unsqueeze(1).unsqueeze(1)
kernel_9 = torch.tensor(kernel_9).float().cuda().repeat(c, 1).unsqueeze(1).unsqueeze(1)
kernel_5 = torch.tensor(kernel_5).float().cuda().repeat(c, 1).unsqueeze(1).unsqueeze(1)
kernel_3 = torch.tensor(kernel_3).float().cuda().repeat(c, 1).unsqueeze(1).unsqueeze(1)
gt_h_17 = F.conv2d(img, kernel_17, stride=16, groups=c)
gt_h_9 = F.conv2d(img, kernel_9, stride=8, groups=c)
gt_h_5 = F.conv2d(img, kernel_5, stride=4, groups=c)
gt_h_3 = F.conv2d(img, kernel_3, stride=2, groups=c)
gt_v_17 = F.conv2d(img, kernel_17.transpose(2, 3), stride=(16, 8), groups=c)
gt_v_9 = F.conv2d(img, kernel_9.transpose(2, 3), stride=(8, 4), groups=c)
gt_v_5 = F.conv2d(img, kernel_5.transpose(2, 3), stride=(4, 2), groups=c)
gt_v_3 = F.conv2d(img, kernel_3.transpose(2, 3), stride=(2, 1), groups=c)
gt_h_17 = gt_h_17 * gt_h_17
gt_h_17_l = torch.sum(gt_h_17[:, 0::2, :, :], dim=1, keepdim=True)
gt_h_17_r = torch.sum(gt_h_17[:, 1::2, :, :], dim=1, keepdim=True)
gt_h_17 = torch.cat((gt_h_17_l, gt_h_17_r), dim=1)
gt_h_9 = gt_h_9 * gt_h_9
gt_h_9_l = torch.sum(gt_h_9[:, 0::2, :, :], dim=1, keepdim=True)
gt_h_9_r = torch.sum(gt_h_9[:, 1::2, :, :], dim=1, keepdim=True)
gt_h_9 = torch.cat((gt_h_9_l, gt_h_9_r), dim=1)
gt_h_5 = gt_h_5 * gt_h_5
gt_h_5_l = torch.sum(gt_h_5[:, 0::2, :, :], dim=1, keepdim=True)
gt_h_5_r = torch.sum(gt_h_5[:, 1::2, :, :], dim=1, keepdim=True)
gt_h_5 = torch.cat((gt_h_5_l, gt_h_5_r), dim=1)
gt_h_3 = gt_h_3 * gt_h_3
gt_h_3_l = torch.sum(gt_h_3[:, 0::2, :, :], dim=1, keepdim=True)
gt_h_3_r = torch.sum(gt_h_3[:, 1::2, :, :], dim=1, keepdim=True)
gt_h_3 = torch.cat((gt_h_3_l, gt_h_3_r), dim=1)
gt_v_17 = gt_v_17 * gt_v_17
gt_v_17_l = torch.sum(gt_v_17[:, 0::2, :, :], dim=1, keepdim=True)
gt_v_17_r = torch.sum(gt_v_17[:, 1::2, :, :], dim=1, keepdim=True)
gt_v_17 = torch.cat((gt_v_17_l, gt_v_17_r), dim=1)
gt_v_9 = gt_v_9 * gt_v_9
gt_v_9_l = torch.sum(gt_v_9[:, 0::2, :, :], dim=1, keepdim=True)
gt_v_9_r = torch.sum(gt_v_9[:, 1::2, :, :], dim=1, keepdim=True)
gt_v_9 = torch.cat((gt_v_9_l, gt_v_9_r), dim=1)
gt_v_5 = gt_v_5 * gt_v_5
gt_v_5_l = torch.sum(gt_v_5[:, 0::2, :, :], dim=1, keepdim=True)
gt_v_5_r = torch.sum(gt_v_5[:, 1::2, :, :], dim=1, keepdim=True)
gt_v_5 = torch.cat((gt_v_5_l, gt_v_5_r), dim=1)
gt_v_3 = gt_v_3 * gt_v_3
gt_v_3_l = torch.sum(gt_v_3[:, 0::2, :, :], dim=1, keepdim=True)
gt_v_3_r = torch.sum(gt_v_3[:, 1::2, :, :], dim=1, keepdim=True)
gt_v_3 = torch.cat((gt_v_3_l, gt_v_3_r), dim=1)
return gt_h_17, gt_h_9, gt_h_5, gt_h_3, gt_v_17, gt_v_9, gt_v_5, gt_v_3
if __name__ == '__main__':
import os
from torchvision import transforms
w = 255
s_w = 15
map_in = torch.arange(1, s_w*s_w+1).reshape(s_w, s_w).repeat(1, 1, 1, 1).cuda()
map_in = F.interpolate(map_in.float(), (w, w), mode='nearest')
output = './demo/lab_loss'
if not os.path.isdir(output):
os.makedirs(output)
img = cv2.imread('./demo/inputs/birds.jpg')[:225, :225, :]
img = torch.tensor(img).float().cuda().permute(2, 0, 1).contiguous()
norm = transforms.Normalize([0, 0, 0], [255, 255, 255])
img = norm(img).unsqueeze(0)
# img = img.unsqueeze(0)
img0 = img[0]
cv2.imwrite(os.path.join(output, 'norm.jpg'), img[0].detach().cpu().numpy().transpose(1,2,0))
img = gaussian_kernel(img)
cv2.imwrite(os.path.join(output, 'gaussian_smooth.jpg'), img[0].detach().cpu().numpy().transpose(1,2,0))
lab_img = rgb2Lab_torch(img, torch.tensor([0.411,0.432,0.45]).unsqueeze(-1).unsqueeze(-1))
img_lr_grad = compute_lr_grad(lab_img)
img_tb_grad = compute_tb_grad(lab_img)
gt_h_17, gt_h_9, gt_h_5, gt_h_3, gt_v_17, gt_v_9, gt_v_5, gt_v_3 = compute_gt(lab_img)
p0v = torch.randn(1, 2, 225, 225).cuda()
lab_v_loss = labxy_v_loss(p0v, gt_v_3)
| [
"[email protected]"
] | |
8e81a91c601c7fef81d41a1ed90396680b5dc0fd | 1e58760ecded5173ae3b370d614509eb3650b9eb | /tajo/emr-4.x/install-tajo.py | 9fb4a5e75ac4f77e0f83aff369077591ec0c64d2 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | dkhwangbo/emr-bootstrap-actions | 72573dba3fec30ee56f486547e1bcfd92712a17a | 18add1d15930c25324694825e871cb47acf3bb84 | refs/heads/master | 2021-01-18T02:04:16.161788 | 2015-11-06T07:43:54 | 2015-11-06T07:43:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,057 | py | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# The EMR 4.x Bootstrap for Tajo
#
# Arguments
#
# -t, --tar
# The tajo binary Tarball URL.(Optional)
#
# ex) --tar http://d3kp3z3ppbkcio.cloudfront.net/tajo-0.10.0/tajo-0.10.0.tar.gz
# or
# --tar s3://[your_bucket]/[your_path]/tajo-{version}.tar.gz
#
# -c, --conf
# The tajo conf directory URL.(Optional)
#
# ex) --conf s3://beta.elasticmapreduce/bootstrap-actions/tajo/template/tajo-0.10.0/c3.xlarge/conf
#
# -l, --lib
# The tajo third party lib URL.(Optional)
#
# ex) --lib s3://{your_bucket}/{your_lib_dir}
# or
# --lib http://{lib_url}/{lib_file_name.jar}
#
# -v, --tajo-version
# The tajo release version.(Optional)
# Default: Apache tajo stable version.
#
# ex) x.x.x
#
# -h, --help
# The help
#
# -e, --env
# The item of tajo-env.sh(Optional, space delimiter)
#
# ex) --env "TAJO_PID_DIR=/home/hadoop/tajo/pids TAJO_WORKER_HEAPSIZE=1024"
#
# -s, --site
# The item of tajo-site.xml(Optional, space delimiter)
#
# ex) --site "tajo.rootdir=s3://mybucket/tajo tajo.worker.start.cleanup=true tajo.catalog.store.class=org.apache.tajo.catalog.store.MySQLStore"
#
# -T, --test-home
# The Test directory path(Only test)
#
# ex) --test-home "/home/hadoop/bootstrap_test"
#
# -H, --test-hadoop-home
# The Test HADOOP_HOME(Only test)
#
# ex) --test-hadoop-home "/home/hadoop"
#
# -*- coding: utf-8 -*-
import os, sys
import argparse
import shutil
import re
import json
import socket
import tarfile
import urllib2
import time
import subprocess
from xml.etree import ElementTree
from urlparse import urlparse
class XmlUtil:
CONFIGURATION_HEADER = '''<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>'''
CONFIGURATION_EMPTY_NODE = '''<configuration></configuration>'''
def header_configuration(self):
return self.CONFIGURATION_HEADER
def new_configuration(self):
return self.CONFIGURATION_EMPTY_NODE
def get_property_value(self, target, key):
if os.path.isfile(target):
print('Found file %s' % (target,))
tree = ElementTree.parse(target)
root = tree.getroot()
for property in root.findall('property'):
name = property.find('name')
if name.text == key:
return property.find('value').text
else:
print('Not Found file at %s' % (target,))
return None
def get_property_value_from_node(self, root, key):
for property in root.findall('property'):
name = property.find('name')
if name.text == key:
return property.find('value').text
print('Not Found The Name %s' % (key,))
return None
def create_node(self, parent, node, name, value):
node = ElementTree.Element(node)
ElementTree.SubElement(node, 'name').text = name
ElementTree.SubElement(node, 'value').text = value
parent.append(node)
return parent
def indent(self, elem, level=0):
i = '\n' + level * ' '
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + ' '
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
self.indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
class JsonUtil:
TARGET_JSON = None
JSON_DATA = None
def __init__(self, json_path):
self.TARGET_JSON = json_path
with open(self.TARGET_JSON, 'r') as js_data:
self.JSON_DATA = json.load(js_data)
def get(self, key):
if self.JSON_DATA:
return self.JSON_DATA[key]
else:
print('Json is null')
return None
class FileUtil:
TEST_MODE = False
HADOOP_HOME = ""
def __init__(self, hadoop_home):
self.HADOOP_HOME = hadoop_home
def rm(self, path):
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.exists(path):
os.remove(path)
def cp(self, src, dest):
shutil.copy(src, dest)
def cp2(self, src, dest):
shutil.copy2(src, dest)
def cp(self, src, dest, forced):
if forced:
self.rm(dest)
return shutil.copy(src, dest)
def ln(self, src, dest):
return os.symlink(src, dest)
def ln(self, src, dest, forced):
if forced:
self.rm(dest)
return os.symlink(src, dest)
def copytree(self, src, dest):
return shutil.copytree(src, dest)
def copytree(self, src, dest, forced):
if forced:
self.cleanup(dest)
return shutil.copytree(src, dest)
def mkdir(self, src):
if not os.path.exists(src):
return os.mkdir(src);
return True
def mv(self, src, dest):
shutil.move(src, dest)
def chmod(self, fname, permit):
os.chmod(fname, permit)
def cleanup(self, path):
print('Info: Clean up. (%s)' % (path,))
self.rm(path)
def download(self, src, dest):
print('Info: Download package from %s' % (src,))
parser = urlparse(src.strip())
if parser.scheme == 'http' or parser.scheme == 'https':
response = urllib2.urlopen(src)
handle = open('%s/%s' % (dest, os.path.basename(src)), 'w')
handle.write(response.read())
handle.close()
else:
if self.TEST_MODE:
return self.cp(src, dest)
else:
return os.system('hdfs dfs -copyToLocal %s %s' % (src, dest))
def unpack(self, pack, dest):
print('Info: Unpack. (%s, %s)' % (pack, dest))
tar = tarfile.open(pack)
tar.extractall(dest)
tar.close()
def invoke_run(self, fname, values, hadoop_home):
self.ln(__file__, './installtajolib.py', True)
invoke = InvokeUtil()
fname = invoke.makeInvoke(fname, hadoop_home)
values.insert(0, fname)
pid = subprocess.Popen(values)
return pid
class NetworkUtil:
def scan(self, host, port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex((host, port))
sock.close()
return result
class InvokeUtil:
INVOKE_SOURCE_STR = '''#!/usr/bin/env python
import os
from datetime import datetime
import time
from xml.etree import ElementTree
import re
from installtajolib import *
targetFile="%s/etc/hadoop/yarn-site.xml"
launcherUtil = LauncherUtil()
launcherUtil.options = launcherUtil.parse_opts()
fname="./history.log"
flog = open(fname, "w", 0)
flog.write("%%s\\tWaiting for creating hadoop environment.\\n" %% (datetime.now(),) )
isMaster = False
loop = 0
master = ""
detected = False
while os.path.isfile(targetFile) is False or isMaster is False :
if os.path.isfile(targetFile) :
if not detected:
flog.write("%%s\\tFound It! : %%s\\n" %% (datetime.now(), targetFile))
flog.write("%%s\\tWaiting for looking master..\\n" %% (datetime.now(),))
detected = True
tree = ElementTree.parse(targetFile)
root = tree.getroot()
for property in root.findall("property"):
name = property.find("name")
if name.text == "yarn.resourcemanager.address" :
master = property.find("value").text
m = re.search("[^:<]+", master)
master = m.group()
isMaster = True
if loop > launcherUtil.MAX_WAITING_SEC:
flog.write("Break running! (Loop greater than %%d secs)\\n" %% (launcherUtil.MAX_WAITING_SEC,) )
break
time.sleep(1)
flog.write(".")
loop += 1
if loop%%60 == 0:
flog.write("\\n")
flog.write("%%s\\tMaster:%%s\\n" %% (datetime.now(), master) )
flog.close()
launcherUtil.build()
launcherUtil.start()'''
def getSrc(self, hadoop_home):
return self.INVOKE_SOURCE_STR % (hadoop_home,)
def makeInvoke(self, fname, hadoop_home):
fname = "./%s" % (fname,)
if os.path.exists(fname):
os.remove(fname)
finvoke = open(fname, 'w')
finvoke.write(self.getSrc(hadoop_home))
finvoke.close()
os.chmod(fname, 0775)
return fname
class LauncherUtil:
EXPORT_LIBS = '''
export JAVA_HOME=/usr/lib/jvm/java-1.7.0-openjdk.x86_64
export HADOOP_HOME=%s
export HADOOP_MAPREDUCE_HOME=%s
export HADOOP_HDFS_HOME=%s
export HADOOP_YARN_HOME=%s
export HADOOP_LZO_HOME=%s
export TAJO_CLASSPATH="$TAJO_CLASSPATH:/usr/share/aws/emr/emrfs/lib:/usr/share/aws/emr/lib"
export TAJO_CLASSPATH="$TAJO_CLASSPATH:$HADOOP_HOME:$HADOOP_HOME/lib"
export TAJO_CLASSPATH="$TAJO_CLASSPATH:$HADOOP_MAPREDUCE_HOME"
export TAJO_CLASSPATH="$TAJO_CLASSPATH:$HADOOP_HDFS_HOME:$HADOOP_HDFS_HOME/lib"
export TAJO_CLASSPATH="$TAJO_CLASSPATH:$HADOOP_YARN_HOME"
export TAJO_CLASSPATH="$TAJO_CLASSPATH:$HADOOP_LZO_HOME/lib"
'''
HADOOP_MODULE_DIRS_REGEX = r'''# HADOOP JAR DIRS
HADOOP_MODULE_DIRS="$HADOOP_HOME/share/hadoop/common/lib
$HADOOP_HOME/share/hadoop/common
$HADOOP_HOME/share/hadoop/hdfs
$HADOOP_HOME/share/hadoop/hdfs/lib
$HADOOP_HOME/share/hadoop/yarn
$HADOOP_HOME/share/hadoop/mapreduce
$HADOOP_HOME/share/hadoop/tools/lib"'''
HADOOP_MODULE_DIRS = '''
HADOOP_MODULE_DIRS="/usr/share/aws/emr/emrfs/lib
/usr/share/aws/emr/lib
$HADOOP_HOME
$HADOOP_HOME/lib
$HADOOP_MAPREDUCE_HOME
$HADOOP_HDFS_HOME
$HADOOP_HDFS_HOME/lib
$HADOOP_YARN_HOME
$HADOOP_LZO_HOME/lib"
'''
DEFAULT_HELP_MESSAGE = '''usage : install-tajo.py [-t|--tar] [-c|--conf] [-l|--lib] [-h|--help] [-e|--env] [-s|--site] [-T|--test-home] [-H|--test-hadoop-home]
-t, --tar
The tajo binary Tarball URL.(Optional)
ex) --tar http://apache.mirror.cdnetworks.com/tajo/tajo-0.10.0/tajo-0.10.0.tar.gz
or
--tar s3://[your_bucket]/[your_path]/tajo-0.10.0.tar.gz
-c, --conf
The tajo conf directory URL.(Optional)
ex) --conf s3://beta.elasticmapreduce/bootstrap-actions/tajo/template/tajo-0.10.0/c3.xlarge/conf
-l, --lib
The tajo third party lib URL.(Optional)
ex) --lib s3://{your_bucket}/{your_lib_dir}
or
--lib http://{lib_url}/{lib_file_name.jar}
-v, --tajo-version
The tajo release version.(Optional)
Default: Apache tajo stable version.
ex) x.x.x
-h, --help
The help
-e, --env
The item of tajo-env.sh(Optional, space delimiter)
ex) --tajo-env.sh "TAJO_PID_DIR=/home/hadoop/tajo/pids TAJO_WORKER_HEAPSIZE=1024"
-s, --site
The item of tajo-site.xml(Optional, space delimiter)
ex) --tajo-site.xml "tajo.rootdir=s3://mybucket/tajo tajo.worker.start.cleanup=true tajo.catalog.store.class=org.apache.tajo.catalog.store.MySQLStore"
-T, --test-home
The Test directory path(Only test)
ex) --test-home "/home/hadoop/bootstrap_test"
-H, --test-hadoop-home
The Test HADOOP_HOME(Only test)
ex) --test-hadoop-home "/home/hadoop"'''
TAJO_BASE = '/home/hadoop'
TAJO_VERSION = None
TAJO_PACKAGE_URI = None
TAJO_CONF_URI = None
TAJO_HOME = None
LIBRARY_URI = None
STORAGE = None
NAME_NODE = None
NAME_NODE_PORT = 8020
TEST_MODE = False
TEST_DIR = None
TEST_HADOOP_HOME = None
TAJO_MASTER = None
TAJO_ENV = None
TAJO_SITE = None
JAVA_HOME = None
fileUtil = None
xmlUtil = None
options = None
START_INVOKE_FILE = 'invoke-install-tajo.py'
MAX_WAITING_SEC = 600
# Parses command line arguments.
def parse_opts(self):
parser = argparse.ArgumentParser(description='Tajo Installer on EMR-4.x')
parser.add_argument('-t', '--tar',
dest='tajo_package_uri',
required=False,
help='''The tajo binary Tarball URL.(Optional)
ex) --tar http://d3kp3z3ppbkcio.cloudfront.net/tajo-0.10.0/tajo-0.10.0.tar.gz
or --tar s3://[your_bucket]/[your_path]/tajo-{version}.tar.gz''')
parser.add_argument('-c', '--conf',
dest='conf',
required=False,
help='''The tajo conf directory URL.(Optional)
ex) --conf s3://beta.elasticmapreduce/bootstrap-actions/tajo/template/tajo-0.10.0/c3.xlarge/conf''')
parser.add_argument('-l', '--lib',
dest='lib',
required=False,
help='''The tajo third party lib URL.(Optional)
ex) --lib s3://{your_bucket}/{your_lib_dir}
or
--lib http://{lib_url}/{lib_file_name.jar}''')
parser.add_argument('-v', '--tajo-version',
dest='tajo_version',
required=False,
help='''The tajo release version.(Optional)
Default: Apache tajo stable version.
ex) x.x.x''')
parser.add_argument('-e', '--env',
dest='env',
required=False,
help='''The item of tajo-env.sh(Optional, space delimiter)
ex) --tajo-env.sh "TAJO_PID_DIR=/home/hadoop/tajo/pids TAJO_WORKER_HEAPSIZE=1024"''')
parser.add_argument('-s', '--site',
dest='site',
required=False,
help='''The item of tajo-site.xml(Optional, space delimiter)
ex) --tajo-site.xml "tajo.rootdir=s3://mybucket/tajo tajo.worker.start.cleanup=true tajo.catalog.store.class=org.apache.tajo.catalog.store.MySQLStore''')
parser.add_argument('-T', '--test-home',
dest='test_dir',
required=False,
help='''The Test directory path(Only test)
ex) --test-home "/home/hadoop/bootstrap_test"''')
parser.add_argument('-H', '--test-hadoop-home',
dest='test_hadoop_home',
required=False,
help='''The Test HADOOP_HOME(Only test)
ex) --test-hadoop-home "/home/hadoop"''')
return parser.parse_args()
## Print Help
def help(self):
print(self.DEFAULT_HELP_MESSAGE)
def dic_name_value(self, txt):
d = {}
pieces = txt.split('=')
d['name'] = pieces[0]
d['value'] = '='.join(pieces[1:])
return d
def trip(self, value):
if value:
return value.strip()
return value
def init(self, fileUtil, opt):
print('Info: Initializing.')
self.fileUtil = fileUtil
self.options = opt
self.TAJO_VERSION = self.trip(opt.tajo_version)
self.TAJO_PACKAGE_URI = self.trip(opt.tajo_package_uri)
self.TAJO_CONF_URI = self.trip(opt.conf)
self.TAJO_HOME = None
self.LIBRARY_URI = self.trip(opt.lib)
self.STORAGE = None
self.NAME_NODE = None
if opt.test_dir:
self.TEST_MODE = True
self.TEST_DIR = self.trip(opt.test_dir)
self.TEST_HADOOP_HOME = self.trip(opt.test_hadoop_home)
self.TAJO_ENV = self.trip(opt.env)
self.TAJO_SITE = self.trip(opt.site)
self.JAVA_HOME = os.environ['JAVA_HOME']
self.xmlUtil = XmlUtil()
if self.TEST_MODE:
if not self.JAVA_HOME:
print('Error: JAVA_HOME is not set.')
sys.exit(1)
if not self.TEST_DIR:
print('Error: -T is not set.')
self.help()
sys.exit(1)
if not self.TEST_HADOOP_HOME:
print('Error: -H is not set.')
self.help()
sys.exit(1)
self.fileUtil.mkdir(self.TEST_DIR)
self.fileUtil.copytree(self.TEST_HADOOP_HOME, '%s/hadoop' % (self.TEST_DIR,), True)
os.environ['HADOOP_HOME'] = '%s/hadoop' % (self.TEST_DIR,)
self.TAJO_MASTER = 'localhost'
else:
master = self.xmlUtil.get_property_value('/usr/lib/hadoop/etc/hadoop/yarn-site.xml',
'yarn.resourcemanager.address')
m = re.search('[^:<]+', master)
master = m.group()
self.TAJO_MASTER = master
self.TEST_MODE = False
self.STORAGE = 'S3'
self.NAME_NODE = self.TAJO_MASTER
if not self.TAJO_PACKAGE_URI:
self.TAJO_PACKAGE_URI = 'http://d3kp3z3ppbkcio.cloudfront.net/tajo-$TAJO_VERION/tajo-%s.tar.gz' % (
self.TAJO_VERSION,)
def download(self):
src = self.TAJO_PACKAGE_URI
dest = self.TAJO_BASE
if self.TEST_MODE:
dest = self.TEST_DIR
return self.fileUtil.download(src, dest)
def unpack(self):
tarball = '%s/%s' % (self.TAJO_BASE, os.path.basename(self.TAJO_PACKAGE_URI))
dest = self.TAJO_BASE
if self.TEST_MODE:
dest = self.TEST_DIR
tarball = '%s/%s' % (dest, tarball)
return self.fileUtil.unpack(tarball, dest)
def makeln(self):
c = re.compile(r'(?P<name>.*).tar.gz')
m = c.match(os.path.basename(self.TAJO_PACKAGE_URI))
name = m.group('name')
src = '%s/%s' % (self.TAJO_BASE, name)
dest = '%s/%s' % (self.TAJO_BASE, 'tajo')
if self.TEST_MODE:
src = '%s/%s' % (self.TEST_DIR, name)
dest = '%s/%s' % (self.TEST_DIR, 'tajo')
print('makeln: %s, %s' % (src, dest))
os.symlink(src, dest)
self.TAJO_HOME = dest
def set_hadoop_modules(self):
print('Info: Setting hadoop modules in tajo script.')
org = '%s/bin/tajo' % (self.TAJO_HOME,)
src = '%s/bin/tajo.tmp' % (self.TAJO_HOME,)
target = r'''^# HADOOP JAR DIRS
HADOOP_MODULE_DIRS="\$HADOOP_HOME/share/hadoop/common/lib
\$HADOOP_HOME/share/hadoop/common
\$HADOOP_HOME/share/hadoop/hdfs
\$HADOOP_HOME/share/hadoop/hdfs/lib
\$HADOOP_HOME/share/hadoop/yarn
\$HADOOP_HOME/share/hadoop/mapreduce
\$HADOOP_HOME/share/hadoop/tools/lib"$'''
change = r'''
# HADOOP JAR DIRS
HADOOP_MODULE_DIRS="/usr/share/aws/emr/emrfs/lib
usr/share/aws/emr/lib
$HADOOP_HOME
$HADOOP_HOME/lib
$HADOOP_MAPREDUCE_HOME
$HADOOP_HDFS_HOME
$HADOOP_HDFS_HOME/lib
$HADOOP_YARN_HOME
$HADOOP_LZO_HOME/lib"'''
match = re.compile(target, re.M)
self.fileUtil.cp(org, src)
with open(src, 'r') as content_file:
content = content_file.read()
ret = match.search(content)
if ret:
print ret.group()
ret = match.sub(change, content)
fnew = open(org, 'w')
fnew.write(ret)
fnew.close()
self.fileUtil.rm(src)
print "Successed to change content."
else:
print "Failed set hadoop modules : Not found target and not changed env."
def set_tajo_conf(self):
print('Info: Setting tajo conf.')
if self.TAJO_CONF_URI:
self.fileUtil.mkdir('%s/conf/temp' % (self.TAJO_HOME,))
# Test mode
if self.TEST_MODE:
src = '%s/*' % (self.TAJO_CONF_URI,)
dest = '%s/conf/temp' % (self.TEST_DIR,)
self.fileUtil.copytree(src, dest)
else:
os.system('hdfs dfs -copyToLocal %s/* %s/conf/temp' % (self.TAJO_CONF_URI, self.TAJO_HOME))
src = '%s/conf/temp' % (self.TAJO_HOME,)
dest = '%s/conf' % (self.TAJO_HOME,)
for f in os.listdir(src):
self.fileUtil.cp2('%s/%s' % (src, f), dest)
self.fileUtil.cleanup(src)
self.fileUtil.chmod('%s/conf/tajo-env.sh' % (self.TAJO_HOME,), 0775)
tajo_env_sh = '%s/conf/tajo-env.sh' % (self.TAJO_HOME,)
ftajo_env_sh = open(tajo_env_sh, 'a', 0)
echo_hadoop_home = '/usr/lib/hadoop'
echo_hadoop_mapreduce_home = '%s-mapreduce' % (echo_hadoop_home,)
echo_hadoop_hdfs_home = '%s-hdfs' % (echo_hadoop_home,)
echo_hadoop_yarn_home = '%s-yarn' % (echo_hadoop_home,)
echo_hadoop_lzo_home = '%s-lzo' % (echo_hadoop_home,)
# Test mode
if self.TEST_MODE:
echo_hadoop_home = self.TEST_HADOOP_HOME
export_libs = self.EXPORT_LIBS % (echo_hadoop_home, echo_hadoop_mapreduce_home, echo_hadoop_hdfs_home, echo_hadoop_yarn_home, echo_hadoop_lzo_home,)
ftajo_env_sh.write(export_libs)
# using --env option
if self.TAJO_ENV:
for property in self.TAJO_ENV.replace(' ', '\n').split():
ftajo_env_sh.write('export %s' % (property,))
ftajo_env_sh.close()
tajo_site_xml = '%s/conf/tajo-site.xml' % (self.TAJO_HOME,)
if not os.path.exists(tajo_site_xml):
ftajo_site_xml = open(tajo_site_xml, 'w', 0)
ftajo_site_xml.write(self.xmlUtil.new_configuration())
ftajo_site_xml.close()
tree = ElementTree.parse(tajo_site_xml)
root = tree.getroot()
root = self.xmlUtil.create_node(root, 'property', 'tajo.master.umbilical-rpc.address',
'%s:26001' % (self.TAJO_MASTER,))
root = self.xmlUtil.create_node(root, 'property', 'tajo.master.client-rpc.address',
'%s:26002' % (self.TAJO_MASTER,))
root = self.xmlUtil.create_node(root, 'property', 'tajo.resource-tracker.rpc.address',
'%s:26003' % (self.TAJO_MASTER,))
root = self.xmlUtil.create_node(root, 'property', 'tajo.catalog.client-rpc.address',
'%s:26005' % (self.TAJO_MASTER,))
# setting tmp_dir
tmpdir = None
if not self.TEST_MODE:
tmpdirs = self.xmlUtil.get_property_value('/usr/lib/hadoop/etc/hadoop/hdfs-site.xml', 'dfs.name.dir')
for dir in tmpdirs.replace(',', '\n').split():
if not tmpdir:
tmpdir = '%s/tajo/tmp' % (dir,)
else:
tmpdir = '%s,%s/tajo/tmp' % (tmpdir, dir)
root = self.xmlUtil.create_node(root, 'property', 'tajo.worker.tmpdir.locations', '%s' % (tmpdir,))
# using --site option
if self.TAJO_SITE:
for property in self.TAJO_SITE.replace(',', '\n').split():
d = self.dic_name_value(property)
name = d['name']
value = d['value']
root = self.xmlUtil.create_node(root, 'property', name, value)
# Default rootdir is EMR hdfs
if not self.xmlUtil.get_property_value_from_node(root, 'tajo.rootdir'):
self.STORAGE = 'local'
if self.TEST_MODE:
root = self.xmlUtil.create_node(root, 'property', 'tajo.rootdir', 'file:///%s/tajo' % (self.TAJO_HOME,))
else:
root = self.xmlUtil.create_node(root, 'property', 'tajo.rootdir',
'hdfs://%s:%d/tajo' % (self.NAME_NODE, self.NAME_NODE_PORT))
self.xmlUtil.indent(root)
with open('%s/conf/tajo-site.xml' % (self.TAJO_HOME,), "w", 0) as f:
f.write(self.xmlUtil.header_configuration())
f.write(ElementTree.tostring(root))
## Download Third party Library
def third_party_lib(self):
print('Info: Download Third party Library.')
if self.LIBRARY_URI:
parser = urlparse(self.LIBRARY_URI.strip())
if parser.scheme == 'http' or parser.scheme == 'https':
return os.system(
'curl -o %s/lib/%s %s' % (self.TAJO_HOME, os.path.basename(self.LIBRARY_URI), self.LIBRARY_URI))
else:
# Test mode
if self.TEST_MODE:
self.fileUtil.copytree('%s/*' % (self.LIBRARY_URI,), '%s/lib' % (self.TAJO_HOME,))
else:
return os.system('hdfs dfs -copyToLocal %s/* %s/lib' % (self.LIBRARY_URI, self.TAJO_HOME))
def parse_args(self, opt):
values = []
if opt.conf:
values.append('-c')
values.append('%s' % (opt.conf,))
if opt.tajo_package_uri:
values.append('-t')
values.append('%s' % (opt.tajo_package_uri,))
if opt.site:
values.append('-s')
values.append('%s' % (opt.site,))
if opt.tajo_version:
values.append('-v')
values.append('%s' % (opt.tajo_version,))
if opt.env:
values.append('-e')
values.append('%s' % (opt.env,))
if opt.lib:
values.append('-l')
values.append('%s' % (opt.lib,))
if opt.test_hadoop_home:
values.append('-H')
values.append('%s' % (opt.test_hadoop_home,))
if opt.test_dir:
values.append('-T')
values.append('%s' % (opt.test_dir,))
return values
def build(self):
self.fileUtil = FileUtil(self.TAJO_BASE)
self.init(self.fileUtil, self.options)
self.download()
self.unpack()
self.makeln()
self.set_tajo_conf()
self.set_hadoop_modules()
self.third_party_lib()
def start(self):
print('Info: Start Tajo.')
networkUtil = NetworkUtil()
if self.TEST_MODE:
os.system('%s/bin/tajo-daemon.sh start master' % (self.TAJO_HOME,))
os.system('%s/bin/tajo-daemon.sh start worker' % (self.TAJO_HOME,))
else:
jsonUtil = JsonUtil('/mnt/var/lib/info/instance.json')
if jsonUtil.get('isMaster'):
if self.STORAGE == "local":
result = networkUtil.scan(self.NAME_NODE, self.NAME_NODE_PORT)
while result != 0:
time.sleep(5)
result = networkUtil.scan(self.NAME_NODE, self.NAME_NODE_PORT)
os.system('%s/bin/tajo-daemon.sh start master' % (self.TAJO_HOME,))
else:
result = networkUtil.scan(self.TAJO_MASTER, 26001)
while result != 0:
time.sleep(5)
result = networkUtil.scan(self.NAME_NODE, self.NAME_NODE_PORT)
result = networkUtil.scan(self.TAJO_MASTER, 26001)
os.system('%s/bin/tajo-daemon.sh start worker' % (self.TAJO_HOME,))
def main():
launcherUtil = LauncherUtil()
opt = launcherUtil.parse_opts()
values = launcherUtil.parse_args(opt)
fileUtil = FileUtil(launcherUtil.TAJO_BASE)
hadoop_home = '/usr/lib/hadoop'
if opt.test_hadoop_home:
hadoop_home = opt.test_hadoop_home
pid = fileUtil.invoke_run(launcherUtil.START_INVOKE_FILE, values, hadoop_home)
print('> Created a new process : %s %s' % (pid, values))
if __name__ == '__main__':
sys.exit(main())
| [
"[email protected]"
] | |
a8b749992889c78b7ec86d4e7954ed93a511603d | ec32d4d991f35a8cf253c60286953133975d057f | /kenwin/kenwin/routes.py | 57f16be1ee5ce45fe3b7b0f5d04a119615b5a771 | [] | no_license | gdrubich/kenwin_test | f8c217203c970ec58154cff4cee43bb2415b8efe | 55b6fd8c5e9c303f3f0c3875429838ae0df3d893 | refs/heads/master | 2020-05-14T13:33:10.221556 | 2019-04-17T05:31:34 | 2019-04-17T05:31:34 | 181,814,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | def includeme(config):
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('home', '/')
config.add_route('auth', '/sign/{action}')
| [
"[email protected]"
] | |
b0a6436f62752455c2b323c275f0972c60a604da | 94b0cba1213e0e79a6cb2ee8c26d06f21e23da6b | /curses_tools.py | e0991e67c16ee8484acfa284b7d1b61859debf5d | [] | no_license | avdeenkodmitry/spacegame | 137a09c8d7f35a71440ab46c07c3056c2b684e2f | b7592f0c0026c506c1cd1d99f0c5eb46907046f9 | refs/heads/master | 2020-08-29T01:44:24.448488 | 2020-06-30T12:31:37 | 2020-06-30T12:31:37 | 217,884,163 | 0 | 0 | null | 2020-06-30T12:31:38 | 2019-10-27T16:34:24 | Python | UTF-8 | Python | false | false | 2,837 | py | SPACE_KEY_CODE = 32
LEFT_KEY_CODE = 260
RIGHT_KEY_CODE = 261
UP_KEY_CODE = 259
DOWN_KEY_CODE = 258
SYMS = ('+', '*', '.', ':')
TIC_TIMEOUT = 0.1
def read_controls(canvas):
"""Read keys pressed and returns tuple with controls state.
Args:
canvas: canvas object
Returns:
tuple: (int, int, bool) rows and column direction and space pressed ind
"""
rows_direction = columns_direction = 0
space_pressed = False
while True:
pressed_key_code = canvas.getch()
if pressed_key_code == -1:
# https://docs.python.org/3/library/curses.html#curses.window.getch
break
if pressed_key_code == UP_KEY_CODE:
rows_direction = -1
if pressed_key_code == DOWN_KEY_CODE:
rows_direction = 1
if pressed_key_code == RIGHT_KEY_CODE:
columns_direction = 1
if pressed_key_code == LEFT_KEY_CODE:
columns_direction = -1
if pressed_key_code == SPACE_KEY_CODE:
space_pressed = True
return rows_direction, columns_direction, space_pressed
def draw_frame(canvas, start_row, start_column, text, negative=False):
"""Draw multiline text fragment on canvas.
Erase text instead of drawing if negative=True is specified.
Args:
canvas: canvas object
start_row (int): start row of the drawn object
start_column (int): start column of the drawn object
text (str): frame test
negative (bool): erase text instead of drawing if negative=True
Returns:
None
"""
rows_number, columns_number = canvas.getmaxyx()
for row, line in enumerate(text.splitlines(), round(start_row)):
if row < 0:
continue
if row >= rows_number:
break
for column, symbol in enumerate(line, round(start_column)):
if column < 0:
continue
if column >= columns_number:
break
if symbol == ' ':
continue
# Check that current position it is not in a lower right corner
# of the window
# Curses will raise exception in that case. Don`t ask why…
# https://docs.python.org/3/library/curses.html#curses.window.addch
if row == rows_number - 1 and column == columns_number - 1:
continue
symbol = symbol if not negative else ' '
canvas.addch(row, column, symbol)
def get_frame_size(text):
"""Calculate size of multiline text fragment.
Args:
text: frame text
Returns:
tuple: (int, int) rows and columns number
"""
lines = text.splitlines()
rows = len(lines)
columns = max([len(line) for line in lines])
return rows, columns
| [
"[email protected]"
] | |
e413bf6fcc60004358b591fef11c22fc5d1384f0 | 3d06e2cc8cc701c76ce08e6522928a0ca69832d3 | /users/migrations/0003_auto_20161021_1120.py | 6a7e489fe4aadf10ecf0fead97cbe44fe619fe3d | [] | no_license | praekeltfoundation/gem-bbb-indo-server | 5ac1a697c30f09a6ae63fcdcb1ffe1193e98084d | f1af2d84cff2c5ba242e6330256344c02deea726 | refs/heads/develop | 2020-05-29T08:40:29.647999 | 2017-09-08T10:10:18 | 2017-09-08T10:10:18 | 69,655,513 | 0 | 0 | null | 2018-10-24T14:37:14 | 2016-09-30T09:56:21 | Python | UTF-8 | Python | false | false | 592 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-21 09:20
from __future__ import unicode_literals
from django.db import migrations, models
import users.models
import users.storage
class Migration(migrations.Migration):
dependencies = [
('users', '0002_profile_profile_image'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='profile_image',
field=models.ImageField(null=True, storage=users.storage.OverwriteStorage(), upload_to=users.models.get_profile_image_filename),
),
]
| [
"[email protected]"
] | |
07f571d97d75fa5a4a85e1a1c19f28e8644e2dd0 | 7a11792dfb5acae2d3e32a20bc0c40e64ce4cbc4 | /Reverse4.py | d8ea1cba11300389c06cb09cf0f31d349bbb7a95 | [] | no_license | collinskoech11/Python-Learning | 340eed6e7b5a9548433d5844cebbcf5273054638 | 14b8d9dff85c33837a9b4f105dc9366e4b5fd6fa | refs/heads/master | 2023-06-29T14:44:04.004819 | 2021-07-22T16:28:41 | 2021-07-22T16:28:41 | 363,305,765 | 0 | 0 | null | 2021-07-22T16:28:42 | 2021-05-01T02:26:35 | Python | UTF-8 | Python | false | false | 171 | py | def Reverse(num):
number = str(num)
reversing = number[::-1]
reversed = int(reversing)
print( reversed)
number = input('enter a number: ')
Reverse(number) | [
"[email protected]"
] | |
0e361f2147a61127133f357cca662943383c6f09 | 597a963c8c10b80d4f0cd2657e51f6b85dba1d3f | /bin/easy_install-3.5 | 136911bd4b8400bfab25f5509914cab406e7702a | [] | no_license | momotakagi/chainerRL | 57b56461982f1b8f5bd41382b117a43ed0f6325a | e8f423cabf61fa19dfb5d16af8de73514ba6262e | refs/heads/master | 2021-05-15T13:20:43.860048 | 2017-10-16T16:15:17 | 2017-10-16T16:15:17 | 107,153,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | 5 | #!/home/tomoya/projects/chainerrl/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | |
570be72876047f9b536ff3d6c1c1a2fbdcaa33e9 | 0d4be9a85528ca27ce7b4c31a41ba034058720a1 | /1.6/vehicles/base.py | b04e0c43c0512c8d746fbebf04387f131aab600e | [] | no_license | Kronon299/otus_python_basic_hw | b795076ab45d4b95a8e4480a4a45428122ffca67 | 126a7f03dd3b28fe7b445601ddc679bac870e514 | refs/heads/master | 2023-01-21T15:42:30.564229 | 2020-12-03T19:15:34 | 2020-12-03T19:15:34 | 287,369,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 707 | py |
from abc import ABCMeta, abstractmethod
class BaseVehicle:
"""
Base vehicle class
"""
WEIGHT = 0
PAYLOAD = 0
FUEL_CONSUMPTION = 0
SOUND = '...'
def make_sound(self):
print(self.SOUND)
class BaseCar(BaseVehicle, metaclass=ABCMeta):
def __init__(self, vendor: str, model: str):
self.vendor = vendor
self.model = model
@abstractmethod
def ride(self, distance):
pass
class BaseShip(BaseVehicle, metaclass=ABCMeta):
def __init__(self, vendor: str, model: str, name: str):
self.vendor = vendor
self.model = model
self.name = name
@abstractmethod
def sail(self, distance):
pass
| [
"[email protected]"
] | |
77a9f259fac8dcec468a6c4d3657accd82a493e2 | 4c8eb3d5a63ac2a8ddbb7f997b27f08b8019d5f7 | /models/polyencoder/__init__.py | 8589a643838c8307d600c3f5d4d306399dd5ec2e | [] | no_license | RaphaelOlivier/collision-bert | 94974ed8c94295b9d7ed760cb0ad79f2856d34aa | 43eda087bf6d632bdb150d98e934206327f8d082 | refs/heads/master | 2023-01-20T08:09:47.449597 | 2020-11-21T20:01:43 | 2020-11-21T20:01:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | from models.polyencoder.modeling_polyencoder import PretrainedPolyEncoder, PretrainedBiEncoder, PolyEncoderLM
from models.polyencoder.tokenization_polyencoder import PolyEncoderTokenizer
| [
"[email protected]"
] | |
0ca4eaab9a1b2b9a110aed592f00b856885f9aa8 | 8739669f8a0d025bda30b5ddc75ce2ca41326db9 | /rate_limiter/rate_limiter.py | 51576df38bac332982b0e8d0c06ff6e2ac37e4f9 | [] | no_license | uk-gov-mirror/ministryofjustice.opg-repository-reporting | dcddda33e67972ea33fdc711f95d33c0dd4bed4f | 9b6b8047d4ff6980834cb17c50acb29caac795d7 | refs/heads/main | 2023-04-03T20:20:20.313938 | 2021-04-20T15:22:59 | 2021-04-20T15:22:59 | 356,783,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,694 | py | from github import Github
from github import RateLimitExceededException
import calendar
import time
import datetime
# rate_limiter wrapper to handle the github api call limits
class rate_limiter:
g = None
remaining = 0
rate_limit = None
def __init__(self, g):
self.g = g
self.reset()
return
def reset(self):
self.rate_limit = self.g.get_rate_limit()
self.remaining, limit = self.g.rate_limiting
return
# pause execution
def pause(self):
reset_timestamp = self.g.rate_limiting_resettime
date = datetime.datetime.fromtimestamp(reset_timestamp)
# add 5 seconds to be sure the rate limit has been reset
sleep_time = reset_timestamp - calendar.timegm(time.gmtime()) + 1
print('>>>>> Sleeping for {} seconds until {}'.format(sleep_time, date.strftime("%Y-%m-%d %H:%M:%S") ) )
time.sleep(sleep_time)
print('>>>>> Resuming at {}'.format(datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S") ) )
# run
def run(self, function, on_rate_limited=None, on_error=None, on_complete=None):
while True:
try:
# if we have calls remaining, run the function
if self.remaining > 0:
# returns boolean for complete, and result
# of the func
complete, function_result = function()
# update the rate limit
r, limit = self.g.rate_limiting
self.remaining = r
# if complete, break the loop
if complete == True and on_complete != None:
on_complete(function_result)
break
elif complete == True:
break
# if theres an error, run that function
if function_result != True and errror_function != None:
on_error()
# we've caught the rate limit error before making the call
else:
print('\n>>>>> Rate limit hit 0')
if on_rate_limited != None:
on_rate_limited()
self.pause()
self.reset()
# rate limit hit
except RateLimitExceededException:
print('\n>>>>> Rate limit exceeded')
if on_rate_limited != None:
on_rate_limited()
self.pause()
self.reset()
# breaks the loop
except StopIteration:
break
# end the func
return
| [
"[email protected]"
] | |
aef334c17bdbf740d5d7a0b261be1f41437b6341 | c26017f753efc31b485ee6fadc5f68ae2f272372 | /LabOne/wsgi.py | 83c6efba1f8832d51e11aa108a9c6f1dcbfb7e87 | [] | no_license | Seathe14/Lab1Python | cc798bfa64b0c0bd1c29cdfa2f02eeb6d48083a5 | b8f54e23726879bc6d1a46c3c3839915dc4046f3 | refs/heads/master | 2020-12-29T20:41:56.608642 | 2020-02-06T15:59:46 | 2020-02-06T15:59:46 | 238,724,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
WSGI config for LabOne project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'LabOne.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
7a920e87f282a95af69b8cc0fb52604c0a871053 | 365ce3d35f126e246c7fccee99c6d9747c2a9e4e | /Web_Session1/about_me.py | 859d3ed98aa15dac7385c222581a5d379403128a | [] | no_license | Homlessss/maiquangkhai-Labs_C4E30 | c2d3d65bae96086a92a4f8897be24d13ae41da58 | 211f8f9d7476a89897e7421c4fb690e1c85aa548 | refs/heads/master | 2020-05-17T02:03:09.820549 | 2019-04-25T13:52:39 | 2019-04-25T13:52:39 | 183,444,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | from flask import Flask, render_template, redirect
app = Flask(__name__)
@app.route('/about-me')
def index():
return render_template('about-me.html')
@app.route('/school')
def Chuyenhuong():
return redirect("http://techkids.vn ", code = 302)
if __name__ == '__main__':
app.run(host='127.0.0.1', port = 5000, debug=True) | [
"[email protected]"
] | |
e2ffba583a2cd9a7c11261cd7538f36e6ff2654f | 544086b3e4c580dbb39d1a8373a4903696d51db0 | /careforall/careforall/settings.py | 9816cc536f2db524adbf563f5d1e986264a2c199 | [] | no_license | careall249/Care-For-All | dc3a798fb47d10134f50d5a20b2b2eede52e623e | 0c74ba245e1758b10133cbfdc7345cdf4f4a8c5c | refs/heads/master | 2023-02-13T16:54:11.535341 | 2021-01-05T18:25:00 | 2021-01-05T18:25:00 | 316,262,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,690 | py | """
Django settings for careforall project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
TEMPLATE_DIR = os.path.join(BASE_DIR, 'templates')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nw*)s#z0p6-mugr(%p&i0v4ap)wg89m^s_f2v!u$)0+xfz0ihn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'careforallapp',
'datasecurity',
'fitness',
'personality',
'ckeditor'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'careforall.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'careforall.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR,'static')
MEDIA_DIR = os.path.join(BASE_DIR,'media')
MEDIA_ROOT = MEDIA_DIR
MEDIA_URL = '/media/'
| [
"[email protected]"
] | |
8da80e5876ac44005eff52e9f251ad7ba819caa4 | 3012e998e82ea2fa1a0f73131d16a0a4c1719730 | /untitled1/untitled1/urls.py | d82491ddf44c8c647c56b21f08c63571db99063f | [] | no_license | qq738367260/test | 05336a58be7bfa7275e5cf9e411680c47c33bb1c | 8e626f00fa15985f2609cf1def250bbb24279f52 | refs/heads/master | 2020-04-13T12:08:08.999449 | 2018-12-26T12:52:27 | 2018-12-26T12:52:27 | 163,193,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | """untitled1 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.shortcuts import HttpResponse
def upload(request):
return HttpResponse("收到了!")
urlpatterns = [
path('upload/', upload),
]
| [
"[email protected]"
] | |
89a577e2e47f746169fae71030df1df504a5c64f | 0f5f0cf29fa364b5b207270c905b0ece480d2d53 | /launch/launch/events/process/running_process_event.py | a54014e03c3c6730dcafab07c11d63f7e7a7f777 | [
"Apache-2.0"
] | permissive | ivanpauno/launch | 97c8b90768758a444de1c47b822147e8aa1975a6 | ac97baccef9402eca40c817fce6d4303a9a4168f | refs/heads/master | 2020-05-09T10:49:46.545928 | 2019-04-17T16:35:04 | 2019-04-17T16:35:04 | 181,059,628 | 0 | 0 | Apache-2.0 | 2019-04-23T20:01:45 | 2019-04-12T18:03:08 | Python | UTF-8 | Python | false | false | 2,756 | py | # Copyright 2018 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for RunningProcessEvent event."""
from typing import Dict
from typing import List
from typing import Optional
from typing import Text
from ...event import Event
if False:
# imports here would cause loops, but are only used as forward-references for type-checking
from ...actions import ExecuteProcess # noqa
class RunningProcessEvent(Event):
"""Event base class that is related to some running process."""
name = 'launch.events.process.RunningProcessEvent'
def __init__(
self,
*,
action: 'ExecuteProcess',
name: Text,
cmd: List[Text],
cwd: Optional[Text],
env: Optional[Dict[Text, Text]],
pid: int
) -> None:
"""
Constructor.
:param: action is the ExecuteProcess action associated with the event
:param: name is the final name of the process instance, which is unique
:param: cmd is the final command after substitution expansion
:param: cwd is the final working directory after substitution expansion
:param: env is the final environment variables after substitution expansion
"""
super().__init__()
self.__action = action
self.__name = name
self.__cmd = cmd
self.__cwd = cwd
self.__env = env
self.__pid = pid
@property
def action(self) -> 'ExecuteProcess':
"""Getter for action."""
return self.__action
@property
def execute_process_action(self) -> 'ExecuteProcess':
"""Getter for execute_process_action."""
return self.__action
@property
def process_name(self) -> Text:
"""Getter for process_name."""
return self.__name
@property
def cmd(self) -> List[Text]:
"""Getter for cmd."""
return self.__cmd
@property
def cwd(self) -> Optional[Text]:
"""Getter for cwd."""
return self.__cwd
@property
def env(self) -> Optional[Dict[Text, Text]]:
"""Getter for env."""
return self.__env
@property
def pid(self) -> int:
"""Getter for pid."""
return self.__pid
| [
"[email protected]"
] | |
13555b5b26440c6140c403578fd7551824d7cd53 | f8356a20c6bbbfd8c0d6877aa02096dc922014cf | /app/utils/__init__.py | 0e81e190b2bb1b63a1b65f40149e1da77642ad2e | [] | no_license | shimaomao/fastapi-starter | 4fa2b1d8b06269cf3a627a0fd6540b1785e3080f | 8ddcc387a9a6a6ef00073bec5480518882ad789a | refs/heads/master | 2023-03-01T05:08:02.396074 | 2021-01-22T00:00:16 | 2021-01-22T00:00:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | import sys
_TESTING = "pytest" in sys.modules
| [
"[email protected]"
] | |
8b9887fa2de31ba0eccd041913ae4f82b97e5945 | d51dcfa375a102b36af2cb996bf31fb177f53ee2 | /webapi/ImportDB/CWSP_Coastal.py | 062884fb8ea430715806101c6f606bcc5c5ca5f5 | [] | no_license | hsinkai/crowa | 7683425a163791337fc277fe2d6e607bfcb8017b | 839d7efefed03f644557e20dcce563f61345a657 | refs/heads/master | 2020-04-01T09:52:04.108964 | 2018-10-16T02:37:51 | 2018-10-16T02:37:51 | 153,092,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,152 | py | import os
import sys
dir = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, dir)
sys.path.insert(0, os.path.dirname(dir))
from ImportDB.util import setup_django_rentime_depend
setup_django_rentime_depend()
from url_meta import LatLngURLGenerator
for uri in LatLngURLGenerator('OCM3', 23, 45):
print uri
raise SystemExit
datapoints_url = "http://localhost:8000/DataTables/5/DataPoints/"
datatable_url = "http://localhost:8000/DataTables/?application=8"
dataset_url = "http://localhost:8000/DataSets/8/"
input_json_list = []
for point in http_get(datapoints_url):
input_json = {}
input_json['related_info'] = items = []
input_json['application'] = dataset_url
# http: // localhost:8000 / DataPoints / Regions / 1 /
regex = re.compile(r'.*/(\d+)/')
input_json['name'] = 'FM' + '%03d' % int(regex.match(point['url']).groups()[0])
for table in http_get(datatable_url):
table_url = table['url']
item = {
'dataset': table_url
}
if table['datapoint_strategy'] == 'Station':
nears_point_url = table_url + 'DataPoints/lat/%s/lng/%s/' % (point['lat'], point['lng'])
try:
datapoint_url = http_get(nears_point_url)['url']
item['datapoint'] = datapoint_url
items.append(item)
input_json_list.append(input_json)
except:
pass
elif table['datapoint_strategy'] == 'Grid':
item['lat'] = point['lat']
item['lng'] = point['lng']
items.append(item)
input_json_list.append(input_json)
elif table['datapoint_strategy'] == 'Region':
item['datapoint'] = point['url']
items.append(item)
input_json_list.append(input_json)
elif table['datapoint_strategy'] == 'Polygon': pass
# print json.dumps(input_json, indent=2)
http_post('http://localhost:8000/Views/', input_json)
with open(__file__ + '.json', 'w') as fp:
content = json.dumps(input_json_list, indent=2)
fp.write(content) | [
"crsadm@cwspapsvr1.(none)"
] | crsadm@cwspapsvr1.(none) |
ca9e425111e4b3dbf637b359e9a871fe14f0796a | ec35b619f45b8532388a37278de64be523a14082 | /Databases/MongoDB/4.1-ex3.py | c34d16690cecd511557887072997a7b36d1d4f41 | [] | no_license | deesaw/PythonD-03 | 392b654c60cab290053e1bd89e5237b028f1a391 | ce54d5af6e8dd594617969c7efba3285b66edccc | refs/heads/master | 2023-03-14T10:18:10.060303 | 2021-03-02T13:56:59 | 2021-03-02T13:56:59 | 343,791,301 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py | #Finding the documents from the collection
import pymongo
from pymongo import MongoClient
client=MongoClient()
dbs=client.database_names()
db=client.pyex #db pyex
docs=db.pycoll.find({})
for doc in docs:
print (doc)
| [
"[email protected]"
] | |
d3fca4aa4e458f8f6a3eebe159c1749ec9d7777c | b033f2c9d23a64d8d2a26bfc92a02e0a229a238e | /04_dash_callbacks/12_tabs.py | b55d9468064d02b53eb0aca36ee7359ef7e0eff9 | [] | no_license | KonradBier/learn_Dash_framework | 31fe88c3db25f5249d27b4e0eed9eb83709c44a9 | 2bdbfe942e2200af442e4b8e9352433f6a7e23bb | refs/heads/master | 2022-12-26T17:06:50.436345 | 2020-10-09T16:45:27 | 2020-10-09T16:45:27 | 294,767,678 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,054 | py | import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
from dash.dependencies import Input, Output
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.layout = html.Div([
dcc.Tabs(
id='tabs-1',
children=[
dcc.Tab(label='Python', value='py'),
dcc.Tab(label='SQL', value='sql')
],
value='py'
),
html.Div(id='div-1')
])
@app.callback(
Output('div-1', 'children'),
[Input('tabs-1', 'value')]
)
def render_content(tab):
if tab == 'py':
return html.Div([
dcc.Markdown("""
```
print('Hello World')
```
""")
])
elif tab == 'sql':
return html.Div([
dcc.Markdown("""
```sql
SELECT * FROM product;
```
""")
])
if __name__ == '__main__':
app.run_server(debug=True)
| [
"[email protected]"
] | |
e9421d2111cb25159e2a18dc345d96a72cd03978 | 6be97bdb46650296e1d3c88c5f6fd1ddb3d7d5d9 | /router_configuration.py | d75df33df0a7e6005720f030cbc2901ed9f7f4ac | [
"MIT"
] | permissive | Ariel96cs/SimpleMapReduceFramework | da94ed6f2b6a505da662e97dbf24b4510bf876c8 | 7cf482f94b1ed1e6436a7b229dfe09c3c0906d56 | refs/heads/master | 2022-11-04T21:29:59.630018 | 2020-07-22T17:01:13 | 2020-07-22T17:01:13 | 281,735,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,835 | py | import subprocess
import os
import docker
from mininet.net import Mininet
from mininet.node import Host, Controller, OVSSwitch, Node, OVSController
from mininet.link import TCLink
from mininet.log import setLogLevel, info, debug, error
from mininet.clean import cleanup
from mininet.cli import CLI
from mininet.topo import LinearTopo, Topo
class LinuxRouter(Node):
""""A Node with IP forwarding enabled."""
def config(self, **params):
super(LinuxRouter, self).config(**params)
# Enable forwarding on the router
self.cmd('sysctl net.ipv4.ip_forward=1')
def terminate(self):
self.cmd('sysctl net.ipv4.ip_forward=0')
super(LinuxRouter, self).terminate()
class NetworkTopo(Topo):
"A LinuxRouter connecting three IP subnets"
def build(self, **opts):
default_ip = '10.0.0.1/24' # IP address for r0-eth1
router = self.addNode('r0', cls=LinuxRouter, ip=default_ip)
s1 = self.addSwitch('s1')
filesystem1 = self.addHost('fs1', ip='10.0.0.2', defaultRoute='via 10.0.0.1')
worker1 = self.addHost('worker1', ip='10.0.0.3', defaultRoute='via 10.0.0.1')
worker2 = self.addHost('worker2', ip='10.0.0.4', defaultRoute='via 10.0.0.1')
worker3 = self.addHost('worker3', ip='10.0.0.5', defaultRoute='via 10.0.0.1')
client1 = self.addHost('client1', ip='10.0.0.6', defaultRoute='via 10.0.0.1')
filesystem2 = self.addHost('fs2', ip='10.0.0.7', defaultRoute='via 10.0.0.1')
self.addLink(s1, router, intfName2='r0-eth1', params2={'ip': default_ip})
self.addLink(filesystem1, s1)
self.addLink(filesystem2, s1)
self.addLink(worker1, s1)
self.addLink(worker2, s1)
self.addLink(worker3, s1)
self.addLink(client1, s1)
topos = { 'topo': (lambda: NetworkTopo()) } | [
"[email protected]"
] | |
f409bb5edbcbc9a05b0e3614787834fde94c5f45 | 9721d961c8523d65594b0b255ca670ba4e614b9c | /load_data.py | 80792959a997ab5e156a856cff4f6ebc96c8c90b | [] | no_license | HeXie-Tufts/Movie-Rating-Prediction-Variational-Inference | 4e58a75e4c9dae5a796a39ad98755574a274cb56 | 774bd2f828d1e44bbeea7b9d556ed5606d973706 | refs/heads/master | 2020-04-06T06:28:14.320954 | 2016-10-05T20:45:43 | 2016-10-05T20:45:43 | 69,200,778 | 3 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,536 | py | # Read and load movie rating data into a numpy array named ratings.
# Each rating is in [userID, movieID, rating] format.
# Assume MovieLens 100k, 1m, 10m and 20m data set are saved in Data/ml-100k, Data/ml-1m,
# Data/ml-10m, and Data/ml-20m respectively.
import os
import numpy as np
def get_data(size):
ratings = []
if size == "100k":
path = os.path.join("Data", "ml-100k", "u.data")
print("Read movie lens 100k data set")
f = open(path, "r")
while (1):
line = f.readline()
if line == "":
break
ratings.append(line.split()[0:-1])
f.close()
if size == "1m" or size == "10m":
path = os.path.join("Data", "ml-" + size, "ratings.dat")
print("Read movie lens " + size + " data set")
f = open(path, "r")
while (1):
line = f.readline()
if line == "":
break
ratings.append(line.split("::")[0:-1])
f.close()
if size == "20m":
path = os.path.join("Data", "ml-20m", "ratings.csv")
print("Read movie lens 20m data set")
f = open(path, "r")
line = f.readline()
while (1):
line = f.readline()
if line == "":
break
ratings.append(line.split(",")[0:-1])
f.close()
ratings = np.array(ratings, dtype = np.float32)
# permute the ratings array
ratings = np.random.permutation(ratings)
print("Loading data done")
return ratings
| [
"[email protected]"
] | |
61958b82d04c313ae2be0eb2a1c6c3c9d676e970 | a99a6d655a10f6d702e9c4e64ef4c523961520b7 | /入门/example4_列表.py | 64681059ed41e02f52c7434431cc49ba3846ee73 | [] | no_license | angleF/Python-Study-notes | 2bab47e4619052ab8c0aa4bd7db153d0328a0300 | 71b015e07405575e527281e112a299f6a330190d | refs/heads/master | 2020-05-03T19:54:58.910533 | 2019-04-13T13:06:35 | 2019-04-13T13:06:35 | 178,792,991 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,576 | py | # 列表相当于Java中的数组 区别在于长度为可变并且不限制存储类型
# Python中列表可以保存任意对象,也就是说一个列表中可以保存数值、字符串、bool、浮点数、列表、函数以及自定义对象
my_list = [1, 2, 3.2, "hello", True, None, print];
print(my_list)
print(len(my_list)) # len()获取列表的长度
print(my_list[0]) # 获取列表首个元素
print(my_list[-1]) # 获取列表最后一个元素
# 切片 也就是截取出一个子列表
stus = ["张三", "李四", "王五", "赵六", "田七", "刘八", "麻子"]
print(stus[0:3]) # 截取列表前三个到新列表中,索引值包头不包尾
print(stus[:]) # 相当于全拷贝了一个新的列表
print(stus[:4:2])
print(stus[::-1]) # 从列表尾部开始截取 第三个参数为步长,表示获取上个元素后跳过(指定步长-1)个元素,步长默认为1,不能为0
list0_ = [1, 2, 3]
list1_ = [4, 5, 6]
list_ = list0_ + list1_ # 拼接列表
print(list_)
l = (list0_ + list1_) * 5 # 将拼接合并后的列表复制5遍(包括自身一遍)
print(l)
# in、not in 检查数组的元素
print("中" in stus) # 检查指定元素在列表中是否存在
print("张三" not in stus) # 指定元素在列表中是否不存在
print(max(l)) # 获取列表中最大的元素
print(min(l)) # 获取列表中最小的元素
print(stus.index("王五")) # 获取指定元素在列表中的索引,当元素不存在时报错
print(l.count(1)) # 统计指定元素在列表中的个数
for s in stus:
print(s)
| [
"[email protected]"
] | |
75a42862c43c6dca1c05eb337d790f71f13ec067 | 673d11fe8266e0b78208b61c447b9a30eb19f363 | /fusioncharts/urls.py | b921f3a3ac7f17728abd12c40b6628da674729dd | [] | no_license | ks-manish/stock_data | 42cf7cdaf15f58b4cc2db576fb9b3c6b559a76ba | 0dcad0eae13cc3d8af600afffa5e67d913a3f437 | refs/heads/master | 2020-09-07T00:48:09.485279 | 2019-11-09T07:31:34 | 2019-11-09T07:31:34 | 220,606,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | """fusioncharts URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/dev/topics/http/urls/
Examples:
Function views
1. Add an import: from samples import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
"""
from django.urls import path
from django.conf.urls import url
from django.contrib import admin
from fusioncharts.views import catalogue, chart
from fusioncharts import datahandler
from fusioncharts.samples import Line_Chart_With_Time_Axis, Plotting_Multiple_Series_On_Time_Axis
urlpatterns = [
url(r'^$', catalogue),
url(r'^admin/', admin.site.urls),
url(r'^datahandler', datahandler.getdata),
url(r'^Line-Chart-With-Time-Axis', Line_Chart_With_Time_Axis.chart, name='chart'),
url(r'^Plotting-Multiple-Series-On-Time-Axis', Plotting_Multiple_Series_On_Time_Axis.chart, name='chart'),
] | [
"[email protected]"
] | |
c2846ce97c19f9553db35ded901ed58aa038c6b7 | 730d2bfd9393b73ebd00e46a6c8940f71585bf7f | /delete_train2_masks.py | efa421f2bef356220068f1da12df37ee843ee0ae | [] | no_license | zrinka-aka-ziz/bach-thes | 22504c34393596074f92d0daf52860080d885cf3 | af3acedb803987366565630968c7b2d39aeb7c04 | refs/heads/master | 2023-06-07T02:47:42.768376 | 2021-07-01T19:27:52 | 2021-07-01T19:27:52 | 369,268,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 614 | py | import os, shutil
from os import path
from os.path import basename
from configure import Config
config = Config()
folders = []
folders.append("/content/bach-thes/notebooks/UNet/Train_images/train2_masks")
for folder in folders:
for filename in os.listdir(folder):
file_path = os.path.join(folder, filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
print("Deleted contents of folder {}".format(folder))
| [
"[email protected]"
] | |
2e24fbd669e7753a61cadb89e11c4ae41acecc10 | d4b4513c6314871a268ab97d0aece052a632d57d | /soft/server_python/pay_server/handlers/base_ex_handler.py | 38064e65e3b9798b33a03eef719d34d4af6da3b3 | [] | no_license | atom-chen/tssj | f99b87bcaa809a99e8af0e2ba388dbaac7156a31 | f4345ad6b39f7f058fac987c2fed678d719a4482 | refs/heads/master | 2022-03-14T22:23:50.952836 | 2019-10-31T11:47:28 | 2019-10-31T11:47:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,251 | py | # -*- coding: utf-8 -*-
import base_handler
import datetime
import urllib.parse
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
import base_handler
import json
import base64
class BaseExHandler(base_handler.BaseHandler):
client_id = None
client_secret = None
access_token = None
refresh_token = None
access_token_create_time = None
access_token_expire_time = None
def __init__(self, *args, **kwargs):
super(base_handler.BaseHandler, self).__init__(*args, **kwargs)
@tornado.gen.coroutine
def verify_google(self, product_id, purchase_token, package_name):
url_fmt = 'https://www.googleapis.com/androidpublisher/v2/applications/{packageName}/purchases/products/{productId}/tokens/{token}'
url = url_fmt.format(packageName=package_name,
productId=product_id,
token=purchase_token)
acctoken = yield self.get_token_google()
params = {"access_token": acctoken}
url += "?" + urllib.parse.urlencode(params)
http_client = AsyncHTTPClient()
try:
respone = yield http_client.fetch(url)
except Exception as e:
print(e)
raise tornado.gen.Return(-1)
try:
data = json.loads(respone.body)
except Exception as e:
print(e)
raise tornado.gen.Return(-1)
raise tornado.gen.Return(data)
@tornado.gen.coroutine
def get_token_google(self):
self.need_get_access_token = False
if self.access_token:
now = datetime.datetime.now()
if now >= self.access_token_expire_time:
self.need_get_access_token = True
else:
self.need_get_access_token = True
if not self.need_get_access_token:
raise tornado.gen.Return(self.access_token)
url = 'https://accounts.google.com/o/oauth2/token'
headers = {"Content-type": "application/x-www-form-urlencoded"}
body = dict(
grant_type='refresh_token',
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=self.refresh_token,
)
body = urllib.parse.urlencode(body)
http_client = AsyncHTTPClient()
try:
rsp = yield http_client.fetch(url, method="POST",
headers=headers,
body=body)
jdata = json.loads(rsp.body)
except Exception as e:
print(e)
raise tornado.gen.Return("error")
if 'access_token' in jdata:
self.access_token = jdata['access_token']
self.access_token_create_time = datetime.datetime.now()
self.access_token_expire_time = self.access_token_create_time + datetime.timedelta(
seconds=jdata['expires_in'] * 2 / 3
)
raise tornado.gen.Return(self.access_token)
else:
raise tornado.gen.Return("error")
@tornado.gen.coroutine
def verify_apple(self, code, username, serverid, rid, issandbox):
url = "https://sandbox.itunes.apple.com/verifyReceipt" if issandbox else "https://buy.itunes.apple.com/verifyReceipt"
res = -1
orderid = ""
product_id = ""
buy_date_ms = ""
errMsg = ""
try:
headers = {"Content-type": "application/json"}
body = json.dumps({"receipt-data": code}).encode('utf8')
http_client = AsyncHTTPClient()
response = yield http_client.fetch(url, method="POST",
headers=headers,
body=body)
content = json.loads(response.body)
res = content['status']
if res == 0:
orderid = content["receipt"]["transaction_id"]
product_id = content["receipt"]["product_id"]
buy_date_ms = content["receipt"]["purchase_date_ms"]
except Exception as e:
print(e)
errMsg = e
res = -1
raise tornado.gen.Return((res, orderid, product_id, buy_date_ms))
| [
"[email protected]"
] | |
1052118782fd6736888ccb02a6db1b3886405142 | 90419da201cd4948a27d3612f0b482c68026c96f | /sdk/python/pulumi_azure_nextgen/changeanalysis/v20200401preview/configuration_profile.py | 9786500cbb616ed651a6815fdd97c64fa145a53a | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | test-wiz-sec/pulumi-azure-nextgen | cd4bee5d70cb0d332c04f16bb54e17d016d2adaf | 20a695af0d020b34b0f1c336e1b69702755174cc | refs/heads/master | 2023-06-08T02:35:52.639773 | 2020-11-06T22:39:06 | 2020-11-06T22:39:06 | 312,993,761 | 0 | 0 | Apache-2.0 | 2023-06-02T06:47:28 | 2020-11-15T09:04:00 | null | UTF-8 | Python | false | false | 5,413 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['ConfigurationProfile']
class ConfigurationProfile(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
identity: Optional[pulumi.Input[pulumi.InputType['ResourceIdentityArgs']]] = None,
profile_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['ConfigurationProfileResourcePropertiesArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A profile object that contains change analysis configuration, such as notification settings, for this subscription
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['ResourceIdentityArgs']] identity: The identity block returned by ARM resource that supports managed identity.
:param pulumi.Input[str] profile_name: The name of the configuration profile. The profile name should be set to 'default', all other names will be overwritten.
:param pulumi.Input[pulumi.InputType['ConfigurationProfileResourcePropertiesArgs']] properties: The properties of a configuration profile.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['identity'] = identity
if profile_name is None:
raise TypeError("Missing required property 'profile_name'")
__props__['profile_name'] = profile_name
__props__['properties'] = properties
__props__['name'] = None
__props__['system_data'] = None
__props__['type'] = None
super(ConfigurationProfile, __self__).__init__(
'azure-nextgen:changeanalysis/v20200401preview:ConfigurationProfile',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ConfigurationProfile':
"""
Get an existing ConfigurationProfile resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return ConfigurationProfile(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def identity(self) -> pulumi.Output[Optional['outputs.ResourceIdentityResponse']]:
"""
The identity block returned by ARM resource that supports managed identity.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.ConfigurationProfileResourcePropertiesResponse']:
"""
The properties of a configuration profile.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output[Optional['outputs.SystemDataResponse']]:
"""
Top level metadata https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/common-api-contracts.md#system-metadata-for-all-azure-resources
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
] | |
8bd487363ed9f613366edbd1bf42a38de14dc47e | 59703a80c1d7a75e994d6cd99a5551c0fe6f5ff8 | /구현/나는 행복합니다~.py | 9ebf8f1bebf896d21b31d1402ecbb5f294bb589a | [] | no_license | Park-min-hyoung/Baekjoon | 89f53f09a53ccb3b30804cd8bbc64e70425f0e07 | 6a6596885a9967e1962a87947a476ccc1e9804ec | refs/heads/master | 2023-08-27T12:21:30.528803 | 2021-10-13T09:06:49 | 2021-10-13T09:06:49 | 325,918,761 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | n, m, k = map(int, input().split())
for i in range(n):
for j in range(m):
v = ((m - 1) * i) + (i + j)
if v == k:
print(i, j)
break
'''쉽게 푸는 방법
n, m, k = map(int, input().split())
a = k // m
b = k % m
print(a, b)
''' | [
"[email protected]"
] | |
795cf86e497387026cefa28c0577a696475e152e | 2f6b8edbc0cd5872f3078cffe1312ee5968d89aa | /matches/models.py | 52a7dbac968aa3192b1ddfc4b9da744c8c3d0f9a | [] | no_license | bkrop/django_dateapp | cfe1b020be72c55c2c676bd929f019d6d764ff30 | c5d042474e349612cbc60cb0dde587d866c90dde | refs/heads/master | 2022-09-05T17:02:33.500586 | 2020-06-02T19:19:26 | 2020-06-02T19:19:26 | 266,531,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | from django.db import models
from django.contrib.auth.models import User
class Match(models.Model):
user1 = models.ForeignKey(User, on_delete=models.CASCADE, related_name='matches1')
user2 = models.ForeignKey(User, on_delete=models.CASCADE, related_name='matches2')
| [
"[email protected]"
] | |
cba37b7b8c7b212b42e70776c97cefab5e2eee5b | 0f6510ed80c8d9dafbe402082d2a7880a6873b15 | /test_array2.py | d771c67ba23dc6827ce81873f0c13afb61966acb | [] | no_license | surajsvcs1231/SurajProject | 7f09807f51d0f15169b18a731b5593822de39fc5 | 255c50b618f3f6c5da695aee2dbea4976a5739ea | refs/heads/master | 2020-06-15T16:33:31.109742 | 2019-12-28T06:44:32 | 2019-12-28T06:44:32 | 195,342,860 | 0 | 0 | null | 2019-12-28T06:44:33 | 2019-07-05T05:16:15 | Python | UTF-8 | Python | false | false | 210 | py | from numpy import *
arr1=array([5,6])
arr2=array([9,10])
print("arr1+arr2",arr1+arr2)
arr1+=5
print(arr1)
arr3=([9,5,8,1,12,6,13,65,17])
print(sort(arr3))
print(concatenate([arr1,arr3]))
arr4=arr1
print(arr4)
| [
"[email protected]"
] | |
b5e119ab739226922e949ed8bc81ed5dbc23894a | fdd1adf421cafbdb0744d70136ae5185f373ecaf | /CycloneHistoryDbWriter.py | 29b52d7ee10979afd60d162fbc971193d625431e | [] | no_license | veakulin/etl | 8074166e926c4d735ceaf9d0ec21c5775bf90922 | eecceffb7f656b846886563733c20bc607415329 | refs/heads/main | 2023-08-03T04:46:11.378986 | 2021-09-14T10:49:45 | 2021-09-14T10:49:45 | 391,179,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,158 | py | # coding=utf-8
class CycloneHistoryDbWriter:
def __init__(self, dbConnection):
self.__dbConnection = dbConnection
self.__table = "cyclone_history"
self.__prepareServer()
def write(self, data): # В data находится кортеж (id, date, status)
with self.__dbConnection.cursor() as cursor:
cursor.execute('execute checkState (%s, %s, %s)', (data[0], data[1], data[2]))
result = cursor.fetchall()[0][0]
if result == 0:
cursor.execute('execute newPeriod (%s, %s, %s)', (data[0], data[1], data[2]))
elif result == 1 or result == 3: # Две немного разных ситуации, но решаются одинаково. См. запрос checkState.
cursor.execute('execute closePeriod (%s, %s)', (data[0], data[1]))
cursor.execute('execute newPeriod (%s, %s, %s)', (data[0], data[1], data[2]))
elif result == 2:
cursor.execute('execute updatePeriod (%s, %s)', (data[0], data[1]))
elif result == 4:
cursor.execute('execute deletePeriod (%s, %s)', (data[0], data[1]))
cursor.execute('execute closePeriod (%s, %s)', (data[0], data[1]))
cursor.execute('execute updatePeriod (%s, %s)', (data[0], data[1]))
else:
print 'Что-то помешало сделать запись ', data, '\n'
#
def __prepareServer(self):
with self.__dbConnection.cursor() as cursor:
sql = 'prepare checkState as ' \
'with lastPeriod as (' \
'select date_from, date_to, status from {0} where id = $1 order by date_to desc fetch first 1 row only) ' \
'select case when (not exists(select 1 from lastPeriod)) then 0 /* ещё нет ни одной записи по циклону */' \
'when ((select date_to from lastPeriod) < $2) and ' \
'((select status from lastPeriod) <> $3) then 1 /* есть запись за ближайший предыдущий день с другим статусом */ ' \
'when ((select date_to from lastPeriod) < $2) and ' \
'((select status from lastPeriod) = $3) then 2 /* есть запись за ближайший предыдущий день с таким же статусом */ ' \
'when ((select date_to from lastPeriod) = $2) and ' \
'((select date_from from lastPeriod) < (select date_to from lastPeriod)) and ' \
'(select status from lastPeriod) <> $3 then 3 /* есть запись за сегодняшний день, но это хвост многодневного периода c другим статусом */ ' \
'when ((select date_to from lastPeriod) = $2) and ' \
'((select date_from from lastPeriod) = (select date_to from lastPeriod)) and ' \
'(select status from lastPeriod) <> $3 then 4 /* есть запись только за сегодняшний день с другим статусом */ ' \
'else -1 /* что-то другое, скорее всего попытка перезаписать прошлое */ ' \
'end' \
.format(self.__table)
cursor.execute(sql)
sql = 'prepare newPeriod as ' \
'insert into {0} (id, date_from, date_to, status) values ($1, $2, $2, $3)' \
.format(self.__table)
cursor.execute(sql)
sql = 'prepare closePeriod as ' \
'with tomorrow as (' \
'select (extract(year from $2::text::date - 1)::text || ' \
'lpad(extract(month from $2::text::date - 1)::text, 2, \'0\') || ' \
'lpad(extract(day from $2::text::date - 1)::text, 2, \'0\'))::integer as value), ' \
'lastPeriod as (' \
'select date_to from {0} where id = $1 order by date_to desc fetch first 1 row only) ' \
'update {0} set date_to = (select value from tomorrow) where id = $1 and date_to = (select date_to from lastPeriod)' \
.format(self.__table)
cursor.execute(sql)
sql = 'prepare updatePeriod as ' \
'with lastPeriod as (' \
'select date_to from {0} where id = $1 order by date_to desc fetch first 1 row only) ' \
'update {0} set date_to = $2 where id = $1 and date_to = (select date_to from lastPeriod)' \
.format(self.__table)
cursor.execute(sql)
sql = 'prepare deletePeriod as ' \
'delete from {0} where id = $1 and date_from = $2 and date_to = $2' \
.format(self.__table)
cursor.execute(sql)
| [
"[email protected]"
] | |
3cfca532ad03f21d2055c048505310955eac416d | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /cMyMt377ReBsoTHnz_2.py | 0abfc3d1d6df4d6d665b66ac913eddfa6e8387f9 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | """
Write a function that converts a **dictionary** into a **list** of keys-values
**tuples**.
### Examples
dict_to_list({
"D": 1,
"B": 2,
"C": 3
}) ➞ [("B", 2), ("C", 3), ("D", 1)]
dict_to_list({
"likes": 2,
"dislikes": 3,
"followers": 10
}) ➞ [("dislikes", 3), ("followers", 10), ("likes", 2)]
### Notes
Return the elements in the list in alphabetical order.
"""
def dict_to_list(d):
length=len(d)
lst=[]
for i in range(0,length):
lst.append(0)
i=0
for x in sorted(d):
lst[i]=(x, d[x])
i=i+1
return lst
| [
"[email protected]"
] | |
cdb710ec0ba17620866dec0d116500853ab820b7 | 113ef54e42a047e9e631b557012411ecfac72c47 | /siphon/web/apps/bundlers/migrations/0001_initial.py | 31988e23b04bb58e38569d3d153d3b5dc85e951f | [
"MIT"
] | permissive | siphoncode/siphon-web | 77bd241d5f3912ee78155c2b71b75fb59e1b5e27 | c398427dc1b73f70b94cd2f60a13e4d26c71610e | refs/heads/master | 2021-01-19T21:48:15.253451 | 2016-08-04T16:53:18 | 2016-08-04T16:53:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bundler',
fields=[
('id', models.AutoField(verbose_name='ID', primary_key=True, serialize=False, auto_created=True)),
('hostname', models.CharField(unique=True, max_length=255)),
('port', models.IntegerField(default=80)),
],
),
]
| [
"[email protected]"
] | |
bca46aa5667bfdd6b01f2b2e64c77ffc74ebd092 | e3e01d265833bcd6461130e575b63b261a2c74aa | /detect/sort/kalman_filter.py | 8a4ea41eb79f7849374ef110fddc3cdbf9df1b0b | [
"MIT"
] | permissive | bdkiran/openObserver | aab3badab1e7818081b93a81c999a3b590742d6f | 2c9b71f2e7fa021887d7fa760920dea06e67ba72 | refs/heads/master | 2023-08-27T02:28:30.479898 | 2021-10-28T22:59:23 | 2021-10-28T22:59:23 | 405,126,269 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,188 | py | import numpy as np
from numpy import dot, zeros, eye
from numpy.linalg import inv
class KalmanFilter:
'''
Kalman filtering, also known as linear quadratic estimation (LQE), is an algorithm that uses a series of measurements
observed over time, containing statistical noise and other inaccuracies,
and produces estimates of unknown variables that tend to be more accurate than those based on a single measurement
alone, by estimating a joint probability distribution over the variables for each time frame.
'''
def __init__(self, dim_x, dim_z):
self.dim_x = dim_x
self.dim_z = dim_z
self.x = zeros((dim_x, 1))
self.P = eye(dim_x)
self.Q = eye(dim_x)
self.F = eye(dim_x)
self.H = zeros((dim_z, dim_x))
self.R = eye(dim_z)
self.M = zeros((dim_z, dim_z))
self._I = eye(dim_x) # This helps the I matrix to always be compatible to the state vector's dim
self.x_prior = np.copy(self.x)
self.P_prior = np.copy(self.P)
def predict(self):
'''
Predict next state (prior) using the Kalman filter state propagation
equations.
'''
self.x = dot(self.F, self.x) # x = Fx
self.P = dot(self.F, dot(self.P, self.F.T)) + self.Q # P = FPF' + Q
self.x_prior = np.copy(self.x)
self.P_prior = np.copy(self.P)
def update(self, z):
'''
At the time step k, this update step computes the posterior mean x and covariance P
of the system state given a new measurement z.
'''
# y = z - Hx (Residual between measurement and prediction)
y = z - np.dot(self.H, self.x)
PHT = dot(self.P, self.H.T)
# S = HPH' + R (Project system uncertainty into measurement space)
S = dot(self.H, PHT) + self.R
# K = PH'S^-1 (map system uncertainty into Kalman gain)
K = dot(PHT, inv(S))
# x = x + Ky (predict new x with residual scaled by the Kalman gain)
self.x = self.x + dot(K, y)
# P = (I-KH)P
I_KH = self._I - dot(K, self.H)
self.P = dot(I_KH, self.P) | [
"[email protected]"
] | |
5369ae84a01a5b2974f6ccb7a8326aaaeae3b787 | 1cc631c61d85076c192a6946acb35d804f0620e4 | /Source/third_party/boost_1_58_0/libs/mpl/preprocessed/preprocess.py | 816ffdc9a69bcc89914e4d93307131bea7fe088a | [
"BSL-1.0"
] | permissive | reven86/dreamfarmgdk | f9746e1c0e701f243c7dd2f14394970cc47346d9 | 4d5c26701bf05e89eef56ddd4553814aa6b0e770 | refs/heads/master | 2021-01-19T00:58:04.259208 | 2016-10-04T21:29:28 | 2016-10-04T21:33:10 | 906,953 | 2 | 5 | null | null | null | null | UTF-8 | Python | false | false | 2,782 | py |
# Copyright Aleksey Gurtovoy 2001-2004
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
#
# See http://www.boost.org/libs/mpl for documentation.
# $Id$
# $Date$
# $Revision$
import pp
import shutil
import os.path
import os
import string
import sys
preprocess_cmd = open( "preprocess.cmd" ).readlines()[0]
def process( file, boost_root, dst_dir, mode ):
file_path = "%s.hpp" % os.path.splitext( file )[0]
os.system( preprocess_cmd % {
'boost_root': boost_root
, 'mode': mode
, 'file': file
, 'file_path': file_path
} )
os.rename( file_path, "%s.tmp" % file_path )
pp.main( "%s.tmp" % file_path, file_path )
os.remove( "%s.tmp" % file_path )
filename = os.path.basename(file_path)
dst_dir = os.path.join( dst_dir, mode )
dst_file = os.path.join( dst_dir, filename )
if os.path.exists( dst_file ):
shutil.copymode( filename, dst_file )
shutil.copy( filename, dst_dir )
os.remove( filename )
def process_all( root, boost_root, dst_dir, mode ):
files = os.listdir( root )
for file in files:
path = os.path.join( root, file )
if os.path.splitext( file )[1] == ".cpp":
process( path, boost_root, dst_dir, mode )
else:
if os.path.isdir( path ):
process_all( path, boost_root, dst_dir, mode )
def main( all_modes, src_dir, dst_dir ):
if len( sys.argv ) < 2:
print "\nUsage:\n\t %s <mode> <boost_root> [<source_file>]" % os.path.basename( sys.argv[0] )
print "\nPurpose:\n\t updates preprocessed version(s) of the header(s) in \"%s\" directory" % dst_dir
print "\nExample:\n\t the following command will re-generate and update all 'apply.hpp' headers:"
print "\n\t\t %s all f:\\cvs\\boost apply.cpp" % os.path.basename( sys.argv[0] )
sys.exit( -1 )
if sys.argv[1] == "all":
modes = all_modes
else:
modes = [sys.argv[1]]
boost_root = sys.argv[2]
dst_dir = os.path.join( boost_root, dst_dir )
for mode in modes:
if len( sys.argv ) > 3:
file = os.path.join( os.path.join( os.getcwd(), src_dir ), sys.argv[3] )
process( file, boost_root, dst_dir, mode )
else:
process_all( os.path.join( os.getcwd(), src_dir ), boost_root, dst_dir, mode )
if __name__ == '__main__':
main(
["bcc", "bcc551", "gcc", "msvc60", "msvc70", "mwcw", "dmc", "no_ctps", "no_ttp", "plain"]
, "src"
, os.path.join( "boost", "mpl", "aux_", "preprocessed" )
)
| [
"[email protected]"
] | |
0329045be55ebaa28ba8b89cae4674918a4afd35 | b1f9ad79fabf111c802e766aaeee6bfcba0c034c | /code/models/__init__.py | 1f0f12d78f34e629c8e079f28e10a7ec95d398f5 | [
"MIT"
] | permissive | retrieva/lencon | e1a12d6e08966642cfccdd75e733d64afc26edce | 16c65471b30436e4e96049bbdccf6c49c9b1f1f4 | refs/heads/master | 2021-05-02T02:11:48.226647 | 2018-01-29T02:44:14 | 2018-01-29T02:44:14 | 120,880,190 | 0 | 0 | null | 2018-02-09T08:40:23 | 2018-02-09T08:40:22 | null | UTF-8 | Python | false | false | 374 | py | #coding: utf8
from models import encoder_decoder
from models import attention
from models import len_emb
from models import len_init
LenInit = len_init.LenInitLateAttn
LenEmb = len_emb.LenEmbLateAttn
EncoderDecoder = attention.EncDecLateAttn
EncDecEarlyAttn = attention.EncDecEarlyAttn
LenInitEarlyAttn = len_init.LenInitEarlyAttn
LenEmbEarlyAttn= len_emb.LenEmbEarlyAttn
| [
"[email protected]"
] | |
61405195fdf3f543b057bcceb5760eb3a98400fa | 7e30292db480c679094d418938cceab863f69354 | /examples/Joins.py | 22b2600e47d8eaae729cfe3e13921b21dd3f8ac1 | [] | no_license | ragsdale/cs327e | 7896caf4992030ce7c0fa821668367aa63428381 | fd3d676428f4fe6ed6ed892efb69d9c2846b911b | refs/heads/master | 2021-01-17T12:44:57.177119 | 2014-03-28T00:11:54 | 2014-03-28T00:11:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,682 | py | #!/usr/bin/env python3
# --------
# Joins.py
# --------
def cross_join_1 (r, s) :
x = []
for v in r :
for w in s :
y = {}
for u in v :
y[u] = v[u]
for u in w :
y[u] = w[u]
x.append(y)
return x
def cross_join_2 (r, s) :
return [dict(list(v.items()) + list(w.items())) for v in r for w in s]
def theta_join_1 (r, s, bp) :
x = []
for v in r :
for w in s :
if bp(v, w) :
y = {}
for u in v :
y[u] = v[u]
for u in w :
y[u] = w[u]
x.append(y)
return x
def theta_join_2 (r, s, bp) :
return [dict(list(v.items()) + list(w.items())) for v in r for w in s if bp(v, w)]
def match (v, w) :
for i in v :
for j in w :
if (i == j) and (v[i] == w[j]) :
return True
return False
def natural_join_1 (r, s) :
x = []
for v in r :
for w in s :
if match(v, w) :
y = {}
for u in v :
y[u] = v[u]
for u in w :
y[u] = w[u]
x.append(y)
return x
def natural_join_2 (r, s) :
return [dict(list(v.items()) + list(w.items())) for v in r for w in s if match(v, w)]
print("Joins.py")
r = [ \
{"A" : 1, "B" : 6},
{"A" : 2, "B" : 7},
{"A" : 3, "B" : 8}]
assert(len(r) == 3)
s = [ \
{"A" : 4, "C" : 6},
{"A" : 1, "C" : 7},
{"A" : 2, "C" : 8},
{"A" : 2, "C" : 9}]
assert(len(s) == 4)
def test (f) :
x = f(r, s)
assert(len(x) == 12)
assert(
x
==
[{'A': 4, 'B': 6, 'C': 6},
{'A': 1, 'B': 6, 'C': 7},
{'A': 2, 'B': 6, 'C': 8},
{'A': 2, 'B': 6, 'C': 9},
{'A': 4, 'B': 7, 'C': 6},
{'A': 1, 'B': 7, 'C': 7},
{'A': 2, 'B': 7, 'C': 8},
{'A': 2, 'B': 7, 'C': 9},
{'A': 4, 'B': 8, 'C': 6},
{'A': 1, 'B': 8, 'C': 7},
{'A': 2, 'B': 8, 'C': 8},
{'A': 2, 'B': 8, 'C': 9}])
test(cross_join_1)
test(cross_join_1)
def test (f) :
x = f(r, s, lambda v, w : v["A"] == w["A"])
assert(len(x) == 3)
assert(
x
==
[{'A': 1, 'B': 6, 'C': 7},
{'A': 2, 'B': 7, 'C': 8},
{'A': 2, 'B': 7, 'C': 9}])
test(theta_join_1)
test(theta_join_1)
def test (f) :
x = f(r, s)
assert(len(x) == 3)
assert(
x
==
[{'A': 1, 'B': 6, 'C': 7},
{'A': 2, 'B': 7, 'C': 8},
{'A': 2, 'B': 7, 'C': 9}])
test(natural_join_1)
test(natural_join_2)
print("Done.")
| [
"[email protected]"
] | |
3b0a6d7ee252f0a62e3c3b65d97449b829c79187 | b186830ab8e74452cb2e8ff0188d0a8fa3d15b59 | /isBeautifulString.py | 28d7d42b616a6bf7a7717f070ea4f66957175180 | [] | no_license | cafaray/atco.de-fights | e3a278bd51feb7bee05623eaf20d1ea4db535eb6 | 1304bcabf1f18202c14a20b38144854ef05bf776 | refs/heads/master | 2022-11-26T05:57:37.564504 | 2020-07-28T12:07:01 | 2020-07-28T12:07:01 | 124,416,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | import re
def isBeautifulString(inputString):
orderedString = sorted(inputString)
inputString = ""
for c in orderedString:
inputString += c
#inputString = str(inputString)
print(inputString)
count = len(inputString)
esPrimero = True
previo = ''
for caracter in range(ord('a'),ord('z')+1): #inputString:
char = chr(caracter)
print(char)
pattern = re.compile(char)
res = pattern.findall(inputString)
#print(res)
if esPrimero is False:
if count < len(res):
return False
else:
count = len(res)
previo = caracter
else:
count = len(res)
previo = caracter
esPrimero = False
return True | [
"[email protected]"
] | |
d387f33e8da6e96eed4a3d1678adeeca46760d68 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2128/60657/248788.py | 279a31a79972b37452759dc5d2286957bd030dc7 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | import math
A=input().split(',')
A=[int(x) for x in A]
cons=0
n = len(A)
s = 0
sA = 0
for i in range(n):
s += A[i] * i
sA += A[i]
cons = s
for i in range(1,n):
s = s + sA - n * A[n-i]
cons = max(cons, s)
print(cons) | [
"[email protected]"
] | |
2774c6663b2e06020a13842b09ff5e91a404ed82 | b01e63071b744dc438193d182b1ae8702241cac1 | /functional_tests/page_objects.py | 82a721e7472332d6d800f96c221b1790a9b62ccf | [] | no_license | tilacog/paranoid | ad0d340021e9420cfbcf9b8b8b10dd29889d9483 | e28b66076068eada6d1ce7c8ee9726460bc0fe79 | refs/heads/master | 2020-04-06T07:04:33.484264 | 2016-03-25T21:34:44 | 2016-03-25T21:34:44 | 42,269,554 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,398 | py |
'''
Website testing Page Objects
'''
from unittest import TestCase
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver
class PageObject(object):
""" Page Object pattern.
Parameters
----------
test: TestCase instance, with a predefined selenium webdriver
instance.
Examples
--------
With page elements::
from page_objects import PageObject, page_element
class LoginPage(PageObject):
username = page_element(id='username')
password = page_element(name='username')
login = page_element(css='input[type="submit"]')
login_page = LoginPage()
login_page.username = 'foo'
assert login_page.username.text == 'foo'
login_page.login.click()
"""
def __init__(self, test, webdriver_attr='browser'):
webdriver = getattr(test, webdriver_attr)
assert isinstance(test, TestCase)
assert isinstance(webdriver, WebDriver)
self.test = test
self.w = webdriver
self.browser = self.w # friendly alias
class PageElement(object):
""" Page Element pattern.
Required Attributes:
locator: (`selenium.webriver.common.by.By`, locator text)
Eg: 'login.username': (By.ID, 'username'), (By.XPATH, '//password)'
"""
locator = None
def __init__(self):
assert self.locator is not None
def __get__(self, instance, owner):
if not instance:
return None
try:
return instance.w.find_element(*self.locator)
except NoSuchElementException:
return None
def __set__(self, instance, value):
elem = self.__get__(instance, None)
if not elem:
raise ValueError(
'Can\'t set value "{value}", element not found on {page_name}.\n'
'Method:"{method}", Selector:"{selector}"'.format(
value=value,
page_name=instance.__class__.__name__,
method=self.locator[0],
selector=self.locator[1]
)
)
elem.send_keys(value)
class MultiPageElement(PageElement):
""" Like `_PageElement` but returns multiple results
"""
def __get__(self, instance, owner):
try:
return instance.w.find_elements(*self.locator)
except NoSuchElementException:
return []
# Map factory arguments to webdriver locator enums
_LOCATOR_MAP = {'css': By.CSS_SELECTOR,
'id': By.ID,
'name': By.NAME,
'xpath': By.XPATH,
'link_text': By.LINK_TEXT,
'partial_link_text': By.PARTIAL_LINK_TEXT,
'tag_name': By.TAG_NAME,
'class_name': By.CLASS_NAME,
}
def page_element(klass=PageElement, **kwargs):
""" Factory method for page elements
Parameters
----------
css: `str`
Use this css locator
id: `str`
Use this element ID locator
name: `str`
Use this element name locator
xpath: `str`
Use this xpath locator
link_text: `str`
Use this link text locator
partial_link_text: `str`
Use this partial link text locator
tag_name: `str`
Use this tag name locator
class_name: `str`
Use this class locator
Examples
--------
Page Elements can be used like this::
from page_objects import PageObject, page_element
class MyPage(PageObject)
elem1 = page_element(css='div.myclass')
elem2 = page_element(id='foo')
"""
if not kwargs:
raise ValueError("Please specify a locator")
if len(kwargs) > 1:
raise ValueError("Please specify only one locator")
k, v = next(iter(kwargs.items()))
class Element(klass):
locator = (_LOCATOR_MAP[k], v)
return Element()
def multi_page_element(**kwargs):
""" As for `page_element`, but returns a `MutliPageElement`
"""
return page_element(klass=MultiPageElement, **kwargs)
| [
"[email protected]"
] | |
c8d19f97de5d6f8b825f56efbabc8be10792b86a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02795/s029564266.py | 458206a6ba58430e9f3d657fc511b7820a26d955 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | H = int(input())
W = int(input())
N = int(input())
x = H if H>W else W
x2 = 0 if N%x==0 else 1
print(N//x + x2)
| [
"[email protected]"
] | |
7aebba7750acf3847ec26d87c4ac53601ddfaf50 | dc0a1a670dcbf862d45f8957419b04beffc9462d | /0x00-python-hello_world/8-concat_edges.py~ | 7562b0d6bfd36ba0a803fc39d2b633bee56b1ea5 | [] | no_license | tlnguyen2018/holbertonschool-higher_level_programming | 3b4e67895e1fe70b9a727893edcfafe86d159d19 | fd307b134fb298e1b9e0ef19c353f0b40ae5998c | refs/heads/master | 2020-04-09T09:58:55.209093 | 2019-01-26T01:47:36 | 2019-01-26T01:47:36 | 160,253,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | #!/usr/bin/python3
str = "Python is an interpreted, interactive, object-oriented programming\
language that combines remarkable power with very clear syntax"
str = str[39:67] + str[107:112] + [0:6]
print(str)
| [
"[email protected]"
] | ||
51a1625a29e94d22f6e70e9ec6b12db61a54aebd | 6b0778d2c926fade2681f20bbc732b65bf8e4ddf | /manager.py | 8b71824ddecae1ce066259abb02135a02c14bc21 | [] | no_license | volodymyrhusak/Users | 21e878ed89b00c90d50487a7877c225d0b9deadb | 41d8bc1a4082343a852953967906eedb31943e9c | refs/heads/master | 2021-01-01T16:58:20.563946 | 2017-07-21T15:59:32 | 2017-07-21T15:59:32 | 97,964,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,818 | py | # -*- coding: utf-8 -*-
from mysql.connector import MySQLConnection, Error, errorcode
from my_models import User, Course
class UserManager():
def __init__(self):
try:
self.conn = MySQLConnection(user='root',
password='1',
host='127.0.0.1',
database='Users')
self.cursor = self.conn.cursor()
# print(self.cursor._connection)
except Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
self.cursor.close()
self.conn.close()
def addUser(self, data):
user = User()
user.userName = data['userName']
user.userEmail = data['userEmail']
user.userPhone = data['userPhone']
user.userMobilePhone = data['userMobilePhone']
args = [user.userID, user.userName, user.userEmail,
user.userPhone, user.userMobilePhone]
self.cursor.callproc('add_user', args)
self.closeDB()
def getUsers(self, search, page, limit):
result = []
start = limit * (page - 1)
stop = limit * page
print([search, start, stop])
args = [search, start, stop]
self.cursor.callproc('select_users', args)
for data in self.cursor.stored_results():
for u in data.fetchall():
user = User()
user.usersID = u[0]
user.userName = u[1]
user.userEmail = u[2]
user.userPhone = u[3]
user.userMobilePhone = u[4]
user.userStatus = u[5]
user.courses = []
if u[6]:
for c in u[6].split('***'):
course = Course()
course.courseName = c
user.courses.append(course)
result.append(user)
self.closeDB()
return result
def getUser(self, id):
args = [id]
self.cursor.callproc('select_user', args)
for data in self.cursor.stored_results():
# print(data.fetchall())
u = data.fetchone()
user = User()
user.usersID = u[0]
user.userName = u[1]
user.userEmail = u[2]
user.userPhone = u[3]
user.userMobilePhone = u[4]
user.userStatus = u[5]
user.courses = []
if u[6]:
for c in u[6].split('***'):
course = Course()
course.courseName = c
user.courses.append(course)
self.closeDB()
return user
def deleteUser(self, id):
args = [id]
self.cursor.callproc('delete_user', args)
self.closeDB()
def updUsers(self, user):
args = [user.usersID, user.userEmail, user.userPhone, user.userMobilePhone, user.userStatus]
self.cursor.callproc('update_user', args)
self.cursor.callproc('delete_course', [user.userID])
if len(user.courses) <= 5:
map(self.addCourse, user.courses)
self.closeDB()
def addCourse(self, course):
self.cursor.callproc('add_course', [course.coursesID])
self.closeDB()
def closeDB(self):
self.cursor.close()
self.conn.close()
class CourseManager():
def __init__(self):
try:
self.conn = MySQLConnection(user='root',
password='1',
host='127.0.0.1',
database='Users')
self.cursor = self.conn.cursor()
# print(self.cursor._connection)
except Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
self.cursor.close()
self.conn.close()
def closeDB(self):
self.cursor.close()
self.conn.close()
def getCourse(self):
result = []
self.cursor.callproc('select_course')
for data in self.cursor.stored_results():
for c in data.fetchall():
course = Course()
course.id = c[0]
course.courseName = c[1]
result.append(course)
self.closeDB()
return result
| [
"[email protected]"
] | |
c34084cec21ce2291bcfc29b492d4f5635079f8e | 4d28185e7a78a569f9a449f39f183cac3024f711 | /packages/Python/lldbsuite/test/lang/swift/generic_tuple/TestSwiftGenericTuple.py | 3cb865b97a5151804fb5618d1f0b55353219405a | [
"NCSA",
"Apache-2.0",
"LLVM-exception"
] | permissive | apple/swift-lldb | 2789bf44f648609a1674ee520ac20b64c95de072 | d74be846ef3e62de946df343e8c234bde93a8912 | refs/heads/stable | 2023-04-06T00:28:15.882479 | 2019-10-25T22:46:59 | 2019-10-25T22:46:59 | 44,838,862 | 780 | 291 | Apache-2.0 | 2020-01-10T19:28:43 | 2015-10-23T21:13:18 | C++ | UTF-8 | Python | false | false | 608 | py | # TestSwiftGenericTuple.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2018 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
import lldbsuite.test.lldbinline as lldbinline
from lldbsuite.test.decorators import *
lldbinline.MakeInlineTest(__file__, globals(), decorators=[swiftTest])
| [
"[email protected]"
] | |
34147064a2ee0e81851c6e2ceea0750229130269 | d6e74df012ee7c77b24231af3b1134b6fee1b778 | /djangonote/djangonote/settings.py | 283c62f93fa03cbccb50f3b3d917a73747fac1d0 | [] | no_license | akshatz/Backup | d3f48efd1b9a63b2640fd3606437d3566f8c26f8 | 6190d5cfb438bebbc7ea65622978c464be74107c | refs/heads/master | 2020-08-12T13:29:53.602743 | 2019-10-13T07:20:41 | 2019-10-13T07:20:41 | 214,775,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,340 | py | """
Django settings for djangonote project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'elf1f$gekcf=_7c#m)sh^55-6!12$@@9&$cw=8-rgq5a1j4eym'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'notes.apps.NotesConfig',
'django_extensions',
'users.apps.UsersConfig',
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djangonote.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djangonote.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
# STATIC_URL = '/static/djangonote/'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
LOGOUT_REDIRECT_URL = '/' | [
"[email protected]"
] | |
525272bb5554570bbbc59ad39b7c2bc86c8841b0 | f2ac8f55cf34520fa4c2a44f473b61c6d7a2eac0 | /Kaggle/Model-1(CNN)/Code/cnn_kaggle.py | b64d768a3ab69e3ef9f596eca7be32fac06ceef5 | [] | no_license | ProtikNag/Paintings-Art-Style-Classification-with-CNN | e9617db13e766a12ecf2b70f15ff303c5690773f | 5ac098426b0acd26092278ff52218a04900c5eab | refs/heads/master | 2020-06-03T09:51:31.176572 | 2019-07-09T09:24:15 | 2019-07-09T09:24:15 | 191,527,233 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,639 | py | # -*- coding: utf-8 -*-
"""CNN-Kaggle.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1Sar52JYTn18rOnhVnaiwGY5LZ0wcw73Q
# Connect to Google Drive
"""
#Step 1 part 1: connect to drive
!apt-get install -y -qq software-properties-common python-software-properties module-init-tools
!add-apt-repository -y ppa:alessandro-strada/ppa 2>&1 > /dev/null
!apt-get update -qq 2>&1 > /dev/null
!apt-get -y install -qq google-drive-ocamlfuse fuse
from google.colab import auth
auth.authenticate_user()
from oauth2client.client import GoogleCredentials
creds = GoogleCredentials.get_application_default()
import getpass
!google-drive-ocamlfuse -headless -id={creds.client_id} -secret={creds.client_secret} < /dev/null 2>&1 | grep URL
vcode = getpass.getpass()
!echo {vcode} | google-drive-ocamlfuse -headless -id={creds.client_id} -secret={creds.client_secret}
!mkdir -p drive
!google-drive-ocamlfuse drive
"""# CNN Model"""
import numpy as np
import pandas as pd
import os
import cv2
import glob
from os import listdir
from os.path import isfile, join
from os import walk
from random import sample
import pickle, cv2
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
from sklearn.preprocessing import LabelEncoder
from keras.layers import Convolution2D
from keras.layers import MaxPooling2D
from keras import applications, optimizers
from keras.utils import np_utils
from keras.models import Sequential, Model
from keras.layers import Dropout, Flatten, Dense
from keras.models import model_from_json
from keras.models import load_model
from keras.utils import plot_model
import json
data_root = 'drive/ML-Kaggle/'
model = Sequential()
model.add(Convolution2D(filters=32, kernel_size=(3,3), input_shape=(224,224,3), activation = 'relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Convolution2D(32,3,3, activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Flatten())
model.add(Dense(units=512, activation='relu'))
model.add(Dense(units=512, activation='relu'))
model.add(Dense(units=128, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(units=3, activation = 'softmax'))
sgd = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
train_datagen = ImageDataGenerator(rescale=1./255, shear_range=0.1, zoom_range=0.2, horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1./255)
train_data = '{}{}'.format(data_root,'training_set/')
test_data = '{}{}'.format(data_root,'test_set/')
training_set = train_datagen.flow_from_directory(train_data, target_size=(224,224), batch_size=32, class_mode='categorical')
test_set = test_datagen.flow_from_directory(test_data, target_size=(224,224), batch_size=32, class_mode='categorical')
classes = training_set.class_indices
print(classes)
print(training_set)
print(test_set)
model.fit_generator(training_set, samples_per_epoch=100, nb_epoch=30, validation_data=test_set, nb_val_samples=300)
plot_model(model, to_file='drive/ML-Kaggle/model.pdf')
def load_model():
json_file = open('drive/ML-Kaggle/second_model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights('drive/ML-Kaggle/second_model_weights.h5')
sgd = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)
loaded_model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
return loaded_model
model = load_model()
history = model.fit_generator(training_set, samples_per_epoch=100, nb_epoch=15, validation_data=test_set, nb_val_samples=300)
save_model_path = '{}{}'.format(data_root,'second_model.json')
save_model_weight = '{}{}'.format(data_root,'second_model_weights.h5')
history_path = '{}{}'.format(data_root,'history_2.json')
open(save_model_path, 'w').close()
open(save_model_weight, 'w').close()
model_json = model.to_json()
with open(save_model_path, 'w') as json_file:
json_file.write(model_json)
model.save_weights(save_model_weight)
print('Model saved to disk!')
with open(history_path, 'w') as f:
json.dump(history.history, f)
print('History saved to disk!')
print(history.history.keys())
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
model = load_model()
history = model.fit_generator(training_set, samples_per_epoch=100, nb_epoch=10, validation_data=test_set, nb_val_samples=300)
save_model_path = '{}{}'.format(data_root,'third_model.json')
save_model_weight = '{}{}'.format(data_root,'third_model_weights.h5')
history_path = '{}{}'.format(data_root,'history_3.json')
open(save_model_path, 'w').close()
open(save_model_weight, 'w').close()
model_json = model.to_json()
with open(save_model_path, 'w') as json_file:
json_file.write(model_json)
model.save_weights(save_model_weight)
print('Model saved to disk!')
with open(history_path, 'w') as f:
json.dump(history.history, f)
print('History saved to disk!')
print(history.history.keys())
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.show()
"""# Predict"""
def load_model_to_predict():
json_file = open('drive/ML-Kaggle/third_model.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
loaded_model.load_weights('drive/ML-Kaggle/third_model_weights.h5')
sgd = optimizers.SGD(lr=0.001, decay=1e-6, momentum=0.9, nesterov=True)
loaded_model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
return loaded_model
model = load_model_to_predict()
def make_prediction(img_path):
img = cv2.imread(img_path, 1)
img = cv2.resize(img,(224,224))
img = np.reshape(img,[1,224,224,3])
preds = model.predict_classes(img)
return preds
img_path_imp = 'drive/ML-Kaggle/UnseenData/Impressionism/'
imp = []
for root, dirs, files in os.walk(img_path_imp):
for filename in files:
imp.append(filename)
print("Impressionism\n")
results = []
for f in imp:
str = img_path_imp + f
x = make_prediction(str)
if x==[0]:
results.append("Expressionism")
elif x==[1]:
results.append("Impressionism")
elif x==[2]:
results.append("Surrealism")
print(len(results))
count_im = 0
for i in range(0,211):
if results[i] == "Impressionism":
count_im += 1
accuracy = count_im/len(results)
print(accuracy)
img_path_imp = 'drive/ML-Kaggle/UnseenData/Expressionism/'
imp = []
for root, dirs, files in os.walk(img_path_imp):
for filename in files:
imp.append(filename)
print("Expressionism\n")
results = []
for f in imp:
str = img_path_imp + f
x = make_prediction(str)
#print(x)
if x==[0]:
results.append("Expressionism")
elif x==[1]:
results.append("Impressionism")
elif x==[2]:
results.append("Surrealism")
print(len(results))
count_ex = 0
for i in range(0,len(results)):
if results[i] == "Expressionism":
count_ex += 1
accuracy = count_ex/len(results)
print(accuracy)
img_path_imp = 'drive/ML-Kaggle/UnseenData/Surrealism/'
imp = []
for root, dirs, files in os.walk(img_path_imp):
for filename in files:
imp.append(filename)
print("Surrealism\n")
results = []
for f in imp:
str = img_path_imp + f
x = make_prediction(str)
if x==[0]:
results.append("Expressionism")
elif x==[1]:
results.append("Impressionism")
elif x==[2]:
results.append("Surrealism")
print(len(results))
count_su = 0
for i in range(0,len(results)):
if results[i] == "Surrealism":
count_su += 1
accuracy = count_su/len(results)
print(accuracy)
final_accuracy = (count_im+count_ex+count_su)/(211+200+205)
print(final_accuracy)
| [
"[email protected]"
] | |
0918807e826d4a9e01fb6d35ea14ed6d70de45c5 | cbea18970ef812d8e4405707d1c86a9af0d593ff | /accurate_bg/cnn_ohio.py | 551715f37b20046392632d45b6a4945ab124c4db | [
"MIT"
] | permissive | manhhv87/AccurateBG | 17d531a836145a957d55655ac8a39420ed571739 | 379e52e2353ba73830b3bdc91f991090a627cd01 | refs/heads/main | 2023-07-14T03:08:27.268212 | 2021-08-23T16:41:52 | 2021-08-23T16:41:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,769 | py | import os
import numpy as np
import tensorflow as tf
from cgms_data_seg import CGMSDataSeg
# from multiprocessing import Pool
def regressor(
low_fid_data,
k_size,
nblock,
nn_size,
nn_layer,
learning_rate,
batch_size,
epoch,
beta,
loss_type,
outdir,
):
tf.compat.v1.reset_default_graph()
tf.compat.v1.disable_eager_execution()
sampling_horizon = low_fid_data.sampling_horizon
feature_size = 0
if low_fid_data.feature is not None:
feature_size = low_fid_data.feature[0].size
x = tf.compat.v1.placeholder(
tf.float32, [None, sampling_horizon + feature_size], name="x"
)
alpha = tf.Variable(tf.random.normal([], stddev=0.1))
p = tf.math.sin(tf.range(float(sampling_horizon + feature_size)))
y = x + alpha * p
assert k_size < sampling_horizon + feature_size
for _ in range(nblock):
x0 = tf.slice(y, [0, 0], [-1, 1])
x0s = tf.tile(x0, [1, k_size - 1])
xx = tf.concat([x0s, y], 1)
data = tf.reshape(xx, [-1, sampling_horizon + feature_size + k_size - 1, 1])
kernel1 = tf.Variable(tf.random.normal([k_size, 1, 1], stddev=0.1))
kernel2 = tf.Variable(tf.random.normal([k_size, 1, 1], stddev=0.1))
A = tf.squeeze(
tf.nn.conv1d(input=data, filters=kernel1, stride=1, padding="VALID")
)
B = tf.squeeze(
tf.nn.conv1d(input=data, filters=kernel2, stride=1, padding="VALID")
)
y = tf.math.multiply(A, tf.sigmoid(B)) + y
# FNN
with tf.compat.v1.variable_scope("fnn"):
W = tf.Variable(
tf.random.normal([sampling_horizon + feature_size, nn_size], stddev=0.1),
name="W",
)
b = tf.Variable(tf.random.normal([nn_size], stddev=0.1), name="b")
y = tf.nn.relu(tf.tensordot(y, W, [[1], [0]]) + b)
for _ in range(nn_layer - 1):
W = tf.Variable(tf.random.normal([nn_size, nn_size], stddev=0.1), name="W")
b = tf.Variable(tf.random.normal([nn_size], stddev=0.1), name="b")
y = tf.nn.relu(tf.tensordot(y, W, [[1], [0]]) + b)
W = tf.Variable(
tf.random.normal([nn_size, sampling_horizon], stddev=0.1), name="W"
)
b = tf.Variable(tf.random.normal([], stddev=0.1), name="b")
y = tf.tensordot(y, W, [[1], [0]]) + b
y = tf.identity(y, name="y")
y_ = tf.compat.v1.placeholder(tf.float32, [None, sampling_horizon], name="y_")
weights = tf.compat.v1.placeholder(tf.float32, [sampling_horizon], name="weights")
assert loss_type in ["mse", "mape", "mae", "relative_mse", "rmse", "rmse+mae"]
if loss_type == "mse":
loss = tf.compat.v1.losses.mean_squared_error(
y_,
y,
weights=tf.expand_dims(weights, axis=0),
reduction=tf.compat.v1.losses.Reduction.MEAN,
)
elif loss_type == "mape":
loss = tf.compat.v1.keras.losses.MeanAbsolutePercentageError()(
y_[:, 0], y[:, -1]
)
elif loss_type == "mae":
loss = tf.compat.v1.keras.losses.MAE(y_[:, 0], y[:, -1])
elif loss_type == "relative_mse":
loss = tf.math.reduce_mean(
input_tensor=(y_[:, 0] - y[:, -1]) ** 2 / y_[:, 0] ** 2
)
elif loss_type == "rmse":
loss = tf.math.sqrt(
tf.math.reduce_mean(input_tensor=(y_[:, 0] - y[:, -1]) ** 2)
)
elif loss_type == "rmse+mae":
rmse_loss = tf.math.sqrt(
tf.math.reduce_mean(input_tensor=(y_[:, 0] - y[:, -1]) ** 2)
)
mae_loss = tf.compat.v1.losses.mean_squared_error(
y_,
y,
weights=tf.expand_dims(weights, axis=0),
reduction=tf.compat.v1.losses.Reduction.MEAN,
)
loss = rmse_loss + mae_loss
# add L2 regularization
L2_var = [
var
for var in tf.compat.v1.global_variables()
if ("fnn/W" in var.name or "fnn/b" in var.name) and "Adam" not in var.name
]
lossL2 = tf.math.add_n([tf.nn.l2_loss(v) for v in L2_var]) * beta
loss = tf.identity(loss + lossL2, name="loss")
train = tf.compat.v1.train.AdamOptimizer(learning_rate).minimize(loss)
new_train = tf.compat.v1.train.AdamOptimizer(learning_rate).minimize(
loss, var_list=L2_var
)
tf.compat.v1.add_to_collections("optimizer", train)
tf.compat.v1.add_to_collections("optimizer", new_train)
sess = tf.compat.v1.Session()
saver = tf.compat.v1.train.Saver()
sess.run(tf.compat.v1.global_variables_initializer())
for i in range(epoch):
for _ in range(int(low_fid_data.train_n / batch_size)):
d = low_fid_data.train_next_batch(batch_size)
sess.run(train, feed_dict={x: d[0], y_: d[1], weights: d[2]})
err = sess.run(loss, feed_dict={x: d[0], y_: d[1], weights: d[2]})
print("Epoch %d, train loss: %f" % (i, err))
saver.save(sess, os.path.join(outdir, "pretrain"))
def test_ckpt(high_fid_data, outdir):
tf.compat.v1.disable_eager_execution()
sess = tf.compat.v1.Session()
saver = tf.compat.v1.train.import_meta_graph(os.path.join(outdir, "pretrain.meta"))
saver.restore(sess, tf.train.latest_checkpoint(outdir))
graph = tf.compat.v1.get_default_graph()
x = graph.get_tensor_by_name("x:0")
weights = graph.get_tensor_by_name("weights:0")
loss = graph.get_tensor_by_name("loss:0")
y_ = graph.get_tensor_by_name("y_:0")
y = graph.get_tensor_by_name("y:0")
d = high_fid_data.test()
err = sess.run(loss, feed_dict={x: d[0], y_: d[1], weights: d[2]})
y_pred = sess.run(y, feed_dict={x: d[0]})
return err, np.vstack((d[1][:, 0], y_pred[:, -1])).T
def regressor_transfer(
train_dataset, test_dataset, batch_size, epoch, outdir, option=1
):
print("------------------in transfer----------------------")
"""
transfer learning:
1. reuse seq2seq and FNN weights and train both of them
2. reuse seq2seq and FNN weights and train FNN weights
3. reuse seq2seq weights, reinitialize FNN weights and train FNN only
other: return ErrorMessage
"""
tf.compat.v1.disable_eager_execution()
sess = tf.compat.v1.Session()
saver = tf.compat.v1.train.import_meta_graph(os.path.join(outdir, "pretrain.meta"))
saver.restore(sess, tf.train.latest_checkpoint(outdir))
graph = tf.compat.v1.get_default_graph()
x = graph.get_tensor_by_name("x:0")
weights = graph.get_tensor_by_name("weights:0")
loss = graph.get_tensor_by_name("loss:0")
y = graph.get_tensor_by_name("y:0")
y_ = graph.get_tensor_by_name("y_:0")
if option == 1:
optimizer = tf.compat.v1.get_collection("optimizer")[0]
elif option == 2:
optimizer = tf.compat.v1.get_collection("optimizer")[1]
elif option == 3:
optimizer = tf.compat.v1.get_collection("optimizer")[1]
var = tf.compat.v1.global_variables()
var_to_init = [
val
for val in var
if ("fnn/W" in val.name or "fnn/b" in val.name) and "Adam" not in val.name
]
epoch *= 3
sess.run(tf.compat.v1.variables_initializer(var_to_init))
else:
print("option not available, please assign 1 or 2 or 3 to option")
return
for i in range(epoch):
for _ in range(int(train_dataset.train_n / batch_size)):
d = train_dataset.train_next_batch(batch_size)
sess.run(optimizer, feed_dict={x: d[0], y_: d[1], weights: d[2]})
d = test_dataset.test()
err = sess.run(loss, feed_dict={x: d[0], y_: d[1], weights: d[2]})
print("Epoch %d, test loss: %f" % (i, err))
y_pred = sess.run(y, feed_dict={x: d[0]})
return err, np.vstack((d[1][:, 0], y_pred[:, -1])).T
| [
"[email protected]"
] | |
e2da18e1f25ce2aa8965c636a96204d735f88438 | d7bda270836a614f21b590abe6f679dbeb467147 | /test/test_radiusmessage.py | d1d072812fbd949b14a8c76186bd1e9989a68be2 | [] | no_license | craighagan/PythonRadius | 3e02561b1c7ada7e8a27ea1388afba602869bf1b | d657c2fb04a04330108ce3b8274a03c2157d67b5 | refs/heads/master | 2020-05-07T12:47:15.031580 | 2014-04-25T03:30:51 | 2014-04-25T03:30:51 | 19,129,017 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 10,397 | py | import os
import logging
import json
import re
import unittest
import tempfile
from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import assert_raises
import random
import json
import base64
from radiusconstants import *
from radiusmessage import *
def get_random_int(min, max, SET=[]):
random_int = random.randint(min, max)
if SET != []:
while random_int in SET:
random_int = random.randint(min, max)
return random_int
def get_random_string(len):
return ''.join( [chr(random.randint(0,255)) for i in xrange(0,len)] )
class RadiusMessageTestcase(unittest.TestCase):
def test_setcode(self):
rm = RadiusMessage()
for code in VALID_RADIUS_CODES:
rm.code = code
assert rm.code == code
assert rm._getCode() == code
assert rm._code == code
nr_exceptions = 0
nr_tries = 10
for tries in range(0, nr_tries):
random_code = get_random_int(-MAX_CODE,MAX_CODE,VALID_RADIUS_CODES)
with self.assertRaises(ValueError):
rm.code = random_code
with self.assertRaises(ValueError):
rm.code = -1
with self.assertRaises(ValueError):
rm.code = 'hello'
def test_setid(self):
rm = RadiusMessage()
for identifier in range(0,MAX_ID):
rm.identifier = identifier
assert rm.identifier == identifier
assert rm._getId() == identifier
assert rm._id == identifier
nr_exceptions = 0
nr_tries = 10
for tries in range(0, nr_tries):
random_id = get_random_int(-MAX_ID,1000,range(0,MAX_ID))
with self.assertRaises(ValueError):
rm.identifier = random_id
with self.assertRaises(ValueError):
rm.identifier = -1
with self.assertRaises(ValueError):
rm.identifier = 'hello'
def test_setspecificattribute(self):
rm = RadiusMessage()
for attribute in VALID_RADIUS_ATTRIBUTES:
rm.setSpecificAttr(attribute,"test")
assert rm.getSpecificAttr(attribute) == "test"
nr_exceptions = 0
nr_tries = 10
for tries in range(0, nr_tries):
random_attribute = get_random_int(-MAX_CODE,MAX_CODE,VALID_RADIUS_ATTRIBUTES)
self.assertRaises(ValueError,lambda: rm.setSpecificAttr(random_attribute,"test"))
self.assertRaises(ValueError,lambda: rm.getSpecificAttr(random_attribute))
self.assertRaises(ValueError,lambda: rm.setSpecificAttr(-1,"test"))
self.assertRaises(ValueError,lambda: rm.getSpecificAttr(-1))
self.assertRaises(ValueError,lambda: rm.setSpecificAttr(1,get_random_string(MAX_ATTRIBUTE_LENGTH+1)))
self.assertRaises(ValueError,lambda: rm.setSpecificAttr('hello',get_random_string(MAX_ATTRIBUTE_LENGTH+1)))
self.assertRaises(ValueError,lambda: rm.getSpecificAttr('hello'))
def test_secret(self):
rm = RadiusMessage()
assert rm.secret is None
rm.secret=get_random_string(MAX_SECRET_LENGTH)
with self.assertRaises(ValueError):
rm.secret=get_random_string(MAX_SECRET_LENGTH+1)
def test_init_and_copy(self):
b64udp_packets = ['AUUALUCjDOwr0lhuEiMSNUwQ9OEBB3VzZXIxAhJptmuOeOyR3MTdofK42mlu', 'A0UAFHVAi08beo6vYi4E6Da3cVI=', 'AfsAOULqcG+v/xHDwCWvIRNxMXoBB3VzZXIxAhLAESO80gZLbHohVn2bK5w2BAZ/AAEBBQYAAAcU', 'AvsAFLnUDEiD72S+yAQFNWCTBN4=']
udp_packets = map(base64.b64decode,b64udp_packets)
rm = RadiusMessage()
assert len(rm) == 20
with self.assertRaises(ValueError):
rm = RadiusMessage(datagram=udp_packets[0],request_id=3)
rm = RadiusMessage(datagram=udp_packets[2])
assert len(rm) == 57
assert len(rm) == rm._len #make sure computed len == imported len
rm2 = rm.copy()
assert rm.code == RADIUS_ACCESS_REQUEST
assert rm.code == rm2.code
assert rm.identifier == rm2.identifier
assert rm.authenticator == rm2.authenticator
assert rm._attr_hash == rm2._attr_hash
assert len(rm) == len(rm2)
#make sure it is a clone, not a reference
rm2.code = RADIUS_ACCESS_REJECT
assert rm2.code == RADIUS_ACCESS_REJECT
assert rm.code == RADIUS_ACCESS_REQUEST
rm.username="testuser"
assert rm2.username != rm.username
with self.assertRaises(ValueError):
rm.username=get_random_string(MAX_ATTRIBUTE_LENGTH+1)
with self.assertRaises(ValueError):
rm.password="testuser"
rm.secret = 'testing123'
rm.password="testuser"
assert rm2.password != rm.username
assert rm.encryptedPassword != "testuser"
with self.assertRaises(ValueError):
rm.username=get_random_string(MAX_ATTRIBUTE_LENGTH+1)
def test_authenticator(self):
rm = RadiusMessage()
a1 = rm._generateAuthenticator()
a2 = rm._generateAuthenticator()
assert a1 != a2
assert len(a1) == MAX_AUTHENTICATOR_LENGTH
assert len(a2) == MAX_AUTHENTICATOR_LENGTH
rm.authenticator = get_random_string(MAX_AUTHENTICATOR_LENGTH)
with self.assertRaises(ValueError):
rm.authenticator = get_random_string(MAX_AUTHENTICATOR_LENGTH+1)
def test_healthcheck(self):
rm = RadiusMessage()
assert rm.isHealthCheckRequest() is False
rm.makeHealthCheckRequest()
assert rm.isHealthCheckRequest() is True
assert rm._attr_hash == {RADIUS_PROXY_STATE: "perform_healthcheck"}
rm.makeHealthCheckRequest("deep")
assert rm.isHealthCheckRequest() is True
assert rm._attr_hash == {RADIUS_PROXY_STATE: "deep_healthcheck"}
rm.makeHealthCheckRequest("shallow")
assert rm.isHealthCheckRequest() is True
assert rm._attr_hash == {RADIUS_PROXY_STATE: "shallow_healthcheck"}
def test_packets1(self):
#packets from sniffing some traffic, two requests
# request one: code1->server; code3->client (reject)
# request two: code1->server; code2->client (accept)
#testing to make sure we're correctly loading off the wire
b64udp_packets = ['AUUALUCjDOwr0lhuEiMSNUwQ9OEBB3VzZXIxAhJptmuOeOyR3MTdofK42mlu', 'A0UAFHVAi08beo6vYi4E6Da3cVI=', 'AfsAOULqcG+v/xHDwCWvIRNxMXoBB3VzZXIxAhLAESO80gZLbHohVn2bK5w2BAZ/AAEBBQYAAAcU', 'AvsAFLnUDEiD72S+yAQFNWCTBN4=']
udp_packets = map(base64.b64decode,b64udp_packets)
rms = []
rms.append(RadiusMessage(datagram=udp_packets[0]))
rms.append(RadiusResponse(datagram=udp_packets[1]))
rms.append(RadiusMessage(datagram=udp_packets[2]))
rms.append(RadiusResponse(datagram=udp_packets[3]))
#now check the first packet against known data
#i don't know the secret for the first two packets
assert rms[0].identifier == 69
assert rms[0].code == RADIUS_ACCESS_REQUEST
assert rms[0].encryptedPassword == 'i\xb6k\x8ex\xec\x91\xdc\xc4\xdd\xa1\xf2\xb8\xdain'
assert len(rms[0]) == rms[0]._len
assert rms[0].getAttrHash() == {1: 'user1', 2: 'i\xb6k\x8ex\xec\x91\xdc\xc4\xdd\xa1\xf2\xb8\xdain'}
self.assertRaises(AttributeError,lambda: rms[0].requestAuthenticator)
#this was a garbage password and secret, so.. decryption won't be that useful
#check the response
assert rms[1].identifier == 69
assert rms[1].code == RADIUS_ACCESS_REJECT
assert len(rms[1]) == rms[1]._len
# now check the sane two packets
assert rms[2].identifier == 251
assert rms[2].code == RADIUS_ACCESS_REQUEST
assert rms[2].encryptedPassword == '\xc0\x11#\xbc\xd2\x06Klz!V}\x9b+\x9c6'
assert rms[2].secret is None
assert len(rms[2]) == rms[2]._len
assert rms[2].getAttrHash() == {1: 'user1', 2: '\xc0\x11#\xbc\xd2\x06Klz!V}\x9b+\x9c6', 4: '\x7f\x00\x01\x01', 5: '\x00\x00\x07\x14'}
assert rms[2].getUDPDatagram() == udp_packets[2]
rms[2].secret = "testing123"
assert rms[2].secret == 'testing123'
assert rms[2].password == 'supersecret'
#verify response validation
assert rms[2].checkDatagram(udp_packets[3]) == True
#change secret, verify password is re-encrypted
#verify validation fails
#critical thing for proxy behavior
rms[2].secret = "testing456"
assert rms[2].checkDatagram(udp_packets[3]) == False
assert rms[2].encryptedPassword == '\x81z@04X\x90Xa3N\x92\x11!r\xcc'
assert rms[2].password == 'supersecret'
assert rms[2].getUDPDatagram() != udp_packets[2]
# and change back
rms[2].secret = "testing123"
assert rms[2].encryptedPassword == '\xc0\x11#\xbc\xd2\x06Klz!V}\x9b+\x9c6'
assert rms[2].password == 'supersecret'
assert rms[2].getUDPDatagram() == udp_packets[2]
#check response packet
assert rms[3].identifier == 251
assert rms[3].identifier == rms[2].identifier
assert rms[3].code == RADIUS_ACCESS_ACCEPT
assert len(rms[3]) == rms[3]._len
assert rms[3].getAttrHash() == {}
self.assertRaises(ValueError, lambda: rms[3].getUDPDatagram() == udp_packets[3])
rms[3].secret='testing123'
self.assertRaises(ValueError, lambda: rms[3].getUDPDatagram() == udp_packets[3])
rms[3].requestAuthenticator = rms[2].authenticator
assert rms[3].getUDPDatagram() == udp_packets[3]
assert rms[2].checkDatagram(rms[3].getUDPDatagram()) == True
assert rms[2].authenticator == rms[3].requestAuthenticator
#throw in a message back to the client, confirm that works
rms[3].setSpecificAttr(RADIUS_REPLY_MESSAGE,"message to user")
assert rms[3].getAttrHash() == {18: 'message to user'}
assert rms[3].getUDPDatagram() == '\x02\xfb\x00%L\xa0H\n\x9bq\xa5`9\x04\xef\xac\x8d1~\x11\x12\x11message to user'
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
4c0a86c3d86c492fd1261d2dc30831ac9d2d320e | 0857cd840d073d0ebd411be3910e456d2f9e0e74 | /commandGenerate.py | f5ef31d97fc53028d741db5711089a4a08ba5d62 | [] | no_license | jonkiky/RegressionTestSelection- | a9f766d6fb0338ccddb126b70a4009d90b594b2b | a2836f8174d9520bbf58427a594eb94f556b14bb | refs/heads/master | 2020-04-19T02:47:00.308861 | 2019-01-28T07:03:55 | 2019-01-28T07:03:55 | 167,914,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,460 | py | from subprocess import call
arry = ["cn.tf.bean.CartItemTest",
"cn.tf.bean.CartTest",
"cn.tf.commons.PageTest",
"cn.tf.controller.ClientServletTest",
"cn.tf.controller.LoginServletTest",
"cn.tf.controller.ManageServletTest",
"cn.tf.controller.PayServletTest",
"cn.tf.controller.ResponsePayServletTest",
"cn.tf.dao.BookDaoTest",
"cn.tf.dao.CategoryDaoTest",
"cn.tf.dao.OrderDaoTest",
"cn.tf.dao.PrivilegeDaoTest",
"cn.tf.dao.impl.BookDaoImplTest",
"cn.tf.dao.impl.CategoryDaoImplTest",
"cn.tf.dao.impl.CustomerDaoImplTest",
"cn.tf.dao.impl.OrderDaoImplTest",
"cn.tf.dao.impl.PrivilegeDaoImplTest",
"cn.tf.filter.CharseEncodingFilterTest",
"cn.tf.filter.GetHttpServletRequestTest",
"cn.tf.filter.PrivilegeFilterTest",
"cn.tf.service.BusinessServiceTest",
"cn.tf.service.PrivilegeServiceTest",
"cn.tf.service.impl.BusinessServiceImplTest",
"cn.tf.service.impl.PrivilegeServiceImplTest",
"cn.tf.utils.C3P0UtilTest",
"cn.tf.utils.ConstantTest",
"cn.tf.utils.OrderNumUtilTest",
"cn.tf.utils.PaymentUtilTest",
"cn.tf.utils.SendMailThreadTest",
"cn.tf.utils.WebUtilTest"]
import os
for name in arry:
baseCommand = "java -Xmx4g -cp"
baseCommand = baseCommand +" F:/experiment20170325Regression/BookStore-master2/WebRoot/WEB-INF/lib/*;"
baseCommand = baseCommand +"F:/experiment20170325Regression/BookStore-master2/target/classes/;"
baseCommand = baseCommand +"F:/experiment20170325Regression/BookStore-master2Test/bin;"
baseCommand = baseCommand +" daikon.Chicory --ppt-select-pattern=\"cn.tf\" --dtrace-file=\""+name[name.rindex(".")+1:len(name)]+".dtrace.gz\" "
baseCommand = baseCommand +name
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.Daikon " +name[name.rindex(".")+1:len(name)]+".dtrace.gz"
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.PrintInvariants "+name[name.rindex(".")+1:len(name)]+".inv.gz>>"
baseCommand = baseCommand +name[name.rindex(".")+1:len(name)]+".txt"
#print "\n"
#print baseCommand
#os.system(baseCommand)
arry2 =["org.apache.commons.lang.daikon.AbstractNestableTestCase",
"org.apache.commons.lang.daikon.AbstractRangeTest",
"org.apache.commons.lang.daikon.ArrayUtilsAddTest",
"org.apache.commons.lang.daikon.ArrayUtilsRemoveTest",
"org.apache.commons.lang.daikon.ArrayUtilsTest",
"org.apache.commons.lang.daikon.BitFieldTest",
"org.apache.commons.lang.daikon.BooleanUtilsTest",
"org.apache.commons.lang.daikon.CharEncodingTest",
"org.apache.commons.lang.daikon.CharRangeTest",
"org.apache.commons.lang.daikon.CharSetTest",
"org.apache.commons.lang.daikon.CharSetUtilsTest",
"org.apache.commons.lang.daikon.CharUtilsPerfTest",
"org.apache.commons.lang.daikon.CharUtilsTest",
"org.apache.commons.lang.daikon.ClassUtilsTest",
"org.apache.commons.lang.daikon.CompareToBuilderTest",
"org.apache.commons.lang.daikon.DateFormatUtilsTest",
"org.apache.commons.lang.daikon.DateUtilsTest",
"org.apache.commons.lang.daikon.DefaultToStringStyleTest",
"org.apache.commons.lang.daikon.DoubleRangeTest",
"org.apache.commons.lang.daikon.DurationFormatUtilsTest",
"org.apache.commons.lang.daikon.EntitiesPerformanceTest",
"org.apache.commons.lang.daikon.EntitiesTest",
"org.apache.commons.lang.daikon.EnumTestSuite",
"org.apache.commons.lang.daikon.EnumUtilsTest",
"org.apache.commons.lang.daikon.EqualsBuilderTest",
"org.apache.commons.lang.daikon.ExceptionTestSuite",
"org.apache.commons.lang.daikon.ExceptionUtilsTestCase",
"org.apache.commons.lang.daikon.FastDateFormatTest",
"org.apache.commons.lang.daikon.FloatRangeTest",
"org.apache.commons.lang.daikon.FractionTest",
"org.apache.commons.lang.daikon.HashCodeBuilderAndEqualsBuilderTest",
"org.apache.commons.lang.daikon.HashCodeBuilderTest",
"org.apache.commons.lang.daikon.IllegalClassExceptionTest",
"org.apache.commons.lang.daikon.IncompleteArgumentExceptionTest",
"org.apache.commons.lang.daikon.IntRangeTest",
"org.apache.commons.lang.daikon.LangTestSuite",
"org.apache.commons.lang.daikon.LongRangeTest",
"org.apache.commons.lang.daikon.MathTestSuite",
"org.apache.commons.lang.daikon.MultiLineToStringStyleTest",
"org.apache.commons.lang.daikon.MutableByteTest",
"org.apache.commons.lang.daikon.MutableDoubleTest",
"org.apache.commons.lang.daikon.MutableFloatTest",
"org.apache.commons.lang.daikon.MutableIntTest",
"org.apache.commons.lang.daikon.MutableLongTest",
"org.apache.commons.lang.daikon.MutableObjectTest",
"org.apache.commons.lang.daikon.MutableShortTest",
"org.apache.commons.lang.daikon.MutableTestSuite",
"org.apache.commons.lang.daikon.NestableDelegateTestCase",
"org.apache.commons.lang.daikon.NestableErrorTestCase",
"org.apache.commons.lang.daikon.NestableExceptionTestCase",
"org.apache.commons.lang.daikon.NestableRuntimeExceptionTestCase",
"org.apache.commons.lang.daikon.NoFieldNamesToStringStyleTest",
"org.apache.commons.lang.daikon.NotImplementedExceptionTest",
"org.apache.commons.lang.daikon.NullArgumentExceptionTest",
"org.apache.commons.lang.daikon.NumberRangeTest",
"org.apache.commons.lang.daikon.NumberUtilsTest",
"org.apache.commons.lang.daikon.ObjectUtilsTest",
"org.apache.commons.lang.daikon.RandomStringUtilsTest",
"org.apache.commons.lang.daikon.RandomUtilsTest",
"org.apache.commons.lang.daikon.SerializationUtilsTest",
"org.apache.commons.lang.daikon.ShortPrefixToStringStyleTest",
"org.apache.commons.lang.daikon.SimpleToStringStyleTest",
"org.apache.commons.lang.daikon.StandardToStringStyleTest",
"org.apache.commons.lang.daikon.StopWatchTest",
"org.apache.commons.lang.daikon.StringEscapeUtilsTest",
"org.apache.commons.lang.daikon.StringUtilsEqualsIndexOfTest",
"org.apache.commons.lang.daikon.StringUtilsIsTest",
"org.apache.commons.lang.daikon.StringUtilsSubstringTest",
"org.apache.commons.lang.daikon.StringUtilsTest",
"org.apache.commons.lang.daikon.StringUtilsTrimEmptyTest",
"org.apache.commons.lang.daikon.SystemUtilsTest",
"org.apache.commons.lang.daikon.ToStringBuilderTest",
"org.apache.commons.lang.daikon.ToStringStyleTest",
"org.apache.commons.lang.daikon.UnhandledExceptionTest",
"org.apache.commons.lang.daikon.ValidateTest",
"org.apache.commons.lang.daikon.ValuedEnumTest",
"org.apache.commons.lang.daikon.WordUtilsTest"]
for name in arry2:
baseCommand = "java -Xmx4g -cp"
baseCommand = baseCommand +" F:/experiment20170325Regression/commons-lang-2.1-src/commons-lang-2.1/lib/*;"
baseCommand = baseCommand +"F:/experiment20170325Regression/commons-lang-2.1.c/target/classes/;"
baseCommand = baseCommand +"F:/experiment20170325Regression/commons-lang-2.1.c/bin;"
baseCommand = baseCommand +" daikon.Chicory --ppt-select-pattern=\"org.apache.commons.lang\" --dtrace-file=\""+name[name.rindex(".")+1:len(name)]+".dtrace.gz\" "
baseCommand = baseCommand +name
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.Daikon " +name[name.rindex(".")+1:len(name)]+".dtrace.gz"
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.PrintInvariants "+name[name.rindex(".")+1:len(name)]+".inv.gz>>"
baseCommand = baseCommand +name[name.rindex(".")+1:len(name)]+".txt"
#print "\n"
#print baseCommand
#os.system(baseCommand)
#
# "org.apache.commons.collections4.daikon.AbstractArrayListTest",
# "org.apache.commons.collections4.daikon.AbstractBagTest",
# "org.apache.commons.collections4.daikon.AbstractBidiMapTest",
# "org.apache.commons.collections4.daikon.AbstractCollectionTest",
# "org.apache.commons.collections4.daikon.AbstractComparatorTest",
# "org.apache.commons.collections4.daikon.AbstractIterableMapTest",
# "org.apache.commons.collections4.daikon.AbstractIteratorTest",
# "org.apache.commons.collections4.daikon.AbstractLinkedListTest",
# "org.apache.commons.collections4.daikon.AbstractListIteratorTest",
# "org.apache.commons.collections4.daikon.AbstractListTest",
# "org.apache.commons.collections4.daikon.AbstractMapEntryTest",
# "org.apache.commons.collections4.daikon.AbstractMapIteratorTest",
# "org.apache.commons.collections4.daikon.AbstractMapTest",
# "org.apache.commons.collections4.daikon.AbstractNullComparatorTest",
# "org.apache.commons.collections4.daikon.AbstractObjectTest",
# "org.apache.commons.collections4.daikon.AbstractOrderedBidiMapDecoratorTest",
# "org.apache.commons.collections4.daikon.AbstractOrderedBidiMapTest",
# "org.apache.commons.collections4.daikon.AbstractOrderedMapIteratorTest",
# "org.apache.commons.collections4.daikon.AbstractOrderedMapTest",
# "org.apache.commons.collections4.daikon.AbstractQueueTest",
# "org.apache.commons.collections4.daikon.AbstractSetTest",
# "org.apache.commons.collections4.daikon.AbstractSortedBagTest",
# "org.apache.commons.collections4.daikon.AbstractSortedBidiMapTest",
# "org.apache.commons.collections4.daikon.AbstractSortedMapTest",
# "org.apache.commons.collections4.daikon.AbstractSortedSetTest",
# "org.apache.commons.collections4.daikon.AbstractTreeMapTest",
# "org.apache.commons.collections4.daikon.ArrayIterator2Test",
# "org.apache.commons.collections4.daikon.ArrayIteratorTest",
# "org.apache.commons.collections4.daikon.ArrayListIterator2Test",
# "org.apache.commons.collections4.daikon.ArrayListIteratorTest",
# "org.apache.commons.collections4.daikon.ArrayStackTest",
# "org.apache.commons.collections4.daikon.BagUtilsTest",
# "org.apache.commons.collections4.daikon.BooleanComparatorTest",
# "org.apache.commons.collections4.daikon.BulkTest",
# "org.apache.commons.collections4.daikon.CaseInsensitiveMapTest",
# "org.apache.commons.collections4.daikon.CircularFifoQueueTest",
# "org.apache.commons.collections4.daikon.ClosureUtilsTest",
# "org.apache.commons.collections4.daikon.CollatingIteratorTest",
# "org.apache.commons.collections4.daikon.CollectionBagTest",
# "org.apache.commons.collections4.daikon.CollectionSortedBagTest",
# "org.apache.commons.collections4.daikon.ComparableComparatorTest",
# "org.apache.commons.collections4.daikon.ComparatorChainTest",
# "org.apache.commons.collections4.daikon.CompositeCollectionTest",
# "org.apache.commons.collections4.daikon.CompositeMapTest",
# "org.apache.commons.collections4.daikon.CompositeSetTest",
# "org.apache.commons.collections4.daikon.CursorableLinkedListTest",
# "org.apache.commons.collections4.daikon.DefaultedMapTest",
# "org.apache.commons.collections4.daikon.DefaultKeyValueTest",
# "org.apache.commons.collections4.daikon.DefaultMapEntryTest",
# "org.apache.commons.collections4.daikon.DualHashBidiMapTest",
# "org.apache.commons.collections4.daikon.DualLinkedHashBidiMapTest",
# "org.apache.commons.collections4.daikon.DualTreeBidiMap2Test",
# "org.apache.commons.collections4.daikon.DualTreeBidiMapTest",
# "org.apache.commons.collections4.daikon.EnumerationUtilsTest",
# "org.apache.commons.collections4.daikon.FactoryUtilsTest",
# "org.apache.commons.collections4.daikon.FilterIteratorTest",
# "org.apache.commons.collections4.daikon.FilterListIteratorTest",
# "org.apache.commons.collections4.daikon.FixedOrderComparatorTest",
# "org.apache.commons.collections4.daikon.FixedSizeListTest",
# "org.apache.commons.collections4.daikon.FixedSizeMapTest",
# "org.apache.commons.collections4.daikon.FixedSizeSortedMapTest",
# "org.apache.commons.collections4.daikon.Flat3MapTest",
# "org.apache.commons.collections4.daikon.GrowthListTest",
# "org.apache.commons.collections4.daikon.HashBagTest",
# "org.apache.commons.collections4.daikon.HashedMapTest",
# "org.apache.commons.collections4.daikon.IndexedCollectionTest",
# "org.apache.commons.collections4.daikon.IteratorChainTest",
# "org.apache.commons.collections4.daikon.IteratorEnumerationTest",
# "org.apache.commons.collections4.daikon.IteratorIterableTest",
# "org.apache.commons.collections4.daikon.IteratorUtilsTest",
# "org.apache.commons.collections4.daikon.LazyIteratorChainTest",
# "org.apache.commons.collections4.daikon.LazyMapTest",
# "org.apache.commons.collections4.daikon.LazySortedMapTest",
# "org.apache.commons.collections4.daikon.LinkedMapTest",
# "org.apache.commons.collections4.daikon.ListIteratorWrapper2Test",
# "org.apache.commons.collections4.daikon.ListIteratorWrapperTest",
# "org.apache.commons.collections4.daikon.ListOrderedMap2Test",
# "org.apache.commons.collections4.daikon.ListOrderedMapTest",
# "org.apache.commons.collections4.daikon.ListOrderedSet2Test",
# "org.apache.commons.collections4.daikon.ListOrderedSetTest",
arry2 =[
"org.apache.commons.collections4.daikon.ListUtilsTest",
"org.apache.commons.collections4.daikon.LoopingIteratorTest",
"org.apache.commons.collections4.daikon.LoopingListIteratorTest",
"org.apache.commons.collections4.daikon.LRUMapTest",
"org.apache.commons.collections4.daikon.MapBackedSet2Test",
"org.apache.commons.collections4.daikon.MapBackedSetTest",
"org.apache.commons.collections4.daikon.MapUtilsTest",
"org.apache.commons.collections4.daikon.MultiKeyMapTest",
"org.apache.commons.collections4.daikon.MultiKeyTest",
"org.apache.commons.collections4.daikon.MultiValueMapTest",
"org.apache.commons.collections4.daikon.NodeCachingLinkedListTest",
"org.apache.commons.collections4.daikon.NodeListIteratorTest",
"org.apache.commons.collections4.daikon.ObjectArrayIteratorTest",
"org.apache.commons.collections4.daikon.ObjectArrayListIterator2Test",
"org.apache.commons.collections4.daikon.ObjectArrayListIteratorTest",
"org.apache.commons.collections4.daikon.ObjectGraphIteratorTest",
"org.apache.commons.collections4.daikon.PassiveExpiringMapTest",
"org.apache.commons.collections4.daikon.PatriciaTrie2Test",
"org.apache.commons.collections4.daikon.PatriciaTrieTest",
"org.apache.commons.collections4.daikon.PeekingIteratorTest",
"org.apache.commons.collections4.daikon.PermutationIteratorTest",
"org.apache.commons.collections4.daikon.PredicatedBagTest",
"org.apache.commons.collections4.daikon.PredicatedCollectionTest",
"org.apache.commons.collections4.daikon.PredicatedListTest",
"org.apache.commons.collections4.daikon.PredicatedMapTest",
"org.apache.commons.collections4.daikon.PredicatedQueueTest",
"org.apache.commons.collections4.daikon.PredicatedSetTest",
"org.apache.commons.collections4.daikon.PredicatedSortedBagTest",
"org.apache.commons.collections4.daikon.PredicatedSortedMapTest",
"org.apache.commons.collections4.daikon.PredicatedSortedSetTest",
"org.apache.commons.collections4.daikon.PushbackIteratorTest",
"org.apache.commons.collections4.daikon.QueueUtilsTest",
"org.apache.commons.collections4.daikon.ReferenceIdentityMapTest",
"org.apache.commons.collections4.daikon.ReferenceMapTest",
"org.apache.commons.collections4.daikon.ReverseComparatorTest",
"org.apache.commons.collections4.daikon.ReverseListIteratorTest",
"org.apache.commons.collections4.daikon.SetUniqueListTest",
"org.apache.commons.collections4.daikon.SetUtilsTest",
"org.apache.commons.collections4.daikon.SingletonIterator2Test",
"org.apache.commons.collections4.daikon.SingletonIteratorTest",
"org.apache.commons.collections4.daikon.SingletonListIteratorTest",
"org.apache.commons.collections4.daikon.SingletonMapTest",
"org.apache.commons.collections4.daikon.SplitMapUtilsTest",
"org.apache.commons.collections4.daikon.StaticBucketMapTest",
"org.apache.commons.collections4.daikon.SynchronizedBagTest",
"org.apache.commons.collections4.daikon.SynchronizedCollectionTest",
"org.apache.commons.collections4.daikon.TiedMapEntryTest",
"org.apache.commons.collections4.daikon.TransformedBagTest",
"org.apache.commons.collections4.daikon.TransformedCollectionTest",
"org.apache.commons.collections4.daikon.TransformedListTest",
"org.apache.commons.collections4.daikon.TransformedMapTest",
"org.apache.commons.collections4.daikon.TransformedQueueTest",
"org.apache.commons.collections4.daikon.TransformedSetTest",
"org.apache.commons.collections4.daikon.TransformedSortedBagTest",
"org.apache.commons.collections4.daikon.TransformedSortedMapTest",
"org.apache.commons.collections4.daikon.TransformedSortedSetTest",
"org.apache.commons.collections4.daikon.TransformedSplitMapTest",
"org.apache.commons.collections4.daikon.TransformerUtilsTest",
"org.apache.commons.collections4.daikon.TransformingComparatorTest",
"org.apache.commons.collections4.daikon.TreeBagTest",
"org.apache.commons.collections4.daikon.TreeBidiMapTest","org.apache.commons.collections4.daikon.TreeListTest"]
arry2 =[
"org.apache.commons.collections4.daikon.TrieUtilsTest",
"org.apache.commons.collections4.daikon.UniqueFilterIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableBagTest",
"org.apache.commons.collections4.daikon.UnmodifiableBidiMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableBoundedCollectionTest",
"org.apache.commons.collections4.daikon.UnmodifiableCollectionTest",
"org.apache.commons.collections4.daikon.UnmodifiableIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableListIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableListTest",
"org.apache.commons.collections4.daikon.UnmodifiableMapEntryTest",
"org.apache.commons.collections4.daikon.UnmodifiableMapIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableOrderedBidiMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableOrderedMapIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableOrderedMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableQueueTest",
"org.apache.commons.collections4.daikon.UnmodifiableSetTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedBagTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedBidiMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedSetTest",
"org.apache.commons.collections4.daikon.UnmodifiableTrieTest"]
arry2=["org.apache.commons.validator.daikon.BaseCalendarValidatorTest",
"org.apache.commons.validator.daikon.BaseNumberValidatorTest",
"org.apache.commons.validator.daikon.BigDecimalValidatorTest",
"org.apache.commons.validator.daikon.BigIntegerValidatorTest",
"org.apache.commons.validator.daikon.ByteTest",
"org.apache.commons.validator.daikon.ByteValidatorTest",
"org.apache.commons.validator.daikon.CalendarValidatorTest",
"org.apache.commons.validator.daikon.CreditCardValidatorTest",
"org.apache.commons.validator.daikon.CurrencyValidatorTest",
"org.apache.commons.validator.daikon.DateTest",
"org.apache.commons.validator.daikon.DateValidatorTest",
"org.apache.commons.validator.daikon.DoubleTest",
"org.apache.commons.validator.daikon.DoubleValidatorTest",
"org.apache.commons.validator.daikon.EmailTest",
"org.apache.commons.validator.daikon.EntityImportTest",
"org.apache.commons.validator.daikon.ExceptionTest",
"org.apache.commons.validator.daikon.ExtensionTest",
"org.apache.commons.validator.daikon.FieldTest",
"org.apache.commons.validator.daikon.FlagsTest",
"org.apache.commons.validator.daikon.FloatTest",
"org.apache.commons.validator.daikon.FloatValidatorTest",
"org.apache.commons.validator.daikon.GenericValidatorTest",
"org.apache.commons.validator.daikon.IntegerTest",
"org.apache.commons.validator.daikon.IntegerValidatorTest",
"org.apache.commons.validator.daikon.ISBNValidatorTest",
"org.apache.commons.validator.daikon.LocaleTest",
"org.apache.commons.validator.daikon.LongTest",
"org.apache.commons.validator.daikon.LongValidatorTest",
"org.apache.commons.validator.daikon.MultipleConfigFilesTest",
"org.apache.commons.validator.daikon.MultipleTests",
"org.apache.commons.validator.daikon.NameBean",
"org.apache.commons.validator.daikon.PercentValidatorTest",
"org.apache.commons.validator.daikon.RequiredIfTest",
"org.apache.commons.validator.daikon.RequiredNameTest",
"org.apache.commons.validator.daikon.RetrieveFormTest",
"org.apache.commons.validator.daikon.RoutinesTestSuite",
"org.apache.commons.validator.daikon.ShortTest",
"org.apache.commons.validator.daikon.ShortValidatorTest",
"org.apache.commons.validator.daikon.TestCommon",
"org.apache.commons.validator.daikon.TestNumber",
"org.apache.commons.validator.daikon.TestTypeValidator",
"org.apache.commons.validator.daikon.TestValidator",
"org.apache.commons.validator.daikon.TimeValidatorTest",
"org.apache.commons.validator.daikon.TypeBean",
"org.apache.commons.validator.daikon.TypeTest",
"org.apache.commons.validator.daikon.UrlTest",
"org.apache.commons.validator.daikon.ValidatorResultsTest",
"org.apache.commons.validator.daikon.ValidatorTest",
"org.apache.commons.validator.daikon.ValidatorTestSuite",
"org.apache.commons.validator.daikon.ValueBean",
"org.apache.commons.validator.daikon.VarTest"]
arry2=[
"org.apache.commons.validator.daikon.EntityImportTest",
"org.apache.commons.validator.daikon.ExceptionTest",
"org.apache.commons.validator.daikon.ExtensionTest",
"org.apache.commons.validator.daikon.FieldTest",
"org.apache.commons.validator.daikon.FlagsTest",
"org.apache.commons.validator.daikon.FloatTest",
"org.apache.commons.validator.daikon.FloatValidatorTest",
"org.apache.commons.validator.daikon.GenericValidatorTest",
"org.apache.commons.validator.daikon.IntegerTest",
"org.apache.commons.validator.daikon.IntegerValidatorTest",
"org.apache.commons.validator.daikon.ISBNValidatorTest",
"org.apache.commons.validator.daikon.LocaleTest",
"org.apache.commons.validator.daikon.LongTest",
"org.apache.commons.validator.daikon.LongValidatorTest",
"org.apache.commons.validator.daikon.MultipleConfigFilesTest",
"org.apache.commons.validator.daikon.MultipleTests",
"org.apache.commons.validator.daikon.NameBean",
"org.apache.commons.validator.daikon.PercentValidatorTest",
"org.apache.commons.validator.daikon.RequiredIfTest",
"org.apache.commons.validator.daikon.RequiredNameTest",
"org.apache.commons.validator.daikon.RetrieveFormTest",
"org.apache.commons.validator.daikon.RoutinesTestSuite",
"org.apache.commons.validator.daikon.ShortTest",
"org.apache.commons.validator.daikon.ShortValidatorTest",
"org.apache.commons.validator.daikon.TestCommon",
"org.apache.commons.validator.daikon.TestNumber",
"org.apache.commons.validator.daikon.TestTypeValidator",
"org.apache.commons.validator.daikon.TestValidator",
"org.apache.commons.validator.daikon.TimeValidatorTest",
"org.apache.commons.validator.daikon.TypeBean",
"org.apache.commons.validator.daikon.TypeTest",
"org.apache.commons.validator.daikon.UrlTest",
"org.apache.commons.validator.daikon.ValidatorResultsTest",
"org.apache.commons.validator.daikon.ValidatorTest",
"org.apache.commons.validator.daikon.ValidatorTestSuite",
"org.apache.commons.validator.daikon.ValueBean",
"org.apache.commons.validator.daikon.VarTest"]
for name in arry2:
baseCommand = "java -Xmx4g -cp "
baseCommand = baseCommand +"F:/Regression2/lib/*;"
baseCommand = baseCommand +"F:\\Regression2\\commons-validator-1.3.0-src\\bin;"
baseCommand = baseCommand + "F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar;"
baseCommand = baseCommand +" daikon.Chicory " \
" --dtrace-file=\""+name[name.rindex(".")+1:len(name)]+".dtrace.gz\" "
baseCommand = baseCommand +name
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.Daikon " +name[name.rindex(".")+1:len(name)]+".dtrace.gz"
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.PrintInvariants "+name[name.rindex(".")+1:len(name)]+".inv.gz>>"
baseCommand = baseCommand +name[name.rindex(".")+1:len(name)]+".txt"
#print "\n"
#print baseCommand
#os.system(baseCommand)
array3=["org.apache.commons.collections4.daikon.ArrayIterator2Test",
"org.apache.commons.collections4.daikon.ArrayIteratorTest",
"org.apache.commons.collections4.daikon.ArrayListIterator2Test",
"org.apache.commons.collections4.daikon.ArrayListIteratorTest",
"org.apache.commons.collections4.daikon.ArrayStackTest",
"org.apache.commons.collections4.daikon.BagUtilsTest",
"org.apache.commons.collections4.daikon.BooleanComparatorTest",
"org.apache.commons.collections4.daikon.BulkTest",
"org.apache.commons.collections4.daikon.CaseInsensitiveMapTest",
"org.apache.commons.collections4.daikon.CircularFifoQueueTest",
"org.apache.commons.collections4.daikon.ClosureUtilsTest",
"org.apache.commons.collections4.daikon.CollatingIteratorTest",
"org.apache.commons.collections4.daikon.CollectionBagTest",
"org.apache.commons.collections4.daikon.CollectionSortedBagTest",
"org.apache.commons.collections4.daikon.ComparableComparatorTest",
"org.apache.commons.collections4.daikon.ComparatorChainTest",
"org.apache.commons.collections4.daikon.CompositeCollectionTest",
"org.apache.commons.collections4.daikon.CompositeMapTest",
"org.apache.commons.collections4.daikon.CompositeSetTest",
"org.apache.commons.collections4.daikon.CursorableLinkedListTest",
"org.apache.commons.collections4.daikon.DefaultedMapTest",
"org.apache.commons.collections4.daikon.DefaultKeyValueTest",
"org.apache.commons.collections4.daikon.DefaultMapEntryTest",
"org.apache.commons.collections4.daikon.DualHashBidiMapTest",
"org.apache.commons.collections4.daikon.DualLinkedHashBidiMapTest",
"org.apache.commons.collections4.daikon.DualTreeBidiMap2Test",
"org.apache.commons.collections4.daikon.DualTreeBidiMapTest",
"org.apache.commons.collections4.daikon.EnumerationUtilsTest",
"org.apache.commons.collections4.daikon.FactoryUtilsTest",
"org.apache.commons.collections4.daikon.FilterIteratorTest",
"org.apache.commons.collections4.daikon.FilterListIteratorTest",
"org.apache.commons.collections4.daikon.FixedOrderComparatorTest",
"org.apache.commons.collections4.daikon.FixedSizeListTest",
"org.apache.commons.collections4.daikon.FixedSizeMapTest",
"org.apache.commons.collections4.daikon.FixedSizeSortedMapTest",
"org.apache.commons.collections4.daikon.Flat3MapTest",
"org.apache.commons.collections4.daikon.GrowthListTest",
"org.apache.commons.collections4.daikon.HashBagTest",
"org.apache.commons.collections4.daikon.HashedMapTest",
"org.apache.commons.collections4.daikon.IndexedCollectionTest",
"org.apache.commons.collections4.daikon.IteratorChainTest",
"org.apache.commons.collections4.daikon.IteratorEnumerationTest",
"org.apache.commons.collections4.daikon.IteratorIterableTest",
"org.apache.commons.collections4.daikon.IteratorUtilsTest",
"org.apache.commons.collections4.daikon.LazyIteratorChainTest",
"org.apache.commons.collections4.daikon.LazyMapTest",
"org.apache.commons.collections4.daikon.LazySortedMapTest",
"org.apache.commons.collections4.daikon.LinkedMapTest",
"org.apache.commons.collections4.daikon.ListIteratorWrapper2Test",
"org.apache.commons.collections4.daikon.ListIteratorWrapperTest",
"org.apache.commons.collections4.daikon.ListOrderedMap2Test",
"org.apache.commons.collections4.daikon.ListOrderedMapTest",
"org.apache.commons.collections4.daikon.ListOrderedSet2Test",
"org.apache.commons.collections4.daikon.ListOrderedSetTest",
"org.apache.commons.collections4.daikon.ListUtilsTest",
"org.apache.commons.collections4.daikon.LoopingIteratorTest",
"org.apache.commons.collections4.daikon.LoopingListIteratorTest",
"org.apache.commons.collections4.daikon.LRUMapTest",
"org.apache.commons.collections4.daikon.MapBackedSet2Test",
"org.apache.commons.collections4.daikon.MapBackedSetTest",
"org.apache.commons.collections4.daikon.MapUtilsTest",
"org.apache.commons.collections4.daikon.MultiKeyMapTest",
"org.apache.commons.collections4.daikon.MultiKeyTest",
"org.apache.commons.collections4.daikon.MultiValueMapTest",
"org.apache.commons.collections4.daikon.NodeCachingLinkedListTest",
"org.apache.commons.collections4.daikon.NodeListIteratorTest",
"org.apache.commons.collections4.daikon.ObjectArrayIteratorTest",
"org.apache.commons.collections4.daikon.ObjectArrayListIterator2Test",
"org.apache.commons.collections4.daikon.ObjectArrayListIteratorTest",
"org.apache.commons.collections4.daikon.ObjectGraphIteratorTest",
"org.apache.commons.collections4.daikon.PassiveExpiringMapTest",
"org.apache.commons.collections4.daikon.PatriciaTrie2Test",
"org.apache.commons.collections4.daikon.PatriciaTrieTest",
"org.apache.commons.collections4.daikon.PeekingIteratorTest",
"org.apache.commons.collections4.daikon.PermutationIteratorTest",
"org.apache.commons.collections4.daikon.PredicatedBagTest",
"org.apache.commons.collections4.daikon.PredicatedCollectionTest",
"org.apache.commons.collections4.daikon.PredicatedListTest",
"org.apache.commons.collections4.daikon.PredicatedMapTest",
"org.apache.commons.collections4.daikon.PredicatedQueueTest",
"org.apache.commons.collections4.daikon.PredicatedSetTest",
"org.apache.commons.collections4.daikon.PredicatedSortedBagTest",
"org.apache.commons.collections4.daikon.PredicatedSortedMapTest",
"org.apache.commons.collections4.daikon.PredicatedSortedSetTest",
"org.apache.commons.collections4.daikon.PushbackIteratorTest",
"org.apache.commons.collections4.daikon.QueueUtilsTest",
"org.apache.commons.collections4.daikon.ReferenceIdentityMapTest",
"org.apache.commons.collections4.daikon.ReferenceMapTest",
"org.apache.commons.collections4.daikon.ReverseComparatorTest",
"org.apache.commons.collections4.daikon.ReverseListIteratorTest",
"org.apache.commons.collections4.daikon.SetUniqueListTest",
"org.apache.commons.collections4.daikon.SetUtilsTest",
"org.apache.commons.collections4.daikon.SingletonIterator2Test",
"org.apache.commons.collections4.daikon.SingletonIteratorTest",
"org.apache.commons.collections4.daikon.SingletonListIteratorTest",
"org.apache.commons.collections4.daikon.SingletonMapTest",
"org.apache.commons.collections4.daikon.SplitMapUtilsTest",
"org.apache.commons.collections4.daikon.StaticBucketMapTest",
"org.apache.commons.collections4.daikon.SynchronizedBagTest",
"org.apache.commons.collections4.daikon.SynchronizedCollectionTest",
"org.apache.commons.collections4.daikon.TiedMapEntryTest",
"org.apache.commons.collections4.daikon.TransformedBagTest",
"org.apache.commons.collections4.daikon.TransformedCollectionTest",
"org.apache.commons.collections4.daikon.TransformedListTest",
"org.apache.commons.collections4.daikon.TransformedMapTest",
"org.apache.commons.collections4.daikon.TransformedQueueTest",
"org.apache.commons.collections4.daikon.TransformedSetTest",
"org.apache.commons.collections4.daikon.TransformedSortedBagTest",
"org.apache.commons.collections4.daikon.TransformedSortedMapTest",
"org.apache.commons.collections4.daikon.TransformedSortedSetTest",
"org.apache.commons.collections4.daikon.TransformedSplitMapTest",
"org.apache.commons.collections4.daikon.TransformerUtilsTest",
"org.apache.commons.collections4.daikon.TransformingComparatorTest",
"org.apache.commons.collections4.daikon.TreeBagTest",
"org.apache.commons.collections4.daikon.TreeBidiMapTest",
"org.apache.commons.collections4.daikon.TreeListTest",
"org.apache.commons.collections4.daikon.TrieUtilsTest",
"org.apache.commons.collections4.daikon.UniqueFilterIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableBagTest",
"org.apache.commons.collections4.daikon.UnmodifiableBidiMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableBoundedCollectionTest",
"org.apache.commons.collections4.daikon.UnmodifiableCollectionTest",
"org.apache.commons.collections4.daikon.UnmodifiableIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableListIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableListTest",
"org.apache.commons.collections4.daikon.UnmodifiableMapEntryTest",
"org.apache.commons.collections4.daikon.UnmodifiableMapIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableOrderedBidiMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableOrderedMapIteratorTest",
"org.apache.commons.collections4.daikon.UnmodifiableOrderedMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableQueueTest",
"org.apache.commons.collections4.daikon.UnmodifiableSetTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedBagTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedBidiMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedMapTest",
"org.apache.commons.collections4.daikon.UnmodifiableSortedSetTest",
"org.apache.commons.collections4.daikon.UnmodifiableTrieTest"]
for name in array3:
baseCommand = "java -Xmx4g -cp "
baseCommand = baseCommand +"F:/Regression2/lib/*;C:/User/jonkiky/Downloads/*;"
baseCommand = baseCommand +"F:\\experiment120161230\\commons-collections4\\target\\classes;F:\\experiment120161230\\commons-collections4\\target\\test-classes;"
baseCommand = baseCommand + "F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar;"
baseCommand = baseCommand +" daikon.Chicory " \
" --dtrace-file=\""+name[name.rindex(".")+1:len(name)]+".dtrace.gz\" "
baseCommand = baseCommand +name
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.Daikon " +name[name.rindex(".")+1:len(name)]+".dtrace.gz"
baseCommand = baseCommand +"&&java -Xmx4g -cp F:/experiment20170325Regression/commons-lang-2.1.a/lib/daikon.jar; daikon.PrintInvariants "+name[name.rindex(".")+1:len(name)]+".inv.gz>>"
baseCommand = baseCommand +name[name.rindex(".")+1:len(name)]+".txt"
print "\n"
print baseCommand
os.system(baseCommand)
| [
"[email protected]"
] | |
1398839d72afa87d9f8326f5f637ebdb440aeb5e | 17e744cce8f511cec7d337086aaf998659edf397 | /tests/unit/test_node.py | 6b46064aa16a2814e78c09f8e5220dfb3d58e8fc | [
"BSD-3-Clause"
] | permissive | GnaneshKunal/redisgraph-py | 80d77abdb492e8ac8d21b93a8467bf5b86ffec1d | e988cdd70ee8685e6d2a62f166036e2a04b77e94 | refs/heads/master | 2021-06-14T12:20:44.925745 | 2021-02-24T11:38:12 | 2021-02-24T11:38:12 | 189,179,170 | 0 | 0 | null | 2019-05-29T08:02:00 | 2019-05-29T08:02:00 | null | UTF-8 | Python | false | false | 2,117 | py | from redisgraph import node
from tests.utils import base
class TestNode(base.TestCase):
def setUp(self):
super().setUp()
self.no_args = node.Node()
self.no_props = node.Node(node_id=1, alias="alias", label="l")
self.props_only = node.Node(properties={"a": "a", "b": 10})
self.no_label = node.Node(node_id=1, alias="alias",
properties={"a": "a"})
def test_toString(self):
self.assertEqual(self.no_args.toString(), "")
self.assertEqual(self.no_props.toString(), "")
self.assertEqual(self.props_only.toString(), '{a:"a",b:10}')
self.assertEqual(self.no_label.toString(), '{a:"a"}')
def test_stringify(self):
self.assertEqual(str(self.no_args), "()")
self.assertEqual(str(self.no_props), "(alias:l)")
self.assertEqual(str(self.props_only), '({a:"a",b:10})')
self.assertEqual(str(self.no_label), '(alias{a:"a"})')
def test_comparision(self):
self.assertEqual(node.Node(), node.Node())
self.assertEqual(node.Node(node_id=1), node.Node(node_id=1))
self.assertNotEqual(node.Node(node_id=1), node.Node(node_id=2))
self.assertEqual(node.Node(node_id=1, alias="a"),
node.Node(node_id=1, alias="b"))
self.assertEqual(node.Node(node_id=1, alias="a"),
node.Node(node_id=1, alias="a"))
self.assertEqual(node.Node(node_id=1, label="a"),
node.Node(node_id=1, label="a"))
self.assertNotEqual(node.Node(node_id=1, label="a"),
node.Node(node_id=1, label="b"))
self.assertEqual(node.Node(node_id=1, alias="a", label="l"),
node.Node(node_id=1, alias="a", label="l"))
self.assertNotEqual(node.Node(alias="a", label="l"),
node.Node(alias="a", label="l1"))
self.assertEqual(node.Node(properties={"a": 10}),
node.Node(properties={"a": 10}))
self.assertNotEqual(node.Node(), node.Node(properties={"a": 10}))
| [
"[email protected]"
] | |
089794d55234d8a1952611f007936d2d9699a213 | 694f8bddc62e5d01a2d5dc558ffdf65100b8c4fc | /app/charts/plot.py | c40406e3faf4e59e3fcead9008f4c3be23df4a00 | [] | no_license | SennaSemakula/Savings-visualiser | abdeb10ac8022117294410893db161452b6096c5 | 8c821a7b4e6cf9b1a6788b9803689bef5a324056 | refs/heads/master | 2020-03-31T02:37:50.108288 | 2018-10-14T17:49:59 | 2018-10-14T17:49:59 | 151,833,202 | 0 | 0 | null | 2018-10-27T17:51:10 | 2018-10-06T11:26:51 | Python | UTF-8 | Python | false | false | 1,197 | py | from . import graph
import doctest
import os
import plotly
import plotly.plotly as py
import plotly.graph_objs as go
class Plot(graph.Graph):
"""Advanced scatter plot graph implementation """
def __init__(self, data, title):
self.data = data
self.title = title
def draw_graph(self, x_axis, y_axis):
graph_data = go.Scatter(
x = x_axis,
y = y_axis,
mode = 'markers',
marker = dict(
size = 20,
color = 'rgba(152, 0, 0, .8)',
line = dict(
width = 2.5,
color = 'rgb(0, 0, 0, 0)')))
return [graph_data]
def generate_file(self, chart_data, filename, open_chart=False):
PATH = "{}/templates".format(os.getcwd())
if os.path.isdir(PATH) is False:
os.mkdir(PATH)
graph_html = plotly.offline.plot({
"data": chart_data,
"layout": go.Layout(title=self.title)},
filename="templates/{}".format(filename),
auto_open=open_chart)
return graph_html
| [
"[email protected]"
] | |
462157a1a89b592f7d4281e5bb42b7f236dc1270 | c9ccfdae700387557cd5e214de4bfad70bfce1ff | /InvestmentMonitor/asgi.py | f92f78a759726230c4b756c08915efbfbff82bee | [
"MIT"
] | permissive | BrunoPivoto/Invest-Monitor1.1 | 2d01f23a4593d631ffed3f8426493d4a2497337c | 19ce6565a8fc4ff0c61c643435b8906645d0acb8 | refs/heads/main | 2023-06-08T13:14:32.951901 | 2021-06-27T23:25:25 | 2021-06-27T23:25:25 | 374,479,397 | 0 | 0 | MIT | 2021-06-27T23:25:26 | 2021-06-06T22:51:16 | Python | UTF-8 | Python | false | false | 411 | py | """
ASGI config for InvestmentMonitor project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'InvestmentMonitor.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
dc4a1dacd3a190a806a1bdb63ddfd46211bb4018 | 97b827ddf260dfc8a2725e66359e3625af84b7d1 | /src/ssm_document_generator/definition/mixins/stickytape_mixin.py | 8e79540f2e1028167c58782c0f2c81e5212f7e64 | [
"Apache-2.0"
] | permissive | ken2190/aws-systems-manager-document-generator | eeea79dbae67c0b12f0d50a8412de3e8293a0037 | 2c041fd52342d95da4535fe3236e43933cc6e08d | refs/heads/master | 2023-03-16T03:15:07.034439 | 2018-05-12T16:56:57 | 2018-11-04T12:26:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 890 | py | from tempfile import NamedTemporaryFile
import sys
import stickytape
class StickyTapeMixin:
"""
Adds functionality to combine several python modules into one. If your original code has dependencies that can be
found in add_python_paths - those dependencies would be merged into the result.
The point is to be able to write modular code, even though SSM documents are single-file.
"""
def __init__(self, add_python_paths=sys.path, *args, **kwargs):
super().__init__(*args, **kwargs)
self.add_python_paths = add_python_paths
def generate_commands(self):
commands = super().generate_commands()
with NamedTemporaryFile(mode='w') as temp_file:
temp_file.write('\n'.join(commands))
temp_file.flush()
return stickytape.script(temp_file.name, add_python_paths=self.add_python_paths).splitlines()
| [
"[email protected]"
] | |
e596609fdd6492eb7d8be737559ec86b1d983667 | 8688c88074a871f75a03a11a9fe878d4045edf33 | /legacy/test_None.py | db55b76bf9574b9f1913f695f3cb034258cc61b5 | [] | no_license | liminn/deep_image_matting | e958dd46fc61a0dc48491ccecad5420b2548a3b3 | b280ed12bde753ec8221c61fadeddf1ac54cce03 | refs/heads/master | 2020-05-19T17:36:13.338417 | 2019-05-08T09:28:49 | 2019-05-08T09:28:49 | 185,137,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,794 | py | import argparse
import cv2
import numpy as np
from model_None import build_encoder_decoder,build_refinement
if __name__ == '__main__':
# model
model_weights_path = 'models/final.42-0.0398_author.hdf5'
encoder_decoder = build_encoder_decoder()
model = build_refinement(encoder_decoder)
model.load_weights(model_weights_path,by_name = True)
model.summary()
# image
image_path = "test_results_None/3_image.png"
trimap_path = "test_results_None/3_trimap.png"
img_bgr = cv2.imread(image_path)
trimap = cv2.imread(trimap_path, 0)
# cv2.imshow("image_bgr",img_bgr)
# cv2.imshow("trimap",trimap)
# cv2.waitKey(0)
# real input size
img_rows, img_cols = 1375,579
print("original shape: {}".format((img_rows,img_cols)))
img_rows_final = int(np.floor(img_rows/32.)*32)
img_cols_final = int(np.floor(img_cols/32.)*32)
input_shape = (img_rows_final,img_cols_final)
print("final shape: {}".format(input_shape))
img_bgr = cv2.resize(img_bgr,(input_shape[1],input_shape[0]),cv2.INTER_CUBIC)
trimap = cv2.resize(trimap,(input_shape[1],input_shape[0]),cv2.INTER_NEAREST)
# cv2.imshow("image_bgr",img_bgr)
# cv2.imshow("trimap",trimap)
# cv2.waitKey(0)
# x_test
x_test = np.empty((1, input_shape[0], input_shape[1], 4), dtype=np.float32)
x_test[0, :, :, 0:3] = img_bgr / 255.
x_test[0, :, :, 3] = trimap / 255.
# predict
# out:(1,rows,cols,1), 0~255
out = model.predict(x_test)
print(out.shape)
out = np.reshape(out, (out.shape[1],out.shape[2],out.shape[3]))
print(out.shape)
out = out * 255.0
out = out.astype(np.uint8)
# save
save_path = "test_results_None/3_"+str(input_shape[0])+"x"+str(input_shape[1])+".png"
cv2.imwrite(save_path, out)
| [
"[email protected]"
] | |
31fbc00f9b6ac9fa12193baad339be0975f877cd | ab28e2cfd8c2475b5f2b361095c6dcfec6418392 | /manage.py | d2f49f112c85fac41e933d58be1b4b451c67a19b | [] | no_license | jonerra/stock_app | 89bd10276e636e6b35dd268d5886bcbd07db48e9 | ee8c40c50e4c767a985e82f9da1f470a1f935df6 | refs/heads/master | 2021-01-10T09:19:22.066846 | 2015-12-01T18:09:35 | 2015-12-01T18:09:35 | 45,061,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "stock_app.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
820940d089213e7f72a477e24c7b2034c5d58ad4 | 4009ab8072698c97556c033ab09cc5ff4465ef44 | /venv/bin/flask | 886b14db2496f1e6ce8c2edca35ca1b2e0cdb0d2 | [] | no_license | Joyer0099/myblog | c6452393cd6c8f3a6adea25bab66b898fa03df04 | 572a472ecf724bec5b4395c20d5466e79f3fab64 | refs/heads/master | 2020-05-16T22:53:30.237823 | 2019-04-25T03:30:47 | 2019-04-25T03:30:47 | 183,349,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | #!/Users/Joyer/Documents/Django/myblog/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from flask.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
f363be6d2c535c94f8e0236e65f7658c398957fd | f903445786f43421e8a543585053862875d1d480 | /spider.py | e7fa311bfacdec5283dde00f83e925f4d6e0b05d | [] | no_license | Andchenn/WZRY | 3f997503f33c89b4dd41d88c7baa41d0c9e42be8 | a066c69ebb4f64c395b90fdf5419ac6e655bebbb | refs/heads/master | 2020-04-08T16:22:23.541904 | 2018-11-30T10:56:41 | 2018-11-30T10:56:41 | 159,515,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,791 | py | """
抓取王者荣耀皮肤
"""
import requests
from urllib import parse
from bs4 import BeautifulSoup
import os
class Skin(object):
def __init__(self):
# 英雄的json数据
self.hero_url = 'https://pvp.qq.com/web201605/js/herolist.json'
# 英雄详细页的通用url前缀信息
self.base_url = 'https://pvp.qq.com/web201605/herodetail/'
# 英雄详细也url后缀信息
self.detail_url = ''
# 图片存储文件夹
self.img_folder = 'skin'
# 图片url的通用前缀
self.skin_url = 'https://game.gtimg.cn/images/yxzj/img201606/skin/hero-info/'
# 图片url的后缀信息
self.skin_detail_url = ''
def get_hero(self):
"""获取英雄的json数据"""
request = requests.get(self.hero_url)
hero_list = request.json()
return hero_list
def get_hero_skin(self, hero_name, hero_no):
"""获取详细页英雄皮肤展示的信息,并爬图"""
url = parse.urljoin(self.base_url, self.detail_url)
request = requests.get(url)
request.encoding = 'gbk'
html = request.text
# 获取皮肤信息的节点
soup = BeautifulSoup(html, 'lxml')
skip_list = soup.select('.pic-pf-list3')
for skin_info in skip_list:
# 获取皮肤名称
img_names = skin_info.attrs['data-imgname']
name_list = img_names.split('|')
skin_no = 1
# 循环下载皮肤图片
for skin_name in name_list:
self.skin_detail_url = '%s/%s-bigskin-%s.jpg' % (hero_no, hero_no, skin_no)
skin_no += 1
img_name = hero_name + '-' + skin_name + '.jpg'
self.download_skin(img_name)
def download_skin(self, img_name):
"""下载皮肤图片"""
img_url = parse.urljoin(self.skin_url, self.skin_detail_url)
request = requests.get(img_url)
if request.status_code == 200:
print('download-%s' % img_name)
img_path = os.path.join(self.img_folder, img_name)
with open(img_path, 'wb')as img:
img.write(request.content)
else:
print('img error!')
def make_folder(self):
"""创建图片存储文件夹"""
if not os.path.exists(self.img_folder):
os.mkdir(self.img_folder)
def run(self):
"""脚本执行入口"""
self.make_folder()
hero_list = self.get_hero()
for hero in hero_list:
hero_no = str(hero['ename'])
self.detail_url = hero_no + '.shtml'
hero_name = hero['cname']
self.get_hero_skin(hero_name, hero_no)
if __name__ == '__main__':
skin = Skin()
skin.run()
| [
"[email protected]"
] | |
a17c972f8a76d1cd5c1564834b6a69d586195c85 | e54214031e565c489edd4ceaadbfd2f58c66ea5f | /tianyancha/geetest验证码本地架设网址demo/demo/flask_demo/start.py | 682973170762702f6bcb9935830032447247b69a | [] | no_license | ziakkk/tianyancha_project | 372e2c06710517c1a1dfa2da3c7c948ae5d29bc4 | 6ef44ae6ef94e8ca3716e5de6b805e42b9dbb794 | refs/heads/master | 2021-01-01T18:10:16.349268 | 2017-07-06T02:20:12 | 2017-07-06T02:20:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,263 | py | #coding:utf-8
import json
from flask import session, make_response, Flask, request, render_template
from geetest import GeetestLib
pc_geetest_id = "b46d1900d0a894591916ea94ea91bd2c"
pc_geetest_key = "36fc3fe98530eea08dfc6ce76e3d24c4"
mobile_geetest_id = "7c25da6fe21944cfe507d2f9876775a9"
mobile_geetest_key = "f5883f4ee3bd4fa8caec67941de1b903"
app = Flask(__name__)
app.config.update(
DEBUG=True,
)
@app.route('/pc-geetest/register', methods=["GET"])
def get_pc_captcha():
user_id = 'test'
gt = GeetestLib(pc_geetest_id, pc_geetest_key)
status = gt.pre_process(user_id)
session[gt.GT_STATUS_SESSION_KEY] = status
session["user_id"] = user_id
response_str = gt.get_response_str()
return response_str
@app.route('/mobile-geetest/register', methods=["GET"])
def get_mobile_captcha():
user_id = 'test'
gt = GeetestLib(mobile_geetest_id, mobile_geetest_key)
status = gt.pre_process(user_id)
session[gt.GT_STATUS_SESSION_KEY] = status
session["user_id"] = user_id
response_str = gt.get_response_str()
return response_str
@app.route('/pc-geetest/validate', methods=["POST"])
def pc_validate_captcha():
gt = GeetestLib(pc_geetest_id, pc_geetest_key)
challenge = request.form[gt.FN_CHALLENGE]
validate = request.form[gt.FN_VALIDATE]
seccode = request.form[gt.FN_SECCODE]
status = session[gt.GT_STATUS_SESSION_KEY]
user_id = session["user_id"]
if status:
result = gt.success_validate(challenge, validate, seccode, user_id)
else:
result = gt.failback_validate(challenge, validate, seccode)
result = "<html><body><h1>登录成功</h1></body></html>" if result else "<html><body><h1>登录失败</h1></body></html>"
return result
@app.route('/pc-geetest/ajax_validate', methods=["POST"])
def pc_ajax_validate():
gt = GeetestLib(pc_geetest_id,pc_geetest_key)
challenge = request.form[gt.FN_CHALLENGE]
validate = request.form[gt.FN_VALIDATE]
seccode = request.form[gt.FN_SECCODE]
status = session[gt.GT_STATUS_SESSION_KEY]
user_id = session["user_id"]
if status:
result = gt.success_validate(challenge, validate, seccode, user_id,data='',userinfo='')
else:
result = gt.failback_validate(challenge, validate, seccode)
result = {"status":"success"} if result else {"status":"fail"}
return json.dumps(result)
@app.route('/mobile-geetest/ajax_validate', methods=["POST"])
def mobile_ajax_validate():
gt = GeetestLib(mobile_geetest_id,mobile_geetest_key)
challenge = request.form[gt.FN_CHALLENGE]
validate = request.form[gt.FN_VALIDATE]
seccode = request.form[gt.FN_SECCODE]
status = session[gt.GT_STATUS_SESSION_KEY]
user_id = session["user_id"]
if status:
result = gt.success_validate(challenge, validate, seccode, user_id,data='',userinfo='')
else:
result = gt.failback_validate(challenge, validate, seccode)
result = {"status":"success"} if result else {"status":"fail"}
return json.dumps(result)
@app.route('/')
def login():
return render_template('login.html')
if __name__ == '__main__':
app.secret_key = 'i-like-python-nmba'
app.run()
| [
"[email protected]"
] | |
1910c08d3ec9f7a073edcf52ee5b3b98893e5ad5 | e5a5e11e44b168020e4be78feda984620eec84b5 | /projectScripts/BandGap/tPA_acenes/createGeometries/acenes/dumindaCyclesFIXED/qtp00/acene8/submit.py | c8decf683e9f7111456b880ab86a7979fa9f0a88 | [] | no_license | zww-4855/General_Programs | 0541a4467b5c1d4e7c88cc35a93b9d3e5bd76b78 | 792002371faf8568050c56a937c1605c464fa239 | refs/heads/master | 2023-01-04T11:56:26.713924 | 2020-11-03T05:07:20 | 2020-11-03T05:07:20 | 271,626,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,782 | py | #!/usr/bin/env python
#
# Author: Qiming Sun <[email protected]>
#
from pyscf import gto
from pyscf import scf
from pyscf import lib
'''
Density fitting method by decorating the scf object with scf.density_fit function.
There is no flag to control the program to do density fitting for 2-electron
integration. The way to call density fitting is to decorate the existed scf
object with scf.density_fit function.
NOTE scf.density_fit function generates a new object, which works exactly the
same way as the regular scf method. The density fitting scf object is an
independent object to the regular scf object which is to be decorated. By
doing so, density fitting can be applied anytime, anywhere in your script
without affecting the exsited scf object.
See also:
examples/df/00-with_df.py
examples/df/01-auxbasis.py
'''
lib.num_threads(44)
mol = gto.Mole()
mol.build(
verbose = 7,
atom = '''
H 0.000000 1.232560 -1.577103
H 0.000000 -1.232560 -1.577103
C 0.000000 0.697583 -0.650443
C 0.000000 -0.697583 -0.650443
C 0.000000 -1.395038 0.557442
C 0.000000 1.395038 0.557442
H 0.000000 -2.494718 0.557442
H 0.000000 2.494718 0.557442
C 0.0000 0.697583 1.765326
C 0.0000 -0.697583 1.765326
C 0.0000 -1.395038 2.973211
C 0.0000 1.395038 2.973211
H 0.0000 -2.494718 2.973211
H 0.0000 2.494718 2.973211
C 0.0000 0.697583 4.181095
C 0.0000 -0.697583 4.181095
C 0.0000 -1.395038 5.38898
C 0.0000 1.395038 5.38898
H 0.0000 -2.494718 5.38898
H 0.0000 2.494718 5.38898
C 0.0000 0.697583 6.596864
C 0.0000 -0.697583 6.596864
C 0.0000 -1.395038 7.804749
C 0.0000 1.395038 7.804749
H 0.0000 -2.494718 7.804749
H 0.0000 2.494718 7.804749
C 0.0000 0.697583 9.012633
C 0.0000 -0.697583 9.012633
C 0.0000 -1.395038 10.220518
C 0.0000 1.395038 10.220518
H 0.0000 -2.494718 10.220518
H 0.0000 2.494718 10.220518
C 0.0000 0.697583 11.428402
C 0.0000 -0.697583 11.428402
C 0.0000 -1.395038 12.636287
C 0.0000 1.395038 12.636287
H 0.0000 -2.494718 12.636287
H 0.0000 2.494718 12.636287
C 0.0000 0.697583 13.844171
C 0.0000 -0.697583 13.844171
C 0.0000 -1.395038 15.052056
C 0.0000 1.395038 15.052056
H 0.0000 -2.494718 15.052056
H 0.0000 2.494718 15.052056
C 0.0000 0.697583 16.25994
C 0.0000 -0.697583 16.25994
C 0.0000 -1.395038 17.467825
C 0.0000 1.395038 17.467825
H 0.0000 -2.494718 17.467825
H 0.0000 2.494718 17.467825
C 0.0000 0.69758346 18.675709
C 0.0000 -0.69758346 18.675709
H 0.0000 1.2325599 19.602356
H 0.0000 -1.2325599 19.602356
''',
unit='Angstrom',
basis = 'ccpvtz',
max_memory=10000,
)
#
# By default optimal auxiliary basis (if possible) or even-tempered gaussian
# functions are used fitting basis. You can assign with_df.auxbasis to change
# the change the fitting basis.
#
mol.spin = 0
mol.charge = 0
mol.build(0, 0)
mf = scf.RKS(mol)
mf.xc='cam-b3lyp'
mf.define_xc_('0.91*LR_HF(0.29) + 0.54*SR_HF(0.29)+0.37*ITYH +0.09*B88, 0.80*LYP + .20*VWN','GGA', 0.91,(0.29,0.91, -0.37))
energy = mf.kernel()
mos=mf.mo_energy
print(mos)
print('mo occ: ',mf.mo_occ)
print('total num of electrons: ', sum(mf.mo_occ))
homoIndex=int(sum(mf.mo_occ)/2 - 1)
lumoIndex=homoIndex+1
bandGap=(mos[lumoIndex] - mos[homoIndex])*27.2114
print('index: ', homoIndex, lumoIndex)
print('homo E:',mos[homoIndex])
print('lumo E:', mos[lumoIndex])
print('band Gap (eV):', bandGap)
| [
"[email protected]"
] | |
acd40a1353e684cc02afbd337edd65e8d9d59493 | b656c527a0a6bcb8df45512e202d1607fe33638e | /migrations/versions/0322_broadcast_service_perm.py | 21997bc3e47eacf7ba74bc921906f4c7644cf2f3 | [
"MIT"
] | permissive | alphagov/notifications-api | fb6d0b6b952f0e2e8c98776be9adf836cce85c54 | b4c5be42583ef6d6cd004e8c24acaf810d86f65c | refs/heads/main | 2023-09-01T08:58:52.353509 | 2023-08-31T15:00:31 | 2023-08-31T15:00:31 | 46,422,101 | 61 | 33 | MIT | 2023-09-14T14:18:19 | 2015-11-18T13:57:17 | Python | UTF-8 | Python | false | false | 516 | py | """
Revision ID: 0322_broadcast_service_perm
Revises: 0321_drop_postage_constraints
Create Date: 2020-06-29 11:14:13.183683
"""
from alembic import op
revision = "0322_broadcast_service_perm"
down_revision = "0321_drop_postage_constraints"
def upgrade():
op.execute("INSERT INTO service_permission_types VALUES ('broadcast')")
def downgrade():
op.execute("DELETE FROM service_permissions WHERE permission = 'broadcast'")
op.execute("DELETE FROM service_permission_types WHERE name = 'broadcast'")
| [
"[email protected]"
] | |
369ef5905bcb862ee7334e10fdb50ce1ad709143 | b3d3460bd1551b0b8fbc191a323d20341629acee | /RestPy/SampleScripts/connectToExistingConfig.py | 69501fc34d187358989ee7ab5a16fc0639805421 | [
"MIT"
] | permissive | sar772004/IxNetwork | 0a0cf8f732541df3397e630121829ecd8b836400 | 8c1226c9d958823737365414f8b025cfd6bf48b0 | refs/heads/master | 2020-08-21T08:34:57.896307 | 2019-10-18T01:54:03 | 2019-10-18T01:54:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,355 | py | """
connectToExistingConfig.py
Connecting to an existing session.
- For Windows, the default session ID=1.
- For Windows Connection Mgr, session ID=?
- For Linux, there are two ways to login:
1> Login with username/password
2> Use the API-Key instead.
- Then provide the Session ID to connect into.
Supports IxNetwork API servers:
- Windows, Windows Connection Mgr and Linux
Requirements
- IxNetwork 8.50
- RestPy version 1.0.33
- Python 2.7 and 3+
- pip install requests
- pip install -U --no-cache-dir ixnetwork_restpy
RestPy Doc:
https://www.openixia.github.io/ixnetwork_restpy
"""
import os, sys, time, traceback
# Import the RestPy module
from ixnetwork_restpy.testplatform.testplatform import TestPlatform
from ixnetwork_restpy.files import Files
from ixnetwork_restpy.assistants.statistics.statviewassistant import StatViewAssistant
apiServerIp = '192.168.70.3'
# windows|connection_manager|linux
osPlatform = 'windows'
# For Linux API server only
username = 'admin'
password = 'admin'
try:
testPlatform = TestPlatform(apiServerIp, log_file_name='restpy.log')
# Console output verbosity: 'none'|request|request_response
testPlatform.Trace = 'request_response'
if osPlatform == 'linux':
# There are two ways to log into an existing linux session ID
# 1> Provide the login account username and password
testPlatform.Authenticate(username, password)
# Two ways to locate a session to connecto. Either by the session ID or by a name that you could name.
#session = testPlatform.Sessions.find(Id=4)
session = testPlatform.Sessions.find(Name='devTest')
# 2> Or use the API-Key instead. The API-Key could be retrieve from the Linux api server under
# settings, My Account.
#testPlatform.ApiKey = '604348999bc34d028043347f713e49ce'
#session = testPlatform.Sessions.find(Id=4)
if osPlatform in ['windows', 'connection_manager']:
# Windows support only one session. Id is always equal 1.
session = testPlatform.Sessions.find(Id=1)
# ixNetwork is the root object to the IxNetwork API tree.
ixNetwork = session.Ixnetwork
except Exception as errMsg:
print('\nError: %s' % traceback.format_exc())
print('\nrestPy.Exception:', errMsg)
| [
"[email protected]"
] | |
912ff041c6fdadf6ef1d413233456628249095ee | 31cc3d3cb3928ea9fb703946968f46213e70f785 | /blog/migrations/0010_remove_article_slug.py | 643ad2923544e659645207704d31d27d10745d82 | [] | no_license | moukhlisou/myproject002 | 65a05a9fd0d3cfada74cb727723311a88286baf2 | 5dc744ca36732840e5751486d856f98c7c8d09c5 | refs/heads/master | 2020-04-05T04:40:16.897299 | 2018-11-07T14:45:01 | 2018-11-07T14:45:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 324 | py | # Generated by Django 2.1.2 on 2018-11-02 10:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0009_auto_20181102_1004'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='slug',
),
]
| [
"[email protected]"
] | |
1cd3c183ff0340586552ac5a679281e1bec155e4 | 5456396d7138895321da4e8aac0bcbd172652a48 | /articles/views.py | 6518ae401884ce370884a09ab14f30cbcf4fbb4f | [] | no_license | Mustafaryad1/djangonautic | f0ff7552cfd01e52d362d8f4b989da0545bbab0e | 928fdf419ed9c978619ac2ad5d8bfd829afd830e | refs/heads/master | 2021-04-06T09:41:21.894708 | 2018-05-05T08:43:23 | 2018-05-05T08:43:23 | 124,810,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,526 | py | from django.shortcuts import render, redirect
from .models import Article
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from .forms import CreateArticle
from django.http import Http404, HttpResponse # to raise 404 page not found
# Create your views here.
def article_list(request):
articles = Article.objects.all().order_by('date')
return render(request, 'articles\\article_list.html', {'articles': articles})
def article_details(request, slug):
article = Article.objects.get(slug=slug)
return render(request, 'articles/article_details.html', {'article': article})
def user_articles(request, author):
user = User.objects.get(username=author)
user_data = Article.objects.filter(author=user.pk)
if user_data:
user = user_data[0].author.username
return render(request, 'articles/user_articles.html', {'articles': user_data, 'user': user})
else:
raise Http404('PAge Not found')
# return HttpResponse('hello world {}'.format(author))
@login_required(login_url='/accounts/login')
def article_create(request):
if request.method == 'POST':
form = CreateArticle(request.POST, request.FILES)
if form.is_valid():
instance = form.save(commit=False)
instance.author = request.user
instance.save()
return redirect('articles:list')
else:
form = CreateArticle()
return render(request, 'articles/article_create.html', {'form': form})
| [
"[email protected]"
] | |
57949227124101eb09ec13c3918b518bd5693c14 | 1974bab0ee3cda08b22464cdccee226d30a34ced | /setup.py | 8e673f4b3286ff3184fcf67afad43b00aae80d38 | [
"MIT"
] | permissive | jan-hajny-netsmile/safrs | 96a7b540e70cca98a00f0806615040a7043a41c2 | 7470f7fbc05a81be74666dbd1ce729276f2e1792 | refs/heads/master | 2021-07-01T12:08:48.966531 | 2017-09-22T05:30:47 | 2017-09-22T05:30:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | from distutils.core import setup
setup(
name = 'safrs',
packages = ['safrs'],
version = '0.1.1',
description = 'safrs : SqlAlchemy Flask-Restful Swagger2',
author = 'Thomas Pollet',
author_email = '[email protected]',
url = 'https://github.com/thomaxxl/safrs',
download_url = 'https://github.com/thomaxxl/safrs/archive/0.1.tar.gz',
keywords = ['SqlAlchemy', 'Flask', 'REST', 'Swagger'],
python_requires='>=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4',
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Framework :: Flask',
],
)
| [
"[email protected]"
] | |
9ff3b63c56ef58dc9711f812f84b3a2d77013f91 | cd13eb3e56f6cc89f47f55917bf1dd6b0807b586 | /lgb_util.py | 30294aad43a91c293c01e27889d17e852c8e1ae7 | [] | no_license | mahaoyang/MovieLens | f43e2b47c50c7d0140255207eac0c2d25a681d40 | 734c12a41c95f880b2ddb19b256dac83fac18dc4 | refs/heads/master | 2020-07-11T19:55:55.798820 | 2020-05-09T03:55:54 | 2020-05-09T03:55:54 | 204,632,072 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 441 | py | #!/usr/bin/python3
# -*- encoding: utf-8 -*-
import re
import math
def trans_publish_years(s):
s = re.findall('[\d+]', s)
return ''.join(s)[-4:]
def trans_genres(genre, genres_length, genres):
ger = [0] * genres_length
for g in genre.strip().split('|'):
ger[genres[g]] = 1
return ger
def sigmoid(x):
1 / (1 + math.exp(-x))
if __name__ == '__main__':
print(trans_publish_years('drrg 4 hfgh 2646'))
| [
"mahydxygitlab"
] | mahydxygitlab |
fdc259e99744b2ffb97919d7dd540c5359414d35 | 195afe3ecd8cab082350c52e3a9d8bbf37d3c762 | /funcs/backUteis/uteis.py | 765192fe850e990424eceb290fbe03704e17103d | [] | no_license | rfzorzi/glacX2020 | faa87dd7244df4dd80f1664c81d49fe260425ff9 | 911254a1a80e3f00040e3ad7ba0c745238229444 | refs/heads/master | 2023-07-29T09:25:03.688481 | 2021-09-02T14:28:15 | 2021-09-02T14:28:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,687 | py | from tkinter import *
from tkinter import messagebox
import webbrowser
class Functions:
def moedaTotaliza(self, totalizador=0, moeda='R$'):
return f'{moeda}{totalizador:>8.2f}'.replace('.',',')
def moedaTotalizador(self, totalizador=0, moeda='R$'):
return f'{moeda}{totalizador:>8.2f}'.replace('.',',')
def moedaTotal1(self, total1=0, moeda='R$'):
return f'{moeda}{self.total1:>8.2f}'.replace('.',',')
def moedaTotal2(self, total1=0):
return f'{self.total1:>8.2f}'.replace('.', ',')
def atualiza_listaServProd(self):
id_orc = self.listaNumOrc.get()
cod_item1 = self.codServ1.get()
desc_item1 = self.listaCol2a.get()
valor1 = self.listaCol4a.get()
quant1 = self.listaCol3a.get()
total1 = self.listaCol5a.get()
self.conecta_Glac()
self.listaServProd.delete(*self.listaServProd.get_children())
lista = self.cursor.execute("""
SELECT ordem_item, desc_item, cod_item, valor, quant, total
FROM orcamento2 WHERE id_orc2 = '%s' """ % id_orc)
rows = self.cursor.fetchall()
for row in rows:
self.listaServProd.insert("", END, values=row)
self.desconecta_Glac()
def altera_itens_orc_quant2(self):
valor = self.listaCol3a.get()
quant = self.listaCol4a.get()
total = self.listaCol5a.get()
valor = float(valor)
quant = float(quant)
self.listaCol5a.delete('0', 'end')
soma = valor * quant
soma = float(f'{soma:>8.2f}')
self.listaCol5a.insert(END, soma)
def total_orc(self):
self.entradatotal.delete('0', 'end')
id_orc = self.listaNumOrc.get()
totalizador = self.entradatotal.get()
if id_orc == '':
msg = 'Não é possivel calcular o Valor Total se nenhum '
msg+= 'Orçamento ou Ordem de Serviço estiver selecionada.'
messagebox.showerror("GLAC ", msg)
else:
self.conecta_Glac()
self.cursor.execute("""SELECT SUM(total) FROM orcamento2
WHERE id_orc2 = '%s'""" % id_orc)
buscaNumItem = self.cursor.fetchall()
for i in buscaNumItem:
i = str(i)
i = i.replace('(', '')
i = i.replace(')', '')
i = i.replace("'", "")
i = i.replace(",", "h")
i = i.replace("h", "")
i = i.replace("R$", "")
i = float(i)
self.entradatotal.insert(END, self.moedaTotaliza(i))
print(i)
self.desconecta_Glac()
def abre_orc(self):
self.listaNumOrc.delete('0', 'end')
id_orc1 = self.listaNumOrc.get()
numeroorcamento = self.listaNumOrc.get()
cliente_orc = self.entradaCod_cli.get()
placa_orc = self.placa.get()
dia = self.entradaDataorc.get()
mes = self.entradaDataorc2.get()
ano = self.entradaDataorc3.get()
descp1 = self.area1.get()
descp2 = self.area2.get()
descp3 = self.area3.get()
descp4 = self.area4.get()
totalizador = self.entradatotal.get()
km = self.entradaObs.get()
tecnico = self.entradaTecnico.get()
tipoOrc = self.Tipvar.get()
comp1 = self.listInicio.get()
comp2 = self.listFim.get()
self.conecta_Glac()
self.cursor.execute("""
INSERT INTO orcamento1 ( cliente_orc, placa_orc, descp1, descp2,
descp3, descp4, dia, mes, ano, tecnico, totalizador, tipoOrc,
km, comp1, comp2)
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(cliente_orc, placa_orc, descp1, descp2, descp3, descp4,
dia, mes, ano, tecnico, totalizador, tipoOrc, km, comp1,
comp2))
self.conn.commit()
numeroorc = self.cursor
numeroorc.execute("""SELECT MAX(id_orc1) FROM orcamento1""")
buscanomecli = self.cursor.fetchall()
for i in buscanomecli:
self.listaNumOrc.insert(0, i)
# variaveis orcamento2
id_orc2 = self.listaNumOrc.get()
cod_item1 = self.codServ1.get()
desc_item1 = self.listaCol2a.get()
###
valor1 = self.listaCol4a.get()
quant1 = self.listaCol3a.get()
total1 = self.listaCol5a.get()
################
# Vistoria variaveis
codVist = self.listaNumOrc.get()
tanque = self.are1.get()
odometro = self.are2.get()
radio = self.are3.get()
calota = self.are4.get()
triangulo = self.are5.get()
macaco = self.are6.get()
estepe = self.are7.get()
obs1 = self.are8.get()
obs2 = self.are9.get()
self.cursor.execute("""
INSERT INTO vistoria ( cod, vist1, vist2, vist3, vist4, vist5,
vist6, vist7, vist8, vist9)
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(codVist, tanque, radio, odometro, calota, triangulo,
macaco, estepe, obs1, obs2))
self.conn.commit()
self.desconecta_Glac()
self.total_orc()
msg = "Orçamento gravado com sucesso.\n "
msg += ""
messagebox.showinfo("GLAC - Orçamento", msg)
def altera_orc(self):
id_orc1 = self.listaNumOrc.get()
cliente_orc = self.entradaCod_cli.get()
placa_orc = self.placa.get()
dia = self.entradaDataorc.get()
mes = self.entradaDataorc2.get()
ano = self.entradaDataorc3.get()
descp1 = self.area1.get()
descp2 = self.area2.get()
descp3 = self.area3.get()
descp4 = self.area4.get()
totalizador = self.entradatotal.get()
km = self.entradaObs.get()
tecnico = self.entradaTecnico.get()
tipoOrc = self.Tipvar.get()
comp1 = self.listInicio.get()
comp2 = self.listFim.get()
self.conecta_Glac()
self.cursor.execute("""
UPDATE orcamento1 SET id_orc1 = ?, cliente_orc = ?, placa_orc = ?, dia = ?,
mes = ?, ano = ?, descp1 = ?, descp2 = ?, descp3 = ?, descp4 = ?, totalizador = ?, km = ?,
tecnico = ?, tipoOrc = ? , comp1 = ?, comp2 = ? WHERE id_orc1 = ?""",
(id_orc1, cliente_orc, placa_orc, dia, mes, ano, descp1, descp2, descp3,
descp4, totalizador, km, tecnico, tipoOrc, comp1, comp2, id_orc1))
self.conn.commit()
################
# Vistoria variaveis
cod = self.listaNumOrc.get()
tanque = self.are1.get()
odometro = self.are2.get()
radio = self.are3.get()
calota = self.are4.get()
triangulo = self.are5.get()
macaco = self.are6.get()
estepe = self.are7.get()
obs1 = self.are8.get()
obs2 = self.are9.get()
self.cursor.execute("""
UPDATE vistoria SET vist1 = ?, vist2 = ?, vist3 = ?, vist4 = ?, vist5 = ?,
vist6 = ? , vist7 = ?, vist8 = ?, vist9 = ? WHERE cod = ? """,
(tanque, radio, odometro, calota, triangulo, macaco, estepe, obs1, obs2, cod))
self.conn.commit()
self.desconecta_Glac()
self.total_orc()
msg = "Alterações realizadas com sucesso.\n "
msg += ""
messagebox.showinfo("GLAC - Orçamento", msg)
def buscanomeorc(self):
self.listaNomeO.insert(END, '%')
self.listaServ.delete((*self.listaServ.get_children()))
nomeO = self.listaNomeO.get()
self.conecta_Glac()
nom = self.cursor
nom.execute(
"""SELECT id_orc1, nome ,dia , mes , ano, placa_orc, orcamento1.tipoOrc FROM orcamento1, clientes WHERE cod_cli = cliente_orc AND nome LIKE '%s' ORDER BY id_orc1 DESC""" % nomeO)
buscanomeO = self.cursor.fetchall()
for row in buscanomeO:
self.listaServ.insert("", END, values=row)
self.listaNomeO.delete(0, END)
self.desconecta_Glac()
def buscaplacaorc(self):
self.listaPlaca.insert(END, '%')
self.listaServ.delete(*self.listaServ.get_children())
placaO = self.listaPlaca.get()
self.conecta_Glac()
plac = self.cursor
plac.execute(
"""SELECT id_orc1, nome, dia , mes , ano, placa_orc, orcamento1.tipoOrc FROM orcamento1, clientes WHERE cod_cli = cliente_orc AND placa_orc LIKE '%s'""" % placaO)
buscaplac = self.cursor.fetchall()
for row in buscaplac:
self.listaServ.insert("", END, values=row)
self.listaPlaca.delete(0, END)
self.desconecta_Glac()
def carrega_orc(self, event):
self.limpa_cliente()
self.entradaDataorc.delete('0', 'end')
self.entradaDataorc2.delete('0', 'end')
self.entradaDataorc3.delete('0', 'end')
self.entradatotal.delete('0', 'end')
self.listaNumOrc.delete('0', 'end')
self.entradaTecnico.delete('0', 'end')
self.listaServ.selection()
self.conecta_Glac()
for n in self.listaServ.selection():
col1, col2, col3, col4, col5, col6, col7 = self.listaServ.item(n,
'values')
self.listaNumOrc.insert(0, col1)
id_orc = self.listaNumOrc.get()
nomecur = self.cursor
nomecur.execute("SELECT cliente_orc FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultanome = self.cursor.fetchall()
for i in consultanome:
self.entradaCod_cli.insert(0, i)
self.desconecta_Glac()
self.carrega_cliente()
self.conecta_Glac()
self.entradaCod_aut.delete('0', 'end')
nomeplaca = self.cursor
nomeplaca.execute("SELECT placa_orc FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultaplaca = self.cursor.fetchall()
for i in consultaplaca:
self.placa.insert(0, i)
nomedescp1 = self.cursor
nomedescp1.execute("SELECT descp1 FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultap1 = self.cursor.fetchall()
for i in consultap1:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.area1.insert(0, i)
nomedescp2 = self.cursor
nomedescp2.execute("SELECT descp2 FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultap2 = self.cursor.fetchall()
for i in consultap2:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.area2.insert(0, i)
nomedescp3 = self.cursor
nomedescp3.execute("SELECT descp3 FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultap3 = self.cursor.fetchall()
for i in consultap3:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.area3.insert(0, i)
nomedescp4 = self.cursor
nomedescp4.execute("SELECT descp4 FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultap4 = self.cursor.fetchall()
for i in consultap4:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.area4.insert(0, i)
self.entradaDataorc.delete('0', 'end')
nomedia = self.cursor
nomedia.execute("SELECT dia FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultadia = self.cursor.fetchall()
for i in consultadia:
self.entradaDataorc.insert(0, i)
self.entradaDataorc2.delete('0', 'end')
nomemes = self.cursor
nomemes.execute("SELECT mes FROM orcamento1 WHERE id_orc1 = '%s'" % id_orc)
consultames = self.cursor.fetchall()
for i in consultames:
self.entradaDataorc2.insert(0, i)
self.entradaDataorc3.delete('0', 'end')
nomeano = self.cursor
nomeano.execute("SELECT ano FROM orcamento1 WHERE id_orc1 = '%s'" % id_orc)
consultaano = self.cursor.fetchall()
for i in consultaano:
self.entradaDataorc3.insert(0, i)
nometotal = self.cursor
nometotal.execute("SELECT totalizador FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultatotal = self.cursor.fetchall()
for i in consultatotal:
self.entradatotal.insert(0, i)
nomekm = self.cursor
nomekm.execute("SELECT km FROM orcamento1 WHERE id_orc1 = '%s'" % id_orc)
consultakm = self.cursor.fetchall()
for i in consultakm:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.entradaObs.insert(0, i)
self.cursor.execute("SELECT comp1 FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultacomp1 = self.cursor.fetchall()
for i in consultacomp1:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listInicio.insert(0, i)
self.cursor.execute("SELECT comp2 FROM orcamento1 "
"WHERE id_orc1 = '%s'" % id_orc)
consultacomp2 = self.cursor.fetchall()
for i in consultacomp2:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listFim.insert(0, i)
##################################################
placa = self.placa.get()
nomeaut = self.cursor
nomeaut.execute(
"SELECT UPPER(veiculo) FROM frota WHERE placa = '%s'" % placa)
consultaautomovel = self.cursor.fetchall()
for i in consultaautomovel:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listAut.insert(0, i)
nomeano = self.cursor
nomeano.execute("SELECT ano FROM frota WHERE placa = '%s'" % placa)
consultaano = self.cursor.fetchall()
for i in consultaano:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listAno.insert(0, i)
nomemarca = self.cursor
nomemarca.execute(
"SELECT UPPER(montadora) FROM frota WHERE placa = '%s'" % placa)
consultamarca = self.cursor.fetchall()
for i in consultamarca:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listMarca.insert(0, i)
nomecomb = self.cursor
nomecomb.execute("SELECT UPPER(combust) FROM frota "
"WHERE placa = '%s'" % placa)
consultacomb = self.cursor.fetchall()
for i in consultacomb:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listCombustivel.insert(0, i)
nomecor = self.cursor
nomecor.execute("SELECT UPPER(cor) FROM frota WHERE placa = '%s'" % placa)
consultacor = self.cursor.fetchall()
for i in consultacor:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.listCor.insert(0, i)
#############################################
self.listaServProd.delete(*self.listaServProd.get_children())
lista = self.cursor.execute("""SELECT ordem_item, desc_item, cod_item, valor, quant, total FROM orcamento2
WHERE id_orc2 = '%s' """ % id_orc)
rows = self.cursor.fetchall()
for row in rows:
self.listaServProd.insert("", 0, values=row)
##################################
##################################
tec = self.cursor
tec.execute("SELECT tecnico FROM orcamento1 WHERE id_orc1 = '%s' " % id_orc)
tecd = self.cursor.fetchall()
for i in tecd:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.entradaTecnico.insert(0, i)
orcos = self.cursor
orcos.execute("Select tipoOrc From orcamento1 "
"Where id_orc1 = '%s' " % id_orc)
orcos1 = self.cursor.fetchall()
for i in orcos1:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.Tipvar.set(i)
################
# Vistoria variaveis
codVist = self.listaNumOrc.get()
tanque = self.are1.get()
odometro = self.are2.get()
radio = self.are3.get()
calota = self.are4.get()
triangulo = self.are5.get()
macaco = self.are6.get()
estepe = self.are7.get()
obs1 = self.are8.get()
obs2 = self.are9.get()
self.cursor.execute("SELECT vist1 FROM vistoria "
"WHERE cod = '%s' " % codVist)
codVisto = self.cursor.fetchall()
for i in codVisto:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are1.insert(0, i)
self.cursor.execute("SELECT vist2 FROM vistoria "
"WHERE cod = '%s' " % codVist)
codR = self.cursor.fetchall()
for i in codR:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are3.insert(0, i)
self.cursor.execute("SELECT vist3 FROM vistoria "
"WHERE cod = '%s' " % codVist)
codO = self.cursor.fetchall()
for i in codO:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are2.insert(0, i)
self.cursor.execute("SELECT vist4 FROM vistoria "
"WHERE cod = '%s' " % codVist)
codCalota = self.cursor.fetchall()
for i in codCalota:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are4.insert(0, i)
self.cursor.execute("SELECT vist5 FROM vistoria "
"WHERE cod = '%s' " % codVist)
codTri = self.cursor.fetchall()
for i in codTri:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are5.insert(0, i)
self.cursor.execute("SELECT vist6 FROM vistoria "
"WHERE cod = '%s' " % codVist)
cod6 = self.cursor.fetchall()
for i in cod6:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are6.insert(0, i)
self.cursor.execute("SELECT vist7 FROM vistoria "
"WHERE cod = '%s' " % codVist)
cod7 = self.cursor.fetchall()
for i in cod7:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are7.insert(0, i)
self.cursor.execute("SELECT vist8 FROM vistoria "
"WHERE cod = '%s' " % codVist)
cod8 = self.cursor.fetchall()
for i in cod8:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are8.insert(0, i)
self.cursor.execute("SELECT vist9 FROM vistoria "
"WHERE cod = '%s' " % codVist)
cod9 = self.cursor.fetchall()
for i in cod9:
i = str(i);
i = i.replace('(', '');
i = i.replace(')', '');
i = i.replace("'", "");
i = i.replace(',', '')
self.are9.insert(0, i)
self.listaOrc.destroy()
self.desconecta_Glac()
self.total_orc()
def carrega_orc_a(event):
carrega_orc()
def OnDoubleClick(self, event):
self.limpa_cliente()
self.listaServ.selection()
for n in self.listaServ.selection():
col1, col2, col3, col4, col5 = listaServ.item(n, 'values')
self.entradan.insert(END, col1)
self.carrega_orc()
def backup(self):
try:
shutil.copyfile("c:\glacx\glac.db", "c:\glacbkp\copiaGlacX.db")
msg = "Backup salvo em c:\glacbkp\ \n" \
"Copie e salve em local seguro. ;) "
msg += ""
messagebox.showinfo("GLACX", msg)
except:
msg = "Copia não realizada, crie a pasta c:\glacbkp \n" \
"antes de realizar o backup"
messagebox.showinfo("GLACX", msg)
def busca_serv(self):
# self.listaServ1.delete(0, END)
self.listaServ1.delete(*self.listaServ1.get_children())
self.listaServicos1.insert(END, '%')
self.conecta_Glac()
servprod = self.listaServicos1.get()
servico12 = self.cursor
servico12.execute("""SELECT cod_sp, servprod, tiposerv, hor, descricao, id_marcaprod, sistemaserv, valor * hor
FROM servprod WHERE servprod LIKE '%s' """ % servprod)
buscaservico12 = self.cursor.fetchall()
for i in buscaservico12:
self.listaServ1.insert("", END, values=i)
self.listaServicos1.delete(0, END)
self.desconecta_Glac()
def OnVsb_S1F(self, *args):
self.listaServ1F.yview(*args)
def busca_servF(self):
# self.listaServ1.delete(0, END)
self.listaServ1F.delete(*self.listaServ1F.get_children())
self.listaServicos1F.insert(END, '%')
self.conecta_Glac()
servprodF = self.listaServicos1F.get()
servico12F = self.cursor
servico12F.execute("""SELECT cod_falha, falha, falha2 FROM codfalha WHERE falha LIKE '%s' """ % servprodF)
buscaservico12F = self.cursor.fetchall()
for i in buscaservico12F:
self.listaServ1F.insert("", END, values=i)
self.listaServicos1F.delete(0, END)
self.desconecta_Glac()
def PaginaRf(self):
webbrowser.open("https://www.facebook.com/rfzorzi/")
def buscaCli(self):
self.conecta_Glac()
self.EntryCliente2.insert(END, '%')
nome = self.EntryCliente2.get()
nomecod = self.cursor
self.listaServ.delete(*self.listaServ.get_children())
lista = self.cursor.execute("""
SELECT cod_cli, nome FROM clientes WHERE nome LIKE '%s'
""" % nome)
rows = self.cursor.fetchall()
for row in rows:
self.listaServ.insert("", END, values=row)
self.EntryCliente2.delete(0, END)
self.desconecta_Glac()
def carrega_cliente2C(self, event):
self.limpa_clienteC()
pos = int(self.listaServ2.curselection()[0])
cod_cli = self.listaServ2.get(pos)
self.cursor.execute("SELECT cod_cli FROM clientes WHERE cod_cli = '%s'" % cod_cli)
consultacod = cursor.fetchall()
for i in consultacod:
self.entradaCod_clicC.insert(END, i)
self.carrega_clienteC()
def busca_serv_veic(self):
# self.listaServ1.delete(0, END)
self.listaServ1.delete(*self.listaServ1.get_children())
self.listaServicos1.insert(END, '%')
servprod = self.listaServicos1.get()
self.conecta_Glac()
servico12 = self.cursor
servico12.execute("""SELECT cod_sp, servprod, tiposerv, hor, descricao, id_marcaprod, sistemaserv, valor * hor
FROM servprod WHERE id_marcaprod LIKE '%s' """ % servprod)
buscaservico12 = self.cursor.fetchall()
for i in buscaservico12:
self.listaServ1.insert("", END, values=i)
self.listaServicos1.delete(0, END)
self.desconecta_Glac()
def OnVsb(self, *args):
self.listaServ.yview(*args)
def OnVsb_S1(self, *args):
self.listaServ1.yview(*args)
def OnVsb_Orc(self, *args):
self.listaServ.yview(*args)
def totalbotao(self):
def moedaTotal1(total1=0, moeda='R$'):
return f'{moeda}{total1:>8.2f}'.replace('.', ',')
quant1 = self.listaCol3a.get()
quant1 = float(quant1)
valor1 = self.listaCol4a.get()
valor1 = float(valor1)
total1 = quant1 * valor1
total1 = float(f'{moeda}{total1:>8.2f}')
self.listaCol5a.delete(0, END)
self.listaCol5a.insert(END, total1)
self.entradatotal.delete(0, END)
self.totalsimples = float(total1 )
self.entradatotal.insert(END, self.moedaTotalizador(total1 ))
def add_servico1bind(self, event):
self.codServ1.delete(0, END)
self.listaCol2a.delete(0, END)
self.listaCol4a.delete(0, END)
self.entradatotal.delete(0, END)
self.listaServ1.selection()
for n in self.listaServ1.selection():
col1, col2, col3, col4, col5, \
col6, col7, col8 = self.listaServ1.item(n, 'values')
self.codServ1.insert(END, col1)
self.add_servico1()
self.listaServP1.destroy()
| [
"[email protected]"
] | |
c812efedd1bc577a1273403a66df03d69e7c9e6c | e2cefc848c417334f7ba12d2a3875dfb55b151be | /blogmods/models/users.py | 53295e59c620ea561ceeddd2878d2017af44259a | [
"MIT"
] | permissive | stonescar/multi-user-blog | 2f5f11fd890e6a2dbad15e874a8631441ee50d8b | a402dafde1f7d94031129638aa072ce39223e80e | refs/heads/master | 2021-01-23T01:25:40.459957 | 2017-06-08T09:21:22 | 2017-06-08T09:21:22 | 92,869,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,913 | py | from google.appengine.ext import db
from database import Database
from .. import seq
class Users(Database):
""" DB for users """
username = db.StringProperty(required=True)
password = db.StringProperty(required=True)
email = db.StringProperty(required=False)
def get_votes(self, uid):
v = db.GqlQuery("""SELECT * FROM Votes
WHERE user = KEY('Users', %s)
""" % uid)
return v.count()
def get_vote_score(self, uid):
s = db.GqlQuery("""SELECT * FROM Votes
WHERE user = KEY('Users', %s)
""" % uid)
return sum(v.vote for v in s)
def get_post_scores(self, uid):
p = db.GqlQuery("""SELECT * FROM Posts
WHERE author = KEY('Users', %s)
""" % uid)
if p.count() > 0:
posts = [post.key().id() for post in p]
score = 0
for post in posts:
s = db.GqlQuery("""SELECT * FROM Votes
WHERE post = KEY('Posts', %s)
""" % post)
if s.count() > 0:
score += sum(score.vote for score in s)
return score/p.count()
else:
return 0
@classmethod
def by_name(cls, name):
u = cls.all().filter('username =', name).get()
return u
@classmethod
def name_by_id(cls, id):
u = cls.by_id()
return u.username
@classmethod
def register(cls, name, pw, email=None):
pw_hash = seq.hash_pw(name, pw)
return cls(username=name,
password=pw_hash,
email=email)
@classmethod
def valid_login(cls, name, pw):
u = cls.by_name(name)
if u and seq.valid_pw(name, pw, u.password):
return u
| [
"[email protected]"
] | |
82184d7fea3aa9573d5469355b5206fdd80eccbb | 5a4a152031b79feeb1aa2dfe9aeaf5fb89ef5d9d | /test_main.py | 4c1f57faa4f74e2b9b883705766a4b27aa23bd05 | [] | no_license | ivanjankovic/deploy-flask-app-to-kubernetes | 6add453a54b5f5eec4aab608db6672e76543b475 | 5d835670c053517045ba3698af1f2787c3da0f6d | refs/heads/master | 2022-04-27T02:26:31.369667 | 2020-04-23T11:44:40 | 2020-04-23T11:44:40 | 257,952,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 997 | py | '''
Tests for jwt flask app.
'''
import os
import json
import pytest
import main
SECRET = 'TestSecret'
TOKEN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE1NjEzMDY3OTAsIm5iZiI6MTU2MDA5NzE5MCwiZW1haWwiOiJ3b2xmQHRoZWRvb3IuY29tIn0.IpM4VMnqIgOoQeJxUbLT-cRcAjK41jronkVrqRLFmmk'
EMAIL = '[email protected]'
PASSWORD = 'huff-puff'
@pytest.fixture
def client():
os.environ['JWT_SECRET'] = SECRET
main.APP.config['TESTING'] = True
client = main.APP.test_client()
yield client
def test_health(client):
# assert False
response = client.get('/')
assert response.status_code == 200
assert response.json == 'Healthy'
def test_auth(client):
# assert False
body = {'email': EMAIL,
'password': PASSWORD}
response = client.post('/auth',
data=json.dumps(body),
content_type='application/json')
assert response.status_code == 200
token = response.json['token']
assert token is not None
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.