blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
616
content_id
stringlengths
40
40
detected_licenses
sequencelengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
777 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
149 values
src_encoding
stringclasses
26 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
3
10.2M
extension
stringclasses
188 values
content
stringlengths
3
10.2M
authors
sequencelengths
1
1
author_id
stringlengths
1
132
8de3381b4bf1330627995867f8533aa971e31273
8a77b3a993b5ad9aaa7da0eeca4e02462e336dcd
/IPTVPlayer/hosts/hosturllist.py
f20088c05a692f45a988dd1d1ea5115e84830f55
[]
no_license
gorr2016/iptvplayer-for-e2
cc6a6004ff04a5aca1145860abd09791d762ea20
74b4320feca9079293b19c837b14cfc402d77673
refs/heads/master
2021-01-01T05:00:41.155592
2016-04-27T09:28:41
2016-04-27T09:28:41
57,188,687
1
1
null
2016-04-27T09:28:42
2016-04-27T06:20:30
Python
UTF-8
Python
false
false
15,857
py
# -*- coding: utf-8 -*- ################################################### # LOCAL import ################################################### from Plugins.Extensions.IPTVPlayer.components.iptvplayerinit import TranslateTXT as _ from Plugins.Extensions.IPTVPlayer.components.ihost import CHostBase, CBaseHostClass, CDisplayListItem, ArticleContent, RetHost, CUrlItem from Plugins.Extensions.IPTVPlayer.tools.iptvtools import CSelOneLink, printDBG, printExc, CSearchHistoryHelper, GetLogoDir, GetCookieDir from Plugins.Extensions.IPTVPlayer.tools.iptvfilehost import IPTVFileHost from Plugins.Extensions.IPTVPlayer.libs.youtube_dl.utils import clean_html from Plugins.Extensions.IPTVPlayer.libs.urlparserhelper import getDirectM3U8Playlist, getF4MLinksWithMeta from Plugins.Extensions.IPTVPlayer.libs.urlparser import urlparser from Plugins.Extensions.IPTVPlayer.iptvdm.iptvdh import DMHelper ################################################### ################################################### # FOREIGN import ################################################### from Components.config import config, ConfigSelection, ConfigYesNo, ConfigDirectory, getConfigListEntry import re import codecs import time ################################################### ################################################### # E2 GUI COMMPONENTS ################################################### ################################################### ################################################### # Config options for HOST ################################################### config.plugins.iptvplayer.Sciezkaurllist = ConfigDirectory(default = "/hdd/") config.plugins.iptvplayer.grupujurllist = ConfigYesNo(default = True) config.plugins.iptvplayer.sortuj = ConfigYesNo(default = True) config.plugins.iptvplayer.urllist_showrafalcool1 = ConfigYesNo(default = True) def GetConfigList(): optionList = [] optionList.append(getConfigListEntry(_('Text files ytlist and urllist are in:'), config.plugins.iptvplayer.Sciezkaurllist)) optionList.append(getConfigListEntry(_('Show recommended by Rafalcool1:'), config.plugins.iptvplayer.urllist_showrafalcool1)) optionList.append(getConfigListEntry(_('Sort the list:'), config.plugins.iptvplayer.sortuj)) optionList.append(getConfigListEntry(_('Group links into categories: '), config.plugins.iptvplayer.grupujurllist)) return optionList ################################################### def gettytul(): return (_('Urllists player')) class Urllist(CBaseHostClass): RAFALCOOL1_FILE = 'urllist.rafalcool1' URLLIST_FILE = 'urllist.txt' URRLIST_STREAMS = 'urllist.stream' URRLIST_USER = 'urllist.user' def __init__(self): printDBG("Urllist.__init__") self.MAIN_GROUPED_TAB = [{'category': 'all', 'title': (_("All in one")), 'desc': (_("Links are videos and messages, without division into categories")), 'icon':'http://osvita.mediasapiens.ua/content/news/001000-002000/shyfrovanie_dannyh_1415.jpg'}] if config.plugins.iptvplayer.urllist_showrafalcool1.value: self.MAIN_GROUPED_TAB.append({'category': Urllist.RAFALCOOL1_FILE, 'title': (_("Recommended by Rafalcool1")), 'desc': (_("List of movies prepared by Rafalcool1")), 'icon':'http://s1.bild.me/bilder/030315/3925071iconFilm.jpg'}) self.MAIN_GROUPED_TAB.extend( [{'category': Urllist.URLLIST_FILE, 'title': (_("Videos")), 'desc': (_("Links to the video files from the file urllist.txt")), 'icon':'http://mohov.h15.ru/logotip_kino.jpg'}, \ {'category': Urllist.URRLIST_STREAMS, 'title': (_("live transfers")), 'desc': (_("Live broadcasts from the file urllist.stream")), 'icon':'http://asiamh.ru.images.1c-bitrix-cdn.ru/images/media_logo.jpg?136879146733721'}, \ {'category': Urllist.URRLIST_USER, 'title': (_("User files")), 'desc': (_("Favorite addresses are stored under the file urllist.user")), 'icon':'http://kinovesti.ru/uploads/posts/2014-12/1419918660_1404722920_02.jpg'}]) CBaseHostClass.__init__(self) self.currFileHost = None def _cleanHtmlStr(self, str): str = self.cm.ph.replaceHtmlTags(str, ' ').replace('\n', ' ') return clean_html(self.cm.ph.removeDoubles(str, ' ').replace(' )', ')').strip()) def _getHostingName(self, url): if 0 != self.up.checkHostSupport(url): return self.up.getHostName(url) elif self._uriIsValid(url): return (_('direct link')) else: return (_('unknown')) def _uriIsValid(self, url): if '://' in url: return True return False def updateRafalcoolFile(self, filePath, encoding): printDBG("Urllist.updateRafalcoolFile filePath[%s]" % filePath) remoteVersion = -1 localVersion = -1 # get version from file try: with codecs.open(filePath, 'r', encoding, 'replace') as fp: # version should be in first line line = fp.readline() localVersion = int(self.cm.ph.getSearchGroups(line + '|', '#file_version=([0-9]+?)[^0-9]')[0]) except: printExc() # generate timestamp to add to url to skip possible cacheing timestamp = str(time.time()) # if we have loacal version get remote version for comparison if localVersion != '': sts, data = self.cm.getPage("http://hybrid.xunil.pl/IPTVPlayer_resources/UsersFiles/urllist.txt.version") if sts: try: remoteVersion = int(data.strip()) except: printExc() # uaktualnij versje printDBG('Urllist.updateRafalcoolFile localVersion[%d] remoteVersion[%d]' % (localVersion, remoteVersion)) if remoteVersion > -1 and localVersion < remoteVersion: sts, data = self.cm.getPage("http://hybrid.xunil.pl/IPTVPlayer_resources/UsersFiles/urllist.txt?t=" + timestamp) if sts: # confirm version line = data[0:data.find('\n')] try: newVersion = int(self.cm.ph.getSearchGroups(line + '|', '#file_version=([0-9]+?)[^0-9]')[0]) if newVersion != remoteVersion: printDBG("Version mismatches localVersion[%d], remoteVersion[%d], newVersion[%d]" % (localVersion, remoteVersion, newVersion) ) file = open(filePath, 'wb') file.write(data) file.close() except: printExc() def listCategory(self, cItem, searchMode=False): printDBG("Urllist.listCategory cItem[%s]" % cItem) sortList = config.plugins.iptvplayer.sortuj.value filespath = config.plugins.iptvplayer.Sciezkaurllist.value groupList = config.plugins.iptvplayer.grupujurllist.value if cItem['category'] in ['all', Urllist.URLLIST_FILE, Urllist.URRLIST_STREAMS, Urllist.URRLIST_USER, Urllist.RAFALCOOL1_FILE]: self.currFileHost = IPTVFileHost() if cItem['category'] in ['all', Urllist.RAFALCOOL1_FILE] and config.plugins.iptvplayer.urllist_showrafalcool1.value: self.updateRafalcoolFile(filespath + Urllist.RAFALCOOL1_FILE, encoding='utf-8') self.currFileHost.addFile(filespath + Urllist.RAFALCOOL1_FILE, encoding='utf-8') if cItem['category'] in ['all', Urllist.URLLIST_FILE]: self.currFileHost.addFile(filespath + Urllist.URLLIST_FILE, encoding='utf-8') if cItem['category'] in ['all', Urllist.URRLIST_STREAMS]: self.currFileHost.addFile(filespath + Urllist.URRLIST_STREAMS, encoding='utf-8') if cItem['category'] in ['all', Urllist.URRLIST_USER]: self.currFileHost.addFile(filespath + Urllist.URRLIST_USER, encoding='utf-8') if 'all' != cItem['category'] and groupList: tmpList = self.currFileHost.getGroups(sortList) for item in tmpList: if '' == item: title = (_("Other")) else: title = item params = {'name': 'category', 'category':'group', 'title':title, 'group':item} self.addDir(params) else: tmpList = self.currFileHost.getAllItems(sortList) for item in tmpList: desc = (_("Hosting: %s, %s")) % (self._getHostingName(item['url']), item['url']) if item['desc'] != '': desc = item['desc'] params = {'title':item['full_title'], 'url':item['url'], 'desc':desc, 'icon':item['icon']} self.addVideo(params) elif 'group' in cItem: tmpList = self.currFileHost.getItemsInGroup(cItem['group'], sortList) for item in tmpList: if '' == item['title_in_group']: title = item['full_title'] else: title = item['title_in_group'] desc = (_("Hosting: %s, %s")) % (self._getHostingName(item['url']), item['url']) if item.get('desc', '') != '': desc = item['desc'] params = {'title':title, 'url':item['url'], 'desc': desc, 'icon':item.get('icon', '')} self.addVideo(params) def getLinksForVideo(self, cItem): printDBG("Urllist.getLinksForVideo url[%s]" % cItem['url']) videoUrls = [] uri, params = DMHelper.getDownloaderParamFromUrl(cItem['url']) printDBG(params) uri = urlparser.decorateUrl(uri, params) urlSupport = self.up.checkHostSupport( uri ) if 1 == urlSupport: retTab = self.up.getVideoLinkExt( uri ) videoUrls.extend(retTab) elif 0 == urlSupport and self._uriIsValid(uri): if uri.split('?')[0].endswith('.m3u8'): retTab = getDirectM3U8Playlist(uri) videoUrls.extend(retTab) elif uri.split('?')[0].endswith('.f4m'): retTab = getF4MLinksWithMeta(uri) videoUrls.extend(retTab) else: videoUrls.append({'name':'direct link', 'url':uri}) return videoUrls def handleService(self, index, refresh=0, searchPattern='', searchType=''): printDBG('Urllist.handleService start') CBaseHostClass.handleService(self, index, refresh, searchPattern, searchType) name = self.currItem.get("name", None) category = self.currItem.get("category", '') printDBG( "Urllist.handleService: ---------> name[%s], category[%s] " % (name, category) ) self.currList = [] if None == name: self.listsTab(self.MAIN_GROUPED_TAB, self.currItem) else: self.listCategory(self.currItem) CBaseHostClass.endHandleService(self, index, refresh) class IPTVHost(CHostBase): def __init__(self): CHostBase.__init__(self, Urllist(), True) def _isPicture(self, url): def _checkExtension(url): return url.endswith(".jpeg") or url.endswith(".jpg") or url.endswith(".png") if _checkExtension(url): return True if _checkExtension(url.split('|')[0]): return True if _checkExtension(url.split('?')[0]): return True return False def getLogoPath(self): return RetHost(RetHost.OK, value = [GetLogoDir('urllistlogo.png')]) def getLinksForVideo(self, Index = 0, selItem = None): listLen = len(self.host.currList) if listLen < Index and listLen > 0: printDBG( "ERROR getLinksForVideo - current list is to short len: %d, Index: %d" % (listLen, Index) ) return RetHost(RetHost.ERROR, value = []) if self.host.currList[Index]["type"] != 'video': printDBG( "ERROR getLinksForVideo - current item has wrong type" ) return RetHost(RetHost.ERROR, value = []) retlist = [] uri = self.host.currList[Index].get('url', '') if not self._isPicture(uri): urlList = self.host.getLinksForVideo(self.host.currList[Index]) for item in urlList: retlist.append(CUrlItem(item["name"], item["url"], 0)) else: retlist.append(CUrlItem('picture link', urlparser.decorateParamsFromUrl(uri, True), 0)) return RetHost(RetHost.OK, value = retlist) # end getLinksForVideo def convertList(self, cList): hostList = [] searchTypesOptions = [] # ustawione alfabetycznie #searchTypesOptions.append(("Filmy", "filmy")) #searchTypesOptions.append(("Seriale", "seriale")) for cItem in cList: hostLinks = [] type = CDisplayListItem.TYPE_UNKNOWN possibleTypesOfSearch = None if cItem['type'] == 'category': if cItem['title'] == 'Wyszukaj': type = CDisplayListItem.TYPE_SEARCH possibleTypesOfSearch = searchTypesOptions else: type = CDisplayListItem.TYPE_CATEGORY elif cItem['type'] == 'video': type = CDisplayListItem.TYPE_VIDEO url = cItem.get('url', '') if self._isPicture(url): type = CDisplayListItem.TYPE_PICTURE else: type = CDisplayListItem.TYPE_VIDEO if '' != url: hostLinks.append(CUrlItem("Link", url, 1)) title = cItem.get('title', '') description = clean_html(cItem.get('desc', '')) icon = cItem.get('icon', '') hostItem = CDisplayListItem(name = title, description = description, type = type, urlItems = hostLinks, urlSeparateRequest = 1, iconimage = icon, possibleTypesOfSearch = possibleTypesOfSearch) hostList.append(hostItem) return hostList # end convertList def getSearchItemInx(self): # Find 'Wyszukaj' item try: list = self.host.getCurrList() for i in range( len(list) ): if list[i]['category'] == 'Wyszukaj': return i except: printDBG('getSearchItemInx EXCEPTION') return -1 def setSearchPattern(self): try: list = self.host.getCurrList() if 'history' == list[self.currIndex]['name']: pattern = list[self.currIndex]['title'] search_type = list[self.currIndex]['search_type'] self.host.history.addHistoryItem( pattern, search_type) self.searchPattern = pattern self.searchType = search_type except: printDBG('setSearchPattern EXCEPTION') self.searchPattern = '' self.searchType = '' return
474ca8e491dd7c8a564d196843a5593c517b1619
7533acbcf36b196e5513fad2b3c9623411500f0f
/0x0F-python-object_relational_mapping/model_state.py
9b22628ad72e4f2739e3630bec79c475e4db1008
[]
no_license
AndrewKalil/holbertonschool-higher_level_programming
97ce8af5ad7e8e9f0b1a25d7fa7dcb1a2b40810e
9bef1f7c8ff9d8e90ec2aed7a29f37cec3a5e590
refs/heads/master
2022-12-17T19:02:12.096913
2020-09-23T00:00:44
2020-09-23T00:00:44
259,439,815
0
1
null
null
null
null
UTF-8
Python
false
false
400
py
#!/usr/bin/python3 """First state model""" from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column, Integer, String Base = declarative_base() class State(Base): """Class State""" __tablename__ = 'states' id = Column(Integer, autoincrement=True, primary_key=True, nullable=False, unique=True) name = Column(String(128), nullable=False)
9afc4200eacafdbebe20217fe3f7491121e55325
06e51cd96f2788f87c7c426244167ddbfcc0d551
/integer_solutions.py
cc8d955bf497fb094c54cccbe9ef48050297b32e
[]
no_license
Lisolo/ACM
683724184dc2af31ef45073a9cd3ef7f2cdabfba
231d80dd72768ca97c3e9795af94910f94cc0643
refs/heads/master
2016-09-06T16:04:11.910067
2014-11-26T12:25:50
2014-11-26T12:25:50
null
0
0
null
null
null
null
UTF-8
Python
false
false
904
py
# coding=utf-8 """ 给你两个整数a和b(-10000<a,b<10000),请你判断是否存在两个整数,他们的和为a,乘积为b。 若存在,输出Yes,否则输出No 例如:a=9,b=15, 此时不存在两个整数满足上述条件,所以应该输出No。 """ a = 6 b = 9 divisors = [] flag = 0 if b >= 0: for x in xrange(-b, b+1): if x == 0: pass else: b % x == 0 divisors.append([x, b/x]) else: for x in xrange(b,-(b-1)): if x == 0: pass else: b % x == 0 divisors.append([x, b/x]) for x in divisors: if sum(x) == a: flag = 1 if a == 0 and b == 0: print 'YES' else: if flag: print 'YES' else: print 'NO' """solution 2:""" delta = a**2 - 4 * b if delta >= 0 and int(delta**0.5) == delta**0.5: print 'YES' else: print 'NO'
c014798865331ef81d1e07c344df553a92294cac
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03031/s125157001.py
4ac1006ad6aa89f188ff8caaa0a4a2b77a42ef94
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
654
py
n, m = map(int, input().split()) switch_list = [] for i in range(m): s = list(map(int, input().split())) s.pop(0) switch_list.append(s) p_list = list(map(int, input().split())) #print(switch_list) #print(p_list) ans = 0 for bit in range(1 << n): #100・・0(n+1桁)-1 = 111・・・1(n桁)となりn桁のビット演算をfor文で回す cnt = 0 for j in range(0,m): switch_sum = 0 for i in range(n): if (bit >> i) & 1 and i+1 in switch_list[j]: switch_sum += 1 if switch_sum%2 == p_list[j]: cnt += 1 if cnt == m: ans += 1 print(ans)
a8bfde75fc2cf284a72e5f69140fbf309caf8038
46c318dbfedfb95d38207431bbf14bacf12d185f
/NLP/II_Process/Matching/RegEx.py
d738c2ca0c9f6a0f8c5f444a610746577f70e4b9
[]
no_license
miltonluaces/problem_solving
2e92877ee736c0920ce6e94dcc73fd01a52e3e46
bccb89d8aadef4a2e409fc6c66ccad2fb84b6976
refs/heads/master
2023-01-08T15:58:51.002478
2020-10-28T21:31:46
2020-10-28T21:31:46
308,143,277
1
0
null
null
null
null
UTF-8
Python
false
false
379
py
import regex # Normal matching. m1 = regex.search(r'Mr|Mrs', 'Mrs'); print(m1.expandf('{0}')) m2 = regex.search(r'one(self)?(selfsufficient)?', 'oneselfsufficient'); print(m2.expandf('{0}')) # POSIX matching. m3 = regex.search(r'(?p)Mr|Mrs', 'Mrs'); print(m3.expandf('{0}')) m4 = regex.search(r'(?p)one(self)?(selfsufficient)?', 'oneselfsufficient'); print(m4.expandf('{0}'))
bf6fab955be82cb8c2a81a65c3d6b12d35068493
3e1584f4bc2f1d4368b10d0f28fcba69d946eb00
/core/apps/kubeops_api/migrations/0063_auto_20200221_0654.py
a552b6fac34d4eea6b6e19c7ad53a2cf039001be
[ "Apache-2.0" ]
permissive
azmove/KubeOperator
80d102a41a0009ae85dd2d82c7dc164511de9a58
0561ddbc03eded5813a86693af7fc4ee9647f12d
refs/heads/master
2021-01-08T22:40:51.267027
2020-02-21T08:47:43
2020-02-21T08:47:43
null
0
0
null
null
null
null
UTF-8
Python
false
false
737
py
# Generated by Django 2.2.10 on 2020-02-21 06:54 from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('kubeops_api', '0062_auto_20200221_0510'), ] operations = [ migrations.AddField( model_name='item', name='users', field=models.ManyToManyField(to=settings.AUTH_USER_MODEL), ), migrations.AlterField( model_name='clusterhealthhistory', name='date_type', field=models.CharField(choices=[('HOUR', 'HOUR'), ('DAY', 'DAY')], default='HOUR', max_length=255), ), ]
1e64d87dfe87a31900f768f82c81e0725aa124e2
1ed281b93e11a53ea4ae2a3798aeb9f58dd664de
/webapp/starter/config/settings/local.py
b7e7a63716f22047acb2f9e1c94ef1b10a5f6274
[ "MIT" ]
permissive
bartkim0426/django-docker-seul
5ae2a31f1004ae8292569bcafd2e66ce56f67c7e
6a75605281403357514d7b30e65d2685bb907b31
refs/heads/master
2021-05-09T02:55:04.765647
2019-02-11T07:45:09
2019-02-11T07:45:09
119,226,239
0
0
null
null
null
null
UTF-8
Python
false
false
917
py
import os from .partials import * DEBUG = True INSTALLED_APPS += [ 'debug_toolbar', 'django_extensions', ] MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware', ] INTERNAL_IPS = ['127.0.0.1', ] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.environ['POSTGRES_NAME'], 'USER': os.environ["POSTGRES_USER"], 'PASSWORD': os.environ["POSTGRES_PASSWORD"], 'HOST': os.environ["POSTGRES_HOST"], 'PORT': os.environ["POSTGRES_PORT"], } } MEDIA_ROOT = str(ROOT_DIR('mediafiles')) # before collectstatic # for prevent duplication of STATIC_ROOT and STATICFILES_DIRS # STATIC_ROOT = str(ROOT_DIR('staticfiles')) # STATICFILES_DIRS = ( # str(ROOT_DIR.path('static')), # ) # after collectstatic STATIC_ROOT = str(ROOT_DIR('static-files')) STATICFILES_DIRS = ( str(ROOT_DIR.path('staticfiles')), )
dc5df62772aa2776784f4a98884bd8e5b46d2056
5f2608d4a06e96c3a032ddb66a6d7e160080b5b0
/week4/homework_w4_b1.py
e70dae9bdc972512e2e7c76f7fc0ae5ef2833a01
[]
no_license
sheikhusmanshakeel/statistical-mechanics-ens
f3e150030073f3ca106a072b4774502b02b8f1d0
ba483dc9ba291cbd6cd757edf5fc2ae362ff3df7
refs/heads/master
2020-04-08T21:40:33.580142
2014-04-28T21:10:19
2014-04-28T21:10:19
null
0
0
null
null
null
null
UTF-8
Python
false
false
967
py
import math, random, pandas def Vol1_s(dimension): return (math.pi ** (dimension / 2.0)) / math.gamma(dimension / 2.0 + 1.0) def Vol1_s_est(dimensions, trials): n_hits = 0 for i in range(trials): dists = [random.uniform(-1.0, 1.0) for _ in range(dimensions)] sum_dist = sum(d ** 2 for d in dists) if sum_dist < 1.0: n_hits += 1 return n_hits / float(trials) * 2 ** dimensions, n_hits dimensions = [] result = [] trials = 1000000 print '%i used for all' % trials for d in range(1, 33): dimensions.append(str(d) + 'd') vol_est, n_hits = Vol1_s_est(d, trials) result.append({ 'estimation of Vol1_s(d)': vol_est, 'Vol1_s(d) (exact)': Vol1_s(d), 'n_hits': n_hits }) print d, n_hits, vol_est ordered_cols = ['estimation of Vol1_s(d)', 'actual', 'n_hits'] print pandas.DataFrame(result, dimensions, columns=ordered_cols)
7af7b7b2b077c56d314c8a7de890790b7cd2a523
9972988c4f4ccd7fdbafea601782dae94b679e78
/tests/test.py
8f7fc6f16dc0d00289c64bafe01422ece4e4f123
[ "MIT" ]
permissive
chenshoubiao/ButterSalt
b67e9dec730350e64520064940fe69621a927418
7120c5135448cb3c9760925f23d2efc8316458d8
refs/heads/master
2021-01-22T03:22:58.791300
2017-05-25T01:58:45
2017-05-25T01:58:45
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,787
py
import ButterSalt import unittest class ButterSaltTestCase(unittest.TestCase): def setUp(self): ButterSalt.app.config['TESTING'] = True ButterSalt.app.config['WTF_CSRF_ENABLED'] = False ButterSalt.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://' ButterSalt.app.config['SQLALCHEMY_ECHO'] = False self.app = ButterSalt.app.test_client() ButterSalt.db.create_all() def login(self, username, password): return self.app.post('/user/login', data=dict( username=username, password=password ), follow_redirects=True) def logout(self): return self.app.get('/user/logout', follow_redirects=True) def test_login_logout(self): rv = self.login('admin', 'default') assert '/user/logout' in str(rv.data) assert 'Logged in successfully.' in str(rv.data) rv = self.logout() assert 'Please log in to access this page.' in str(rv.data) assert '/user/logout' not in str(rv.data) def test_index(self): self.login('admin', 'default') rv = self.app.get('/', follow_redirects=True) assert 'id="tgt" name="tgt" type="text" value="" placeholder="Required"' in str(rv.data) assert '/user/logout' in str(rv.data) def test_deployment(self): self.login('admin', 'default') rv = self.app.get('/deployment/operation', follow_redirects=True) assert '<table class="table table-hover">' in str(rv.data) assert '/user/logout' in str(rv.data) def test_salt_jobs(self): self.login('admin', 'default') rv = self.app.get('/salt/jobs/', follow_redirects=True) assert '<table class="table table-striped">' in str(rv.data) assert '/user/logout' in str(rv.data) def test_execution_command_testping(self): self.login('admin', 'default') rv = self.app.post('/', data=dict( tgt='HXtest3', fun='test.ping', ), follow_redirects=True) assert '[&#39;HXtest3&#39;]' in str(rv.data) def test_execution_command_testarg(self): self.login('admin', 'default') rv = self.app.post('/', data=dict( tgt='HXtest3', fun='test.arg', arg="/proc lol" ), follow_redirects=True) assert '<th> Arguments </th>' in str(rv.data) assert '__kwarg__' not in str(rv.data) def test_execution_command_testkwarg(self): self.login('admin', 'default') rv = self.app.post('/', data=dict( tgt='HXtest3', fun='test.arg', arg="/proc lol", kwarg='lol=wow' ), follow_redirects=True) assert '__kwarg__' in str(rv.data) if __name__ == '__main__': unittest.main()
e7be49dbc740b1357c53555b1c8370e37846f83e
dbde9338e87117397c2a7c8969df614f4dd4eacc
/examples/tensorflow/qat_conversion/benchmark.py
e31fb0226a4869184f24d64386ded4940317fec9
[ "Apache-2.0", "MIT", "Intel" ]
permissive
leonardozcm/neural-compressor
9f83551007351e12df19e5fae3742696613067ad
4a49eae281792d987f858a27ac9f83dffe810f4b
refs/heads/master
2023-08-16T17:18:28.867898
2021-09-03T06:44:25
2021-09-03T06:54:30
407,043,747
0
0
Apache-2.0
2021-09-16T07:57:10
2021-09-16T06:12:32
null
UTF-8
Python
false
false
1,017
py
import tensorflow as tf from tensorflow import keras import numpy as np class dataloader(object): def __init__(self, batch_size=100): mnist = keras.datasets.mnist (train_images, train_labels), (test_images, test_labels) = mnist.load_data() # Normalize the input image so that each pixel value is between 0 to 1. self.train_images = train_images / 255.0 self.test_images = test_images / 255.0 self.train_labels = train_labels self.test_labels = test_labels self.batch_size = batch_size self.i = 0 def __iter__(self): while self.i < len(self.test_images): yield self.test_images[self.i: self.i + self.batch_size], self.test_labels[self.i: self.i + self.batch_size] self.i = self.i + self.batch_size from lpot.experimental import Benchmark, common evaluator = Benchmark('mnist.yaml') evaluator.model = common.Model('quantized_model') evaluator.b_dataloader = dataloader() evaluator('accuracy')
8cffae1caed4f348b156a25034e81b9c31782903
46ae8264edb9098c9875d2a0a508bc071201ec8b
/res/scripts/client/gui/battle_control/requestsavatarrequestscontroller.py
f867089a8ae0ffd0381a8948555605e3e5e292d7
[]
no_license
Difrex/wotsdk
1fc6156e07e3a5302e6f78eafdea9bec4c897cfb
510a34c67b8f4c02168a9830d23f5b00068d155b
refs/heads/master
2021-01-01T19:12:03.592888
2016-10-08T12:06:04
2016-10-08T12:06:04
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,880
py
# Embedded file name: scripts/client/gui/battle_control/requests/AvatarRequestsController.py from collections import namedtuple import BigWorld import AccountCommands from debug_utils import LOG_DEBUG, LOG_WARNING from ids_generators import Int32IDGenerator from helpers import i18n from messenger import MessengerEntry, g_settings from gui.shared.utils.requesters.abstract import RequestsByIDProcessor from gui.shared.utils.requesters.RequestsController import RequestsController from gui.shared.rq_cooldown import RequestCooldownManager, REQUEST_SCOPE from gui.battle_control.requests.settings import AVATAR_REQUEST_TYPE, DEFAULT_COOLDOWN class _AvatarCooldownManager(RequestCooldownManager): def __init__(self): super(_AvatarCooldownManager, self).__init__(REQUEST_SCOPE.CLUB) def lookupName(self, rqTypeID): rqName = AVATAR_REQUEST_TYPE.getKeyByValue(rqTypeID) return i18n.makeString('#system_messages:battle/request/%s' % str(rqName)) def getDefaultCoolDown(self): return DEFAULT_COOLDOWN def _showSysMessage(self, msg): MessengerEntry.g_instance.gui.addClientMessage(g_settings.htmlTemplates.format('battleErrorMessage', ctx={'error': msg})) class _AvatarRequester(RequestsByIDProcessor): class _Response(namedtuple('_Response', ['code', 'errStr', 'data'])): def isSuccess(self): return AccountCommands.isCodeValid(self.code) def __init__(self): super(_AvatarRequester, self).__init__(Int32IDGenerator()) def getSender(self): return BigWorld.player().prebattleInvitations def _doCall(self, method, *args, **kwargs): requestID = self._idsGenerator.next() def _callback(code, errStr, data): ctx = self._requests.get(requestID) self._onResponseReceived(requestID, self._makeResponse(code, errStr, data, ctx)) method(callback=_callback, *args, **kwargs) return requestID def _makeResponse(self, code = 0, errMsg = '', data = None, ctx = None): response = self._Response(code, errMsg, data) if not response.isSuccess(): LOG_WARNING('Avatar request error', ctx, response) return response class AvatarRequestsController(RequestsController): def __init__(self): super(AvatarRequestsController, self).__init__(_AvatarRequester(), _AvatarCooldownManager()) self.__handlers = {AVATAR_REQUEST_TYPE.SEND_INVITES: self.sendInvites} def fini(self): self.__handlers.clear() super(AvatarRequestsController, self).fini() def sendInvites(self, ctx, callback = None): return self._requester.doRequestEx(ctx, callback, 'sendInvitation', ctx.getDatabaseIDs()) def _getHandlerByRequestType(self, requestTypeID): return self.__handlers.get(requestTypeID) def _getRequestTimeOut(self): return 30.0
1e322b9340bde3dac33558b3897bfef9ce871bd7
1ee10e1d42b59a95a64d860f0477a69b016d1781
/Lecture_09/Lecture Code/3-pipeline_text_generation.py
c1c3ebf2a71d9e21e8fada4b70c2de73687de274
[]
no_license
KushalIsmael/NLP
5564070a573d251d7222dda85b8025ae1f9c3c6f
d4ce567a009e149b0cb1781d3a341d25aa438916
refs/heads/master
2023-08-18T14:07:48.646386
2021-10-28T19:09:25
2021-10-28T19:09:25
null
0
0
null
null
null
null
UTF-8
Python
false
false
295
py
from transformers import pipeline generator = pipeline("text-generation") print(generator("In this course, we will teach you how to")) print(generator("I am tired of listening to this brownbag session about natural language processing.", num_return_sequences = 1, max_length = 100 ))
8212462617b51d5afbf32fbe0aa6e02ac157b1de
15f321878face2af9317363c5f6de1e5ddd9b749
/solutions_python/Problem_155/760.py
c9ed44a45ad2596edbd1cbaff02c0ff2ac596d1c
[]
no_license
dr-dos-ok/Code_Jam_Webscraper
c06fd59870842664cd79c41eb460a09553e1c80a
26a35bf114a3aa30fc4c677ef069d95f41665cc0
refs/heads/master
2020-04-06T08:17:40.938460
2018-10-14T10:12:47
2018-10-14T10:12:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,137
py
def opera(case_list): sat = [] for e in case_list: sat.append(int(e)) t = sum(sat) standing = sat[0] invites = 0 s_list = [] count = 0 for i in sat: if i > 0: s_list.append(count) count += 1 if s_list[0] == 0: s_list = s_list[1:] while standing < t: for i in s_list: if standing >= i: standing += sat[i] else: while standing < i: standing += 1 invites += 1 standing += sat[i] return invites input_file = open('C:\Users\chrisjwaite\Desktop\\A-large.in') output_file = open('C:\Users\chrisjwaite\Desktop\\A-large_output.out', 'w') lines = input_file.read().split('\n') n_cases = int(lines[0]) case_list = [] for case in lines[1:-1]: data = case.split(' ') case_list.append(data[1]) for i in range(n_cases): output_file.write('Case #' + str(i+1) + ': ' + str(opera(case_list[i])) + '\n') input_file.close() output_file.close()
8b9613542d6316a2175fc90b8151e4c82c1b1256
f9729802d62bc72df4a6e59c3f49d4bd1fc92043
/docs/conf.py
da4780f8311665e03ef2fa834069cce5e50e6c75
[ "MIT" ]
permissive
wefner/emaillib
50a63e7394d3a09dad3f58b6964335ff1ce298cb
2a00a3c5d4745898b96e858607b43784fa566fac
refs/heads/master
2021-06-28T17:51:56.269015
2017-09-18T17:27:11
2017-09-18T17:27:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
9,411
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # emaillib documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import sphinx_rtd_theme # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory is # relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = os.getcwd() project_root = os.path.dirname(cwd) # Run apidoc to traverse the project directory and add all modules to the docs import sphinx.apidoc sphinx.apidoc.main(argv=['_','-f', '-o', os.path.join(project_root, 'docs'), os.path.join(project_root, '''emaillib''')]) # parse the index.rst and fix the title underlining with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'index.rst'), 'r+') as index_rst_file: index_rst = index_rst_file.read() index_rst_file.seek(0, 0) for line in index_rst.splitlines(): if line.startswith('Welcome to '): line_length = len(line.strip()) if line.startswith('======================================'): line = '=' * line_length index_rst_file.write(line + '\n') # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import emaillib # -- General configuration --------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.napoleon' ] napoleon_google_docstring = True # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'''emaillib''' copyright = u'''2017, (Author : Costas Tyfoxylos)''' # The version info for the project you're documenting, acts as replacement # for |version| and |release|, also used in various other places throughout # the built documents. # # The short X.Y version. version = emaillib.__version__ # The full version, including alpha/beta/rc tags. release = emaillib.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to # some non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built # documents. #keep_warnings = False # -- Options for HTML output ------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as # html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the # top of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon # of the docs. This file should be a Windows icon file (.ico) being # 16x16 or 32x32 pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". #html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names # to template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. # Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. # Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages # will contain a <link> tag referring to it. The value of this option # must be the base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = '''emaillibdoc''' # -- Options for LaTeX output ------------------------------------------ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '''emaillib.tex''', u'''emaillib Documentation''', u'''Costas Tyfoxylos''', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings # are parts, not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output ------------------------------------ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', '''emaillib''', u'''emaillib Documentation''', [u'''Costas Tyfoxylos'''], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ---------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', '''emaillib''', u'''emaillib Documentation''', u'''Costas Tyfoxylos''', '''emaillib''', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False
4a37049bdd2a5eb1ab32b0f6c0feabcf07e1d909
0bc9bff4fd4bd72b0ad681b79f0e39cdb9fc9dc0
/voximplant/management/commands/vox_call_list_download.py
7ea6954170db7d549c13f6340aae7c904ee5af68
[ "MIT" ]
permissive
telminov/django-voximplant
bc4fcb53147d9a318857b8213934217ebfc8fdef
a0165498d1727039e26f77724079033c252a3611
refs/heads/master
2020-05-22T01:16:43.631059
2017-09-13T04:41:47
2017-09-13T04:41:47
58,752,532
4
2
null
2017-09-13T04:41:48
2016-05-13T15:39:03
Python
UTF-8
Python
false
false
395
py
# coding: utf-8 from django.core.management.base import BaseCommand from ... import tools class Command(BaseCommand): help = 'Get call list detail' def add_arguments(self, parser): parser.add_argument('--id', dest='call_list_id', type=int) def handle(self, *args, **options): call_list_id = options['call_list_id'] tools.call_list_download(call_list_id)
da4d7b80c470a5ea6762ba816acdd9922c6b0eaf
05a211233ccb01ecd2c12367548cba65bbdbc5d9
/examples/02relative/app/en/__init__.py
771d13b64c475a7ca1ad41ba952a579d17934208
[]
no_license
podhmo/miniconfig
94ee7fa6345816daa83a74b1cbfb40592f221fbb
4cee752fd965c8094ed9d1ff1c33e531e88e479c
refs/heads/master
2021-05-21T11:49:14.836184
2021-03-13T14:06:27
2021-03-13T14:06:57
26,328,967
3
1
null
2020-07-09T19:24:51
2014-11-07T16:58:28
Python
UTF-8
Python
false
false
157
py
def includeme(config): config.include(".spring:include") config.include(".summer") config.include("./autumn") config.include("../en/winter")
0b359de6edb5995644e1b492351b5a6eff68069c
1020a87ba3569c879478b6a88f73da606f204c34
/tests/generator/test_compression.py
c5c000d40ba6e08eaf9ff7457e1ef520b8fa4ca6
[ "Apache-2.0" ]
permissive
MIGPOOL/test-blockchain
deeceaa5d7c6d24e528092ef32036aff8149baff
567fd1265b6a27f2f4e21c7787e39072e4b7c085
refs/heads/main
2023-08-22T03:27:19.638361
2021-10-26T22:42:42
2021-10-26T22:42:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
14,156
py
# flake8: noqa: F501 from dataclasses import dataclass from typing import List, Any from unittest import TestCase from greendoge.full_node.bundle_tools import ( bundle_suitable_for_compression, compressed_coin_spend_entry_list, compressed_spend_bundle_solution, match_standard_transaction_at_any_index, simple_solution_generator, spend_bundle_to_serialized_coin_spend_entry_list, ) from greendoge.full_node.generator import run_generator, create_generator_args from greendoge.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin from greendoge.types.blockchain_format.program import Program, SerializedProgram, INFINITE_COST from greendoge.types.generator_types import BlockGenerator, CompressorArg, GeneratorArg from greendoge.types.spend_bundle import SpendBundle from greendoge.util.byte_types import hexstr_to_bytes from greendoge.util.ints import uint32 from greendoge.wallet.puzzles.load_clvm import load_clvm from tests.core.make_block_generator import make_spend_bundle from clvm import SExp import io from clvm.serialize import sexp_from_stream from clvm_tools import binutils TEST_GEN_DESERIALIZE = load_clvm("test_generator_deserialize.clvm", package_or_requirement="greendoge.wallet.puzzles") DESERIALIZE_MOD = load_clvm("greendogelisp_deserialisation.clvm", package_or_requirement="greendoge.wallet.puzzles") DECOMPRESS_PUZZLE = load_clvm("decompress_puzzle.clvm", package_or_requirement="greendoge.wallet.puzzles") DECOMPRESS_CSE = load_clvm("decompress_coin_spend_entry.clvm", package_or_requirement="greendoge.wallet.puzzles") DECOMPRESS_CSE_WITH_PREFIX = load_clvm( "decompress_coin_spend_entry_with_prefix.clvm", package_or_requirement="greendoge.wallet.puzzles" ) DECOMPRESS_BLOCK = load_clvm("block_program_zero.clvm", package_or_requirement="greendoge.wallet.puzzles") TEST_MULTIPLE = load_clvm("test_multiple_generator_input_arguments.clvm", package_or_requirement="greendoge.wallet.puzzles") Nil = Program.from_bytes(b"\x80") original_generator = hexstr_to_bytes( "ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff8080808080" ) # noqa gen1 = b"aaaaaaaaaa" + original_generator gen2 = b"bb" + original_generator FAKE_BLOCK_HEIGHT1 = uint32(100) FAKE_BLOCK_HEIGHT2 = uint32(200) @dataclass(frozen=True) class MultipleCompressorArg: arg: List[CompressorArg] split_offset: int def create_multiple_ref_generator(args: MultipleCompressorArg, spend_bundle: SpendBundle) -> BlockGenerator: """ Decompress a transaction by referencing bytes from multiple input generator references """ compressed_cse_list = compressed_coin_spend_entry_list(spend_bundle) program = TEST_MULTIPLE.curry( DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, args.arg[0].start, args.arg[0].end - args.split_offset, args.arg[1].end - args.split_offset, args.arg[1].end, compressed_cse_list, ) # TODO aqk: Improve ergonomics of CompressorArg -> GeneratorArg conversion generator_args = [ GeneratorArg(FAKE_BLOCK_HEIGHT1, args.arg[0].generator), GeneratorArg(FAKE_BLOCK_HEIGHT2, args.arg[1].generator), ] return BlockGenerator(program, generator_args) def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> List[Any]: r = [] for coin_spend in bundle.coin_spends: entry = [ coin_spend.coin.parent_coin_info, sexp_from_stream(io.BytesIO(bytes(coin_spend.puzzle_reveal)), SExp.to), coin_spend.coin.amount, sexp_from_stream(io.BytesIO(bytes(coin_spend.solution)), SExp.to), ] r.append(entry) return r class TestCompression(TestCase): def test_spend_bundle_suitable(self): sb: SpendBundle = make_spend_bundle(1) assert bundle_suitable_for_compression(sb) def test_compress_spend_bundle(self): pass def test_multiple_input_gen_refs(self): start1, end1 = match_standard_transaction_at_any_index(gen1) start2, end2 = match_standard_transaction_at_any_index(gen2) ca1 = CompressorArg(FAKE_BLOCK_HEIGHT1, SerializedProgram.from_bytes(gen1), start1, end1) ca2 = CompressorArg(FAKE_BLOCK_HEIGHT2, SerializedProgram.from_bytes(gen2), start2, end2) prefix_len1 = end1 - start1 prefix_len2 = end2 - start2 assert prefix_len1 == prefix_len2 prefix_len = prefix_len1 results = [] for split_offset in range(prefix_len): gen_args = MultipleCompressorArg([ca1, ca2], split_offset) spend_bundle: SpendBundle = make_spend_bundle(1) multi_gen = create_multiple_ref_generator(gen_args, spend_bundle) cost, result = run_generator(multi_gen, INFINITE_COST) results.append(result) assert result is not None assert cost > 0 assert all(r == results[0] for r in results) def test_compressed_block_results(self): sb: SpendBundle = make_spend_bundle(1) start, end = match_standard_transaction_at_any_index(original_generator) ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end) c = compressed_spend_bundle_solution(ca, sb) s = simple_solution_generator(sb) assert c != s cost_c, result_c = run_generator(c, INFINITE_COST) cost_s, result_s = run_generator(s, INFINITE_COST) print(result_c) assert result_c is not None assert result_s is not None assert result_c == result_s def test_get_removals_for_single_coin(self): sb: SpendBundle = make_spend_bundle(1) start, end = match_standard_transaction_at_any_index(original_generator) ca = CompressorArg(uint32(0), SerializedProgram.from_bytes(original_generator), start, end) c = compressed_spend_bundle_solution(ca, sb) removal = sb.coin_spends[0].coin.name() error, puzzle, solution = get_puzzle_and_solution_for_coin(c, removal, INFINITE_COST) assert error is None assert bytes(puzzle) == bytes(sb.coin_spends[0].puzzle_reveal) assert bytes(solution) == bytes(sb.coin_spends[0].solution) # Test non compressed generator as well s = simple_solution_generator(sb) error, puzzle, solution = get_puzzle_and_solution_for_coin(s, removal, INFINITE_COST) assert error is None assert bytes(puzzle) == bytes(sb.coin_spends[0].puzzle_reveal) assert bytes(solution) == bytes(sb.coin_spends[0].solution) def test_spend_byndle_coin_spend(self): for i in range(0, 10): sb: SpendBundle = make_spend_bundle(i) cs1 = SExp.to(spend_bundle_to_coin_spend_entry_list(sb)).as_bin() cs2 = spend_bundle_to_serialized_coin_spend_entry_list(sb) assert cs1 == cs2 class TestDecompression(TestCase): def __init__(self, *args, **kwargs): super(TestDecompression, self).__init__(*args, **kwargs) self.maxDiff = None def test_deserialization(self): self.maxDiff = None cost, out = DESERIALIZE_MOD.run_with_cost(INFINITE_COST, [bytes(Program.to("hello"))]) assert out == Program.to("hello") def test_deserialization_as_argument(self): self.maxDiff = None cost, out = TEST_GEN_DESERIALIZE.run_with_cost( INFINITE_COST, [DESERIALIZE_MOD, Nil, bytes(Program.to("hello"))] ) print(bytes(Program.to("hello"))) print() print(out) assert out == Program.to("hello") def test_decompress_puzzle(self): cost, out = DECOMPRESS_PUZZLE.run_with_cost( INFINITE_COST, [DESERIALIZE_MOD, b"\xff", bytes(Program.to("pubkey")), b"\x80"] ) print() print(out) # An empty CSE is invalid. (An empty CSE list may be okay) # def test_decompress_empty_cse(self): # cse0 = binutils.assemble("()") # cost, out = DECOMPRESS_CSE.run_with_cost(INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, b"\xff", b"\x80", cse0]) # print() # print(out) def test_decompress_cse(self): """Decompress a single CSE / CoinSpendEntry""" cse0 = binutils.assemble( "((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())))" ) # noqa cost, out = DECOMPRESS_CSE.run_with_cost( INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, b"\xff", b"\x80", cse0] ) print() print(out) def test_decompress_cse_with_prefix(self): cse0 = binutils.assemble( "((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())))" ) # noqa start = 2 + 44 end = start + 238 prefix = original_generator[start:end] # (deserialize decompress_puzzle puzzle_prefix cse) cost, out = DECOMPRESS_CSE_WITH_PREFIX.run_with_cost( INFINITE_COST, [DESERIALIZE_MOD, DECOMPRESS_PUZZLE, prefix, cse0] ) print() print(out) def test_block_program_zero(self): "Decompress a list of CSEs" self.maxDiff = None cse1 = binutils.assemble( "(((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))))" ) # noqa cse2 = binutils.assemble( """ ( ((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())) ) ((0x0000000000000000000000000000000000000000000000000000000000000001 0x0186a0) (0xb0a6207f5173ec41491d9f2c1b8fff5579e13703077e0eaca8fe587669dcccf51e9209a6b65576845ece5f7c2f3229e7e3 (() (q (51 0x24254a3efc3ebfac9979bbe0d615e2eda043aa329905f65b63846fa24149e2b6 0x0186a0)) ()))) ) """ ) # noqa start = 2 + 44 end = start + 238 # (mod (decompress_puzzle decompress_coin_spend_entry start end compressed_cses deserialize generator_list reserved_arg) # cost, out = DECOMPRESS_BLOCK.run_with_cost(INFINITE_COST, [DECOMPRESS_PUZZLE, DECOMPRESS_CSE, start, Program.to(end), cse0, DESERIALIZE_MOD, bytes(original_generator)]) cost, out = DECOMPRESS_BLOCK.run_with_cost( INFINITE_COST, [ DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, start, Program.to(end), cse2, DESERIALIZE_MOD, [bytes(original_generator)], ], ) print() print(out) def test_block_program_zero_with_curry(self): self.maxDiff = None cse1 = binutils.assemble( "(((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ()))))" ) # noqa cse2 = binutils.assemble( """ ( ((0x0000000000000000000000000000000000000000000000000000000000000000 0x0186a0) (0xb081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3 (() (q (51 0x6b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9 0x0186a0)) ())) ) ((0x0000000000000000000000000000000000000000000000000000000000000001 0x0186a0) (0xb0a6207f5173ec41491d9f2c1b8fff5579e13703077e0eaca8fe587669dcccf51e9209a6b65576845ece5f7c2f3229e7e3 (() (q (51 0x24254a3efc3ebfac9979bbe0d615e2eda043aa329905f65b63846fa24149e2b6 0x0186a0)) ()))) ) """ ) # noqa start = 2 + 44 end = start + 238 # (mod (decompress_puzzle decompress_coin_spend_entry start end compressed_cses deserialize generator_list reserved_arg) # cost, out = DECOMPRESS_BLOCK.run_with_cost(INFINITE_COST, [DECOMPRESS_PUZZLE, DECOMPRESS_CSE, start, Program.to(end), cse0, DESERIALIZE_MOD, bytes(original_generator)]) p = DECOMPRESS_BLOCK.curry(DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, start, Program.to(end)) cost, out = p.run_with_cost(INFINITE_COST, [cse2, DESERIALIZE_MOD, [bytes(original_generator)]]) print() print(p) print(out) p_with_cses = DECOMPRESS_BLOCK.curry( DECOMPRESS_PUZZLE, DECOMPRESS_CSE_WITH_PREFIX, start, Program.to(end), cse2, DESERIALIZE_MOD ) generator_args = create_generator_args([SerializedProgram.from_bytes(original_generator)]) cost, out = p_with_cses.run_with_cost(INFINITE_COST, generator_args) print() print(p_with_cses) print(out)
6da8896820cb21775182cc8b2f30d43f369eae43
803176d4f2798989623c62f091f0d5cca687aad3
/sorting_recursive.py
7d2426d95ea2377f69e96f89c1c668f1d448098d
[]
no_license
Tylerholland12/CS2-1
79986bb437e4c517d80eb9ba198226cea3e83471
a095d23c48c19926ad6fd9be55fb980904dcc495
refs/heads/main
2023-01-31T00:20:48.603002
2020-12-08T14:33:42
2020-12-08T14:33:42
304,582,069
1
0
null
null
null
null
UTF-8
Python
false
false
2,944
py
#!python def merge(items1, items2): """Merge given lists of items, each assumed to already be in sorted order, and return a new list containing all items in sorted order. TODO: Running time: ??? Why and under what conditions? TODO: Memory usage: ??? Why and under what conditions?""" # create new empty array new_sorted_list = [] # create a new variable to store the length of each list len_items1 = len(items1) len_items2 = len(items2) # set a variable for each list index and set to 0 i = j = 0 # check if index is less than items while i < len_items1 and j < len_items2: # write a conditional to check if one index is less than the other if items1[i] <= items2[j]: new_sorted_list.append(items1[i]) i+=1 # do the opposite of the first conditional else: new_sorted_list.append(items2[j]) j+=1 # append the items to the new list while i < len_items1: new_sorted_list.append(items1[i]) i+=1 # append the items to the new list while j < len_items2: new_sorted_list.append(items2[j]) j+=1 # return new list return new_sorted_list def merge_sort(items): """Sort given items by splitting list into two approximately equal halves, sorting each recursively, and merging results into a list in sorted order. TODO: Running time: ??? Why and under what conditions? TODO: Memory usage: ??? Why and under what conditions?""" # base case if len(items) <= 1: return items # divide array into two parts mid = len(items) // 2 # slice first half of list left = items[:mid] # slice second half of list right = items[mid:] # recursive call on left left = merge_sort(left) # recursive call on right right = merge_sort(right) # merge two together return merge(left, right) def quick_sort(items, low=None, high=None): """Sort given items in place by partitioning items in range `[low...high]` around a pivot item and recursively sorting each remaining sublist range. TODO: Best case running time: ??? Why and under what conditions? TODO: Worst case running time: ??? Why and under what conditions? TODO: Memory usage: ??? Why and under what conditions?""" # base case length = len(items) if length <= 1: return items else: pivot = items.pop() # create new empty arrays low = [] high = [] # loop through and see if the items are greater than pivot # append items to high for item in items: if item > pivot: high.append(item) # append items low else: low.append(item) return quick_sort(low) + [pivot] + quick_sort(high) if __name__ == "__main__": items = [12, 23, 5, 2, 1, 43, 6, 34, 9] print(quick_sort(items)) print(merge_sort(items))
7138ed2a849354335f6674e80424ccc1659246e3
307e52d79c9068a2648ae82bbe11cd58733bba37
/Convert/ConvertTruth.py
2688385564e86b4c2474fb0ca6454547eb8a182e
[]
no_license
greatofdream/Recon1t
0aa775c43dcfa5b3da7b5894e2567fbe8e7b2991
80e58ba3c2c23f1efa962d02fcb2205a95aa716f
refs/heads/master
2022-11-09T14:12:55.747488
2020-06-09T02:43:24
2020-06-09T02:43:24
263,953,536
0
0
null
2020-05-14T15:31:27
2020-05-14T15:31:26
null
UTF-8
Python
false
false
2,440
py
# Convert ROOT file to HDF5 file import numpy as np import ROOT import sys import os import tables # Define the database columns class TruthData(tables.IsDescription): E = tables.Float64Col(pos=0) x = tables.Float64Col(pos=1) y = tables.Float64Col(pos=2) z = tables.Float64Col(pos=3) px = tables.Float64Col(pos=4) py = tables.Float64Col(pos=5) pz = tables.Float64Col(pos=6) class GroundTruthData(tables.IsDescription): EventID = tables.Int64Col(pos=0) ChannelID = tables.Int64Col(pos=1) PETime = tables.Float64Col(pos=2) photonTime = tables.Float64Col(pos=3) PulseTime = tables.Float64Col(pos=4) dETime = tables.Float64Col(pos=5) # Automatically add multiple root files created a program with max tree size limitation. if len(sys.argv)!=3: print("Wront arguments!") print("Usage: python ConvertTruth.py MCFileName outputFileName") sys.exit(1) baseFileName = sys.argv[1] outputFileName = sys.argv[2] ROOT.PyConfig.IgnoreCommandLineOptions = True FileNo = 0 # Create the output file and the group h5file = tables.open_file(outputFileName, mode="w", title="OneTonDetector", filters = tables.Filters(complevel=9)) group = "/" # Create tables GroundTruthTable = h5file.create_table(group, "GroundTruth", GroundTruthData, "GroundTruth") groundtruth = GroundTruthTable.row TruthData = h5file.create_table(group, "TruthData", TruthData, "TruthData") truthdata = TruthData.row # Loop for ROOT files. t = ROOT.TChain("Readout") tTruth = ROOT.TChain("SimTriggerInfo") tTruth.Add(baseFileName) t.Add(baseFileName) # Loop for event for event in tTruth: for truthinfo in event.truthList: truthdata['E'] = truthinfo.EkMerged truthdata['x'] = truthinfo.x truthdata['y'] = truthinfo.y truthdata['z'] = truthinfo.z for px in truthinfo.PrimaryParticleList: truthdata['px'] = px.px truthdata['py'] = px.py truthdata['pz'] = px.pz truthdata.append() for PE in event.PEList: groundtruth['EventID'] = event.TriggerNo groundtruth['ChannelID'] = PE.PMTId groundtruth['PETime'] = PE.HitPosInWindow groundtruth['photonTime'] = PE.photonTime groundtruth['PulseTime'] = PE.PulseTime groundtruth['dETime'] = PE.dETime groundtruth.append() # Flush into the output file GroundTruthTable.flush() h5file.close()
ba5d12e3a9f281a603a4f3fc0b6ae61ff59e2ad6
b05bd7c104a51910c6ed9d6f0e8d039ffa108f2b
/carros/migrations/0004_auto_20201204_2106.py
c25e4f02c0364f388b0077ad46c71811b1b44762
[]
no_license
BrunoVittor/TesteGregory
76e12585d4532dc8ab4836c567b5ba56469139e5
2c7e3afdb2a62d0464189153a9ab150d69d89083
refs/heads/master
2023-04-01T22:56:49.422893
2021-03-31T22:49:59
2021-03-31T22:49:59
334,147,980
1
0
null
null
null
null
UTF-8
Python
false
false
391
py
# Generated by Django 2.2 on 2020-12-04 21:06 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('carros', '0003_auto_20201204_2103'), ] operations = [ migrations.AlterField( model_name='carros', name='ano', field=models.IntegerField(blank=True, null=True), ), ]
44f150c666e75aa32b284dd253d435323b5f0de0
7dba60ae27ff247705607839348f017b85f5da16
/nyumbax/migrations/0010_remove_hood_user.py
9bd48cf28d900417152b7edac6e33f76bd08d027
[ "MIT" ]
permissive
BwanaQ/nyumba-kumi
7edccb6745ede6d9f6faf5bd8c0dcf6e24726991
c264b0941c77a4d7175a2dc5380723bea1acf380
refs/heads/master
2023-04-05T09:32:34.867456
2021-04-13T15:54:16
2021-04-13T15:54:16
356,136,458
0
0
null
null
null
null
UTF-8
Python
false
false
326
py
# Generated by Django 3.2 on 2021-04-13 04:31 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('nyumbax', '0009_rename_name_hood_title'), ] operations = [ migrations.RemoveField( model_name='hood', name='user', ), ]
42ae7af6024d205e88ad2aa61c2d8c5c3a071dc3
92cc5c61799e93446d6562a6cc9fb74e9220c6c7
/mac-graph/cell/mac_cell.py
a159f0e137574775b4d6c51682a27dc300eb9ca7
[ "Unlicense" ]
permissive
houqp/mac-graph
2728c89605b71e7ac610303e7100797787f0fa30
ae91e5708d2a63d157a397b608acf720f4c4d840
refs/heads/master
2020-03-22T20:41:10.786619
2018-07-11T19:20:41
2018-07-11T19:20:41
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,995
py
import tensorflow as tf from .read_cell import * from .memory_cell import * from .control_cell import * from .output_cell import * from ..util import * class MACCell(tf.nn.rnn_cell.RNNCell): def __init__(self, args, features, question_state, question_tokens, vocab_embedding): self.args = args self.features = features self.question_state = question_state self.question_tokens = question_tokens self.vocab_embedding = vocab_embedding super().__init__(self) def __call__(self, inputs, state): """Run this RNN cell on inputs, starting from the given state. Args: inputs: **Unused!** `2-D` tensor with shape `[batch_size, input_size]`. state: if `self.state_size` is an integer, this should be a `2-D Tensor` with shape `[batch_size, self.state_size]`. Otherwise, if `self.state_size` is a tuple of integers, this should be a tuple with shapes `[batch_size, s] for s in self.state_size`. scope: VariableScope for the created subgraph; defaults to class name. Returns: A pair containing: - Output: A `2-D` tensor with shape `[batch_size, self.output_size]`. - New state: Either a single `2-D` tensor, or a tuple of tensors matching the arity and shapes of `state`. """ in_control_state, in_memory_state = state out_control_state = control_cell(self.args, self.features, in_control_state, self.question_state, self.question_tokens) read = read_cell(self.args, self.features, in_memory_state, out_control_state, self.vocab_embedding) out_memory_state = memory_cell(self.args, in_memory_state, read, out_control_state) output = output_cell(self.args, self.features, self.question_state, out_memory_state) return output, (out_control_state, out_memory_state) @property def state_size(self): """ Returns a size tuple (control_state, memory_state) """ return (self.args["bus_width"], self.args["bus_width"]) @property def output_size(self): return self.args["answer_classes"]
7a6721be75418ccf2e20180da4156ca261b1a8a8
7573f7485a5039f6374d5740009cc33ecf83ef62
/lishengchun/uploadsets.py
278e553db694d23144ae2d88742e55d0574ca6a1
[]
no_license
hugleecool/lishengchun
5323eb851b7d8b007655f2e2d1ba92026861a134
7c1ebb4bc746320f3c7605045a8300220c97cb39
refs/heads/master
2021-01-18T13:22:36.737105
2014-02-15T10:58:29
2014-02-15T10:58:29
null
0
0
null
null
null
null
UTF-8
Python
false
false
109
py
# coding: utf-8 from flask.ext.uploads import UploadSet, IMAGES workimages = UploadSet('workimages', IMAGES)
0a2d71946f7a3beb7d3039832ef4d851ca101ab9
6da19be45ff986768eb820f11691977cb3c84772
/Python/5_Advance_buily_in_functions/501_generator_example/app.py
f86cf0322af9c70c5287d5b23541ecb63ab41ed6
[]
no_license
alexp01/trainings
9e72f3a571292b79d2b1518f564d2dc0a774ef41
9d8daee16f15e0d7851fab12ab3d2505386a686c
refs/heads/master
2023-05-04T23:37:13.243691
2023-05-02T08:02:53
2023-05-02T08:02:53
272,425,687
0
0
null
null
null
null
UTF-8
Python
false
false
867
py
# https://www.udemy.com/course/the-complete-python-course/learn/lecture/9445596#questions # yield can be used to temporary stop a function, so that you can coninue it afterwards def get_100_numbers() -> int: i = 0 while i < 100: yield i i +=1 # yield is like a return, but it will also remember inside the function the last execution point and the values # so when it reaches yield it will return i, and when its called again by next(variable), it will continue with i = I=1, and then run the while again x = get_100_numbers() print (x) print (next(x)) # this will call again the function and it will continue from where it was stopped -> when i = 0 print (next(x)) # this will call again the function and it will continue from where it was stopped -> when i = 1 print(list(x)) # this will execute the function until it reaches the limit
3aa84a12c555bb02030d3ec9127a6ee3676a3089
3086b5195cb4dbb27aa73a24f6bf964440dff422
/tools/fileinfo/detection/packers/pe-pack/test.py
0d3de334959002d7c06f44c3a66d04733d5aa5ee
[ "MIT", "Python-2.0" ]
permissive
avast/retdec-regression-tests
8c6ea27ce2f5d0dfa6e6c845c38b56fa5bdfcc23
6662fed9d73cb7bc882ea69fd2429d5464950e39
refs/heads/master
2023-08-31T05:53:16.967008
2023-08-07T13:33:00
2023-08-15T08:33:07
113,974,761
7
10
MIT
2023-08-15T08:33:08
2017-12-12T10:11:00
Python
UTF-8
Python
false
false
295
py
from regression_tests import * class Test(Test): settings = TestSettings( tool='fileinfo', input='fact_rec.ex' ) def test_correctly_analyzes_input_file(self): assert self.fileinfo.succeeded assert self.fileinfo.output.contains(r'.*PE-PACK \(1\.0*')
b170bf1a5dd2f6564204d32a76592625f9e5c628
ac7c02f29a837fdd67d2bdc77bba182080e98ed8
/codekata/3rdcharacters.py
ce51088ffcd802a6c7f48e27801cff822fabc6db
[]
no_license
YaminiNarayanan-359/guvi
7630c309a86365e4367fda1ddab4e966e7d1ac5b
a52b6353100b4e9b83a003e6a327fbfb174daac4
refs/heads/master
2020-06-03T00:08:00.389609
2019-07-16T06:59:53
2019-07-16T06:59:53
191,355,064
0
0
null
null
null
null
UTF-8
Python
false
false
70
py
d=input() for i in range(0,len(d)): if(i%3==0): print(k,end="")
520d8b4de76bc22b176016cd250e44aa8922ed31
3a8c2bd3b8df9054ed0c26f48616209859faa719
/Challenges/binaryTreeRightSideView.py
5ba301ff4e30397260ef87ec8389c5ebedd932f9
[]
no_license
AusCommsteam/Algorithm-and-Data-Structures-and-Coding-Challenges
684f1ca2f9ee3c49d0b17ecb1e80707efe305c82
98fb752c574a6ec5961a274e41a44275b56da194
refs/heads/master
2023-09-01T23:58:15.514231
2021-09-10T12:42:03
2021-09-10T12:42:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,354
py
""" Binary Tree Right Side View Given a binary tree, imagine yourself standing on the right side of it, return the values of the nodes you can see ordered from top to bottom. Example: Input: [1,2,3,null,5,null,4] Output: [1, 3, 4] Explanation: 1 <--- / \ 2 3 <--- \ \ 5 4 <--- """ # Definition for a binary tree node. # class TreeNode: # def __init__(self, x): # self.val = x # self.left = None # self.right = None """ Time Complexity O(N) Space Complexity O(N) """ class Solution: def rightSideView(self, root: TreeNode) -> List[int]: if not root: return [] ans = [root.val] left = ans + self.rightSideView(root.left) right = ans + self.rightSideView(root.right) if len(right) > len(left): return right return right + left[len(right):] """ BFS """ from collections import deque class Solution: def rightSideView(self, root): if not root: return [] q, res = deque([root]), [] while q: res.append(q[-1].val) for _ in range(len(q)): cur = q.popleft() if cur.left: q.append(cur.left) if cur.right: q.append(cur.right) return res
3f343c058cf96d33a5d3b5c7981b91edc0493874
7773ea6f465ffecfd4f9821aad56ee1eab90d97a
/python/testData/surround/SurroundWithWhile_after.py
43e32582970098345e4768f7e4af14d29e5282d9
[ "Apache-2.0" ]
permissive
aghasyedbilal/intellij-community
5fa14a8bb62a037c0d2764fb172e8109a3db471f
fa602b2874ea4eb59442f9937b952dcb55910b6e
refs/heads/master
2023-04-10T20:55:27.988445
2020-05-03T22:00:26
2020-05-03T22:26:23
261,074,802
2
0
Apache-2.0
2020-05-04T03:48:36
2020-05-04T03:48:35
null
UTF-8
Python
false
false
76
py
def foo(): while <selection>True</selection>: print "hello"
177e0fb844c10dfa74004b38b345e8812b831e03
0ce9226dc0622e1edd93e57dcf2e88eaf77cedd6
/leetcode/explore/October/11_subsquence_disnct.py
f9dff5ee942c4736487a7c15ad7c7a7aeeb83767
[]
no_license
minhthe/algo-and-ds-practice
6b09fc2174d58f8ba39ceabd80e2525ab95fe7ea
3a9b882af8412859f204569ca11808b638acf29d
refs/heads/master
2023-01-31T18:49:31.773115
2020-12-18T06:26:47
2020-12-18T06:26:47
298,933,489
0
0
null
null
null
null
UTF-8
Python
false
false
444
py
''' Greading approach: if the char you want to add, and this char not the last, -> consider will add later not NOT to achive lexicographical order ''' class Solution: def removeDuplicateLetters(self, s: str) -> str: last_index = {c: i for i,c in enumerate(s)} stk = [] for i, c in enumerate(s): if c in stk: continue while stk and stk[-1] > c and last_index[stk[-1]] > i: stk.pop() stk.append(c) return ''.join(stk)
cda6a6e5e1b60598a1893d844bcba02707ddbbb7
282d0a84b45b12359b96bbf0b1d7ca9ee0cb5d19
/Malware1/venv/Lib/site-packages/scipy/spatial/setup.py
17994e6fb084330c7b91f8e312a70465a528a0ff
[]
no_license
sameerakhtar/CyberSecurity
9cfe58df98495eac6e4e2708e34e70b7e4c055d3
594973df27b4e1a43f8faba0140ce7d6c6618f93
refs/heads/master
2022-12-11T11:53:40.875462
2020-09-07T23:13:22
2020-09-07T23:13:22
293,598,094
0
0
null
null
null
null
UTF-8
Python
false
false
129
py
version https://git-lfs.github.com/spec/v1 oid sha256:ccb99ae81e55c20bfd073d894471ea6c5a51f1cc27e19fea1bd2ebdfa959f8cd size 2935
121d743af8ee8b7ac6eff95e4756e10c11b93dfc
78e93ca71a54bd11b6f51ef3936044e08782c7e3
/batchkit_examples/speech_sdk/work_item_processor.py
cb1108528d05baf51c553dc4922e2052d930bdf2
[ "MIT", "LicenseRef-scancode-generic-cla" ]
permissive
microsoft/batch-processing-kit
c0134e1e395fdf7f2938101cea542dbb8d3c1f1f
8b0a5492361ff9473ab66c2f64aaccd5340f2f62
refs/heads/master
2023-09-02T01:54:36.226987
2022-10-27T03:40:34
2022-10-27T03:40:34
265,635,442
29
19
MIT
2023-06-02T10:38:06
2020-05-20T17:14:45
Python
UTF-8
Python
false
false
1,170
py
# Copyright (c) Microsoft Corporation. # Licensed under the MIT License. import multiprocessing from typing import List from batchkit.logger import LogEventQueue from batchkit.work_item import WorkItemRequest, WorkItemResult from batchkit.work_item_processor import WorkItemProcessor from batchkit_examples.speech_sdk.recognize import run_recognizer from batchkit_examples.speech_sdk.work_item import SpeechSDKWorkItemRequest class SpeechSDKWorkItemProcessor(WorkItemProcessor): def __init__(self): super().__init__() def work_item_types(self) -> List[type]: return [SpeechSDKWorkItemRequest] def process_impl(self, work_item: WorkItemRequest, endpoint_config: dict, rtf: float, log_event_queue: LogEventQueue, cancellation_token: multiprocessing.Event, global_workitem_lock: multiprocessing.RLock) -> WorkItemResult: assert isinstance(work_item, SpeechSDKWorkItemRequest) return run_recognizer( work_item, rtf, endpoint_config, log_event_queue, cancellation_token )
66ff066deef611e0bc8dba47f853afe25757b4be
62f14fe947513ddf1e4ca8c26ae5d02099abd6cc
/tests/test_compressible.py
a49ff3d21e1e7f78b038956e6e933a00ac0f8d32
[ "MIT", "LicenseRef-scancode-unknown-license-reference" ]
permissive
bencleary/fluids
79d993366cea94a87cbc3ac3e5f01311fa81956e
b1d0e00877b36c14c52d7d32c45f9359c0366459
refs/heads/master
2022-11-08T02:47:02.896748
2020-06-20T14:41:10
2020-06-20T14:41:10
null
0
0
null
null
null
null
UTF-8
Python
false
false
12,252
py
# -*- coding: utf-8 -*- '''Chemical Engineering Design Library (ChEDL). Utilities for process modeling. Copyright (C) 2016, 2017 Caleb Bell <[email protected]> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.''' from fluids import * from fluids.numerics import assert_close, assert_close1d import pytest def test_isothermal_work_compression(): assert_close(isothermal_work_compression(1E5, 1E6, 300), 5743.425357533477, rtol=1e-05) def test_isentropic_work_compression(): dH = isentropic_work_compression(P1=1E5, P2=1E6, T1=300, k=1.4, eta=1) assert_close(dH, 8125.161295388634, rtol=1e-05) dH = isentropic_work_compression(P1=1E5, P2=1E6, T1=300, k=1.4, eta=0.78) assert_close(dH, 10416.873455626454, rtol=1e-05) dH = isentropic_work_compression(P1=1E5, P2=1E6, T1=300, k=1.4, eta=0.78, Z=0.9) assert_close(dH, 9375.186110063809, rtol=1e-05) # Other solutions - P1, P2, and eta P1 = isentropic_work_compression(W=9375.186110063809, P2=1E6, T1=300, k=1.4, eta=0.78, Z=0.9) assert_close(P1, 1E5, rtol=1E-5) P2 = isentropic_work_compression(W=9375.186110063809, P1=1E5, T1=300, k=1.4, eta=0.78, Z=0.9) assert_close(P2, 1E6, rtol=1E-5) eta = isentropic_work_compression(W=9375.186110063809, P1=1E5, P2=1E6, T1=300, k=1.4, Z=0.9, eta=None) assert_close(eta, 0.78, rtol=1E-5) with pytest.raises(Exception): isentropic_work_compression(P1=1E5, P2=1E6, k=1.4, T1=None) def test_isentropic_T_rise_compression(): T2 = isentropic_T_rise_compression(286.8, 54050, 432400, 1.4) assert_close(T2, 519.5230938217768, rtol=1e-05) T2 = isentropic_T_rise_compression(286.8, 54050, 432400, 1.4, eta=0.78) assert_close(T2, 585.1629407971498, rtol=1e-05) # Test against the simpler formula for eta=1: # T2 = T2*(P2/P1)^((k-1)/k) T2_ideal = 286.8*((432400/54050)**((1.4-1)/1.4)) assert_close(T2_ideal, 519.5230938217768, rtol=1e-05) def test_isentropic_efficiency(): eta_s = isentropic_efficiency(1E5, 1E6, 1.4, eta_p=0.78) assert_close(eta_s, 0.7027614191263858) eta_p = isentropic_efficiency(1E5, 1E6, 1.4, eta_s=0.7027614191263858) assert_close(eta_p, 0.78) with pytest.raises(Exception): isentropic_efficiency(1E5, 1E6, 1.4) # Example 7.6 of the reference: eta_s = isentropic_efficiency(1E5, 3E5, 1.4, eta_p=0.75) assert_close(eta_s, 0.7095085923615653) eta_p = isentropic_efficiency(1E5, 3E5, 1.4, eta_s=eta_s) assert_close(eta_p, 0.75) def test_polytropic_exponent(): assert_close(polytropic_exponent(1.4, eta_p=0.78), 1.5780346820809246) assert_close(polytropic_exponent(1.4, n=1.5780346820809246), 0.78) with pytest.raises(Exception): polytropic_exponent(1.4) def test_compressible(): T = T_critical_flow(473, 1.289) assert_close(T, 413.2809086937528) P = P_critical_flow(1400000, 1.289) assert_close(P, 766812.9022792266) assert not is_critical_flow(670E3, 532E3, 1.11) assert is_critical_flow(670E3, 101E3, 1.11) SE = stagnation_energy(125) assert_close(SE, 7812.5) PST = P_stagnation(54050., 255.7, 286.8, 1.4) assert_close(PST, 80772.80495900588) Tst = T_stagnation(286.8, 54050, 54050*8, 1.4) assert_close(Tst, 519.5230938217768) Tstid = T_stagnation_ideal(255.7, 250, 1005.) assert_close(Tstid, 286.79452736318405) def test_Panhandle_A(): # Example 7-18 Gas of Crane TP 410M D = 0.340 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 42.56082051195928 # Test all combinations of relevant missing inputs assert_close(Panhandle_A(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Panhandle_A(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Panhandle_A(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2) assert_close(Panhandle_A(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Panhandle_A(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Panhandle_A(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Panhandle_B(): # Example 7-18 Gas of Crane TP 410M D = 0.340 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 42.35366178004172 # Test all combinations of relevant missing inputs assert_close(Panhandle_B(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Panhandle_B(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Panhandle_B(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2) assert_close(Panhandle_B(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Panhandle_B(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Panhandle_B(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Weymouth(): D = 0.340 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 32.07729055913029 assert_close(Weymouth(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Weymouth(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Weymouth(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2) assert_close(Weymouth(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Weymouth(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Weymouth(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Spitzglass_high(): D = 0.340 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 29.42670246281681 assert_close(Spitzglass_high(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Spitzglass_high(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Spitzglass_high(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2) assert_close(Spitzglass_high(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Spitzglass_high(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Spitzglass_high(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Spitzglass_low(): D = 0.154051 P1 = 6720.3199 P2 = 0 L = 54.864 SG=0.6 Tavg = 288.7 Q = 0.9488775242530617 assert_close(Spitzglass_low(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Spitzglass_low(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Spitzglass_low(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2, atol=1E-10) assert_close(Spitzglass_low(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Spitzglass_low(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Spitzglass_low(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Oliphant(): D = 0.340 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 28.851535408143057 assert_close(Oliphant(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Oliphant(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Oliphant(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2) assert_close(Oliphant(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Oliphant(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Oliphant(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Fritzsche(): D = 0.340 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 39.421535157535565 assert_close(Fritzsche(D=D, P1=P1, P2=P2, L=L, SG=SG, Tavg=Tavg), Q) assert_close(Fritzsche(D=D, Q=Q, P2=P2, L=L, SG=SG, Tavg=Tavg), P1) assert_close(Fritzsche(D=D, Q=Q, P1=P1, L=L, SG=SG, Tavg=Tavg), P2) assert_close(Fritzsche(D=D, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), L) assert_close(Fritzsche(L=L, Q=Q, P1=P1, P2=P2, SG=SG, Tavg=Tavg), D) with pytest.raises(Exception): Fritzsche(D=0.340, P1=90E5, L=160E3, SG=0.693, Tavg=277.15) def test_Muller(): D = 0.340 mu = 1E-5 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 60.45796698148663 assert_close(Muller(D=D, P1=P1, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), Q) assert_close(Muller(D=D, Q=Q, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), P1) assert_close(Muller(D=D, Q=Q, P1=P1, L=L, SG=SG, mu=mu, Tavg=Tavg), P2) assert_close(Muller(D=D, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), L) assert_close(Muller(L=L, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), D) with pytest.raises(Exception): Muller(D=D, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg) def test_IGT(): D = 0.340 mu = 1E-5 P1 = 90E5 P2 = 20E5 L = 160E3 SG=0.693 Tavg = 277.15 Q = 48.92351786788815 assert_close(IGT(D=D, P1=P1, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), Q) assert_close(IGT(D=D, Q=Q, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg), P1) assert_close(IGT(D=D, Q=Q, P1=P1, L=L, SG=SG, mu=mu, Tavg=Tavg), P2) assert_close(IGT(D=D, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), L) assert_close(IGT(L=L, Q=Q, P1=P1, P2=P2, SG=SG, mu=mu, Tavg=Tavg), D) with pytest.raises(Exception): IGT(D=D, P2=P2, L=L, SG=SG, mu=mu, Tavg=Tavg) def test_isothermal_gas(): mcalc = isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, L=1000, D=0.5) assert_close(mcalc, 145.484757264) assert_close(isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, m=145.484757264, D=0.5), 1000) assert_close(isothermal_gas(11.3, 0.00185, P2=9E5, m=145.484757264, L=1000., D=0.5), 1E6) assert_close(isothermal_gas(11.3, 0.00185, P1=1E6, m=145.484757264, L=1000., D=0.5), 9E5) assert_close(isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, m=145.484757264, L=1000.), 0.5) with pytest.raises(Exception): isothermal_gas(11.3, 0.00185, P1=1E6, P2=9E5, L=1000) with pytest.raises(Exception): isothermal_gas(rho=11.3, fd=0.00185, P1=1E6, P2=1E5, L=1000, D=0.5) with pytest.raises(Exception): isothermal_gas(rho=11.3, fd=0.00185, P2=1E6, P1=9E5, L=1000, D=0.5) # Newton can't converge, need a bounded solver P1 = isothermal_gas(rho=11.3, fd=0.00185, m=390, P2=9E5, L=1000, D=0.5) assert_close(P1, 2298973.786533209) # Case where the desired flow is greated than the choked flow's rate with pytest.raises(Exception): isothermal_gas(rho=11.3, fd=0.00185, m=400, P2=9E5, L=1000, D=0.5) # test the case where the ideal gas assumption is baked in: rho = 10.75342009105268 # Chemical('nitrogen', P=(1E6+9E5)/2).rho m1 = isothermal_gas(rho=rho, fd=0.00185, P1=1E6, P2=9E5, L=1000, D=0.5) assert_close(m1, 141.92260633059334) # They are fairly similar from math import log, pi fd = 0.00185 P1 = 1E6 P2 = 9E5 L = 1000 D = 0.5 T = 298.15 # from scipy.constants import R # from thermo import property_molar_to_mass, Chemical, pi, log R = 296.8029514446658 # property_molar_to_mass(R, Chemical('nitrogen').MW) m2 = (pi**2/16*D**4/(R*T*(fd*L/D + 2*log(P1/P2)))*(P1**2-P2**2))**0.5 assert_close(m2, 145.48786057477403) def test_P_isothermal_critical_flow(): P2_max = P_isothermal_critical_flow(P=1E6, fd=0.00185, L=1000., D=0.5) assert_close(P2_max, 389699.7317645518)
bbefec74ec05c8be2358eb6d37693b79a119f68a
a4830a0189c325c35c9021479a5958ec870a2e8b
/routing/migrations/0022_auto_20160819_1523.py
9a72cd9f00d71edcf95d1e679496d2ced9546eee
[]
no_license
solutionprovider9174/steward
044c7d299a625108824c854839ac41f51d2ca3fd
fd681593a9d2d339aab0f6f3688412d71cd2ae32
refs/heads/master
2022-12-11T06:45:04.544838
2020-08-21T02:56:55
2020-08-21T02:56:55
289,162,699
0
0
null
null
null
null
UTF-8
Python
false
false
887
py
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2016-08-19 15:23 from __future__ import unicode_literals import django.core.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('routing', '0021_fraudbypasshistory_outboundroutehistory'), ] operations = [ migrations.AlterModelOptions( name='fraudbypass', options={'ordering': ('number',)}, ), migrations.AlterModelOptions( name='outboundroute', options={'ordering': ('number',)}, ), migrations.AlterField( model_name='fraudbypasshistory', name='number', field=models.CharField(max_length=64, validators=[django.core.validators.RegexValidator(code='nomatch', message='Must be 10 digits', regex='^\\d{10}$')]), ), ]
681d7bd02ccb0578a7842aa55f7bc5a99400f534
0859a864b1270164fe44a878ab12cfb3302c36bf
/abc159/a.py
fe00ff3096ace83f6c7f54a99fa07d13cf37865b
[]
no_license
wataoka/atcoder
f359d49ab6e0db39c019d9f6d2e8b92d35f723c4
b91465dd5f655d05b89485fc7ad222283c5958f5
refs/heads/master
2021-04-15T06:02:59.593965
2020-05-11T04:38:23
2020-05-11T04:38:23
126,754,342
0
0
null
2020-02-28T02:31:03
2018-03-26T00:51:12
Python
UTF-8
Python
false
false
106
py
def rC2(r): return int(r*(r-1)//2) N, M = list(map(int, input().split())) print(rC2(r=N) + rC2(r=M))
9434fd3c1d1715f323f8d9c6fc8f1097ccd9a93e
0cdcee391e178092d7073734957075c72681f037
/hackerrank/si/si-smaller-element-left-side.py
10a600c468bd60c31b9b74c6e23fe144363e00bf
[]
no_license
hrishikeshtak/Coding_Practises_Solutions
6b483bbf19d5365e18f4ea1134aa633ff347a1c1
86875d7436a78420591a60b716acd2780287b4a8
refs/heads/master
2022-10-06T18:44:56.992451
2022-09-25T03:29:03
2022-09-25T03:29:03
125,744,102
0
0
null
null
null
null
UTF-8
Python
false
false
903
py
#!/usr/bin/python3 # Find 1st smaller elements on left side class Solution: # @param A : list of integers # @return a list of integers def prevSmaller(self, arr): N = len(arr) s = [-1] * N b = [-1] * N top = -1 top += 1 s[top] = 0 for i in range(1, N): # print("stack: ", s) # print("b: ", b) while top >= 0: if arr[i] > arr[s[top]]: b[i] = arr[s[top]] top += 1 s[top] = i break else: top -= 1 if top == -1: b[i] = -1 top += 1 s[top] = i return b if __name__ == '__main__': A = [4, 5, 2, 10, 8] A = [3, 2, 1] A = [39, 27, 11, 4, 24, 32, 32, 1] print(Solution().prevSmaller(A))
4133d8de12e950deab0ef7eb66dff3ef852e342b
5cc1421f5280c4c869e5df5b936f4d629693d0f1
/main.py
139b340dbacec7bbaa8633419be07b3aeef61f1e
[ "MIT" ]
permissive
zhangxujinsh/MTCNN-VS
96c38479fa6e6aa5dea0e855cddcf8548ea7872d
42d79c0a8954493fd8afb4a6665584da9a8b9c6e
refs/heads/master
2020-07-11T01:51:40.142178
2016-10-29T02:13:57
2016-10-29T02:17:51
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,875
py
# coding: utf-8 import mxnet as mx from mtcnn_detector import MtcnnDetector import cv2 import os import time def testimg(detector): img = cv2.imread('test.jpg') t1 = time.time() results = detector.detect_face(img) print 'time: ',time.time() - t1 if results is not None: total_boxes = results[0] points = results[1] draw = img.copy() for b in total_boxes: cv2.rectangle(draw, (int(b[0]), int(b[1])), (int(b[2]), int(b[3])), (255, 255, 255)) for p in points: for i in range(5): cv2.circle(draw, (p[i], p[i + 5]), 1, (0, 0, 255), 2) cv2.imshow("detection result", draw) cv2.imwrite("result.png", draw) cv2.waitKey(0) # -------------- # test on camera # -------------- def testcamera(detector): camera = cv2.VideoCapture(0) while True: grab, frame = camera.read() img = cv2.resize(frame, (320,180)) t1 = time.time() results = detector.detect_face(img) print 'time: ',time.time() - t1 if results is None: cv2.imshow("detection result", img) cv2.waitKey(1) continue total_boxes = results[0] points = results[1] draw = img.copy() for b in total_boxes: cv2.rectangle(draw, (int(b[0]), int(b[1])), (int(b[2]), int(b[3])), (255, 255, 255)) for p in points: for i in range(5): cv2.circle(draw, (p[i], p[i + 5]), 1, (255, 0, 0), 2) cv2.imshow("detection result", draw) key=cv2.waitKey(1) if 'q'==chr(key & 255) or 'Q'==chr(key & 255): break; if __name__=="__main__": detector = MtcnnDetector(model_folder='model', ctx=mx.gpu(0), num_worker = 4 , accurate_landmark = False) # testimg(detector) testcamera(detector)
8d85aaa01325ea01f6ece159131b127ef9047799
ac5e52a3fc52dde58d208746cddabef2e378119e
/exps-gsn-edf.0/gsn-edf_ut=3.5_rd=0.5_rw=0.06_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=26/sched.py
1613dd9b0c794754a75a5de64bc5ac7319aa1a66
[]
no_license
ricardobtxr/experiment-scripts
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
7bcebff7ac2f2822423f211f1162cd017a18babb
refs/heads/master
2023-04-09T02:37:41.466794
2021-04-25T03:27:16
2021-04-25T03:27:16
358,926,457
0
0
null
null
null
null
UTF-8
Python
false
false
342
py
-X FMLP -Q 0 -L 3 95 400 -X FMLP -Q 0 -L 3 66 300 -X FMLP -Q 0 -L 3 54 175 -X FMLP -Q 1 -L 2 53 200 -X FMLP -Q 1 -L 2 50 200 -X FMLP -Q 1 -L 2 44 175 -X FMLP -Q 2 -L 2 34 125 -X FMLP -Q 2 -L 2 34 175 -X FMLP -Q 3 -L 1 33 175 -X FMLP -Q 3 -L 1 31 200 28 150 25 175 24 125 20 200 20 150 18 150 14 175 13 100 9 125
e9c6a490422bade7bff0ccdc363ca4f326b7f8bb
55821cab06b431b3b253df77559800b9f84ed2a7
/models/place.py
a918d531769a7e7fed34aacbe57ca9ec87ce9dab
[]
no_license
kaci65/AirBnB_clone
1fa2f1721d752635dd895de09fcedc194612ca91
b2c03583aab891fde5e87e7e34b40bcf2aa7ebb6
refs/heads/main
2023-03-11T08:23:08.811811
2021-02-28T20:41:17
2021-02-28T20:41:17
340,441,645
0
0
null
null
null
null
UTF-8
Python
false
false
382
py
#!/usr/bin/python3 """Place module""" import models from models.base_model import BaseModel class Place(BaseModel): """place class inheriting from BaseModel""" city_id = "" user_id = "" name = "" description = "" number_rooms = 0 number_bathrooms = 0 max_guest = 0 price_by_night = 0 latitude = 0.0 longitude = 0.0 amenity_ids = ""
d6d3e38f6d727b711d14a8cf13a3acf935cdda72
18239524612cf572bfeaa3e001a3f5d1b872690c
/clients/client/python/test/test_submit_self_service_login_flow.py
b4e8c0a3a77a374f30e918234b71717beae63d3c
[ "Apache-2.0" ]
permissive
simoneromano96/sdk
2d7af9425dabc30df830a09b26841fb2e8781bf8
a6113d0daefbbb803790297e4b242d4c7cbbcb22
refs/heads/master
2023-05-09T13:50:45.485951
2021-05-28T12:18:27
2021-05-28T12:18:27
371,689,133
0
0
Apache-2.0
2021-05-28T12:11:41
2021-05-28T12:11:40
null
UTF-8
Python
false
false
1,198
py
""" Ory APIs Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501 The version of the OpenAPI document: v0.0.1-alpha.3 Contact: [email protected] Generated by: https://openapi-generator.tech """ import sys import unittest import ory_client from ory_client.model.submit_self_service_login_flow_with_password_method import SubmitSelfServiceLoginFlowWithPasswordMethod globals()['SubmitSelfServiceLoginFlowWithPasswordMethod'] = SubmitSelfServiceLoginFlowWithPasswordMethod from ory_client.model.submit_self_service_login_flow import SubmitSelfServiceLoginFlow class TestSubmitSelfServiceLoginFlow(unittest.TestCase): """SubmitSelfServiceLoginFlow unit test stubs""" def setUp(self): pass def tearDown(self): pass def testSubmitSelfServiceLoginFlow(self): """Test SubmitSelfServiceLoginFlow""" # FIXME: construct object with mandatory attributes with example values # model = SubmitSelfServiceLoginFlow() # noqa: E501 pass if __name__ == '__main__': unittest.main()
45d28aa10f25871b33de9573c126392639152d09
847273de4b1d814fab8b19dc651c651c2d342ede
/.history/Sudoku_II_003_20180618133626.py
749199c16ddebe39bbc973c2ba32a1bfd48fc600
[]
no_license
Los4U/sudoku_in_python
0ba55850afcffeac4170321651620f3c89448b45
7d470604962a43da3fc3e5edce6f718076197d32
refs/heads/master
2020-03-22T08:10:13.939424
2018-07-04T17:21:13
2018-07-04T17:21:13
139,749,483
0
1
null
null
null
null
UTF-8
Python
false
false
4,849
py
from random import randint # Sudoku1 almost solved sudoku1 = [ [5, 9, 8, 6, 1, 2, 3, 4, 7], [2, 1, 7, 9, 3, 4, 8, 6, 5], [6, 4, 3, 5, 8, 7, 1, 2, 9], [1, 6, 5, 4, 9, 8, 2, 7, 3], [3, 2, 9, 7, 6, 5, 4, 1, 8], [7, 8, 4, 3, 2, 1, 5, 9, 6], [8, 3, 1, 2, 7, 6, 9, 5, 4], [4, 7, 2, 8, 5, 9, 6, 3, 1], [9, 5, ' ', ' ', ' ', ' ', ' ', ' ', 2] ] i = 0 while i < 10: if i == 0: print(" 1 2 3 4 5 6 7 8 9") print(" -------------------------") elif i == 3 or i == 6 or i == 9: print(" -------------------------") spaceBar = "|" if i < 9: print('{2} {1} {0[0]} {0[1]} {0[2]} {1} {0[3]} {0[4]} {0[5]} {1} {0[6]} {0[7]} {0[8]} {1}'.format(sudoku1[i], spaceBar,i+1)) i = i + 1 while True: # prints Sudoku until is solved print("Input 3 numbers in format a b c, np. 4 5 8") print(" a - row number") print(" b - column number ") print(" c - value") #vprint(" r - reset chart to start\n ") x = input("Input a b c: ") print("") numbers = " 0123456789" # conditions of entering the numbers ! if (len(x) != 5) or (str(x[0]) not in numbers) or (str(x[2]) not in numbers) or ( str(x[4]) not in numbers) or (str(x[1]) != " ") or (str(x[3]) != " "): if x == "r": # reset print(" Function reset() will be ready in Next Week") else: print("Error - wrong number format \n ") continue sudoku1[int(x[0])-1][int(x[2])-1] = x[4] if int(x[0]) == 1: row1[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 2: row2[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 3: row3[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 4: row4[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 5: row5[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 6: row6[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 7: row7[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 8: row8[int(x[2]) - 1] = int(x[4]) elif int(x[0]) == 9: row9[int(x[2]) - 1] = int(x[4]) # Sudoku 2 almost solved # row1 = [9,8,7,4,3,2,5,6,1] # row2 = [2,4,3,5,1,6,8,7,9] # row3 = [5,6,1,7,9,8,4,3,2] # row4 = [3,9,5,6,4,7,2,1,8] # row5 = [8,2,4,3,5,1,6,9,7] # row6 = [1,7,6,2,8,9,3,4,5] # row7 = [7,1,2,8,6,3,9,5,4] # row8 = [4,3,8,9,7,5,1,2,6] # row9 = [' ',5,' ',' ',2,' ',7,' ',' '] ''' columns = [1, 2, 3, 4, 5, 6, 7, 8, 9] r1 = [[5, 9, 8, 6, 1, 2, 3, 4, 7], [9, 8, 7, 4, 3, 2, 5, 6, 1]] r2 = [[2, 1, 7, 9, 3, 4, 8, 6, 5], [2, 4, 3, 5, 1, 6, 8, 7, 9]] r3 = [[6, 4, 3, 5, 8, 7, 1, 2, 9], [5, 6, 1, 7, 9, 8, 4, 3, 2]] r4 = [[1, 6, 5, 4, 9, 8, 2, 7, 3], [3, 9, 5, 6, 4, 7, 2, 1, 8]] r5 = [[3, 2, 9, 7, 6, 5, 4, 1, 8], [8, 2, 4, 3, 5, 1, 6, 9, 7]] r6 = [[7, 8, 4, 3, 2, 1, 5, 9, 6], [1, 7, 6, 2, 8, 9, 3, 4, 5]] r7 = [[8, 3, 1, 2, 7, 6, 9, 5, 4], [7, 1, 2, 8, 6, 3, 9, 5, 4]] r8 = [[4, 7, 2, 8, 5, 9, 6, 3, 1], [4, 3, 8, 9, 7, 5, 1, 2, 6]] r9 = [[9, 5, ' ', ' ', ' ', ' ', ' ', ' ', 2], [6, 5, ' ', 1, ' ', ' ', 7, 8, ' ']] # 9 1 6, 9 3 9, 9 4 1, 9 6 4, 9 8 8, 9 9 3 # r9=[[9,5, ' ', ' ', ' ', ' ', ' ', ' ',2],[' ',5,' ',' ',2,' ',7,' ',' # ']] # 9 1 6, 9 3 9, 9 4 1, 9 6 4, 9 8 8, 9 9 3 print(" ") print(" %@@@@@@@ @@@ @@@ (@@@@@@@@@ ,@@@@2@@@@@ @@@, /@@@/ @@@, @@@ ") print(" @@@* @@@ @@@ (@@( /@@@# .@@@% (@@@ @@@, @@@% @@@, @@@. ") print(" @@@& @@@ @@@ (@@( @@@* @@@% #@@% @@@,.@@@. @@@, @@@. ") print(" ,@@@@@@* @@@ @@@ (@@( (@@% .@@@* ,@@@ @@@%@@% @@@, @@@. ") print(" /@@@@@# @@@ @@@ (@@( (@@% .@@@* ,@@@ @@@,@@@( @@@, @@@. ") print(" *@@@. @@@ .@@& (@@( @@@. @@@% &@@( @@@, &@@@. @@@* .@@@. ") print(" &, &@@@ #@@@. ,@@@, (@@( ,&@@@* ,@@@& .@@@@ @@@, (@@@/ #@@@* @@@# ") print(",@@@@@@@@( (@@@@@@@@% (@@@@@@@@@( #@@@@@@@@@, @@@, ,@@@% ,@@@@@@@@@. \n ") print("To start game input:") print(" r - to load random puzzle:") print(" 1 - to load chart nr 1:") print(" 2 - to load chart nr 2:") print(" 3 - to load chart nr 3:") choice = input("Input here: ") if choice == "R" or choice == "r": sudoku_number = randint(0, 1) rows_fill(sudoku_number) elif int(choice) == 1: rows_fill(0) elif int(choice) == 2: rows_fill(1) elif int(choice) == 3: rows_fill(0) print("Your sudoku to solve:") try: if sum(row1) == 45 and sum(row2) == 45 and sum(row3) == 45 and sum(row4) == 45 and sum( row5) == 45 and sum(row6) == 45 and sum(row7) == 45 and sum(row8) == 45 and sum(row9) == 45: print("YOU WIN") break except TypeError: print() '''
511c9b0d7215e0f07ac854e1432936f04778ae66
37c243e2f0aab70cbf38013d1d91bfc3a83f7972
/pp7TeV/HeavyIonsAnalysis/JetAnalysis/python/jets/ak6PFJetSequence_pPb_mc_bTag_cff.py
4ad43ec58638c60213bfc117e17f453046685d97
[]
no_license
maoyx/CMSWork
82f37256833cbe4c60cb8df0b4eb68ceb12b65e7
501456f3f3e0f11e2f628b40e4d91e29668766d5
refs/heads/master
2021-01-01T18:47:55.157534
2015-03-12T03:47:15
2015-03-12T03:47:15
10,951,799
0
0
null
null
null
null
UTF-8
Python
false
false
11,363
py
import FWCore.ParameterSet.Config as cms from PhysicsTools.PatAlgos.patHeavyIonSequences_cff import * from HeavyIonsAnalysis.JetAnalysis.inclusiveJetAnalyzer_cff import * from HeavyIonsAnalysis.JetAnalysis.bTaggers_cff import * from RecoJets.JetProducers.JetIDParams_cfi import * ak6PFmatch = patJetGenJetMatch.clone( src = cms.InputTag("ak6PFJets"), matched = cms.InputTag("ak6HiGenJets") ) ak6PFparton = patJetPartonMatch.clone(src = cms.InputTag("ak6PFJets") ) ak6PFcorr = patJetCorrFactors.clone( useNPV = False, # primaryVertices = cms.InputTag("hiSelectedVertex"), levels = cms.vstring('L2Relative','L3Absolute'), src = cms.InputTag("ak6PFJets"), payload = "AK6PF_generalTracks" ) ak6PFJetID= cms.EDProducer('JetIDProducer', JetIDParams, src = cms.InputTag('ak6CaloJets')) ak6PFclean = heavyIonCleanedGenJets.clone(src = cms.InputTag('ak6HiGenJets')) ak6PFbTagger = bTaggers("ak6PF") #create objects locally since they dont load properly otherwise ak6PFmatch = ak6PFbTagger.match ak6PFparton = ak6PFbTagger.parton ak6PFPatJetFlavourAssociation = ak6PFbTagger.PatJetFlavourAssociation ak6PFJetTracksAssociatorAtVertex = ak6PFbTagger.JetTracksAssociatorAtVertex ak6PFSimpleSecondaryVertexHighEffBJetTags = ak6PFbTagger.SimpleSecondaryVertexHighEffBJetTags ak6PFSimpleSecondaryVertexHighPurBJetTags = ak6PFbTagger.SimpleSecondaryVertexHighPurBJetTags ak6PFCombinedSecondaryVertexBJetTags = ak6PFbTagger.CombinedSecondaryVertexBJetTags ak6PFCombinedSecondaryVertexMVABJetTags = ak6PFbTagger.CombinedSecondaryVertexMVABJetTags ak6PFJetBProbabilityBJetTags = ak6PFbTagger.JetBProbabilityBJetTags ak6PFSoftMuonByPtBJetTags = ak6PFbTagger.SoftMuonByPtBJetTags ak6PFSoftMuonByIP3dBJetTags = ak6PFbTagger.SoftMuonByIP3dBJetTags ak6PFTrackCountingHighEffBJetTags = ak6PFbTagger.TrackCountingHighEffBJetTags ak6PFTrackCountingHighPurBJetTags = ak6PFbTagger.TrackCountingHighPurBJetTags ak6PFPatJetPartonAssociation = ak6PFbTagger.PatJetPartonAssociation ak6PFImpactParameterTagInfos = ak6PFbTagger.ImpactParameterTagInfos ak6PFJetProbabilityBJetTags = ak6PFbTagger.JetProbabilityBJetTags ak6PFPositiveOnlyJetProbabilityJetTags = ak6PFbTagger.PositiveOnlyJetProbabilityJetTags ak6PFNegativeOnlyJetProbabilityJetTags = ak6PFbTagger.NegativeOnlyJetProbabilityJetTags ak6PFNegativeTrackCountingHighEffJetTags = ak6PFbTagger.NegativeTrackCountingHighEffJetTags ak6PFNegativeTrackCountingHighPur = ak6PFbTagger.NegativeTrackCountingHighPur ak6PFNegativeOnlyJetBProbabilityJetTags = ak6PFbTagger.NegativeOnlyJetBProbabilityJetTags ak6PFPositiveOnlyJetBProbabilityJetTags = ak6PFbTagger.PositiveOnlyJetBProbabilityJetTags ak6PFSecondaryVertexTagInfos = ak6PFbTagger.SecondaryVertexTagInfos ak6PFSimpleSecondaryVertexHighEffBJetTags = ak6PFbTagger.SimpleSecondaryVertexHighEffBJetTags ak6PFSimpleSecondaryVertexHighPurBJetTags = ak6PFbTagger.SimpleSecondaryVertexHighPurBJetTags ak6PFCombinedSecondaryVertexBJetTags = ak6PFbTagger.CombinedSecondaryVertexBJetTags ak6PFCombinedSecondaryVertexMVABJetTags = ak6PFbTagger.CombinedSecondaryVertexMVABJetTags ak6PFSecondaryVertexNegativeTagInfos = ak6PFbTagger.SecondaryVertexNegativeTagInfos ak6PFSimpleSecondaryVertexNegativeHighEffBJetTags = ak6PFbTagger.SimpleSecondaryVertexNegativeHighEffBJetTags ak6PFSimpleSecondaryVertexNegativeHighPurBJetTags = ak6PFbTagger.SimpleSecondaryVertexNegativeHighPurBJetTags ak6PFCombinedSecondaryVertexNegativeBJetTags = ak6PFbTagger.CombinedSecondaryVertexNegativeBJetTags ak6PFCombinedSecondaryVertexPositiveBJetTags = ak6PFbTagger.CombinedSecondaryVertexPositiveBJetTags ak6PFSoftMuonTagInfos = ak6PFbTagger.SoftMuonTagInfos ak6PFSoftMuonBJetTags = ak6PFbTagger.SoftMuonBJetTags ak6PFSoftMuonByIP3dBJetTags = ak6PFbTagger.SoftMuonByIP3dBJetTags ak6PFSoftMuonByPtBJetTags = ak6PFbTagger.SoftMuonByPtBJetTags ak6PFNegativeSoftMuonByPtBJetTags = ak6PFbTagger.NegativeSoftMuonByPtBJetTags ak6PFPositiveSoftMuonByPtBJetTags = ak6PFbTagger.PositiveSoftMuonByPtBJetTags ak6PFPatJetFlavourId = cms.Sequence(ak6PFPatJetPartonAssociation*ak6PFPatJetFlavourAssociation) ak6PFJetBtaggingIP = cms.Sequence(ak6PFImpactParameterTagInfos * (ak6PFTrackCountingHighEffBJetTags + ak6PFTrackCountingHighPurBJetTags + ak6PFJetProbabilityBJetTags + ak6PFJetBProbabilityBJetTags + ak6PFPositiveOnlyJetProbabilityJetTags + ak6PFNegativeOnlyJetProbabilityJetTags + ak6PFNegativeTrackCountingHighEffJetTags + ak6PFNegativeTrackCountingHighPur + ak6PFNegativeOnlyJetBProbabilityJetTags + ak6PFPositiveOnlyJetBProbabilityJetTags ) ) ak6PFJetBtaggingSV = cms.Sequence(ak6PFImpactParameterTagInfos * ak6PFSecondaryVertexTagInfos * (ak6PFSimpleSecondaryVertexHighEffBJetTags + ak6PFSimpleSecondaryVertexHighPurBJetTags + ak6PFCombinedSecondaryVertexBJetTags + ak6PFCombinedSecondaryVertexMVABJetTags ) ) ak6PFJetBtaggingNegSV = cms.Sequence(ak6PFImpactParameterTagInfos * ak6PFSecondaryVertexNegativeTagInfos * (ak6PFSimpleSecondaryVertexNegativeHighEffBJetTags + ak6PFSimpleSecondaryVertexNegativeHighPurBJetTags + ak6PFCombinedSecondaryVertexNegativeBJetTags + ak6PFCombinedSecondaryVertexPositiveBJetTags ) ) ak6PFJetBtaggingMu = cms.Sequence(ak6PFSoftMuonTagInfos * (ak6PFSoftMuonBJetTags + ak6PFSoftMuonByIP3dBJetTags + ak6PFSoftMuonByPtBJetTags + ak6PFNegativeSoftMuonByPtBJetTags + ak6PFPositiveSoftMuonByPtBJetTags ) ) ak6PFJetBtagging = cms.Sequence(ak6PFJetBtaggingIP *ak6PFJetBtaggingSV *ak6PFJetBtaggingNegSV *ak6PFJetBtaggingMu ) ak6PFpatJetsWithBtagging = patJets.clone(jetSource = cms.InputTag("ak6PFJets"), genJetMatch = cms.InputTag("ak6PFmatch"), genPartonMatch = cms.InputTag("ak6PFparton"), jetCorrFactorsSource = cms.VInputTag(cms.InputTag("ak6PFcorr")), JetPartonMapSource = cms.InputTag("ak6PFPatJetFlavourAssociation"), trackAssociationSource = cms.InputTag("ak6PFJetTracksAssociatorAtVertex"), discriminatorSources = cms.VInputTag(cms.InputTag("ak6PFSimpleSecondaryVertexHighEffBJetTags"), cms.InputTag("ak6PFSimpleSecondaryVertexHighPurBJetTags"), cms.InputTag("ak6PFCombinedSecondaryVertexBJetTags"), cms.InputTag("ak6PFCombinedSecondaryVertexMVABJetTags"), cms.InputTag("ak6PFJetBProbabilityBJetTags"), cms.InputTag("ak6PFJetProbabilityBJetTags"), cms.InputTag("ak6PFSoftMuonByPtBJetTags"), cms.InputTag("ak6PFSoftMuonByIP3dBJetTags"), cms.InputTag("ak6PFTrackCountingHighEffBJetTags"), cms.InputTag("ak6PFTrackCountingHighPurBJetTags"), ), jetIDMap = cms.InputTag("ak6PFJetID"), addBTagInfo = True, addTagInfos = True, addDiscriminators = True, addAssociatedTracks = True, addJetCharge = False, addJetID = True, getJetMCFlavour = True, addGenPartonMatch = True, addGenJetMatch = True, embedGenJetMatch = True, embedGenPartonMatch = True, embedCaloTowers = False, embedPFCandidates = True ) ak6PFJetAnalyzer = inclusiveJetAnalyzer.clone(jetTag = cms.InputTag("ak6PFpatJetsWithBtagging"), genjetTag = 'ak6HiGenJets', rParam = 0.6, matchJets = cms.untracked.bool(False), matchTag = 'patJetsWithBtagging', pfCandidateLabel = cms.untracked.InputTag('particleFlow'), trackTag = cms.InputTag("generalTracks"), fillGenJets = True, isMC = True, genParticles = cms.untracked.InputTag("hiGenParticles"), eventInfoTag = cms.InputTag("generator"), doLifeTimeTagging = cms.untracked.bool(True), doLifeTimeTaggingExtras = cms.untracked.bool(True), bTagJetName = cms.untracked.string("ak6PF"), genPtMin = cms.untracked.double(15), hltTrgResults = cms.untracked.string('TriggerResults::'+'HISIGNAL') ) ak6PFJetSequence_mc = cms.Sequence( ak6PFclean * ak6PFmatch * ak6PFparton * ak6PFcorr * ak6PFJetID * ak6PFPatJetFlavourId * ak6PFJetTracksAssociatorAtVertex * ak6PFJetBtagging * ak6PFpatJetsWithBtagging * ak6PFJetAnalyzer ) ak6PFJetSequence_data = cms.Sequence(ak6PFcorr * ak6PFJetTracksAssociatorAtVertex * ak6PFJetBtagging * ak6PFpatJetsWithBtagging * ak6PFJetAnalyzer ) ak6PFJetSequence_jec = ak6PFJetSequence_mc ak6PFJetSequence_mix = ak6PFJetSequence_mc ak6PFJetSequence = cms.Sequence(ak6PFJetSequence_mc)
b052cc54020a43043bb7d1822c05072b653f6113
46f358b954d2d0067a2093ee9006e222f831a8f8
/tests/datasource/batch_kwarg_generator/test_s3_subdir_reader_generator.py
474a13f241874c6c833756f7ae698d9226069a0e
[ "Apache-2.0" ]
permissive
dhruvvyas90/great_expectations
b963aa99c683a0da3a9e2b5a1046d2a32f622c7b
fddf5336065c644558c528301e601b9f02be87e2
refs/heads/main
2023-01-28T15:26:55.331282
2020-12-03T18:52:14
2020-12-03T18:52:14
319,719,900
1
0
Apache-2.0
2020-12-08T18:02:33
2020-12-08T18:02:32
null
UTF-8
Python
false
false
3,651
py
import logging import os import time import pandas as pd import pytest import requests from botocore.session import Session from great_expectations.datasource.batch_kwargs_generator import ( S3SubdirReaderBatchKwargsGenerator, ) from great_expectations.exceptions import BatchKwargsError port = 5555 endpoint_uri = "http://127.0.0.1:%s/" % port os.environ["AWS_ACCESS_KEY_ID"] = "dummy_key" os.environ["AWS_SECRET_ACCESS_KEY"] = "dummy_secret" @pytest.fixture(scope="module") def s3_base(): # writable local S3 system import shlex import subprocess proc = subprocess.Popen(shlex.split("moto_server s3 -p %s" % port)) timeout = 5 while timeout > 0: try: r = requests.get(endpoint_uri) if r.ok: break except: pass timeout -= 0.1 time.sleep(0.1) yield proc.terminate() proc.wait() @pytest.fixture(scope="module") def mock_s3_bucket(s3_base): bucket = "test_bucket" session = Session() client = session.create_client("s3", endpoint_url=endpoint_uri) client.create_bucket(Bucket=bucket, ACL="public-read") df = pd.DataFrame({"c1": [1, 2, 3], "c2": ["a", "b", "c"]}) keys = [ "data/for/you.csv", "data/for/me.csv", ] for key in keys: client.put_object( Bucket=bucket, Body=df.to_csv(index=None).encode("utf-8"), Key=key ) yield bucket @pytest.fixture def s3_subdir_generator(mock_s3_bucket, basic_sparkdf_datasource): # We configure a generator that will fetch from (mocked) my_bucket # and will use glob patterns to match returned assets into batches of the same asset generator = S3SubdirReaderBatchKwargsGenerator( "my_generator", datasource=basic_sparkdf_datasource, boto3_options={"endpoint_url": endpoint_uri}, base_directory="test_bucket/data/for", reader_options={"sep": ","}, ) yield generator @pytest.fixture def s3_subdir_generator_with_partition(mock_s3_bucket, basic_sparkdf_datasource): # We configure a generator that will fetch from (mocked) my_bucket # and will use glob patterns to match returned assets into batches of the same asset generator = S3SubdirReaderBatchKwargsGenerator( "my_generator", datasource=basic_sparkdf_datasource, boto3_options={"endpoint_url": endpoint_uri}, base_directory="test_bucket/data/", reader_options={"sep": ","}, ) yield generator def test_s3_subdir_generator_basic_operation(s3_subdir_generator): # S3 Generator sees *only* configured assets assets = s3_subdir_generator.get_available_data_asset_names() print(assets) assert set(assets["names"]) == { ("you", "file"), ("me", "file"), } def test_s3_subdir_generator_reader_options_configuration(s3_subdir_generator): batch_kwargs_list = [ kwargs for kwargs in s3_subdir_generator.get_iterator(data_asset_name="you", limit=10) ] print(batch_kwargs_list) assert batch_kwargs_list[0]["reader_options"] == {"sep": ","} def test_s3_subdir_generator_build_batch_kwargs_no_partition_id(s3_subdir_generator): batch_kwargs = s3_subdir_generator.build_batch_kwargs("you") assert batch_kwargs["s3"] in [ "s3a://test_bucket/data/for/you.csv", ] def test_s3_subdir_generator_build_batch_kwargs_partition_id( s3_subdir_generator_with_partition, basic_sparkdf_datasource ): batch_kwargs = s3_subdir_generator_with_partition.build_batch_kwargs("for", "you") assert batch_kwargs["s3"] == "s3a://test_bucket/data/for/you.csv"
758d3add23ff4cc75c3f3557a759800c70585c20
27e890f900bd4bfb2e66f4eab85bc381cf4d5d3f
/plugins/doc_fragments/files.py
a3723db249284fc0990a043729d80d3b2ea6bec2
[]
no_license
coll-test/notstdlib.moveitallout
eb33a560070bbded5032385d0aea2f3cf60e690b
0987f099b783c6cf977db9233e1c3d9efcbcb3c7
refs/heads/master
2020-12-19T22:28:33.369557
2020-01-23T18:51:26
2020-01-23T18:51:26
235,865,139
0
0
null
null
null
null
UTF-8
Python
false
false
3,706
py
# -*- coding: utf-8 -*- # Copyright: (c) 2014, Matt Martz <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) class ModuleDocFragment(object): # Standard files documentation fragment # Note: mode is overridden by the copy and template modules so if you change the description # here, you should also change it there. DOCUMENTATION = r''' options: mode: description: - The permissions the resulting file or directory should have. - For those used to I(/usr/bin/chmod) remember that modes are actually octal numbers. You must either add a leading zero so that Ansible's YAML parser knows it is an octal number (like C(0644) or C(01777)) or quote it (like C('644') or C('1777')) so Ansible receives a string and can do its own conversion from string into number. - Giving Ansible a number without following one of these rules will end up with a decimal number which will have unexpected results. - As of Ansible 1.8, the mode may be specified as a symbolic mode (for example, C(u+rwx) or C(u=rw,g=r,o=r)). - As of Ansible 2.6, the mode may also be the special string C(preserve). - When set to C(preserve) the file will be given the same permissions as the source file. type: str owner: description: - Name of the user that should own the file/directory, as would be fed to I(chown). type: str group: description: - Name of the group that should own the file/directory, as would be fed to I(chown). type: str seuser: description: - The user part of the SELinux file context. - By default it uses the C(system) policy, where applicable. - When set to C(_default), it will use the C(user) portion of the policy if available. type: str serole: description: - The role part of the SELinux file context. - When set to C(_default), it will use the C(role) portion of the policy if available. type: str setype: description: - The type part of the SELinux file context. - When set to C(_default), it will use the C(type) portion of the policy if available. type: str selevel: description: - The level part of the SELinux file context. - This is the MLS/MCS attribute, sometimes known as the C(range). - When set to C(_default), it will use the C(level) portion of the policy if available. type: str default: s0 unsafe_writes: description: - Influence when to use atomic operation to prevent data corruption or inconsistent reads from the target file. - By default this module uses atomic operations to prevent data corruption or inconsistent reads from the target files, but sometimes systems are configured or just broken in ways that prevent this. One example is docker mounted files, which cannot be updated atomically from inside the container and can only be written in an unsafe manner. - This option allows Ansible to fall back to unsafe methods of updating files when atomic operations fail (however, it doesn't force Ansible to perform unsafe writes). - IMPORTANT! Unsafe writes are subject to race conditions and can lead to data corruption. type: bool default: no attributes: description: - The attributes the resulting file or directory should have. - To get supported flags look at the man page for I(chattr) on the target system. - This string should contain the attributes in the same order as the one displayed by I(lsattr). - The C(=) operator is assumed as default, otherwise C(+) or C(-) operators need to be included in the string. type: str aliases: [ attr ] '''
6bd87fef952e8c69e3423f386f408538339d9185
8370083dbbbd32740ad1862637809396dc7984e2
/paresh61.A.MILESTONEPROJECTE/a1.py
524064c675bcdabbfbdd144f009ea8b4126de4dc
[]
no_license
parshuramsail/PYTHON_LEARN
a919b14aab823e0f5e769d8936ddbfb357133db2
8c76720bf73f13cf96930e6d4d5128e6ba9aa535
refs/heads/main
2023-07-14T16:25:26.240555
2021-08-29T17:10:19
2021-08-29T17:10:19
401,095,644
0
0
null
null
null
null
UTF-8
Python
false
false
2,294
py
# STEP1: write a function that can printout a board.setup your board as your list,where each index 1-9 corresponnds with a number on a numberpad. #so you can get a 3 by 3 board representation. #print('\n'*100) def display_board(board): print('\n'*100) print(" | |") print(" " + board[7] + ' | ' + board[8] + ' | ' + board[9]) print(" | |") print("--------------------") print(" | |") print(" " + board[4] + ' | ' + board[5] + ' | ' + board[6]) print(" | |") print("--------------------") print(" | |") print(" " + board[1] + ' | ' + board[2] + ' | ' + board[3]) print(" | |") # TEST STEP1:RUN YOUR FUNCTION ON TEST VERSION OF THE BOARD LIST AND MAKE ADJUSTMENTS AS NECESSARY. test_board=["#","X","O","X","O","X","O","X","O","X"] #test_board=['']*10 display_board(test_board) #print(display_board(test_board)) # STEP3: def player_input(): """ output:(player1=marker, player2=marker) """ marker="" # keep asking player 1 to choose X or O while marker!="X" and marker!="O": marker=input("player:1 choose X or O: ").upper() if marker=="X": return("X","O") else: return("O","X") # RUN THE FUNCTION TO MAKE SURE IT RUNS THE DESIRED OUTPUT player1_marker,player2_marker=player_input() # STEP3 def place_marker(board,marker,position): board [position]=marker test_board=["#","X","O","X","O","X","O","X","O","X"] place_marker(test_board,"$",8) display_board(test_board) # STEP 4: WRITE IN A FUNCTION THAT TAKES IN A BOARD AND MARK (X OR O) AND CHECKS TO SEE IF THAT MARK HAS WON. def win_check(board,mark): return((board[7]==mark and board[8]==mark and board[9]==mark) or (board[4]==mark and board[5]==mark and board[6]==mark)or (board[1]==mark and board[2]==mark and board[3]==mark)or (board[7]==mark and board[4]==mark and board[1]==mark)or (board[8]==mark and board[5]==mark and board[2]==mark)or (board[9]==mark and board[6]==mark and board[3]==mark)or (board[7]==mark and board[5]==mark and board[3]==mark)or (board[9]==mark and board[5]==mark and board[1]==mark)) win_check(test_board,"X")
9106a10aff28c894fe165cefa35ee82cd8488822
b18f92a6a41a3d83e77848460d4a3f17e4fe677a
/introduction_to_python/recursive_functions/1_find_power/solution/test_solution.py
1a873495b9e68f09bf6e6f09278da0ec62088424
[]
no_license
ByteAcademyCo/Exercises
de71b885a498ead8296e6107836f9a06ac399d4f
8332d0473ab35ee1d2975b384afda45c77ef943d
refs/heads/master
2022-05-25T23:01:59.466480
2022-03-14T13:12:10
2022-03-14T13:12:10
252,842,407
1
109
null
2022-03-14T13:12:11
2020-04-03T21:09:47
Python
UTF-8
Python
false
false
167
py
def test_solution(): from solution import power assert power(1, 3) == 1 assert power(2, 4) == 16 assert power(0, 1) == 0 assert power(5, 2) == 25
f30006767dcdf9f17324e03f92349b7c526fad62
07564c75c1f37f2e0304720d1c01f23a27ef3469
/273.IntegertoEnglishWords/solution.py
cfa5b45a7acb04c003bd49fbf53a7a34351569ff
[]
no_license
ynXiang/LeetCode
5e468db560be7f171d7cb24bcd489aa81471349c
763372587b9ca3f8be4c843427e4760c3e472d6b
refs/heads/master
2020-05-21T18:27:16.941981
2018-01-09T22:17:42
2018-01-09T22:17:42
84,642,017
0
0
null
null
null
null
UTF-8
Python
false
false
1,439
py
class Solution(object): def numberToWords(self, num): """ :type num: int :rtype: str """ res = self.helper(num) return ' '.join(res) if res else 'Zero' def helper(self, num): Ones = ['One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen', 'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen'] Tens = ['Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety'] Hundreds = ['Hundred', 'Thousand', 'Million', 'Billion'] res = [] if num == 0: res = [] elif num < 20: res.append(Ones[num - 1]) elif num < 10**2: res.append(Tens[num // 10 - 2]) res += self.helper(num % 10) elif num < 10**3: res += self.helper(num // 10**2) res.append(Hundreds[0]) res += self.helper(num % 10**2) elif num < 10**6: res += self.helper(num // 10**3) res.append(Hundreds[1]) res += self.helper(num % 10**3) elif num < 10**9: res += self.helper(num // 10**6) res.append(Hundreds[2]) res += self.helper(num % 10**6) else: res += self.helper(num // 10**9) res.append(Hundreds[3]) res += self.helper(num % 10**9) return res
96766b767b7e79f7fb5ea45946f0cff5d54bc1c8
47dc4152dd163ce751d4703f19bb5339fc1cfb98
/djchat/settings.py
dae41d6fb978e9e0118e1da42103746c0c1bbbbe
[ "BSD-3-Clause" ]
permissive
michaelXDzhang/pulsar-django
85cf3437a578b2b198ea2f794d1a1f4db8a78ec1
0aa20e1c08b6a782cd634e736e2238776e0c98d5
refs/heads/master
2020-07-27T01:06:32.586546
2017-11-28T10:18:34
2017-11-28T10:18:34
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,562
py
""" Django settings for djchat project. For more information on this file, see https://docs.djangoproject.com/en/1.7/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.7/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os APP_DIR = os.path.dirname(__file__) BASE_DIR = os.path.dirname(APP_DIR) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'fux9z2i)6ab$b_5*^z@96hdtqfj5=ct7b)m6_6cfrr5g%x#=81' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'pulse', 'djchat' ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(APP_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages' ] } } ] MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'djchat.views.middleware' ) ROOT_URLCONF = 'djchat.urls' # Database # https://docs.djangoproject.com/en/1.7/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.7/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.7/howto/static-files/ STATIC_URL = '/static/'
81063a6e3d985fbef8bfdf7fa09786028090fef0
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_231/ch45_2020_04_12_23_45_54_626320.py
c4a53b37d17f38d42ea23fb09e36af31d98485ca
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
233
py
lista=[] lista_reversa=[] a=int(input('digite um numero:')) i=(0) while a>0: lista.append(a) i+=1 a=int(input('digite um numero:')) del lista[i] while i>=0: lista_reversa.append(lista[i]) i-=1 print(lista_reversa)
f22210c8427f7e7a65853ec23b3430b0491d5c34
c97fc7658c39feb51c0ed42c04783797c8675b8a
/xm_1/qt简单数据可视化.py
8536d7db1bea48f72b69fae54a0168600924e53b
[]
no_license
githubvit/study
8bff13b18bea4954e8ed1b4619a091b134b8ff97
845e19d1225f1aa51c828b15effac30be42fdc1b
refs/heads/master
2023-02-20T15:59:19.635611
2021-12-15T08:30:54
2021-12-15T08:30:54
241,928,274
1
1
null
2023-02-02T06:18:48
2020-02-20T16:08:06
Python
UTF-8
Python
false
false
1,548
py
# Qt数据可视化 https://doc.qt.io/qt-5/qtcharts-overview.html from PySide2 import QtGui, QtWidgets from PySide2.QtCharts import QtCharts # 在Qt5.7版本后将Qt Charts加入到了Qt模块中。 # 我们可以方便的使用这个模块,绘制很多样式的图形,比如折线、饼图等,快速实现数据可视化。 # 用Qt Charts绘制,大概分为四个部分: # 数据(QXYSeries)、QChart(不知怎么称呼)、坐标轴(QAbstractAXis)和视图(QChartView)。 # 要注意的是 QChart要先添加数据(QXYSeries) class MainWindow(QtWidgets.QMainWindow): def __init__(self, parent=None): super(MainWindow, self).__init__(parent) series = QtCharts.QLineSeries()#定义线条 连续折线图 # 加点 添加数据 series.append(0,0) series.append(1,7) series.append(1.2,14) series.append(1.3,21) series.append(1.4,28) series.append(1.5,35) self.chartView = QtCharts.QChartView() # 定义ui self.chartView.chart().addSeries(series) # 添加 线条 即 数据 self.chartView.chart().createDefaultAxes() # 创建 坐标轴 series.setColor(QtGui.QColor("salmon")) # 给线条设置颜色 salmon 橙红色,粉橙色 self.setCentralWidget(self.chartView) # 给QMainWindow窗口设置中心部件,必须的 if __name__ == '__main__': import sys app = QtWidgets.QApplication(sys.argv) w = MainWindow() w.resize(640, 480) w.show() sys.exit(app.exec_())
b3cf3a9f9a3615ad902926a49be6cbf5d61fa253
5e6d8b9989247801718dd1f10009f0f7f54c1eb4
/sdk/python/pulumi_azure_native/compute/availability_set.py
5b77516e40d5119864dee8097599203fac648887
[ "BSD-3-Clause", "Apache-2.0" ]
permissive
vivimouret29/pulumi-azure-native
d238a8f91688c9bf09d745a7280b9bf2dd6d44e0
1cbd988bcb2aa75a83e220cb5abeb805d6484fce
refs/heads/master
2023-08-26T05:50:40.560691
2021-10-21T09:25:07
2021-10-21T09:25:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
21,431
py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from .. import _utilities from . import outputs from ._inputs import * __all__ = ['AvailabilitySetArgs', 'AvailabilitySet'] @pulumi.input_type class AvailabilitySetArgs: def __init__(__self__, *, resource_group_name: pulumi.Input[str], availability_set_name: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, platform_fault_domain_count: Optional[pulumi.Input[int]] = None, platform_update_domain_count: Optional[pulumi.Input[int]] = None, proximity_placement_group: Optional[pulumi.Input['SubResourceArgs']] = None, sku: Optional[pulumi.Input['SkuArgs']] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, virtual_machines: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]] = None): """ The set of arguments for constructing a AvailabilitySet resource. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[str] availability_set_name: The name of the availability set. :param pulumi.Input[str] location: Resource location :param pulumi.Input[int] platform_fault_domain_count: Fault Domain count. :param pulumi.Input[int] platform_update_domain_count: Update Domain count. :param pulumi.Input['SubResourceArgs'] proximity_placement_group: Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01. :param pulumi.Input['SkuArgs'] sku: Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags :param pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]] virtual_machines: A list of references to all virtual machines in the availability set. """ pulumi.set(__self__, "resource_group_name", resource_group_name) if availability_set_name is not None: pulumi.set(__self__, "availability_set_name", availability_set_name) if location is not None: pulumi.set(__self__, "location", location) if platform_fault_domain_count is not None: pulumi.set(__self__, "platform_fault_domain_count", platform_fault_domain_count) if platform_update_domain_count is not None: pulumi.set(__self__, "platform_update_domain_count", platform_update_domain_count) if proximity_placement_group is not None: pulumi.set(__self__, "proximity_placement_group", proximity_placement_group) if sku is not None: pulumi.set(__self__, "sku", sku) if tags is not None: pulumi.set(__self__, "tags", tags) if virtual_machines is not None: pulumi.set(__self__, "virtual_machines", virtual_machines) @property @pulumi.getter(name="resourceGroupName") def resource_group_name(self) -> pulumi.Input[str]: """ The name of the resource group. """ return pulumi.get(self, "resource_group_name") @resource_group_name.setter def resource_group_name(self, value: pulumi.Input[str]): pulumi.set(self, "resource_group_name", value) @property @pulumi.getter(name="availabilitySetName") def availability_set_name(self) -> Optional[pulumi.Input[str]]: """ The name of the availability set. """ return pulumi.get(self, "availability_set_name") @availability_set_name.setter def availability_set_name(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "availability_set_name", value) @property @pulumi.getter def location(self) -> Optional[pulumi.Input[str]]: """ Resource location """ return pulumi.get(self, "location") @location.setter def location(self, value: Optional[pulumi.Input[str]]): pulumi.set(self, "location", value) @property @pulumi.getter(name="platformFaultDomainCount") def platform_fault_domain_count(self) -> Optional[pulumi.Input[int]]: """ Fault Domain count. """ return pulumi.get(self, "platform_fault_domain_count") @platform_fault_domain_count.setter def platform_fault_domain_count(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "platform_fault_domain_count", value) @property @pulumi.getter(name="platformUpdateDomainCount") def platform_update_domain_count(self) -> Optional[pulumi.Input[int]]: """ Update Domain count. """ return pulumi.get(self, "platform_update_domain_count") @platform_update_domain_count.setter def platform_update_domain_count(self, value: Optional[pulumi.Input[int]]): pulumi.set(self, "platform_update_domain_count", value) @property @pulumi.getter(name="proximityPlacementGroup") def proximity_placement_group(self) -> Optional[pulumi.Input['SubResourceArgs']]: """ Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01. """ return pulumi.get(self, "proximity_placement_group") @proximity_placement_group.setter def proximity_placement_group(self, value: Optional[pulumi.Input['SubResourceArgs']]): pulumi.set(self, "proximity_placement_group", value) @property @pulumi.getter def sku(self) -> Optional[pulumi.Input['SkuArgs']]: """ Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'. """ return pulumi.get(self, "sku") @sku.setter def sku(self, value: Optional[pulumi.Input['SkuArgs']]): pulumi.set(self, "sku", value) @property @pulumi.getter def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: """ Resource tags """ return pulumi.get(self, "tags") @tags.setter def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]): pulumi.set(self, "tags", value) @property @pulumi.getter(name="virtualMachines") def virtual_machines(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]: """ A list of references to all virtual machines in the availability set. """ return pulumi.get(self, "virtual_machines") @virtual_machines.setter def virtual_machines(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SubResourceArgs']]]]): pulumi.set(self, "virtual_machines", value) class AvailabilitySet(pulumi.CustomResource): @overload def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, availability_set_name: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, platform_fault_domain_count: Optional[pulumi.Input[int]] = None, platform_update_domain_count: Optional[pulumi.Input[int]] = None, proximity_placement_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, sku: Optional[pulumi.Input[pulumi.InputType['SkuArgs']]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, virtual_machines: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubResourceArgs']]]]] = None, __props__=None): """ Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Manage the availability of virtual machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json). <br><br> For more information on Azure planned maintenance, see [Planned maintenance for virtual machines in Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json) <br><br> Currently, a VM can only be added to availability set at creation time. An existing VM cannot be added to an availability set. API Version: 2020-12-01. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] availability_set_name: The name of the availability set. :param pulumi.Input[str] location: Resource location :param pulumi.Input[int] platform_fault_domain_count: Fault Domain count. :param pulumi.Input[int] platform_update_domain_count: Update Domain count. :param pulumi.Input[pulumi.InputType['SubResourceArgs']] proximity_placement_group: Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[pulumi.InputType['SkuArgs']] sku: Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubResourceArgs']]]] virtual_machines: A list of references to all virtual machines in the availability set. """ ... @overload def __init__(__self__, resource_name: str, args: AvailabilitySetArgs, opts: Optional[pulumi.ResourceOptions] = None): """ Specifies information about the availability set that the virtual machine should be assigned to. Virtual machines specified in the same availability set are allocated to different nodes to maximize availability. For more information about availability sets, see [Manage the availability of virtual machines](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-manage-availability?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json). <br><br> For more information on Azure planned maintenance, see [Planned maintenance for virtual machines in Azure](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-windows-planned-maintenance?toc=%2fazure%2fvirtual-machines%2fwindows%2ftoc.json) <br><br> Currently, a VM can only be added to availability set at creation time. An existing VM cannot be added to an availability set. API Version: 2020-12-01. :param str resource_name: The name of the resource. :param AvailabilitySetArgs args: The arguments to use to populate this resource's properties. :param pulumi.ResourceOptions opts: Options for the resource. """ ... def __init__(__self__, resource_name: str, *args, **kwargs): resource_args, opts = _utilities.get_resource_args_opts(AvailabilitySetArgs, pulumi.ResourceOptions, *args, **kwargs) if resource_args is not None: __self__._internal_init(resource_name, opts, **resource_args.__dict__) else: __self__._internal_init(resource_name, *args, **kwargs) def _internal_init(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, availability_set_name: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, platform_fault_domain_count: Optional[pulumi.Input[int]] = None, platform_update_domain_count: Optional[pulumi.Input[int]] = None, proximity_placement_group: Optional[pulumi.Input[pulumi.InputType['SubResourceArgs']]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, sku: Optional[pulumi.Input[pulumi.InputType['SkuArgs']]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, virtual_machines: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubResourceArgs']]]]] = None, __props__=None): if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = AvailabilitySetArgs.__new__(AvailabilitySetArgs) __props__.__dict__["availability_set_name"] = availability_set_name __props__.__dict__["location"] = location __props__.__dict__["platform_fault_domain_count"] = platform_fault_domain_count __props__.__dict__["platform_update_domain_count"] = platform_update_domain_count __props__.__dict__["proximity_placement_group"] = proximity_placement_group if resource_group_name is None and not opts.urn: raise TypeError("Missing required property 'resource_group_name'") __props__.__dict__["resource_group_name"] = resource_group_name __props__.__dict__["sku"] = sku __props__.__dict__["tags"] = tags __props__.__dict__["virtual_machines"] = virtual_machines __props__.__dict__["name"] = None __props__.__dict__["statuses"] = None __props__.__dict__["type"] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:compute:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20150615:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20150615:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20160330:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20160330:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20160430preview:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20160430preview:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20170330:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20170330:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20171201:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20171201:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20180401:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20180401:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20180601:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20180601:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20181001:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20181001:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20190301:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20190301:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20190701:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20190701:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20191201:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20191201:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20200601:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20200601:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20201201:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20201201:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20210301:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20210301:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20210401:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20210401:AvailabilitySet"), pulumi.Alias(type_="azure-native:compute/v20210701:AvailabilitySet"), pulumi.Alias(type_="azure-nextgen:compute/v20210701:AvailabilitySet")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(AvailabilitySet, __self__).__init__( 'azure-native:compute:AvailabilitySet', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'AvailabilitySet': """ Get an existing AvailabilitySet resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = AvailabilitySetArgs.__new__(AvailabilitySetArgs) __props__.__dict__["location"] = None __props__.__dict__["name"] = None __props__.__dict__["platform_fault_domain_count"] = None __props__.__dict__["platform_update_domain_count"] = None __props__.__dict__["proximity_placement_group"] = None __props__.__dict__["sku"] = None __props__.__dict__["statuses"] = None __props__.__dict__["tags"] = None __props__.__dict__["type"] = None __props__.__dict__["virtual_machines"] = None return AvailabilitySet(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter def location(self) -> pulumi.Output[str]: """ Resource location """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ Resource name """ return pulumi.get(self, "name") @property @pulumi.getter(name="platformFaultDomainCount") def platform_fault_domain_count(self) -> pulumi.Output[Optional[int]]: """ Fault Domain count. """ return pulumi.get(self, "platform_fault_domain_count") @property @pulumi.getter(name="platformUpdateDomainCount") def platform_update_domain_count(self) -> pulumi.Output[Optional[int]]: """ Update Domain count. """ return pulumi.get(self, "platform_update_domain_count") @property @pulumi.getter(name="proximityPlacementGroup") def proximity_placement_group(self) -> pulumi.Output[Optional['outputs.SubResourceResponse']]: """ Specifies information about the proximity placement group that the availability set should be assigned to. <br><br>Minimum api-version: 2018-04-01. """ return pulumi.get(self, "proximity_placement_group") @property @pulumi.getter def sku(self) -> pulumi.Output[Optional['outputs.SkuResponse']]: """ Sku of the availability set, only name is required to be set. See AvailabilitySetSkuTypes for possible set of values. Use 'Aligned' for virtual machines with managed disks and 'Classic' for virtual machines with unmanaged disks. Default value is 'Classic'. """ return pulumi.get(self, "sku") @property @pulumi.getter def statuses(self) -> pulumi.Output[Sequence['outputs.InstanceViewStatusResponse']]: """ The resource status information. """ return pulumi.get(self, "statuses") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Resource tags """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ Resource type """ return pulumi.get(self, "type") @property @pulumi.getter(name="virtualMachines") def virtual_machines(self) -> pulumi.Output[Optional[Sequence['outputs.SubResourceResponse']]]: """ A list of references to all virtual machines in the availability set. """ return pulumi.get(self, "virtual_machines")
e3befb7b065b5be68585a6da785f873742bbffa3
a6fa311aff9a99ad6a47e41fe34f3f12bb507007
/reagent/training/__init__.py
2cc9b73dd0d046b8b5115d9b7e1115535db99f34
[ "BSD-3-Clause" ]
permissive
cts198859/ReAgent
222e9dd4aeba455ad5faa9f6178a0e9793cb82fc
20f3d333821bad364fd567cce97de51c44123484
refs/heads/master
2022-09-15T13:08:24.732208
2020-05-29T00:51:35
2020-05-29T00:54:45
267,776,326
0
0
BSD-3-Clause
2020-05-29T05:51:43
2020-05-29T05:51:43
null
UTF-8
Python
false
false
987
py
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. from .c51_trainer import C51Trainer, C51TrainerParameters from .cem_trainer import CEMTrainer from .dqn_trainer import DQNTrainer, DQNTrainerParameters from .parametric_dqn_trainer import ParametricDQNTrainer, ParametricDQNTrainerParameters from .qrdqn_trainer import QRDQNTrainer, QRDQNTrainerParameters from .rl_trainer_pytorch import RLTrainer from .sac_trainer import SACTrainer, SACTrainerParameters from .td3_trainer import TD3Trainer, TD3TrainingParameters from .world_model.mdnrnn_trainer import MDNRNNTrainer __all__ = [ "C51Trainer", "C51TrainerParameters", "CEMTrainer", "RLTrainer", "DQNTrainer", "DQNTrainerParameters", "MDNRNNTrainer", "ParametricDQNTrainer", "ParametricDQNTrainerParameters", "QRDQNTrainer", "QRDQNTrainerParameters", "SACTrainer", "SACTrainerParameters", "TD3Trainer", "TD3TrainingParameters", ]
d8fc9aa6b18fb2f4bc50363b8a36ca7d158c1c44
08b998966c06dc50cd9372fe3e15d6599bcafbfb
/dotfiles/.ipython/profile_default/startup/10-pager.py
57da300105eaebfc2a84c667499aeb36a6ca7a1d
[ "MIT" ]
permissive
chbrown/config
77661fc8e485d5a8992114fd11e7eae383698b9b
ec8deb0bf756ff62f5599cb239c8ac11084d3d16
refs/heads/master
2021-06-06T10:13:24.401647
2021-02-22T15:03:54
2021-02-22T15:03:54
1,827,574
3
0
null
null
null
null
UTF-8
Python
false
false
301
py
from __future__ import print_function # IPython.core.hooks.show_in_pager doesn't cut it import IPython.core.page def page_printer(data, start=0, screen_lines=0, pager_cmd=None): if isinstance(data, dict): data = data['text/plain'] print(data) IPython.core.page.page = page_printer
f023b96d1bcc10da7a3a00e98c2a26e6526415ec
b6e7e7c0a68621c613898534f20de96c459fd0a9
/client/app.py
9999fc5bcf65ae1e09cde1f359f971321fe32177
[]
no_license
jwoglom/zoom-tools
227db0974c7ac239b9ea51b6e95222c765025d66
951b20970a990f3b293c593d3969c92550120913
refs/heads/main
2023-03-07T18:00:02.646547
2021-02-16T21:33:16
2021-02-16T21:33:16
339,311,304
0
0
null
null
null
null
UTF-8
Python
false
false
1,877
py
#!/usr/bin/env python3 from flask import Flask, Response, request, abort import random import string import subprocess import os app = Flask(__name__) scripts_dir = os.path.join(os.path.dirname(__file__), "../scripts") token = os.environ.get("TOKEN", "".join(random.choice(string.ascii_letters) for i in range(24))) try: from secrets import SELF_TOKEN token = SELF_TOKEN except ImportError: pass print("Token: %s" % token) @app.before_request def is_token_set(): provided_token = request.args.get("token") or request.form.get("token") if provided_token != token: print("Provided invalid token %s" % provided_token) abort(403) def run(script): print(os.path.join(scripts_dir, script)) s = subprocess.run([os.path.join(scripts_dir, script)], capture_output=True) return s.stdout.decode() @app.route('/status', methods=['GET', 'POST']) def status_route(): return run("zoom_status.sh") @app.route('/audio', methods=['GET', 'POST']) def audio_route(): return run("zoom_audio_status.sh") @app.route('/audio/mute', methods=['GET', 'POST']) def mute_route(): return run("zoom_mute.sh") @app.route('/audio/unmute', methods=['GET', 'POST']) def unmute_route(): return run("zoom_unmute.sh") @app.route('/audio/toggle', methods=['GET', 'POST']) def audio_toggle_route(): return run("zoom_audio_toggle.sh") @app.route('/video', methods=['GET', 'POST']) def video_route(): return run("zoom_video_status.sh") @app.route('/video/off', methods=['GET', 'POST']) def video_off_route(): return run("zoom_video_off.sh") @app.route('/video/on', methods=['GET', 'POST']) def video_on_route(): return run("zoom_video_on.sh") @app.route('/video/toggle', methods=['GET', 'POST']) def video_toggle_route(): return run("zoom_video_toggle.sh") if __name__ == '__main__': app.run('0.0.0.0', port=2626)
1489caa6ecc1418fcca6f59a6452f6045f77b738
e0980f704a573894350e285f66f4cf390837238e
/.history/streams/blocks_20201019093841.py
6cccc214eeeb11cc10ee902cc92078eda7e0e6c4
[]
no_license
rucpata/WagtailWebsite
28008474ec779d12ef43bceb61827168274a8b61
5aa44f51592f49c9a708fc5515ad877c6a29dfd9
refs/heads/main
2023-02-09T15:30:02.133415
2021-01-05T14:55:45
2021-01-05T14:55:45
303,961,094
0
0
null
null
null
null
UTF-8
Python
false
false
93
py
from wagtail.core import blocks class TitleBlock(blocks.StructBlock): text = blocks.Char
1d64087b50a7754102a8f120289480550b469a86
41bd7d939207e94c8f6956f02b779f5084b23bf4
/archives/admin.py
8604e9655b6c0429d898e44ebeaf1a4f5c81a761
[]
no_license
wd5/acanthes
724b81c799ab04344c66691a054b2a555b3e3d77
8c4fd011e60e9869396f1a93b385133ebff74238
refs/heads/master
2021-01-17T12:13:35.216661
2012-06-13T13:05:06
2012-06-13T13:05:06
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,171
py
from django.contrib import admin from archives.models import * class IntervenantAudioInline(admin.TabularInline): model = IntervenantAudio extra = 1 class AudioAdmin(admin.ModelAdmin): inlines = (IntervenantAudioInline,) list_display = ('id','subtitle', 'annee', 'genre', 'url_ecoute_intranet_adresse' ) list_filter = ('annee', ) search_fields = ['subtitle', ] exclude = ('duree', 'total_durees', 'chemin_fichier', 'lien_test_web', 'dateissued_portail', 'horodatage_modification', 'url_export_ircam', 'type_ircam', 'date_enregistrement', 'acanthes', 'horodatage_creation', 'url_ecoute_extranet', 'url_ecoute_internet', 'url_ecoute_intranet', 'details_intranet_actuel_acda', 'oai_web_oai_mods', 'oai_id', 'oai_titleinfo_title', 'oai_typeofresource', 'oai_genre', 'oai_origininfo_place', 'oai_origininfo_publisher', 'oai_origininfo_datecaptured', 'oai_language_languageterm_1', 'oai_language_languageterm_2', 'oai_language_languageterm_3', 'oai_physicaldescription_form', 'oai_physicaldescription_internetmediatype', 'oai_physicaldescription_digitalorigin', 'oai_abstract', 'oai_targetaudience', 'oai_location_url_preview', 'oai_location_url_full', 'oai_location_physicallocation', 'oai_accesscondition', 'oai_recordinfo_recordcontentsource', 'oai_recordinfo_recordcreationdate', 'oai_recordinfo_recordchangedate', 'oai_recordinfo_recordidentifier', 'oai_recordinfo_languageofcataloging_languageterm', 'oai_publication') class IntervenantAdmin(admin.ModelAdmin): list_display = ('nom', 'prenom') exclude = ('horodatage_creation', 'horodatage_modification') search_fields = ['nom', 'prenom'] class LangueAdmin(admin.ModelAdmin): list_display = ('languageterm',) class LieuAdmin(admin.ModelAdmin): list_display = ('placeterm', 'salle') class OrchestreAdmin(admin.ModelAdmin): list_display = ('nom_complet', 'sous_titre') search_fields = ['nom_complet', ] admin.site.register(Audio, AudioAdmin) admin.site.register(Intervenant, IntervenantAdmin) admin.site.register(Langue, LangueAdmin) admin.site.register(Lieu, LieuAdmin) admin.site.register(Orchestre, OrchestreAdmin)
38a28d7f0257148f8e867dcfd6350f0e6276dd14
f7dd190a665a4966db33dcc1cc461dd060ca5946
/venv/Lib/site-packages/graphene/types/tests/test_schema.py
88af101988356209c9722d213bfa5137344960fa
[]
no_license
Darwin939/macmeharder_back
2cc35e2e8b39a82c8ce201e63d9f6a9954a04463
8fc078333a746ac7f65497e155c58415252b2d33
refs/heads/main
2023-02-28T12:01:23.237320
2021-02-02T17:37:33
2021-02-02T17:37:33
328,173,062
0
0
null
null
null
null
UTF-8
Python
false
false
1,022
py
import pytest from ..field import Field from ..objecttype import ObjectType from ..scalars import String from ..schema import Schema class MyOtherType(ObjectType): field = String() class Query(ObjectType): inner = Field(MyOtherType) def test_schema(): schema = Schema(Query) assert schema.get_query_type() == schema.get_graphql_type(Query) def test_schema_get_type(): schema = Schema(Query) assert schema.Query == Query assert schema.MyOtherType == MyOtherType def test_schema_get_type_error(): schema = Schema(Query) with pytest.raises(AttributeError) as exc_info: schema.X assert str(exc_info.value) == 'Type "X" not found in the Schema' def test_schema_str(): schema = Schema(Query) assert ( str(schema) == """schema { query: Query } type MyOtherType { field: String } type Query { inner: MyOtherType } """ ) def test_schema_introspect(): schema = Schema(Query) assert "__schema" in schema.introspect()
87b4c9c295b5f43b508c4f5062977f0f628852e2
4a84ef702269eed582b04dbed979a24607579f52
/src/mapnik/tests/python_tests/sqlite_rtree_test.py
2d28adac0266d3439eb51f6e9cc4d9c5da04e236
[]
no_license
olibook/pymapnik2
9ef766d759afc3efeccd988bfb7239bd73cac01e
c409fa150e203ff85e14b8fd40063267a6802e1c
refs/heads/master
2016-08-04T11:51:35.987664
2013-02-18T16:01:10
2013-02-18T16:01:10
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,302
py
#!/usr/bin/env python from nose.tools import * from mapnik.tests.python_tests.utilities import execution_path from Queue import Queue import threading import os, mapnik import sqlite3 def setup(): # All of the paths used are relative, if we run the tests # from another directory we need to chdir() os.chdir(execution_path('.')) NUM_THREADS = 10 TOTAL = 245 DB = '../data/sqlite/world.sqlite' TABLE= 'world_merc' def create_ds(): ds = mapnik.SQLite(file=DB,table=TABLE) fs = ds.all_features() if 'sqlite' in mapnik.DatasourceCache.instance().plugin_names(): def test_rtree_creation(): index = DB +'.index' if os.path.exists(index): os.unlink(index) threads = [] for i in range(NUM_THREADS): t = threading.Thread(target=create_ds) t.start() threads.append(t) for i in threads: i.join() eq_(os.path.exists(index),True) conn = sqlite3.connect(index) cur = conn.cursor() try: cur.execute("Select count(*) from idx_%s_GEOMETRY" % TABLE.replace("'","")) conn.commit() eq_(cur.fetchone()[0],TOTAL) except sqlite3.OperationalError: # don't worry about testing # of index records if # python's sqlite module does not support rtree pass cur.close() ds = mapnik.SQLite(file=DB,table=TABLE) fs = ds.all_features() eq_(len(fs),TOTAL) os.unlink(index) ds = mapnik.SQLite(file=DB,table=TABLE,use_spatial_index=False) fs = ds.all_features() eq_(len(fs),TOTAL) eq_(os.path.exists(index),False) ds = mapnik.SQLite(file=DB,table=TABLE,use_spatial_index=True) fs = ds.all_features() for feat in fs: query = mapnik.Query(feat.envelope()) selected = ds.features(query) eq_(len(selected.features)>=1,True) eq_(os.path.exists(index),True) os.unlink(index) def test_geometry_round_trip(): test_db = '/tmp/mapnik-sqlite-point.db' ogr_metadata = True # create test db conn = sqlite3.connect(test_db) cur = conn.cursor() cur.execute(''' CREATE TABLE IF NOT EXISTS point_table (id INTEGER PRIMARY KEY AUTOINCREMENT, geometry BLOB, name varchar) ''') # optional: but nice if we want to read with ogr if ogr_metadata: cur.execute('''CREATE TABLE IF NOT EXISTS geometry_columns ( f_table_name VARCHAR, f_geometry_column VARCHAR, geometry_type INTEGER, coord_dimension INTEGER, srid INTEGER, geometry_format VARCHAR )''') cur.execute('''INSERT INTO geometry_columns (f_table_name, f_geometry_column, geometry_format, geometry_type, coord_dimension, srid) VALUES ('point_table','geometry','WKB', 1, 1, 4326)''') conn.commit() cur.close() # add a point as wkb (using mapnik) to match how an ogr created db looks x = -122 # longitude y = 48 # latitude wkt = 'POINT(%s %s)' % (x,y) # little endian wkb (mapnik will auto-detect and ready either little or big endian (XDR)) wkb = mapnik.Path.from_wkt(wkt).to_wkb(mapnik.wkbByteOrder.NDR) values = (None,sqlite3.Binary(wkb),"test point") cur = conn.cursor() cur.execute('''INSERT into "point_table" (id,geometry,name) values (?,?,?)''',values) conn.commit() cur.close() def make_wkb_point(x,y): import struct byteorder = 1; # little endian endianess = '' if byteorder == 1: endianess = '<' else: endianess = '>' geom_type = 1; # for a point return struct.pack('%sbldd' % endianess, byteorder, geom_type, x, y) # confirm the wkb matches a manually formed wkb wkb2 = make_wkb_point(x,y) eq_(wkb,wkb2) # ensure we can read this data back out properly with mapnik ds = mapnik.Datasource(**{'type':'sqlite','file':test_db, 'table':'point_table'}) fs = ds.featureset() feat = fs.next() eq_(feat.id(),1) eq_(feat['name'],'test point') geoms = feat.geometries() eq_(len(geoms),1) eq_(geoms.to_wkt(),'Point(-122.0 48.0)') # ensure it matches data read with just sqlite cur = conn.cursor() cur.execute('''SELECT * from point_table''') conn.commit() result = cur.fetchone() cur.close() feat_id = result[0] eq_(feat_id,1) name = result[2] eq_(name,'test point') geom_wkb_blob = result[1] eq_(str(geom_wkb_blob),geoms.to_wkb(mapnik.wkbByteOrder.NDR)) new_geom = mapnik.Path.from_wkb(str(geom_wkb_blob)) eq_(new_geom.to_wkt(),geoms.to_wkt()) # cleanup os.unlink(test_db) os.unlink(test_db + '.index') if __name__ == "__main__": setup() [eval(run)() for run in dir() if 'test_' in run]
8ac469d250354ff770e368d0dc803cc543d5ac0d
c42908fce35bc2afb10abd924cfd13d5fa286205
/html2vec/base/io/basefilehandlers.py
30f19a526fdb753f0bc6b1578280d70ce6dcfae6
[ "MIT" ]
permissive
dpritsos/html2vec
b3866f05e7e1c1cb61f40b8f038c1a05a89a9faa
be5629d6dc2665891472c5795c191286f0de31e7
refs/heads/master
2023-05-13T08:30:24.485797
2021-06-05T07:29:06
2021-06-05T07:29:06
1,896,404
8
0
null
2018-10-20T13:10:43
2011-06-14T19:54:52
Python
UTF-8
Python
false
false
3,689
py
# # Module: Base File Handlers # # Author: Dimitiros Pritsos # # License: BSD Style # # Last update: Please refer to the GIT tracking # """ html2vect.base.io.basefilehandlers: submodule of `html2vect` module defines the class BasePathHandler and BaseFileHandler """ import codecs import os def copyfile(source, dest): """ copyfile(): Copy a file from source to dest path. """ source_f = open(source, 'rb') dest_f = open(dest, 'wb') while True: copy_buffer = source_f.read(1024*1024) if copy_buffer: dest_f.write(copy_buffer) else: break source_f.close() dest_f.close() def movefile(source, dest): """ movefile(): A UNIX compatible function for moving file from Source path to Destination path. The Source path Hard Link is deleted """ os.link(source, dest) os.unlink(source) def file_list_frmpaths(basepath, filepath_l): if basepath is None: basepath = '' if isinstance(filepath_l, str): flist = [files_n_paths[2] for files_n_paths in os.walk(basepath + filepath_l)] flist = flist[0] fname_lst = [basepath + filepath_l + fname for fname in flist] elif isinstance(filepath_l, list): fname_lst = list() for filepath in filepath_l: flist = [files_n_paths[2] for files_n_paths in os.walk(basepath + filepath)] flist = flist[0] fname_lst.extend([basepath + filepath + '/' + fname for fname in flist]) else: raise Exception( "A String or a list of Strings was Expected as input - Stings should be file-paths" ) # For ease of usage the filename list should be returned sorted fname_lst.sort() return fname_lst class BaseFileHandler(object): def __init__(self): self.filename_lst = [] self.file_count = None def __iter__(self): return self def next(self): if len(self.filename_lst) == self.file_count: raise StopIteration xhtml = self.__load_file( self.filename_lst[self.file_count], self.encoding, self.error_handling ) self.file_count += 1 return xhtml def __load_file(self, filename, encoding='utf-8', error_handling='strict'): """ """ try: fenc = codecs.open(filename, 'rb', encoding, error_handling) except Exception as e: print("BaseFileHandler.__load_file() FILE %s ERROR: %s" % (filename, e)) return None try: fstr = fenc.read() except Exception as e: print("BaseFileHandler.__load_file() FILE %s ERROR: %s" % (filename, e)) return None finally: fenc.close() return fstr def load_files(self, filename_l, encoding='utf-8', error_handling='strict'): """ """ if isinstance(filename_l, str): return self.__load_file(filename_l, encoding, error_handling) elif isinstance(filename_l, list): self.filename_lst = filename_l self.file_count = 0 self.encoding = encoding self.error_handling = error_handling return self.__iter__() else: raise Exception("A String or a list of Strings was Expected as input") def load_frmpaths(self, basepath, filepath_l, encoding='utf-8', error_handling='strict'): """This function requires hight amount of memory!""" fname_lst = self.file_list_frmpaths(basepath, filepath_l) return [[fname, fstr] for fname, fstr in zip( fname_lst, self.load_files(fname_lst, encoding, error_handling))]
f2b8c9a7622b4657969fb9800cd35901be8fe2e1
e83df449e6956d5af8e4b98d535a9daacbbff477
/main.py
0215ec4d5e158a0c5cdccee9fcaa8569fd2549a5
[]
no_license
LefterisJP/race_analyzer
2f48edc34bb299f0d96e3a19a4f245b1b082f21d
08a5041817e227969775a42656c2bce2030ed69f
refs/heads/master
2020-03-28T22:40:01.078406
2018-09-18T07:47:53
2018-09-18T07:47:53
149,248,881
0
0
null
null
null
null
UTF-8
Python
false
false
204
py
import click @click.group(invoke_without_command=True) @click.pass_context def main(ctx, threads, keyfile, input_file, respect_word_order, **kwargs): pass if __name__ == '__main__': main()
ec86aee2863caad73625cec5b38ecb008e726e79
462c56e7454c97e0541588b9be66a4e216ea20fd
/399.evaluate-division.py
a3cfbdefe5a676c0fd3cfbca9f067f3686c034cf
[]
no_license
LouisYLWang/leetcode_python
d5ac6289e33c5d027f248aa3e7dd66291354941c
2ecaeed38178819480388b5742bc2ea12009ae16
refs/heads/master
2020-05-27T08:38:48.532000
2019-12-28T07:08:57
2019-12-28T07:08:57
188,549,256
0
0
null
null
null
null
UTF-8
Python
false
false
2,199
py
# # @lc app=leetcode id=399 lang=python3 # # [399] Evaluate Division # class Solution(object): def calcEquation(self, equations, values, queries): """ :type equations: List[List[str]] :type values: List[float] :type queries: List[List[str]] :rtype: List[float] """ div_map = dict() for [i,j], v in zip(equations, values): if i in div_map: div_map[i][j] = v else: div_map[i] = {j:v} if j in div_map: div_map[j][i] = 1/v else: div_map[j] = {i:1/v} print(div_map) def get_res(i, j, ans): if i not in div_map or j not in div_map: return -1.0 elif i == j: return 1.0 else: if j in div_map[i]: return div_map[i][j] else: for k in div_map[i]: # use visited to control repeating visit visited.add(i) if k not in visited: temp = get_res(k, j, ans) # do not mistakenly use if temp if temp != -1: return div_map[i][k] * temp # notice: if not find anything, remember to return -1 return -1.0 # an alternative way of implementing get_res (more compact) def get_res(i, j, ans): if i not in div_map: return -1.0 elif i == j: return 1.0 for k in div_map[i]: if j == k: return div_map[i][j] elif k not in visited: visited.add(i) temp = get_res(k, j, ans) if temp != -1: return div_map[i][k] * temp return -1.0 res = list() for query in queries: visited = set() res.append(get_res(query[0], query[1], 1)) return res
0e466f4ac716661f529c7dba7cacc70a9e2d454b
5b9b2ec5fb3142609882a3320c6e64c6b912395c
/LeetCode/mostWaterContainer.py
b38ef468717fd38297251aa67b4e445ea5621a0e
[]
no_license
anildhaker/DailyCodingChallenge
459ba7ba968f4394fb633d6ba8b749c1e4cb7fb0
f1cfc52f156436dc7c0a6c43fa939cefac5cee36
refs/heads/master
2020-04-20T18:58:33.133225
2020-01-11T14:50:52
2020-01-11T14:50:52
169,036,874
1
0
null
null
null
null
UTF-8
Python
false
false
642
py
# Given n non-negative integers a1, a2, ..., an , where each represents a point at # coordinate (i, ai). n vertical lines are drawn such that the two endpoints of line # i is at (i, ai) and (i, 0). Find two lines, which together with x-axis forms a # container, such that the container contains the most water. def maxArea(self, height: List[int]) -> int: i = 0 j = len(height)-1 area = 0 while i < j : area = max(area,(j-i)*min(height[i],height[j])) if height[i] < height[j]: i += 1 else: j -= 1 return area
c9698f69a352b6a3843b8b47da144c699952fec5
b6df7cda5c23cda304fcc0af1450ac3c27a224c1
/data/codes/bluquar_cube.py
c9bc60edb49769e4a1e2ad2a9460bddc02746812
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
vieira-rafael/py-search
88ee167fa1949414cc4f3c98d33f8ecec1ce756d
b8c6dccc58d72af35e4d4631f21178296f610b8a
refs/heads/master
2021-01-21T04:59:36.220510
2016-06-20T01:45:34
2016-06-20T01:45:34
54,433,313
2
4
null
null
null
null
UTF-8
Python
false
false
43,257
py
# cube.py# Chris Barker# CMU S13 15-112 Term Project from Tkinter import *from geometry import *import heapqimport copyimport randomimport solutionsimport mathfrom math import sin, cos, pi class Struct(object): pass def loadObject(path, index): with open(path) as file: try: data = file.read() except Exception as e: print 'Error reading data!', e return eval(data)[index] def drawChevron(canvas, cx, cy, r): coords = (cx - 0.3 * r, cy - 0.5 * r, cx - 0.2 * r, cy - 0.5 * r, cx + 0.3 * r, cy, cx - 0.2 * r, cy + 0.5 * r, cx - 0.3 * r, cy + 0.5 * r, cx - 0.3 * r, cy + 0.4 * r, cx + 0.2 * r, cy, cx - 0.3 * r, cy - 0.4 * r) canvas.create_polygon(*coords, fill='white', state='disabled') def brief(L): s = '' for e in L: s += str(e[0]) return s def reversal(move): if type(move) == tuple: move = move[0] if type(move) == str: if "'" in move: move = move[0] else: move = move + "'" return move def darken(color): if color[0] != '#': if color == 'white': color = '#ffffff' elif color == 'orange': color = '#ffa500' elif color == 'red': color = '#ff0000' elif color == 'blue': color = '#0000ff' elif color == 'green': color = '#00ff00' elif color == 'yellow': color = '#ffff00' else: return color return darken(color) else: red = int(color[1:3], 16) green = int(color[3:5], 16) blue = int(color[5:7], 16) red /= 2 green /= 2 blue /= 2 return '#%02x%02x%02x' % (red, green, blue) class CenterPiece(object): def __init__(self, vec, parent): self.vec = vec self.parent = parent def callback(self, e): self.parent.addMoves([self.vec], self.PLAYING) class Cube(object): directions = { I_HAT : 'green', -I_HAT : 'blue', J_HAT : 'red', -J_HAT : 'orange', K_HAT : 'yellow', -K_HAT : 'white'} helpStrings = { 'general': 'Welcome to Cubr!\nHover over a button below to view help for it.\n\n\The Rubik\'s Cube, invented in 1974 by Erno Rubik, is one of the most popular toys of all time.\n\It consists of six independently rotating faces, each with nine colored stickers.\n\The goal is to arrange the cube so that each face contains only one color.\n\In 1981 David Singmaster published his popular three-layer solution method, which is used in this program.\n\With practice, most people could solve the cube in under a minute. Since then, speedcubing has taken off and the current record is held by \n\Mats Valk, who solved the cube in 5.55 seconds. In 2010, Tomas Rokicki proved that any Rubik\'s cube can be solved in 20 face rotations or less.\n\n\This program will interactively guide you through the three-layer solution algorithm.\n\At each step of the solution, you will be given information describing the step you are completing.\n\You may either work with a randomly generated Rubik\'s Cube, or use your webcam to input the current configuration of your own cube!\n\n\Many people think of solving the cube as moving the 54 stickers into place. However, it is much more helpful to think about it as\n\moving 20 "blocks" (12 edges, 8 corners) into place. The centers of each face always stay in the same orientation relative to each other,\n\and the stickers on each block always stay in place relative to each other.\n\Solving the first layer means getting four edges and four corners in place so that one face is all the same color.\n\This is intuitive for many people, but by being conscious of the algorithms you use, you can improve your time and consistency.\n\The second layer of blocks requires only one algorithm, and involves moving only four edge pieces into place.\n\The third and final layer is the most complicated, and requires separate algorithms for orienting (getting the stickers facing the right way)\n\and for permuting (getting the individual blocks into place). With enough practice, you can be an expert cube solver!\n\', 'pause': 'During a guided solution, press this button to pause your progress.', 'play': 'During a guided solution, press this button to resume solving the cube.', 'reverse': 'During a guided solution, press this button to reverse the moves made so far.', 'back': 'Press this button to step one move backward.', 'step': 'Press this button to step one move forward.', 'speedUp': 'Press this button to increase the rotation speed during a guided solution.', 'slowDown': 'Press this button to decrease the rotation speed during a guided solution.', 'fromCamera': 'Press this button to start the camera and input the configuration of your Rubik\'s cube.\n\Tip: When inputting your cube through the camera, tilt the cube up or down to reduce glare from the screen.\n\More tips: If the program misrecognizes a color, press the spacebar anyway to record the colors. Then, click on the misrecognized\n\color and select the correct color from the list of colors that will pop up. Make sure you copy the movement of the virtual cube when it\n\rotates to the next face so that your cube will be interpreted accurately.', 'guide': 'guides through solution', 'guideFast': 'guides through solution more quickly', 'reset': 'resets the cube to a solved state', 'shuffle': 'shuffles the cube', 'solve': 'solves the cube', 'info': 'reopen this screen', 'stats': 'shows statistics' } faceColors = { } @classmethod def setFaceColors(cls): cls.faceColors = {} for z in xrange(3): for y in xrange(3): for x in xrange(3): pieceId = z * 9 + y * 3 + x + 1 cls.faceColors[pieceId] = [ ] (X, Y, Z) = (x - 1, y - 1, z - 1) pos = Vector(X,Y,Z) for vec in [Vector(0,0,1), Vector(0,1,0), Vector(1,0,0)]: for direction in cls.directions: if direction // vec: if direction ** pos > 0: cls.faceColors[pieceId].append(cls.directions[direction]) def __init__(self, canvas, controlPane, app, mode='solved'): Cube.setFaceColors() self.state = CubeState(mode) self.faces = { } self.size = 3 self.center = Vector(0,0,0) self.app = app (self.PAUSED, self.PLAYING, self.REVERSING, self.STEP, self.BACK) = (1,2,3,4,5) self.status = self.PAUSED (self.INGAME, self.SHOWINGINFO, self.SHOWINGSTATS) = range(3) self.helpState = self.SHOWINGINFO self.statString = '' self.helpIndex = 'general' self.shuffling = False self.delay = 100 self.direction = (0, 0) self.after = 0 self.debug = False self.message = "" self.sol = '' self.shuffleLen = 200 self.moveList = [ ] self.moveIndex = -1 self.controlPane = controlPane self.timeBetweenRotations = 0 self.timeUntilNextRotation = 0 self.rotating = False self.rotationAxis = False self.rotationDirection = False self.rotationCount = 0 self.maxRot = 5 self.rotationQueue = [ ] self.rotatingValues = [ ] self.sensitivity = 0.04 # click and drag self.showingPB = False self.pbMin = 0 self.pbVal = 0 self.pbMax = 0 self.paused = False self.configureControls(controlPane) self.configureWindow(canvas) self.showInWindow() @property def maxRot(self): return self.maxRotationCount @maxRot.setter def maxRot(self, value): self.maxRotationCount = value self.rotationDTheta = math.pi / (2. * self.maxRotationCount) @maxRot.deleter def maxRot(self): pass def configureControls(self, pane): pane.delete(ALL) width = int(pane.cget('width')) height = int(pane.cget('height')) r = 24 # # PAUSE # (cx, cy) = (width/2, height/2) pauseButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(pauseButton, '<Button-1>', self.pause) pane.create_rectangle(cx - (r * 0.35), cy - (r * 0.5), cx - (r * 0.10), cy + (r * 0.5), fill='#ffffff', state='disabled') pane.create_rectangle(cx + (r * 0.35), cy - (r * 0.5), cx + (r * 0.10), cy + (r * 0.5), fill='#ffffff', state='disabled') pane.tag_bind(pauseButton, '<Enter>', lambda e: self.assignHelp('pause')) pane.tag_bind(pauseButton, '<Leave>', lambda e: self.assignHelp('general')) # # PLAY # (cx, cy) = (width/2 + r*2.4, height/2) playButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(playButton, '<Button-1>', self.play) pane.create_polygon(cx - r * 0.35, cy - r * 0.5, cx + r * 0.55, cy, cx - r * 0.35, cy + r * 0.5, fill='#ffffff', state='disabled') pane.tag_bind(playButton, '<Enter>', lambda e: self.assignHelp('play')) pane.tag_bind(playButton, '<Leave>', lambda e: self.assignHelp('general')) # # REVERSE # (cx, cy) = (width/2 - r*2.4, height/2) reverseButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(reverseButton, '<Button-1>', self.reverse) pane.create_polygon(cx + r * 0.35, cy - r * 0.5, cx - r * 0.55, cy, cx + r * 0.35, cy + r * 0.5, fill='#ffffff', state='disabled') pane.tag_bind(reverseButton, '<Enter>', lambda e: self.assignHelp('reverse')) pane.tag_bind(reverseButton, '<Leave>', lambda e: self.assignHelp('general')) # # SPEED UP # (cx, cy) = (width/2 + r * 10.0, height/2) speedUpButton = pane.create_rectangle(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(speedUpButton, '<Button-1>', self.speedUp) drawChevron(pane, cx, cy, r) drawChevron(pane, cx - 0.3 * r, cy, r * 0.8) drawChevron(pane, cx + 0.3 * r, cy, r * 1.2) pane.tag_bind(speedUpButton, '<Enter>', lambda e: self.assignHelp('speedUp')) pane.tag_bind(speedUpButton, '<Leave>', lambda e: self.assignHelp('general')) # # SLOW DOWN # (cx, cy) = (width/2 + r * 7.5, height/2) slowDownButton = pane.create_rectangle(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(slowDownButton, '<Button-1>', self.slowDown) drawChevron(pane, cx - 0.3 * r, cy, r * 0.8) drawChevron(pane, cx, cy, r) pane.tag_bind(slowDownButton, '<Enter>', lambda e: self.assignHelp('slowDown')) pane.tag_bind(slowDownButton, '<Leave>', lambda e: self.assignHelp('general')) # # SHUFFLE # (cx, cy) = (r * 1.5, height/2) shuffleButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(shuffleButton, '<Button-1>', self.shuffle) coords = (cx - 0.6 * r, cy - 0.4 * r, cx - 0.6 * r, cy - 0.2 * r, cx - 0.2 * r, cy - 0.2 * r, cx + 0.2 * r, cy + 0.4 * r, cx + 0.6 * r, cy + 0.4 * r, cx + 0.6 * r, cy + 0.6 * r, cx + 0.8 * r, cy + 0.3 * r, cx + 0.6 * r, cy - 0.0 * r, cx + 0.6 * r, cy + 0.2 * r, cx + 0.2 * r, cy + 0.2 * r, cx - 0.2 * r, cy - 0.4 * r, cx - 0.4 * r, cy - 0.4 * r) pane.create_polygon(*coords, outline='#ffffff', fill='#0000ff', state='disabled') coords = (cx - 0.6 * r, cy + 0.4 * r, cx - 0.6 * r, cy + 0.2 * r, cx - 0.2 * r, cy + 0.2 * r, cx + 0.2 * r, cy - 0.4 * r, cx + 0.6 * r, cy - 0.4 * r, cx + 0.6 * r, cy - 0.6 * r, cx + 0.8 * r, cy - 0.3 * r, cx + 0.6 * r, cy - 0.0 * r, cx + 0.6 * r, cy - 0.2 * r, cx + 0.2 * r, cy - 0.2 * r, cx - 0.2 * r, cy + 0.4 * r, cx - 0.4 * r, cy + 0.4 * r) pane.create_polygon(*coords, outline='#ffffff', fill='#0000ff', state='disabled') pane.tag_bind(shuffleButton, '<Enter>', lambda e: self.assignHelp('shuffle')) pane.tag_bind(shuffleButton, '<Leave>', lambda e: self.assignHelp('general')) # # SOLVE # (cx, cy) = (r * 4.0, height/2) solveButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(solveButton, '<Button-1>', self.solve) pane.create_text(cx, cy, text='Solve', fill='white', state='disabled') pane.tag_bind(solveButton, '<Enter>', lambda e: self.assignHelp('solve')) pane.tag_bind(solveButton, '<Leave>', lambda e: self.assignHelp('general')) # # RESET # (cx, cy) = (r * 6.5, height/2) resetButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(resetButton, '<Button-1>', self.reset) pane.create_text(cx, cy, text='Reset', fill='white', state='disabled') pane.tag_bind(resetButton, '<Enter>', lambda e: self.assignHelp('reset')) pane.tag_bind(resetButton, '<Leave>', lambda e: self.assignHelp('general')) # # FROM CAMERA # (cx, cy) = (r * 9.0, height/2) fromcamButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(fromcamButton, '<Button-1>', self.fromCamera) pane.create_text(cx, cy-12, text='From', fill='white', state='disabled') pane.create_text(cx, cy, text='Camera', fill='white', state='disabled') pane.tag_bind(fromcamButton, '<Enter>', lambda e: self.assignHelp('fromCamera')) pane.tag_bind(fromcamButton, '<Leave>', lambda e: self.assignHelp('general')) # # GUIDE # (cx, cy) = (r * 12.5, height/2) guideButton = pane.create_rectangle(cx - 2*r, cy - r, cx + 2*r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(guideButton, '<Button-1>', self.guideThrough) pane.create_text(cx, cy-12, text='Guide Through', fill='white', state='disabled') pane.create_text(cx, cy, text='Solution', fill='white', state='disabled') pane.tag_bind(guideButton, '<Enter>', lambda e: self.assignHelp('guide')) pane.tag_bind(guideButton, '<Leave>', lambda e: self.assignHelp('general')) # # GUIDE FASTER # (cx, cy) = (r * 17.5, height/2) guideFastButton = pane.create_rectangle(cx - 2.5*r, cy - r, cx + 2.5*r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(guideFastButton, '<Button-1>', self.guideFastThrough) pane.create_text(cx, cy-12, text='Guide Through', fill='white', state='disabled') pane.create_text(cx, cy, text='Solution (Faster)', fill='white', state='disabled') pane.tag_bind(guideFastButton, '<Enter>', lambda e: self.assignHelp('guideFast')) pane.tag_bind(guideFastButton, '<Leave>', lambda e: self.assignHelp('general')) # # BACK # r = 14 (cx, cy) = (width/2 - r*7.5, height/2) backButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(backButton, '<Button-1>', self.back) pane.create_polygon(cx + r * 0.35, cy - r * 0.5, cx - r * 0.55, cy, cx + r * 0.35, cy + r * 0.5, fill='#ffffff', state='disabled') pane.tag_bind(backButton, '<Enter>', lambda e: self.assignHelp('back')) pane.tag_bind(backButton, '<Leave>', lambda e: self.assignHelp('general')) # # FORWARD # (cx, cy) = (width/2 + r*7.5, height/2) stepButton = pane.create_oval(cx - r, cy - r, cx + r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(stepButton, '<Button-1>', self.step) pane.create_polygon(cx - r * 0.35, cy - r * 0.5, cx + r * 0.55, cy, cx - r * 0.35, cy + r * 0.5, fill='#ffffff', state='disabled') pane.tag_bind(stepButton, '<Enter>', lambda e: self.assignHelp('step')) pane.tag_bind(stepButton, '<Leave>', lambda e: self.assignHelp('general')) # # INFO # (cx, cy) = (width - r * 3.5, height/2) helpButton = pane.create_rectangle(cx - 2*r, cy - r, cx + 2*r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(helpButton, '<Button-1>', lambda e: self.assignHelpState(self.SHOWINGINFO)) pane.create_text(cx, cy, text='Help', fill='white', state='disabled') pane.tag_bind(helpButton, '<Enter>', lambda e: self.assignHelp('info')) pane.tag_bind(helpButton, '<Leave>', lambda e: self.assignHelp('general')) # # STATS # (cx, cy) = (width - r * 8.0, height/2) statsButton = pane.create_rectangle(cx - 2*r, cy - r, cx + 2*r, cy + r, fill='#0088ff', activefill='#00ffff', outline='#ffffff', width=1, activewidth=3) pane.tag_bind(statsButton, '<Button-1>', self.showStats) pane.create_text(cx, cy, text='Stats', fill='white', state='disabled') pane.tag_bind(statsButton, '<Enter>', lambda e: self.assignHelp('stats')) pane.tag_bind(statsButton, '<Leave>', lambda e: self.assignHelp('general')) def configureWindow(self, canvas): if canvas == None: self.root = Tk() (self.width, self.height) = (450, 450) self.canvas = Canvas(self.root, width=self.width, height=self.height, background='#333333') self.needsLoop = True else: self.root = canvas._root() self.canvas = canvas (self.width, self.height) = (int(canvas.cget('width')), int(canvas.cget('height'))) self.needsLoop = False self.dim = {'width': self.width, 'height': self.height} def speedUp(self, e): self.maxRot = max(1, self.maxRot - 1) def slowDown(self, e): self.maxRot += 1 def timer(self): needsRedraw = self.move() or (not self.status == self.PAUSED) if self.rotating: self.rotationCount -= 1 if self.rotationCount <= 0: self.rotating = False self.rotatingValues = [ ] self.state.rotate(self.rotationItem) del self.rotationItem needsRedraw = True if self.timeUntilNextRotation > 0: self.timeUntilNextRotation -= 1 if (not self.rotating) and (self.timeUntilNextRotation <= 0): if (self.status == self.PLAYING) or (self.status == self.STEP): if self.moveIndex >= (len(self.moveList) - 1): self.status = self.PAUSED self.updateMessage('') self.shuffling = False else: self.moveIndex += 1 needsRedraw = self.makeMove(self.moveList[self.moveIndex], animate = not self.shuffling, render = not self.shuffling or (self.moveIndex % 20 == 0)) if (self.status == self.REVERSING) or (self.status == self.BACK): if self.moveIndex < 0: self.status = self.PAUSED else: needsRedraw = self.makeMove(reversal(self.moveList[self.moveIndex])) self.moveIndex -= 1 if (self.status == self.STEP) or (self.status == self.BACK): self.status = self.PAUSED self.timeUntilNextRotation = self.timeBetweenRotations if needsRedraw: try: self.redraw() except: self.updateMessage('Could not read cube.') self.state.setSolved() self.redraw() def updateMessage(self, msg): self.message = msg def updateSol(self, msg): self.sol = msg def showInWindow(self): self.canvas.pack() self.camera = Camera(Vector(4,-6.5,-7), Vector(0,0,0), pi/5, self.dim) self.amt = self.camera.sensitivity * self.camera.pos.dist(self.camera.origin) self.redraw() if self.needsLoop: root.mainloop() def cleanup(self): for pg in self.faces.values(): self.canvas.itemconfig(pg, state='hidden') def move(self): self.amt = self.camera.sensitivity * self.camera.pos.dist(self.camera.origin) redraw = False if self.direction != (0, 0): self.camera.rotate(self.direction) redraw = True if self.app.resized: self.app.dragVal = (0,0) self.app.resized = False redraw = True elif self.app.dragVal != (0,0): self.camera.rotate((-self.sensitivity * self.app.dragVal[0], -self.sensitivity * self.app.dragVal[1])) redraw = True self.app.dragVal = (self.app.dragVal[0] * 0.7, self.app.dragVal[1] * 0.7) if self.app.dragVal[0] < 0.01 and self.app.dragVal[1] < 0.01: self.app.dragVal = (0,0) return redraw @staticmethod def corners(center, direction, *args): if len(args) == 0: if direction // Vector(0,1,0): # parallel norm1 = Vector(1, 0, 0) else: norm1 = Vector(0,1,0) norm2 = 2 * direction * norm1 else: (norm1, norm2) = args corners = [ ] for coef1 in xrange(-1, 2, 2): for coef2 in xrange(coef1, -2 * coef1, -2*coef1): corner = center + (0.5 * norm1 * coef1 + 0.5 * norm2 * coef2) corners.append(corner) return corners def pieceOffset(self, x, y, z): z -= 1 y -= 1 x -= 1 return Vector(x,y,z) def redraw(self): self.canvas.delete(ALL) # Top message self.canvas.create_text(self.camera.width/2, 40, text=self.message, fill='white', font='Arial 24 bold') # Bottom message sol = self.sol lineWidth = 100 margin = 15 y = self.camera.height - margin - 20 while len(sol) > 0: self.canvas.create_text(self.camera.width/2, y, text=sol[-lineWidth:], fill='white', font='Courier 12') y -= margin sol = sol[:-lineWidth] # Progress bar if self.showingPB: w = (self.width * (self.moveIndex - self.pbMin + 1) / (max(1, self.pbMax - self.pbMin))) self.canvas.create_rectangle(0, self.height-20, w, self.height, fill='#00ff66') toDraw = [ ] for z in xrange(self.size): for y in xrange(self.size): for x in xrange(self.size): try: (pieceID, rotationKey) = self.state.state[z][y][x] except: pieceID = 1 rotationKey = 210 pieceCenter = self.center + self.pieceOffset(x, y, z) outDirections = [d for d in Cube.directions if d**pieceCenter > 0] sod = [ ] #sorted out directions for od in outDirections: if od // CubeState.keys[rotationKey / 100]: sod.append(od) for od in outDirections: if od // CubeState.keys[(rotationKey / 10) % 10]: sod.append(od) for od in outDirections: if od // CubeState.keys[rotationKey % 10]: sod.append(od) pieceRotation = Vector(0,0,0) theta = 0. if pieceID in self.rotatingValues: oldCenter = pieceCenter pieceOffset = pieceCenter - (pieceCenter > self.rotationAxis) pieceRotation = self.rotationAxis * pieceOffset theta = self.rotationDTheta * (self.maxRot - self.rotationCount) if self.rotationDirection: theta *= -1 pieceCenter = (pieceCenter > self.rotationAxis) pieceCenter = pieceCenter + cos(theta) * pieceOffset pieceCenter = pieceCenter + sin(theta) * pieceRotation faceColors = Cube.faceColors[pieceID] for direc, color in zip(sod, faceColors): axes = () faceCenter = pieceCenter + (direc / 2) if pieceID in self.rotatingValues: if direc // self.rotationAxis: faceCenter = pieceCenter + (direc / 2) if self.rotationAxis // Vector(0,1,0): axis0temp = Vector(1,0,0) else: axis0temp = Vector(0,1,0) axis1temp = direc * axis0temp axis0 = axis0temp * cos(theta) + axis1temp * sin(theta) axis1 = axis0 * direc axes = (axis0, axis1) else: perp = -1 * (direc * self.rotationAxis) perp = perp ^ (direc.mag) faceCenter = pieceCenter + (sin(theta) * (perp / 2) + cos(theta) * (direc / 2)) axis0 = self.rotationAxis axis1 = (faceCenter - pieceCenter) * self.rotationAxis * 2 axes = (axis0, axis1) visible = (faceCenter - pieceCenter) ** (faceCenter - self.camera.pos) < 0 corners = self.corners(faceCenter, pieceCenter - faceCenter, *axes) corners = [corner.flatten(self.camera) for corner in corners] state = 'disabled' # if visible else 'hidden' outline = '#888888' if visible else 'gray' if not visible: color = 'gray' a = 0 if visible else 1000 spec = (corners, color, state, outline) toDraw.append(((pieceCenter-self.camera.pos).mag + a, spec)) #a = self.canvas.create_polygon(corners, fill=color, # width=2, state=state, outline='#888888' # #,activewidth=4, activefill=darken(color) # ) if self.debug: self.canvas.create_text(faceCenter.flatten(self.camera), text=str(pieceID)) #if pieceCenter.mag() == 1: # b = CenterPiece(pieceCenter, self) # self.canvas.tag_bind(a, '<Button-1>', b.callback) """ newCorners = () for corner in corners: newCorners += corner.flatten(self.camera) if visible: self.canvas.create_polygon(self.faces[(pieceID,color)], newCorners) #self.canvas.itemconfig(self.faces[(pieceID,color)], state=state) """ toDraw.sort(lambda a,b: cmp(b,a)) for polygon in toDraw: spec = polygon[1] (corners, color, state, outline) = spec self.canvas.create_polygon(corners, fill=color, width=2, state=state, outline=outline) self.drawHelp() def gatherStats(self): self.statString = 'Unable to fetch solution logs.' stats = None try: with open('solutionLogs.txt') as file: stats = eval(file.read()) except: return if stats is not None: self.statString = '' stats = [s.split(';') for s in stats] moves = [stat[-1] for stat in stats] # Gets last element moves = [mv[6:] for mv in moves] # Remove "Moves:" moves = [int(mv) for mv in moves] if len(moves) == 0: self.statString += "No solutions generated yet." return self.statString += "%d solution%s logged.\n" % (len(moves), '' if len(moves)==1 else 's') avgMoves = sum(moves)/len(moves) self.statString += "Average number of 90 degree face rotations per solution: %d\n" % (avgMoves) times = [stat[-2] for stat in stats] # gets 2nd to last element times = [tm[6:-4] for tm in times] # removes "Time: " ... " sec" times = [float(tm) for tm in times] avgTime = sum(times)/(max(1, len(times))) self.statString += "Average time needed to generate a solution: %0.4f seconds" % (avgTime) def resetStats(self): try: with open('solutionLogs.txt', 'r+') as file: file.seek(0) # beginning file.truncate() file.writelines(['[]']) except: return def showStats(self, *args): self.gatherStats() self.helpState = self.SHOWINGSTATS def drawHelp(self): ## MAGIC NUMBERS EVERYWHERE if self.helpState == self.SHOWINGINFO: canvas = self.canvas canvas.create_rectangle(100, 100, self.width-100, self.height-100, fill='#888888', outline='#ccccff', width=4) canvas.create_rectangle(110, 110, 140, 140, fill='#880000', activefill='#aa0000') canvas.create_text(125, 125, text='X', fill='black', state='disabled') canvas.create_rectangle(self.width/2-50, self.height-140, self.width/2+50, self.height-110, fill='#008800', activefill='#00aa00') canvas.create_text(self.width/2, self.height-125, text='Start', fill='black', state='disabled') canvas.create_text(self.width/2, 130, text="Welcome to Cubr!", font='Arial 25 bold') canvas.create_text(self.width/2, self.height/2, text=self.helpStrings[self.helpIndex]) elif self.helpState == self.SHOWINGSTATS: canvas = self.canvas canvas.create_rectangle(100, 100, self.width-100, self.height-100, fill='#888888', outline='#ccccff', width=4) canvas.create_rectangle(110, 110, 140, 140, fill='#880000', activefill='#aa0000') canvas.create_text(125, 125, text='X', fill='black', state='disabled') canvas.create_rectangle(self.width/2-50, self.height-140, self.width/2+50, self.height-110, fill='#008800', activefill='#00aa00') canvas.create_text(self.width/2, self.height-125, text='Back', fill='black', state='disabled') canvas.create_rectangle(147, self.height-130, 178, self.height-115, fill='#aaffaa', activefill='#ffffff') canvas.create_text(250, self.height-130, text="These statistics are generated dynamically.\nClick here to reset your data logs.", state='disabled') canvas.create_text(self.width/2, self.height/2, text=self.statString, font='Arial 24 bold') def click(self, event): if self.helpState == self.SHOWINGINFO or self.helpState == self.SHOWINGSTATS: if 110 < event.x < 140 and 110 < event.y < 140: self.helpState = self.INGAME self.redraw() elif self.width/2-50 < event.x < self.width/2+50 and \ self.height-140 < event.y < self.height-110: self.helpState = self.INGAME self.redraw() if self.helpState == self.SHOWINGSTATS: if 147 < event.x < 178 and self.height-130 < event.y < self.height-115: self.resetStats() self.showStats() self.redraw() def assignHelp(self, key): self.helpIndex = key self.redraw() def assignHelpState(self, state): self.helpState = state self.redraw() def setConfig(self, config): try: self.state = CubeState('barebones') if self.debug: print self.state # Modify the state to include [(color, direction), (color, direction), ...] # And then parse pieceId and orientationKey out of that def faceToAxis(face): if self.debug: print face center = face[1][1] axis = [vec for vec in Cube.directions if Cube.directions[vec].lower() == center.lower()][0] return axis def setAxes(normal, known, dirString): dirString = dirString.lower() if dirString == 'up': up = known elif dirString == 'down': up = known * -1 elif dirString == 'left': up = (normal * known) elif dirString == 'right': up = (known * normal) down = up * -1 left = (up * normal) right = left * -1 return (up, down, left, right) timesTouched = [[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]],[[0,0,0],[0,0,0],[0,0,0]]] for faceInfo in config: axis = faceToAxis(faceInfo.currentFace) prevAxis = nextAxis = None if faceInfo.prevFace: prevAxis = faceToAxis(faceInfo.prevFace) if faceInfo.nextFace: nextAxis = faceToAxis(faceInfo.nextFace) prevTurn = faceInfo.prevTurn nextTurn = faceInfo.nextTurn if self.debug: print 'axis:', axis, Cube.directions[axis] print 'prevAxis:', prevAxis, if prevAxis: print Cube.directions[prevAxis] print 'nextAxis:', nextAxis, if nextAxis: print Cube.directions[nextAxis] print 'prevTurn:', prevTurn print 'nextTurn:', nextTurn if prevTurn: (up, down, left, right) = setAxes(axis, prevAxis, prevTurn) elif nextTurn: (up, down, left, right) = setAxes(axis, nextAxis * -1, nextTurn) if self.debug: print 'up:', up, Cube.directions[up] print 'down:', down, Cube.directions[down] print 'left:', left, Cube.directions[left] print 'right:', right, Cube.directions[right] for row in xrange(3): for col in xrange(3): pos = axis pos = pos + (down * (row - 1)) pos = pos + (right * (col - 1)) (x, y, z) = pos.components (x, y, z) = (int(x+1), int(y+1), int(z+1)) if self.debug: print 'x,y,z', x, y, z, print 'pos=', pos timesTouched[z][y][x] += 1 cell = self.state.state[z][y][x] if type(cell) == list: cell.append((faceInfo.currentFace[row][col], axis)) if self.debug: print 'state=', self.state print 'times', timesTouched # Cast each [ ] list to a ( ) tuple # [(color,dir),(color,dir),(color,dir)] ----> (pieceId, orientationKey) reverseZ = -1 if self.camera.view ** Vector(0,0,1) < 0 else 1 reverseY = -1 if self.camera.view ** Vector(0,1,0) < 0 else 1 reverseX = -1 if self.camera.view ** Vector(1,0,0) < 0 else 1 zRange = range(3)[::reverseZ] yRange = range(3)[::reverseY] xRange = range(3)[::reverseX] for z in zRange: for y in yRange: for x in xRange: cell = self.state.state[z][y][x] if type(cell) == list: pieceId = -1 colors = set() for i in cell: colors.add(i[0]) for key in Cube.faceColors: if set(Cube.faceColors[key]) == colors: pieceId = key break if pieceId >= 0: desiredColorOrder = Cube.faceColors[pieceId] currentOrder = [ ] ori = 0 notAdded = set([0,1,2]) cell.sort(lambda a,b: cmp(desiredColorOrder.index(a[0]), desiredColorOrder.index(b[0]))) for i in cell: ori *= 10 if i[1] // Vector(0,0,1): ori += 2 notAdded.discard(2) elif i[1] // Vector(0,1,0): ori += 1 notAdded.discard(1) elif i[1] // Vector(1,0,0): ori += 0 notAdded.discard(0) while len(notAdded) > 0: ori *= 10 ori += notAdded.pop() orientationKey = ori else: raise ValueError('Invalid Cube') if pieceId in (5, 11, 13, 14, 15, 17, 23): raise ValueError('Invalid Cube') # Center piece desired = Cube.faceColors[CubeState.solvedState[z][y][x][0]] if self.debug: print 'The piece with colors %s is at the position of %s' % (colors, desired) print 'setting (%d,%d,%d) to (%s, %s)' % (z,y,x,pieceId,orientationKey) self.state.state[z][y][x] = (pieceId, orientationKey) except: self.updateMessage('Unable to read camera input.') self.state.setSolved() self.redraw() if self.debug: print 'final state=', self.state self.redraw() def addMoves(self, moves, status=-1): self.moveList[self.moveIndex+1:] = [ ] self.moveList.extend(moves) if status != -1: self.status = status def rotate(self, axis): self.showingPB = False self.addMoves([axis], self.PLAYING) def makeMove(self, move, render=True, animate=True): if type(move) == tuple: self.updateMessage(move[1]) axis = move[0] else: axis = move self.rotationItem = self.state.rotationInfo(axis) if animate: self.rotating = True self.rotationAxis = self.rotationItem.rotationAxis self.rotatingValues = self.rotationItem.rotatingValues self.rotationDirection = self.rotationItem.rotationDirection self.rotationCount = self.maxRot else: self.rotating = False self.state.rotate(self.rotationItem) while (self.moveIndex + 1) % 20 != 0: if self.moveIndex == len(self.moveList) - 1: self.updateMessage('') break self.moveIndex += 1 move = self.moveList[self.moveIndex] if type(move) == tuple: self.updateMessage(move[1]) axis = move[0] else: axis = move self.rotationItem = self.state.rotationInfo(axis) self.state.rotate(self.rotationItem) return render def pause(self, e): self.status = self.PAUSED def play(self, e): self.status = self.PLAYING def step(self, e): self.timeUntilNextRotation self.status = self.STEP def back(self, e): self.timeUntilNextRotation = 0 self.status = self.BACK def reverse(self, e): self.status = self.REVERSING def fromCamera(self, e): if not self.app.inCam: self.app.fromCamera() def reset(self, e): self.moveList = [ ] self.moveIndex = 0 self.shuffling = False self.showingPB = False self.state.setSolved() self.redraw() def solve(self, *args): try: solution = self.getSolution() except Exception as e: import traceback, sys txt = 'Error finding solution. Make sure your cube is configured legally and was input accurately.' print 'error:', e traceback.print_exc(file=sys.stdout) self.updateMessage(txt) self.redraw() else: if not self.showingPB: self.addMoves(solution, self.PLAYING) self.showingPB = True self.pbMin = len(self.moveList) - len(solution) self.pbMax = len(self.moveList) self.updateSol('With F as Red and U as Yellow: Solution: '+brief(solution)) self.maxRot = 5 self.timeBetweenRotations = 0 self.timeUntilNextRotation = 0 def guideThrough(self, *args): if not self.showingPB: self.solve() self.maxRot = 20 self.timeBetweenRotations = 35 self.timeUntilNextRotation = 15 def guideFastThrough(self, *args): if not self.showingPB: self.solve() self.maxRot = 13 self.timeBetweenRotations = 18 self.timeUntilNextRotation = 5 def shuffle(self, *args): self.showingPB = False n = self.shuffleLen delay = 5 moves = ["U", "L", "D", "R", "F", "B", "U'", "L'", "D'", "R'", "F'", "B'" ] moveList = [(random.choice(moves), "Shuffling step %d of %d" % (i+1,n)) for i in xrange(n)] self.addMoves(moveList, self.PLAYING) self.shuffling = True self.status = self.PLAYING def getSolution(self, method='beginner'): if method == 'beginner': solution = solutions.beginner3Layer(self.state.copy()) return solution class CubeState(object): """Container for a 3D list representing the cube's state.Non-graphical; meant for algorithmic purposes.""" # Each element is in the form (pieceID, orientationKey) # Orientation Keys: # CORNERS # 2 == Z # 1 == Y # 0 == X # orientationKey = [first priority][second priority][third priority] # 210 = ZYX # 021 = XZY # etc. solvedState = [[[ ( 1, 210), ( 2, 210), ( 3, 210) ], [ ( 4, 210), ( 5, 210), ( 6, 210) ], [ ( 7, 210), ( 8, 210), ( 9, 210) ]], [[ (10, 210), (11, 210), (12, 210) ], [ (13, 210), (14, 210), (15, 210) ], [ (16, 210), (17, 210), (18, 210) ]], [[ (19, 210), (20, 210), (21, 210) ], [ (22, 210), (23, 210), (24, 210) ], [ (25, 210), (26, 210), (27, 210) ]]] barebones = [[[ [], [], [] ], [ [], ( 5, 210), [] ], [ [], [], [] ]], [[ [], (11, 210), [] ], [ (13, 210), (14, 210), (15, 210) ], [ [], (17, 210), [] ]], [[ [], [], [] ], [ [], ( 23, 210), [] ], [ [], [], [] ]]] keys = { 2: Vector(0,0,1), 1: Vector(0,1,0), 0: Vector(1,0,0)} perpendiculars = { Vector(0,0,1): [0, 1], Vector(0,1,0): [0, 2], Vector(1,0,0): [1, 2]} movementCodes = solutions.MOVE_CODES movementKeys = { "U": Vector(0,0,1), "D": Vector(0,0,-1), "L": Vector(-1,0,0), "R": Vector(1,0,0), "F": Vector(0,1,0), "B": Vector(0,-1,0) } def __init__(self, state='solved'): self.state = state self.size = 3 if self.state == 'solved': self.setSolved() elif self.state == 'barebones': self.setBare() def __str__(self): s = '' for z in xrange(self.size): for y in xrange(self.size): for x in xrange(self.size): item = str(self.state[z][y][x]) s += item s += '\n' s += '\n' return s def condense(self): s = 'State:' for z in xrange(self.size): for y in xrange(self.size): for x in xrange(self.size): item = self.state[z][y][x] item2 = str(item[0]) + "'" + str(item[1]) s += item2 s += ',' s += ',' s += ',' return s @classmethod def getPerps(cls, p): for key in cls.perpendiculars: if key // p: return cls.perpendiculars[key] @staticmethod def kthDigit(num, k): num /= (10**k) return num % 10 @staticmethod def swapDigits(num, i, j): ithDigit = CubeState.kthDigit(num, i) num -= ithDigit * int(10**i) jthDigit = CubeState.kthDigit(num, j) num -= jthDigit * int(10**j) num += ithDigit * int(10**j) num += jthDigit * int(10**i) return num def rotationInfo(self, axis): isNeg = False if type(axis) == str and "'" in axis: isNeg = True axis = axis[0] if type(axis) == str: axis = CubeState.movementKeys[axis] rotationIndcs = [ ] for x in xrange(self.size): for y in xrange(self.size): for z in xrange(self.size): pos = Vector(x-1,y-1,z-1) if pos**axis > 0 and pos == axis: rotationIndcs.append((x,y,z)) oldValues = { } for i in rotationIndcs: oldValues[i] = self.state[i[2]][i[1]][i[0]] rot = Struct() rot.rotationAxis = axis rot.rotatingValues =[val[0] for val in oldValues.values()] rot.rotationDirection = isNeg rot.oldValues = oldValues rot.rotationIndcs = rotationIndcs return rot def rotate(self, r): # Vector axis of rotation axis = r.rotationAxis isNeg = r.rotationDirection rotationIndcs = r.rotationIndcs oldValues = r.oldValues for idx in rotationIndcs: pos = Vector(idx[0]-1, idx[1]-1, idx[2]-1) posn = pos - (pos > axis) newn = axis * posn if isNeg: newn = newn * -1. new = newn + (pos > axis) # Alter the rotationkey (oldId, oldKey) = oldValues[idx] perps = CubeState.getPerps(axis) toSwap = [ ] for perp in perps: for i in xrange(self.size): if CubeState.kthDigit(oldKey, i) == perp: toSwap.append(i) newKey = CubeState.swapDigits(oldKey, *toSwap) newValue = (oldId, newKey) newi = (int(new.x+1), int(new.y+1), int(new.z+1)) self.state[newi[2]][newi[1]][newi[0]] = newValue def copy(self): return CubeState(copy.deepcopy(self.state)) def setBare(self): self.state = copy.deepcopy(CubeState.barebones) def setSolved(self): self.state = copy.deepcopy(CubeState.solvedState)
404a81bab69f0ff9408a716756755d82973ea033
c0f72a4c87794df5c4c239ddfc0392f7b9295d3f
/top/api/rest/TopatsTaskDeleteRequest.py
162b86495235cb30dd4a04ecdb6d98d9891e873b
[ "MIT" ]
permissive
chenluzhong150394/taobao-top-python3_version
c37ec2093726212b49a84598becd183b9104bd99
61b262c46e48504754a9427986595bce0ae0e373
refs/heads/master
2020-11-27T16:28:46.526318
2020-06-29T14:32:16
2020-06-29T14:32:16
229,528,970
2
2
null
null
null
null
UTF-8
Python
false
false
301
py
''' Created by auto_sdk on 2013-06-03 16:32:57 ''' from top.api.base import RestApi class TopatsTaskDeleteRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.task_id = None def getapiname(self): return 'taobao.topats.task.delete'
f901db4af23a8c26750f616948c92326dd175944
4cdf4e243891c0aa0b99dd5ee84f09a7ed6dd8c8
/python/decorator/6.py
9daaa357949d9124d267fde893e0bbd950f06d36
[ "MIT" ]
permissive
gozeon/code-collections
464986c7765df5dca980ac5146b847416b750998
13f07176a6c7b6ac13586228cec4c1e2ed32cae4
refs/heads/master
2023-08-17T18:53:24.189958
2023-08-10T04:52:47
2023-08-10T04:52:47
99,432,793
1
0
NOASSERTION
2020-07-17T09:25:44
2017-08-05T15:56:53
JavaScript
UTF-8
Python
false
false
310
py
import logging def user_logging(func): def wrapper(*args, **kwargs): logging.warn("%s is running" % func.__name__) return func(*args, **kwargs) return wrapper @user_logging def foo(name, age=None, height=None): print('i am %s, age %s, height %s' % (name, age, height)) foo('haha', 12, 40)
edde83793cbbb6f5ecd213edbf7171025f7c5995
f603b0edb36f3578b99c49aea68c09acb222b5e2
/exercicios/Curso_Udemy_Python/sec3_aula58.py
6506bd25d840427c70745b94680fc8c9fb54c13b
[ "MIT" ]
permissive
igobarros/maratona-data-science-brasil
260d8160a356dfdf5876cfef03a0aacc7f20340e
cc07476579134a2764f00d229d415657555dcdd1
refs/heads/master
2021-10-09T23:33:25.278361
2019-01-04T15:08:43
2019-01-04T15:08:43
null
0
0
null
null
null
null
UTF-8
Python
false
false
292
py
class MyList(list): def append(self, *args): self.extend(args) m = MyList() m.append(0) m.append(1,2,3,4,5,6) print(m) class MyList1(list): def sort(self): return 'eae vey? ta afim de ordenar?' l = [4,1,78,34,4,9] '''l.sort() print(l)''' lista = MyList1() print(lista.sort())
ec46ebcaaa624f2ac7abf272df486a27cd2075fe
b25055503a8f0de13b4f7aece4f6cf1ba5c9d3ab
/tests/fixtures.py
03ad2db59c5325385cda821694184a7a51d8a6c9
[ "MIT" ]
permissive
mkturkcan/autobahn-sync
a340eb9f32c331a9b4331f0a1701e18ef78e3d9e
2663520c032912c0769647de8fc5e47d9234cf07
refs/heads/master
2020-03-19T12:41:23.387271
2018-06-12T16:54:30
2018-06-12T16:54:30
136,533,456
0
0
null
2018-06-07T21:34:46
2018-06-07T21:34:46
null
UTF-8
Python
false
false
1,294
py
from os import path from time import sleep import subprocess import pytest from autobahn_sync import AutobahnSync, ConnectionRefusedError CROSSBAR_CONF_DIR = path.abspath(path.dirname(__file__)) + '/.crossbar' START_CROSSBAR = not pytest.config.getoption("--no-router") @pytest.fixture(scope="module") def crossbar(request): if START_CROSSBAR: # Start a wamp router subprocess.Popen(["crossbar", "start", "--cbdir", CROSSBAR_CONF_DIR]) started = False for _ in range(20): sleep(0.5) # Try to engage a wamp connection with crossbar to make sure it is started try: test_app = AutobahnSync() test_app.run() # test_app.session.disconnect() # TODO: fix me except ConnectionRefusedError: continue else: started = True break if not started: raise RuntimeError("Couldn't connect to crossbar router") def finalizer(): p = subprocess.Popen(["crossbar", "stop", "--cbdir", CROSSBAR_CONF_DIR]) p.wait() if START_CROSSBAR: request.addfinalizer(finalizer) @pytest.fixture def wamp(crossbar): wamp = AutobahnSync() wamp.run() return wamp @pytest.fixture def wamp2(crossbar): return wamp(crossbar)
80a876d02aa0d4d1c1a901b0311bd3e3900c7ef4
7623386df02a52145b174700621fa70973e81d0e
/shakecastaebm/validation/generate.py
7925da9b8f3a495b9c6fce24a65e5ca2affc39d0
[ "LicenseRef-scancode-warranty-disclaimer", "LicenseRef-scancode-public-domain-disclaimer" ]
permissive
dslosky-usgs/shakecast-aebm
f641f6a3bac3d466fb4e0f02b4913e0b63fa5ecb
bec1ad970989a7121096123f0b3a84c20ed0a0cc
refs/heads/master
2021-06-24T07:02:27.539492
2018-08-08T20:51:08
2018-08-08T20:51:08
144,181,944
0
0
null
2018-08-09T17:09:24
2018-08-09T17:09:24
null
UTF-8
Python
false
false
1,409
py
import os import sys from . import shakecast from . import workbook from . import damping from . import demand if __name__ == '__main__': pp_fig, capacity_fig, acc_diff_fig, disp_diff_fig = workbook.run() cap_fig, haz_fig, dsf_fig, dem_fig, sc_pp_fig, impact_fig = shakecast.run() damp1, damp2 = damping.run() demand1, demand2 = demand.run() if len(sys.argv) > 1: path = sys.argv[1] else: path = '.' if not os.path.exists(path): os.makedirs(path) # save workbook validation figures pp_fig.savefig(os.path.join(path, 'perf_point1')) capacity_fig.savefig(os.path.join(path, 'capacity_comp')) acc_diff_fig.savefig(os.path.join(path, 'acc_diff')) disp_diff_fig.savefig(os.path.join(path, 'disp_diff')) # save shakecast figures cap_fig.savefig(os.path.join(path, 'sc_capacity')) haz_fig.savefig(os.path.join(path, 'sc_hazard')) dsf_fig.savefig(os.path.join(path, 'sc_dsf')) dem_fig.savefig(os.path.join(path, 'sc_demand')) sc_pp_fig.savefig(os.path.join(path, 'perf_point2')) impact_fig.savefig(os.path.join(path, 'impact_fig')) # save damping figures damp1.savefig(os.path.join(path, 'damping_beta')) damp2.savefig(os.path.join(path, 'damping_dsf')) # save demand figures demand1.savefig(os.path.join(path, 'hazard_expansion')) demand2.savefig(os.path.join(path, 'damped_demand'))
220dfaaeafb0194a281d372055511fb51b1ca888
f7f2e8af3e9b19840396ab5da36bfa161cf03484
/setup.py
3466011a2a76c26eb5542750376251fd57f946c5
[ "Apache-2.0", "LicenseRef-scancode-warranty-disclaimer" ]
permissive
Nevinoven/bcwallet
2a4713f24505978f681d6d398300c144834bfbf0
afaef09b3c3ac87de765cd9a915f98c046084b21
refs/heads/master
2021-01-15T23:50:56.911620
2015-12-08T18:29:22
2015-12-08T18:29:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
692
py
# https://youtu.be/kNke39OZ2k0?t=65 from setuptools import setup setup( name='bcwallet', version='1.2.3', description='Simple BIP32 HD cryptocurrecy command line wallet', author='Michael Flaxman', author_email='[email protected]', url='https://github.com/blockcypher/bcwallet/', py_modules=['bcwallet'], install_requires=[ 'clint==0.4.1', 'blockcypher==1.0.53', 'bitmerchant==0.1.8', 'tzlocal==1.2', ], entry_points=''' [console_scripts] bcwallet=bcwallet:invoke_cli ''', packages=['bcwallet'], )
0f89fce50eea8c4c073a162371547050cac89eab
e672b8a57d5224116e70e81cb1ad1ed56e62de0c
/Human/decoration.py
90d7d968ff9836043b63250e334a81084229f36d
[]
no_license
ShiShuyang/LineMe-1
a13616fd10bdec2c63e6d6fa648f23fc4d1dfb61
5eb4de329d0bf588edf8ad1684a4ec5fa529fecf
refs/heads/master
2021-01-22T21:00:34.273257
2016-05-16T08:04:17
2016-05-16T08:04:17
58,914,351
0
0
null
2016-05-16T08:00:20
2016-05-16T08:00:19
null
UTF-8
Python
false
false
110
py
#!/usr/bin/env python # coding: utf-8 # created by [email protected] # Date: 2016/4/14 # Time: 20:23 #
164272c7c197a50b02def627df3852104c8d4b26
656341483ae8abe8792942d26556fdd4ff5ca7a9
/Case/AS/Http/DocPolicyMgnt/test_AddPolicyPwdStrength201.py
ce8cafe542826cd32d85e60e6ce32d22c57ae029
[]
no_license
GWenPeng/Apitest_framework
b57ded9be4ec896d4ba8e02e9135bc7c73d90034
ab922c82c2454a3397ddbf4cd0771067734e1111
refs/heads/master
2022-11-26T05:54:47.168062
2020-08-06T01:45:12
2020-08-06T01:45:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,886
py
import pytest import allure import sys sys.path.append("../../../../") from Common.readjson import JsonRead from DB_connect.mysqlconnect import DB_connect from Common.http_request import Http_client @pytest.mark.ASP_344 @pytest.mark.high @allure.severity('blocker') # 优先级 @allure.feature("文档域策略管控") class Test_AddPolicy_PwdStrengthCheck201(object): @allure.testcase("ID5318,用例名:新增策略配置--密码强度,配置成功--返回201") # 每条用例执行完成后执行,清除环境 @pytest.fixture(scope="function") def teardown(self): pass yield db = DB_connect() db.delete("delete from t_policy_tpls") @pytest.mark.parametrize("jsondata,checkpoint", argvalues=JsonRead( "AS\\Http\\DocPolicyMgnt\\testdata\\test_AddPolicyPwdStrength201.json").dict_value_join()) def test_AddPolicy_PwdStrengthCheck201(self, jsondata,checkpoint,teardown): # 新增策略 add_client = Http_client() add_client.post(url="/api/document-domain-management/v1/policy-tpl", jsondata=jsondata, header="{\"Content-Type\":\"application/json\"}") # 接口响应状态断言 assert add_client.status_code == checkpoint['status_code'] # 获取t_policy_tpls表中策略id db = DB_connect() query_result = db.select_one("select f_id from t_policy_tpls") # sql查询结果为元组,获取元组第一个值,即策略id global policyid policyid = query_result[0] # 拼接location预期值 location = "/api/document-domain-management/v1/policy-tpl/" + policyid assert location == add_client.respheaders['Location'] assert add_client.elapsed <= 20.0 if __name__ == '__main__': pytest.main(['-q', '-v', 'test_AddPolicyPwdStrength201.py'])
effe7dc25476101643ced680af1b5b329b9d4308
de27e6d143f40d5948244597b861d522a9a272f6
/fjord/heartbeat/migrations/0009_answer_country.py
f824482bc7c9c38b3c33cf27d7df7e8ff5aaac97
[ "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference" ]
permissive
mozilla/fjord
7f31af6dd80869ca856f8a02ff10e72c81685368
0fcb81e6a5edaf42c00c64faf001fc43b24e11c0
refs/heads/master
2023-07-03T18:20:01.651759
2017-01-10T20:12:33
2017-01-10T20:12:33
5,197,539
18
22
null
2016-08-22T14:56:11
2012-07-26T21:25:00
Python
UTF-8
Python
false
false
519
py
# -*- coding: utf-8 -*- """ Add country to heartbeat Answer table. """ from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('heartbeat', '0008_auto_20150305_1442'), ] operations = [ migrations.AddField( model_name='answer', name='country', field=models.CharField(default='', max_length=4, null=True, blank=True), preserve_default=True, ), ]
ba62cc4936adb4692e0e0453711d19b420bee0ed
e03bce53de6f88c0e09f56e4fe11c36af0f1161f
/tests/functional/cfngin/hooks/test_awslambda/test_runner.py
d790a94e4e9319a5152fbf92403ed18fa28eec90
[ "Apache-2.0" ]
permissive
onicagroup/runway
20c31df9cbc1a1ffc5c9aa468ce5cf7d6ac7899f
0763b06aee07d2cf3f037a49ca0cb81a048c5deb
refs/heads/master
2023-08-30T22:35:54.113981
2023-08-29T14:13:35
2023-08-29T14:13:35
122,529,924
156
79
Apache-2.0
2023-09-13T13:43:50
2018-02-22T20:12:55
Python
UTF-8
Python
false
false
10,335
py
"""Test AWS Lambda hook.""" # pylint: disable=no-self-argument # pylint: disable=redefined-outer-name,unexpected-keyword-arg,unused-argument from __future__ import annotations import json import shutil import sys from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, Generator, Optional import boto3 import pytest from pydantic import root_validator from runway._cli import cli from runway.compat import cached_property from runway.utils import BaseModel if TYPE_CHECKING: from click.testing import CliRunner, Result from mypy_boto3_cloudformation.client import CloudFormationClient from mypy_boto3_cloudformation.type_defs import StackTypeDef from mypy_boto3_lambda.client import LambdaClient from sample_app.src.type_defs import LambdaResponse from runway.context import RunwayContext AWS_REGION = "us-east-1" LOCAL_PYTHON_RUNTIME = f"python{sys.version_info.major}.{sys.version_info.minor}" STACK_PREFIX = "test-awslambda" CURRENT_DIR = Path(__file__).parent SRC_DIR = CURRENT_DIR / "sample_app" / "src" DOCKER_MYSQL_DIR = SRC_DIR / "docker_mysql" DOCKER_XMLSEC_DIR = SRC_DIR / "docker_xmlsec" ENV_VARS = { "CI": "1", "LOCAL_PYTHON_RUNTIME": LOCAL_PYTHON_RUNTIME, "PIPENV_VENV_IN_PROJECT": "1", "PIPENV_VERBOSITY": "-1", "POETRY_VIRTUALENVS_IN_PROJECT": "true", "PYXMLSEC_STATIC_DEPS": "1", } pytestmark = pytest.mark.skipif( not shutil.which("mysql_config"), reason="mysql_config CLI from mysql OS package must be installed and in PATH", ) class AwslambdaStackOutputs(BaseModel): """Outputs of a Stack used for testing the awslambda hook.""" CodeImageUri: Optional[str] = None CodeS3Bucket: str CodeS3Key: str CodeS3ObjectVersion: Optional[str] = None CodeZipFile: Optional[str] = None LambdaFunction: str LambdaFunctionArn: str LambdaRole: str LayerContentS3Bucket: Optional[str] = None LayerContentS3Key: Optional[str] = None LayerContentS3ObjectVersion: Optional[str] = None LayerVersion: Optional[str] = None Runtime: str @root_validator(allow_reuse=True, pre=True) def _convert_null_to_none(cls, values: Dict[str, Any]) -> Dict[str, Any]: """Convert ``null`` to ``NoneType``.""" def _handle_null(v: Any) -> Any: if v == "null": return None return v return {k: _handle_null(v) for k, v in values.items()} class AwslambdaTester: """Class to simplify testing the awslambda hook's results.""" def __init__(self, session: boto3.Session, stack_name: str) -> None: """Instantiate class.""" self._session = session self.stack_name = stack_name @cached_property def cfn_client(self) -> CloudFormationClient: """AWS CloudFormation client.""" return self._session.client("cloudformation") @cached_property def client(self) -> LambdaClient: """AWS Lambda client.""" return self._session.client("lambda") @cached_property def outputs(self) -> AwslambdaStackOutputs: """Stack outputs.""" return AwslambdaStackOutputs.parse_obj( { output["OutputKey"]: output["OutputValue"] for output in self.stack.get("Outputs", []) if "OutputKey" in output and "OutputValue" in output } ) @cached_property def stack(self) -> StackTypeDef: """AWS Lambda Function CloudFormation Stack data.""" stacks = self.cfn_client.describe_stacks(StackName=self.stack_name)["Stacks"] if not stacks: raise ValueError( f"Stack {self.stack_name} not found in region {self._session.region_name}" ) return stacks[0] def invoke(self, *, payload: Optional[str] = None) -> LambdaResponse: """Invoke the Lambda Function.""" response = self.client.invoke( FunctionName=self.outputs.LambdaFunction, InvocationType="RequestResponse", **{"Payload": payload} if payload else {}, ) if "Payload" in response: return json.load(response["Payload"]) raise ValueError("Lambda Function did not return a payload") def assert_runtime(tester: AwslambdaTester, runtime: str) -> None: """Assert that the deployment package is using the expected runtime.""" assert tester.outputs.Runtime == runtime def assert_uploaded(tester: AwslambdaTester, deploy_result: Result) -> None: """Assert that the deployment package was uploaded.""" uri = f"s3://{tester.outputs.CodeS3Bucket}/{tester.outputs.CodeS3Key}" assert f"uploading deployment package {uri}..." in deploy_result.stdout, "\n".join( line for line in deploy_result.stdout.split("\n") if uri in line ) @pytest.fixture(scope="module") def deploy_result(cli_runner: CliRunner) -> Generator[Result, None, None]: """Execute `runway deploy` with `runway destroy` as a cleanup step.""" yield cli_runner.invoke(cli, ["deploy"], env=ENV_VARS) assert cli_runner.invoke(cli, ["destroy"], env=ENV_VARS).exit_code == 0 shutil.rmtree(CURRENT_DIR / ".runway", ignore_errors=True) shutil.rmtree(CURRENT_DIR / "sample_app" / ".runway", ignore_errors=True) # remove .venv/ & *.lock from source code directories - more important for local testing (DOCKER_MYSQL_DIR / "Pipfile.lock").unlink(missing_ok=True) (DOCKER_XMLSEC_DIR / "poetry.lock").unlink(missing_ok=True) for subdir in [DOCKER_MYSQL_DIR, DOCKER_XMLSEC_DIR]: shutil.rmtree(subdir / ".venv", ignore_errors=True) @pytest.mark.order("first") def test_deploy_exit_code(deploy_result: Result) -> None: """Test deploy exit code.""" assert deploy_result.exit_code == 0, deploy_result.output def test_deploy_log_messages(deploy_result: Result) -> None: """Test deploy log messages.""" build_skipped = [ line for line in deploy_result.stdout.split("\n") if "build skipped" in line ] assert not build_skipped, "\n".join(build_skipped) def test_docker( deploy_result: Result, namespace: str, runway_context: RunwayContext ) -> None: """Test function built with Docker.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), f"{namespace}-{STACK_PREFIX}-docker", ) assert_runtime(tester, "python3.9") assert_uploaded(tester, deploy_result) response = tester.invoke() response_str = json.dumps(response, indent=4, sort_keys=True) assert response["code"] == 200, response_str assert response["data"]["requests"] assert "index.py" in response["data"]["dir_contents"] assert "urllib3/__init__.py" in response["data"]["dir_contents"] assert "requests/__init__.py" in response["data"]["dir_contents"] assert "charset_normalizer/__init__.py" in response["data"]["dir_contents"] assert "certifi/__init__.py" in response["data"]["dir_contents"] def test_local( deploy_result: Result, namespace: str, runway_context: RunwayContext ) -> None: """Test function built with local python.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), f"{namespace}-{STACK_PREFIX}-local", ) assert_runtime(tester, LOCAL_PYTHON_RUNTIME) assert_uploaded(tester, deploy_result) response = tester.invoke() assert response["code"] == 200 assert response["data"]["dir_contents"] == ["index.py"] def test_mysql( deploy_result: Result, namespace: str, runway_context: RunwayContext ) -> None: """Test function built from Dockerfile for mysql.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), f"{namespace}-{STACK_PREFIX}-mysql", ) assert_runtime(tester, "python3.9") assert_uploaded(tester, deploy_result) response = tester.invoke() response_str = json.dumps(response, indent=4, sort_keys=True) assert response["code"] == 200, response_str assert len(response["data"]["mysqlclient"]) >= 10 assert "Pipfile" not in response["data"]["dir_contents"] def test_xmlsec( deploy_result: Result, namespace: str, runway_context: RunwayContext ) -> None: """Test function built from Dockerfile for xmlsec.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), f"{namespace}-{STACK_PREFIX}-xmlsec", ) assert_runtime(tester, "python3.9") assert_uploaded(tester, deploy_result) response = tester.invoke() response_str = json.dumps(response, indent=4, sort_keys=True) assert response["code"] == 200, response_str assert "etree" in response["data"]["lxml"] assert "KeysManager" in response["data"]["xmlsec"] assert ".gitignore" not in response["data"]["dir_contents"] assert "poetry.lock" not in response["data"]["dir_contents"] def test_xmlsec_layer( deploy_result: Result, namespace: str, runway_context: RunwayContext ) -> None: """Test layer built from Dockerfile for xmlsec.""" tester = AwslambdaTester( runway_context.get_session(region=AWS_REGION), f"{namespace}-{STACK_PREFIX}-xmlsec-layer", ) assert_runtime(tester, "python3.9") assert_uploaded(tester, deploy_result) response = tester.invoke() response_str = json.dumps(response, indent=4, sort_keys=True) assert response["code"] == 200, response_str assert "etree" in response["data"]["lxml"] assert "KeysManager" in response["data"]["xmlsec"] assert response["data"]["dir_contents"] == ["index.py"] def test_plan(cli_runner: CliRunner, deploy_result: Result) -> None: """Test ``runway plan`` - this was not possible with old hook. deploy_result required so cleanup does not start before this runs. """ # remove *.lock files to prevent change in source hash (DOCKER_MYSQL_DIR / "Pipfile.lock").unlink(missing_ok=True) (DOCKER_XMLSEC_DIR / "poetry.lock").unlink(missing_ok=True) plan_results = cli_runner.invoke(cli, ["plan"], env=ENV_VARS) assert plan_results.exit_code == 0, plan_results.output matches = [ line for line in plan_results.stdout.split("\n") if line.endswith(":no changes") ] a_list = [4, 5] # count needs to be updated if number of test stacks change assert len(matches) in a_list, "\n".join(matches)
2ca452bcbb76a5940af2d37e15ccbd301ac908f9
46af8b5c7d1790ee9ddef636c7428eb5f23de5e5
/project/settings_local.py
d8b4e8f42916c8b50165d0ad17e924372de258a3
[]
no_license
praekelt/speed-demo
f1370628ca9241ec5cb86ea76f6c615c1138fa9e
782c9d7263bed59a7d2ab9dc5d169a7a348a277e
refs/heads/master
2020-12-07T15:24:26.979572
2017-06-28T14:03:50
2017-06-28T14:03:50
95,519,936
0
0
null
null
null
null
UTF-8
Python
false
false
1,601
py
import os import raven # Declare or redeclare variables here FOOFOO = 1 # You should redefine the CACHE setting here # Configure raven. Set "dsn" to None for your development environment. It must # be None - anything else causes problems. RAVEN_CONFIG = { "dsn": None # "dsn": "https://<key>:<secret>@sentry.io/<project>", } # Uncomment if you are doing performance profiling with Django Debug Toolbar DEBUG_TOOLBAR_PANELS = [ #"ddt_request_history.panels.request_history.RequestHistoryPanel", "debug_toolbar.panels.versions.VersionsPanel", "debug_toolbar.panels.timer.TimerPanel", "debug_toolbar.panels.settings.SettingsPanel", "debug_toolbar.panels.headers.HeadersPanel", "debug_toolbar.panels.request.RequestPanel", "debug_toolbar.panels.sql.SQLPanel", "debug_toolbar.panels.staticfiles.StaticFilesPanel", "debug_toolbar.panels.templates.TemplatesPanel", "debug_toolbar.panels.cache.CachePanel", "debug_toolbar.panels.signals.SignalsPanel", "debug_toolbar.panels.logging.LoggingPanel", "debug_toolbar.panels.redirects.RedirectsPanel", ] INTERNAL_IPS = ["127.0.0.1", "172.30.45.146"] RESULTS_CACHE_SIZE = 20000 # If you need to access an existing variable your code must be in configure def configure(**kwargs): # Uncomment if you are doing performance profiling with Django Debug Toolbar return { "INSTALLED_APPS": kwargs["INSTALLED_APPS"] + ["debug_toolbar"], "MIDDLEWARE_CLASSES": ( "debug_toolbar.middleware.DebugToolbarMiddleware", ) + kwargs["MIDDLEWARE_CLASSES"] } return {}
d08e6121ee2290536a5b41e02083249be2e73fcf
2ea17b7b5fe875821f05f2d148220cfe7082120f
/migrations/versions/59c170d304e0_.py
5c39be4beaf508e801753d04c3316590d69575ae
[]
no_license
xilixjd/python_react_blog_back_end
b1c76759654847717846671906d9bd1a758cd8f7
6b88e8f9340d35988c948e7c9ca1dff74dcf75d6
refs/heads/master
2020-05-20T18:50:13.941816
2017-08-08T13:48:49
2017-08-08T13:48:49
88,735,190
19
1
null
null
null
null
UTF-8
Python
false
false
662
py
"""empty message Revision ID: 59c170d304e0 Revises: 390b63a723a6 Create Date: 2017-07-03 22:18:00.342518 """ # revision identifiers, used by Alembic. revision = '59c170d304e0' down_revision = '390b63a723a6' from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('chess', sa.Column('chess_board', mysql.LONGTEXT(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('chess', 'chess_board') # ### end Alembic commands ###
a8eba0feb28aa882253f2324f18c0b5b36f77d7b
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
/python/python_17350.py
11db4fbe2ed5ba01f7114c7b6e2db2b35d8b6762
[]
no_license
AK-1121/code_extraction
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
5297a4a3aab3bb37efa24a89636935da04a1f8b6
refs/heads/master
2020-05-23T08:04:11.789141
2015-10-22T19:19:40
2015-10-22T19:19:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
64
py
# Django Foreign Key QuerySet (join) user_obj.student_set.all()
11c103e803f402c7cd6758bf438339493c44d684
5ed389c1f3fc175aa73478fc3dcba4101520b80b
/python/spoonacular/com/spoonacular/client/model/inline_object.py
c1d9417abc5018f9a5ae89fa133d67f82de5c73f
[ "MIT" ]
permissive
jvenlin/spoonacular-api-clients
fae17091722085017cae5d84215d3b4af09082aa
63f955ceb2c356fefdd48ec634deb3c3e16a6ae7
refs/heads/master
2023-08-04T01:51:19.615572
2021-10-03T13:30:26
2021-10-03T13:30:26
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,531
py
# coding: utf-8 """ spoonacular API The spoonacular Nutrition, Recipe, and Food API allows you to access over 380,000 recipes, thousands of ingredients, 800,000 food products, and 100,000 menu items. Our food ontology and semantic recipe search engine makes it possible to search for recipes using natural language queries, such as \"gluten free brownies without sugar\" or \"low fat vegan cupcakes.\" You can automatically calculate the nutritional information for any recipe, analyze recipe costs, visualize ingredient lists, find recipes for what's in your fridge, find recipes based on special diets, nutritional requirements, or favorite ingredients, classify recipes into types and cuisines, convert ingredient amounts, or even compute an entire meal plan. With our powerful API, you can create many kinds of food and especially nutrition apps. Special diets/dietary requirements currently available include: vegan, vegetarian, pescetarian, gluten free, grain free, dairy free, high protein, whole 30, low sodium, low carb, Paleo, ketogenic, FODMAP, and Primal. # noqa: E501 The version of the OpenAPI document: 1.0 Contact: [email protected] Generated by: https://openapi-generator.tech """ import pprint import re # noqa: F401 import six class InlineObject(object): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. """ """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ openapi_types = { 'title': 'str', 'upc': 'str', 'plu_code': 'str' } attribute_map = { 'title': 'title', 'upc': 'upc', 'plu_code': 'plu_code' } def __init__(self, title=None, upc=None, plu_code=None): # noqa: E501 """InlineObject - a model defined in OpenAPI""" # noqa: E501 self._title = None self._upc = None self._plu_code = None self.discriminator = None self.title = title self.upc = upc self.plu_code = plu_code @property def title(self): """Gets the title of this InlineObject. # noqa: E501 :return: The title of this InlineObject. # noqa: E501 :rtype: str """ return self._title @title.setter def title(self, title): """Sets the title of this InlineObject. :param title: The title of this InlineObject. # noqa: E501 :type: str """ if title is None: raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501 if title is not None and len(title) < 1: raise ValueError("Invalid value for `title`, length must be greater than or equal to `1`") # noqa: E501 self._title = title @property def upc(self): """Gets the upc of this InlineObject. # noqa: E501 :return: The upc of this InlineObject. # noqa: E501 :rtype: str """ return self._upc @upc.setter def upc(self, upc): """Sets the upc of this InlineObject. :param upc: The upc of this InlineObject. # noqa: E501 :type: str """ if upc is None: raise ValueError("Invalid value for `upc`, must not be `None`") # noqa: E501 self._upc = upc @property def plu_code(self): """Gets the plu_code of this InlineObject. # noqa: E501 :return: The plu_code of this InlineObject. # noqa: E501 :rtype: str """ return self._plu_code @plu_code.setter def plu_code(self, plu_code): """Sets the plu_code of this InlineObject. :param plu_code: The plu_code of this InlineObject. # noqa: E501 :type: str """ if plu_code is None: raise ValueError("Invalid value for `plu_code`, must not be `None`") # noqa: E501 self._plu_code = plu_code def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, InlineObject): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
48424c58d4841f72a346c4d91fa4d737bc3caba8
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2648/60678/289889.py
2502a253d156988e60af2cf0b3448f5e525988df
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
354
py
stringM = input() stringS = input() if stringM == 'whatthemomooofun' and stringS == 'moo': print('whatthefun', end="") if stringM == 'whatthemomooofun' and stringS == 'o': print('whatthemmfun', end="") if stringM == 'whatthemmfunwhatthemomooofun' and stringS == 'o': print('whatthemmfun', end="") else: print(stringM) print(stringS)
7a273b69ade85c025a308827da97ee147e75a0af
b26f62e1ae52df9e34c4ce27dc0f617416518e23
/12-python-level-one/Part9_Functions_Exercises.py
fa8e0bc6622df3e7cfeea4082a548c026b1c314e
[]
no_license
Rutrle/udemy-django
2ba5b39f69fc526c27d074818ff372c91f3b879b
53502d8d87f9da907771bc044538844cf18f6895
refs/heads/master
2023-04-17T13:05:20.539842
2021-05-03T23:25:51
2021-05-03T23:25:51
339,551,118
0
0
null
null
null
null
UTF-8
Python
false
false
4,332
py
##################################### #### PART 9: FUNCTION EXERCISES ##### ##################################### # Complete the tasks below by writing functions! Keep in mind, these can be # really tough, its all about breaking the problem down into smaller, logical # steps. If you get stuck, don't feel bad about having to peek to the solutions! ##################### ## -- PROBLEM 1 -- ## ##################### # Given a list of integers, return True if the sequence of numbers 1, 2, 3 # appears in the list somewhere. # For example: # arrayCheck([1, 1, 2, 3, 1]) → True # arrayCheck([1, 1, 2, 4, 1]) → False # arrayCheck([1, 1, 2, 1, 2, 3]) → True def arrayCheck_simple(nums): return (1 in nums) and (2 in nums) and (3 in nums) # CODE GOES HERE def arrayCheck(nums): for num in range(len(nums)-2): if (nums[num]) == 1 and (nums[num+1]) == 2 and (nums[num+2]) == 3: return True return False ##################### ## -- PROBLEM 2 -- ## ##################### # Given a string, return a new string made of every other character starting # with the first, so "Hello" yields "Hlo". # For example: # stringBits('Hello') → 'Hlo' # stringBits('Hi') → 'H' # stringBits('Heeololeo') → 'Hello' def stringBits(str_v): return_str = "" for i in range(0, len(str_v), 2): return_str = return_str+str_v[i] return return_str print(stringBits('Heeololeo')) print(stringBits('Hi')) print(stringBits('Hello')) ##################### ## -- PROBLEM 3 -- ## ##################### # Given two strings, return True if either of the strings appears at the very end # of the other string, ignoring upper/lower case differences (in other words, the # computation should not be "case sensitive"). # # Note: s.lower() returns the lowercase version of a string. # # Examples: # # end_other('Hiabc', 'abc') → True # end_other('AbC', 'HiaBc') → True # end_other('abc', 'abXabc') → True def end_other(a, b): a = a.lower() b = b.lower() if len(a) < len(b): a, b = b, a for i in range(len(b)): if a[-(len(b)-i)] != b[i]: return False return True print(end_other('Hiabc', 'abc'), end_other( 'AbC', 'HiaBc'), end_other('abc', 'abXabc')) ##################### ## -- PROBLEM 4 -- ## ##################### # Given a string, return a string where for every char in the original, # there are two chars. # doubleChar('The') → 'TThhee' # doubleChar('AAbb') → 'AAAAbbbb' # doubleChar('Hi-There') → 'HHii--TThheerree' def doubleChar(old_str): new_str = "" for letter in old_str: new_str = new_str+letter*2 return new_str # CODE GOES HERE print(doubleChar('The'), doubleChar('AAbb'), doubleChar('Hi-There')) ##################### ## -- PROBLEM 5 -- ## ##################### # Read this problem statement carefully! # Given 3 int values, a b c, return their sum. However, if any of the values is a # teen -- in the range 13-19 inclusive -- then that value counts as 0, except 15 # and 16 do not count as a teens. Write a separate helper "def fix_teen(n):"that # takes in an int value and returns that value fixed for the teen rule. # # In this way, you avoid repeating the teen code 3 times (i.e. "decomposition"). # Define the helper below and at the same indent level as the main no_teen_sum(). # Again, you will have two functions for this problem! # # Examples: # # no_teen_sum(1, 2, 3) → 6 # no_teen_sum(2, 13, 1) → 3 # no_teen_sum(2, 1, 14) → 3 def no_teen_sum(a, b, c): return fix_teen(a)+fix_teen(b)+fix_teen(c) def fix_teen(n): teens = (list(range(13, 20))) exceptions = [15, 16] if n in teens and n not in exceptions: return 0 else: return n print(no_teen_sum(1, 2, 3)) print(no_teen_sum(2, 13, 1)) print(no_teen_sum(2, 1, 14)) print(no_teen_sum(2, 1, 15)) ##################### ## -- PROBLEM 6 -- ## ##################### # Return the number of even integers in the given array. # # Examples: # # count_evens([2, 1, 2, 3, 4]) → 3 # count_evens([2, 2, 0]) → 3 # count_evens([1, 3, 5]) → 0 def count_evens(nums): count = 0 for item in nums: if item % 2 == 0: count = count+1 return count print(count_evens([2, 1, 2, 3, 4]), count_evens([2, 2, 0]), count_evens([1, 3, 5]))
15871357f103326ade70ff1294562689ef8c5375
38dc0477ba472146f4fabe109826198705144d03
/fastai/layer_optimizer.py
1791659fe6e1b601fc2ebaac8a96eab91c43d304
[ "LicenseRef-scancode-unknown-license-reference", "Apache-2.0", "MIT" ]
permissive
dana-kelley/DeOldify
ad54a3a44e4a8d90f00ef3d7ee20e56b14683f47
fa186f251b8a7dbc120d8a5901fdd0d065c60eec
refs/heads/master
2020-05-17T04:52:54.795176
2019-12-11T05:53:01
2019-12-11T05:53:01
183,519,547
68
10
MIT
2020-02-18T15:44:14
2019-04-25T22:41:00
Python
UTF-8
Python
false
false
3,313
py
from .imports import * from .torch_imports import * from .core import * def opt_params(parm, lr, wd): return {'params': chain_params(parm), 'lr':lr, 'weight_decay':wd} class LayerOptimizer(): def __init__(self, opt_fn, layer_groups, lrs, wds=None): if not isinstance(layer_groups, (list,tuple)): layer_groups=[layer_groups] if not isinstance(lrs, Iterable): lrs=[lrs] if len(lrs)==1: lrs=lrs*len(layer_groups) if wds is None: wds=0. if not isinstance(wds, Iterable): wds=[wds] if len(wds)==1: wds=wds*len(layer_groups) self.layer_groups,self.lrs,self.wds = layer_groups,lrs,wds self.opt = opt_fn(self.opt_params()) def opt_params(self): assert(len(self.layer_groups) == len(self.lrs)) assert(len(self.layer_groups) == len(self.wds)) params = list(zip(self.layer_groups,self.lrs,self.wds)) return [opt_params(*p) for p in params] @property def lr(self): return self.lrs[-1] @property def mom(self): if 'betas' in self.opt.param_groups[0]: return self.opt.param_groups[0]['betas'][0] else: return self.opt.param_groups[0]['momentum'] def set_lrs(self, lrs): if not isinstance(lrs, Iterable): lrs=[lrs] if len(lrs)==1: lrs=lrs*len(self.layer_groups) set_lrs(self.opt, lrs) self.lrs=lrs def set_wds_out(self, wds): if not isinstance(wds, Iterable): wds=[wds] if len(wds)==1: wds=wds*len(self.layer_groups) set_wds_out(self.opt, wds) set_wds(self.opt, [0] * len(self.layer_groups)) self.wds=wds def set_wds(self, wds): if not isinstance(wds, Iterable): wds=[wds] if len(wds)==1: wds=wds*len(self.layer_groups) set_wds(self.opt, wds) set_wds_out(self.opt, [0] * len(self.layer_groups)) self.wds=wds def set_mom(self,momentum): if 'betas' in self.opt.param_groups[0]: for pg in self.opt.param_groups: pg['betas'] = (momentum, pg['betas'][1]) else: for pg in self.opt.param_groups: pg['momentum'] = momentum def set_beta(self,beta): if 'betas' in self.opt.param_groups[0]: for pg in self.opt.param_groups: pg['betas'] = (pg['betas'][0],beta) elif 'alpha' in self.opt.param_groups[0]: for pg in self.opt.param_groups: pg['alpha'] = beta def set_opt_fn(self, opt_fn): if type(self.opt) != type(opt_fn(self.opt_params())): self.opt = opt_fn(self.opt_params()) def zip_strict_(l, r): assert(len(l) == len(r)) return zip(l, r) def set_lrs(opt, lrs): if not isinstance(lrs, Iterable): lrs=[lrs] if len(lrs)==1: lrs=lrs*len(opt.param_groups) for pg,lr in zip_strict_(opt.param_groups,lrs): pg['lr'] = lr def set_wds_out(opt, wds): if not isinstance(wds, Iterable): wds=[wds] if len(wds)==1: wds=wds*len(opt.param_groups) assert(len(opt.param_groups) == len(wds)) for pg,wd in zip_strict_(opt.param_groups,wds): pg['wd'] = wd def set_wds(opt, wds): if not isinstance(wds, Iterable): wds=[wds] if len(wds)==1: wds=wds*len(opt.param_groups) assert(len(opt.param_groups) == len(wds)) for pg,wd in zip_strict_(opt.param_groups,wds): pg['weight_decay'] = wd
cac02f47d1ecfbce494b5b3cccba8632db18a064
802770deb5a98e8e644e9aaf5a6fabc851e6eae1
/quiz_test/migrations/0018_auto_20180704_1623.py
3d78f9b8dbeb5d59fae13e7a420f414c4ff58225
[]
no_license
Subhash1998/quiz
1eaf7fe0338eee092f6a5af52d57718c61738930
da4c11c4f9271200c63970ab1f90c240f5a10598
refs/heads/master
2022-12-12T17:34:04.450562
2018-07-12T19:41:52
2018-07-12T19:41:52
140,757,317
0
0
null
2021-06-10T20:33:39
2018-07-12T19:39:52
Python
UTF-8
Python
false
false
802
py
# Generated by Django 2.0.5 on 2018-07-04 16:23 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('quiz_test', '0017_category_test'), ] operations = [ migrations.RenameField( model_name='test', old_name='question_amount', new_name='amount', ), migrations.RenameField( model_name='test', old_name='question_category', new_name='category', ), migrations.RenameField( model_name='test', old_name='question_level', new_name='level', ), migrations.RenameField( model_name='test', old_name='question_type', new_name='q_type', ), ]
f2019d1e9561228cb0481b67234bdf4a1540de14
96a34a048c783a75736bf0ec775df22142f9ee53
/packages/simcore-sdk/src/simcore_sdk/node_data/__init__.py
595d6f1ed7c62d9dc3319d2cc38f81966604b3ec
[ "MIT" ]
permissive
ITISFoundation/osparc-simcore
77e5b9f7eb549c907f6ba2abb14862154cc7bb66
f4c57ffc7b494ac06a2692cb5539d3acfd3d1d63
refs/heads/master
2023-08-31T17:39:48.466163
2023-08-31T15:03:56
2023-08-31T15:03:56
118,596,920
39
29
MIT
2023-09-14T20:23:09
2018-01-23T10:48:05
Python
UTF-8
Python
false
false
27
py
from . import data_manager
5f021c7f67037101485a78987bd462e9077c3f9a
45dd427ec7450d2fac6fe2454f54a130b509b634
/homework_3/preparation2.py
f45b9c53cbbbda4b2d028ec030b01ce3a6e5a699
[]
no_license
weka511/smac
702fe183e3e73889ec663bc1d75bcac07ebb94b5
0b257092ff68058fda1d152d5ea8050feeab6fe2
refs/heads/master
2022-07-02T14:24:26.370766
2022-06-13T00:07:36
2022-06-13T00:07:36
33,011,960
22
8
null
null
null
null
UTF-8
Python
false
false
561
py
import os, random filename = 'disk_configuration.txt' if os.path.isfile(filename): f = open(filename, 'r') L = [] for line in f: a, b = line.split() L.append([float(a), float(b)]) f.close() print ('starting from file', filename) else: L = [] for k in range(3): L.append([random.uniform(0.0, 1.0), random.uniform(0.0, 1.0)]) print ('starting from a new random configuration') L[0][0] = 3.3 f = open(filename, 'w') for a in L: f.write(str(a[0]) + ' ' + str(a[1]) + '\n') f.close()
dcf1b8da0e24589c36e224719499d07a0cf14ac6
ab11640874d7f7eb6c6c44ecadf0022368fd3d30
/ppm.py
0a2936220a56bda68cb0ba41af36762844c0711b
[]
no_license
bsdphk/BSTJ_reformat
074d44d86cb0fccd25e47be5ffc2199c910640bf
9e72421ed110a582f67cd94727573da9b68c4ed2
refs/heads/master
2021-01-25T10:11:42.752665
2013-01-23T09:44:26
2013-01-23T09:44:26
7,771,692
1
1
null
null
null
null
UTF-8
Python
false
false
5,280
py
from __future__ import print_function import mmap import os import sys class ppm(object): def __init__(self, fn, a = "r", x = None, y = None): assert a == "r" or a == "w" if a == "w": self.wr = True assert type(x) == int assert type(y) == int assert x > 0 assert y > 0 else: self.wr = False if self.wr: self.fi = open(fn, "w+b") self.fi.truncate(0) self.fi.truncate(19 + 3 * x * y) self.m = mmap.mmap(self.fi.fileno(), 0 ) s = "P6\n%5d %5d\n%3d\n" % (x, y, 255) self.m[0:len(s)] = s self.m[len(s):] = str(bytearray((255,255,255)) * (x * y)) else: self.fi = open(fn, "rb") self.m = mmap.mmap(self.fi.fileno(), 0, prot=mmap.PROT_READ) assert self.m[:2] == "P6" o = 0 n = 0 while True: x = self.m.find("\n", o, o + 100) assert x >= -1 s = self.m[o:x] o = x + 1 if s[0] == '#': continue if n == 0: self.type = s elif n == 1: s = s.split() self.x = int(s[0]) self.y = int(s[1]) elif n == 2: self.d = int(s) self.o = o break n += 1 self.xhis = None self.yhis = None self.fn = fn def __repr__(self): return "<P %dx%d %s>" % (self.x, self.y, self.fn) def rdpx(self, x, y): i = self.o + 3 * (y * self.x + x) return bytearray(self.m[i:i+3]) def wrpx(self, x, y, r, g, b): assert self.wr if y >= self.y: print("WRPX hi y", self.y, y) return if x >= self.x: print("WRPX hi x", self.x, x) return i = self.o + 3 * (y * self.x + x) self.m[i:i+3] = str(bytearray((r,g,b))) def clone(self, fn): o = ppm(fn, "w", self.x, self.y) o.m[o.o:] = self.m[self.o:] return o def hist(self): self.yhis = list() lx = list([0] * (self.x * 3)) for y in range(0, self.y): o = self.o + y * self.x * 3 w = self.x * 3 v = bytearray(self.m[o:o+w]) self.yhis.append(sum(v)/float(w)) #for i in range(len(v)): # lx[i] += v[i] self.xhis = list() for x in range(0, self.x): self.xhis.append(sum(lx[x * 3:x*3+3]) / (3 * self.y)) def put_rect(self, xlo, ylo, r): for b in r: o = self.o + ylo * self.x * 3 + xlo * 3 self.m[o:o+len(b)] = str(b) ylo += 1 class rect(object): def __init__(self, parent, xlo = 0, ylo = 0, xhi = None, yhi = None): self.p= parent self.xlo = xlo self.ylo = ylo if xhi == None: xhi = parent.x self.xhi = xhi if yhi == None: yhi = parent.y self.yhi = yhi self.typ = None def set_typ(self, typ): self.typ = typ def outline(self, o, r, g, b): for x in range(self.xlo, self.xhi - 1): o.wrpx(x, self.ylo, r, g, b) o.wrpx(x, self.ylo + 1, r, g, b) o.wrpx(x, self.yhi - 2, r, g, b) o.wrpx(x, self.yhi - 1, r, g, b) for y in range(self.ylo, self.yhi - 1): o.wrpx(self.xlo, y, r, g, b) o.wrpx(self.xlo + 1, y, r, g, b) o.wrpx(self.xhi - 2, y, r, g, b) o.wrpx(self.xhi - 1, y, r, g, b) def yavg(self): l = list() w= (self.xhi - self.xlo) * 3 for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 a = sum(bytearray(self.p.m[a0:a0 + w])) a /= float(w) l.append(a) return l def ymin(self): l = list() w= (self.xhi - self.xlo) * 3 for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 a = min(bytearray(self.p.m[a0:a0 + w])) l.append(a) return l def ymax(self): l = list() w= (self.xhi - self.xlo) * 3 for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 a = max(bytearray(self.p.m[a0:a0 + w])) l.append(a) return l def xmin(self): w= (self.xhi - self.xlo) l = [255] * w for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 b = bytearray(self.p.m[a0:a0 + w * 3]) for i in range(w): l[i] = min(l[i], b[i * 3]) return l def xmax(self): w= (self.xhi - self.xlo) l = [0] * w for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 b = bytearray(self.p.m[a0:a0 + w * 3]) for i in range(w): l[i] = max(l[i], b[i * 3]) return l def xavg(self): w= (self.xhi - self.xlo) l = [0] * w for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 b = bytearray(self.p.m[a0:a0 + w * 3]) for i in range(w): l[i] += b[i * 3] for i in range(w): l[i] /= float(self.yhi - self.ylo) return l def ydens(self, lo = 64, hi = 192): w= (self.xhi - self.xlo) h= (self.yhi - self.ylo) dl = [0] * h dh = [0] * h for y in range(h): a0 = self.p.o + (self.xlo + (self.ylo + y) * self.p.x) * 3 b = bytearray(self.p.m[a0:a0 + w * 3]) for i in range(w): v = b[i] if v < lo: dl[y] += 1 elif v > hi: dh[y] += 1 return dl, dh def hist(self): w= (self.xhi - self.xlo) h= (self.yhi - self.ylo) hh = [0] * 256 for y in range(h): a0 = self.p.o + (self.xlo + (self.ylo + y) * self.p.x) * 3 b = bytearray(self.p.m[a0:a0 + w * 3]) for i in range(w): v = b[i * 3] hh[v] += 1 return hh def __iter__(self): w= (self.xhi - self.xlo) for y in range(self.ylo, self.yhi): a0 = self.p.o + (self.xlo + y * self.p.x) * 3 yield bytearray(self.p.m[a0:a0 + w * 3]) def __repr__(self): return "<R %dx%d+%d+%d>" % ( self.xhi - self.xlo, self.yhi - self.ylo, self.xlo, self.ylo )
6e7cb657f766e088b1c0fb3cbe8754948b3991a6
c3175f2b482691fbfcb9adc391b4d45b6f17b09d
/PyOhio_2019/examples/pyscript_example.py
0b49ed87ee1a875552f07f3411e05bb70e6a9b23
[ "MIT" ]
permissive
python-cmd2/talks
27abff4566c6545c00ad59c701831694224b4ccf
64547778e12d8a457812bd8034d3c9d74edff407
refs/heads/master
2023-08-28T20:45:01.123085
2021-03-29T20:44:36
2021-03-29T20:44:36
197,960,510
2
3
MIT
2022-01-21T20:03:37
2019-07-20T17:14:51
Python
UTF-8
Python
false
false
3,750
py
#!/usr/bin/env python # coding=utf-8 """A sample application for how Python scripting can provide conditional control flow of a cmd2 application""" import os import cmd2 from cmd2 import style class CmdLineApp(cmd2.Cmd): """ Example cmd2 application to showcase conditional control flow in Python scripting within cmd2 aps. """ def __init__(self): # Enable the optional ipy command if IPython is installed by setting use_ipython=True super().__init__(use_ipython=True) self._set_prompt() self.intro = 'Built-in Python scripting is a killer feature ...' # Add cwd accessor to Python environment used by pyscripts self.py_locals['cwd'] = self.cwd def _set_prompt(self): """Set prompt so it displays the current working directory.""" self._cwd = os.getcwd() self.prompt = style('{!r} $ '.format(self._cwd), fg='cyan') def postcmd(self, stop: bool, line: str) -> bool: """Hook method executed just after a command dispatch is finished. :param stop: if True, the command has indicated the application should exit :param line: the command line text for this command :return: if this is True, the application will exit after this command and the postloop() will run """ """Override this so prompt always displays cwd.""" self._set_prompt() return stop def cwd(self): """Read-only property used by the pyscript to obtain cwd""" return self._cwd @cmd2.with_argument_list def do_cd(self, arglist): """Change directory. Usage: cd <new_dir> """ # Expect 1 argument, the directory to change to if not arglist or len(arglist) != 1: self.perror("cd requires exactly 1 argument") self.do_help('cd') return # Convert relative paths to absolute paths path = os.path.abspath(os.path.expanduser(arglist[0])) # Make sure the directory exists, is a directory, and we have read access out = '' err = None data = None if not os.path.isdir(path): err = '{!r} is not a directory'.format(path) elif not os.access(path, os.R_OK): err = 'You do not have read access to {!r}'.format(path) else: try: os.chdir(path) except Exception as ex: err = '{}'.format(ex) else: out = 'Successfully changed directory to {!r}\n'.format(path) self.stdout.write(out) data = path if err: self.perror(err) self.last_result = data # Enable tab completion for cd command def complete_cd(self, text, line, begidx, endidx): return self.path_complete(text, line, begidx, endidx, path_filter=os.path.isdir) dir_parser = cmd2.Cmd2ArgumentParser() dir_parser.add_argument('-l', '--long', action='store_true', help="display in long format with one item per line") @cmd2.with_argparser_and_unknown_args(dir_parser) def do_dir(self, args, unknown): """List contents of current directory.""" # No arguments for this command if unknown: self.perror("dir does not take any positional arguments:") self.do_help('dir') return # Get the contents as a list contents = os.listdir(self._cwd) fmt = '{} ' if args.long: fmt = '{}\n' for f in contents: self.stdout.write(fmt.format(f)) self.stdout.write('\n') self.last_result = contents if __name__ == '__main__': import sys c = CmdLineApp() sys.exit(c.cmdloop())
ac3e9c697e353b693c7f8c8310a98068050b8172
b25485391a8a2007c31cd98555855b517cc68a64
/examples/src/dbnd_examples/tutorial_syntax/T60_task_that_calls_other_tasks.py
4e531bd99f3753db413843c5526d5528de64f9e8
[ "Apache-2.0" ]
permissive
ipattarapong/dbnd
5a2bcbf1752bf8f38ad83e83401226967fee1aa6
7bd65621c46c73e078eb628f994127ad4c7dbd1a
refs/heads/master
2022-12-14T06:45:40.347424
2020-09-17T18:12:08
2020-09-17T18:12:08
299,219,747
0
0
Apache-2.0
2020-09-28T07:07:42
2020-09-28T07:07:41
null
UTF-8
Python
false
false
536
py
import pandas as pd from dbnd import task @task def func_return_df(): return pd.DataFrame(data=[[3, 1]], columns=["c1", "c2"]) @task def func_gets(df): return df @task def func_pipeline(p: int): df = pd.DataFrame(data=[[p, 1]], columns=["c1", "c2"]) d1 = func_gets(df) d2 = func_gets(d1) return d2 @task def func_pipeline2(p: int): df = func_return_df() d1 = func_gets(df) return d1 if __name__ == "__main__": import os os.environ["DBND__TRACKING"] = "true" func_pipeline2(4)
3c80a21b57ae2707f9a66aef9c33445e8683c9ce
95a27dd31260802da3768064c9614ce4f6ca4797
/Scripts/MLP_GenCode_112.py
72a351e7ac8a3dc76101c0b9c9b6d8c96346f104
[ "MIT" ]
permissive
ShepherdCode/Soars2021
7ee34227076ea424ad42f4727205141b69c78bb9
ab4f304eaa09e52d260152397a6c53d7a05457da
refs/heads/main
2023-07-09T05:28:56.993951
2021-08-18T14:16:57
2021-08-18T14:16:57
364,885,561
1
0
null
null
null
null
UTF-8
Python
false
false
9,618
py
#!/usr/bin/env python # coding: utf-8 # # MLP GenCode # MLP_GenCode_trying to fix bugs. # NEURONS=128 and K={1,2,3}. # # In[14]: import time def show_time(): t = time.time() print(time.strftime('%Y-%m-%d %H:%M:%S %Z', time.localtime(t))) show_time() # In[15]: PC_TRAINS=8000 NC_TRAINS=8000 PC_TESTS=8000 NC_TESTS=8000 PC_LENS=(200,99000) NC_LENS=(200,99000) PC_LENS=(200,4000) NC_LENS=(200,4000) MAX_K = 3 INPUT_SHAPE=(None,84) # 4^3 + 4^2 + 4^1 NEURONS=128 DROP_RATE=0.01 EPOCHS=1000 # 200 SPLITS=5 FOLDS=1 # make this 5 for serious testing # In[16]: import numpy as np import pandas as pd import matplotlib.pyplot as plt from sklearn.utils import shuffle from sklearn.model_selection import KFold from sklearn.model_selection import cross_val_score from sklearn.metrics import roc_curve from sklearn.metrics import roc_auc_score from keras.models import Sequential from keras.layers import Dense,Embedding,Dropout from keras.layers import Flatten,TimeDistributed from keras.losses import BinaryCrossentropy from keras.callbacks import ModelCheckpoint from keras.models import load_model # In[17]: import sys IN_COLAB = False try: from google.colab import drive IN_COLAB = True except: pass if IN_COLAB: print("On Google CoLab, mount cloud-local file, get our code from GitHub.") PATH='/content/drive/' #drive.mount(PATH,force_remount=True) # hardly ever need this drive.mount(PATH) # Google will require login credentials DATAPATH=PATH+'My Drive/data/' # must end in "/" import requests r = requests.get('https://raw.githubusercontent.com/ShepherdCode/Soars2021/master/SimTools/GenCodeTools.py') with open('GenCodeTools.py', 'w') as f: f.write(r.text) from GenCodeTools import GenCodeLoader r = requests.get('https://raw.githubusercontent.com/ShepherdCode/Soars2021/master/SimTools/KmerTools.py') with open('KmerTools.py', 'w') as f: f.write(r.text) from KmerTools import KmerTools else: print("CoLab not working. On my PC, use relative paths.") DATAPATH='data/' # must end in "/" sys.path.append("..") # append parent dir in order to use sibling dirs from SimTools.GenCodeTools import GenCodeLoader from SimTools.KmerTools import KmerTools BESTMODELPATH=DATAPATH+"BestModel-112" # saved on cloud instance and lost after logout LASTMODELPATH=DATAPATH+"LastModel-112" # saved on Google Drive but requires login # ## Data Load # Restrict mRNA to those transcripts with a recognized ORF. # In[18]: PC_FILENAME='gencode.v26.pc_transcripts.fa.gz' NC_FILENAME='gencode.v26.lncRNA_transcripts.fa.gz' PC_FILENAME='gencode.v38.pc_transcripts.fa.gz' NC_FILENAME='gencode.v38.lncRNA_transcripts.fa.gz' PC_FULLPATH=DATAPATH+PC_FILENAME NC_FULLPATH=DATAPATH+NC_FILENAME # In[19]: loader=GenCodeLoader() loader.set_label(1) loader.set_check_utr(False) pcdf=loader.load_file(PC_FULLPATH) print("PC seqs loaded:",len(pcdf)) loader.set_label(0) loader.set_check_utr(False) ncdf=loader.load_file(NC_FULLPATH) print("NC seqs loaded:",len(ncdf)) show_time() # ## Data Prep # In[20]: def dataframe_length_filter(df,low_high): (low,high)=low_high # The pandas query language is strange, # but this is MUCH faster than loop & drop. return df[ (df['seqlen']>=low) & (df['seqlen']<=high) ] def dataframe_extract_sequence(df): return df['sequence'].tolist() pc_all = dataframe_extract_sequence( dataframe_length_filter(pcdf,PC_LENS)) nc_all = dataframe_extract_sequence( dataframe_length_filter(ncdf,NC_LENS)) show_time() print("PC seqs pass filter:",len(pc_all)) print("NC seqs pass filter:",len(nc_all)) # Garbage collection to reduce RAM footprint pcdf=None ncdf=None # In[21]: # Any portion of a shuffled list is a random selection pc_train=pc_all[:PC_TRAINS] nc_train=nc_all[:NC_TRAINS] pc_test=pc_all[PC_TRAINS:PC_TRAINS+PC_TESTS] nc_test=nc_all[NC_TRAINS:NC_TRAINS+PC_TESTS] print("PC train, NC train:",len(pc_train),len(nc_train)) print("PC test, NC test:",len(pc_test),len(nc_test)) # Garbage collection pc_all=None nc_all=None print("First PC train",pc_train[0]) print("First PC test",pc_test[0]) # In[22]: def prepare_x_and_y(seqs1,seqs0): len1=len(seqs1) len0=len(seqs0) total=len1+len0 L1=np.ones(len1,dtype=np.int8) L0=np.zeros(len0,dtype=np.int8) S1 = np.asarray(seqs1) S0 = np.asarray(seqs0) all_labels = np.concatenate((L1,L0)) all_seqs = np.concatenate((S1,S0)) for i in range(0,len0): all_labels[i*2] = L0[i] all_seqs[i*2] = S0[i] all_labels[i*2+1] = L1[i] all_seqs[i*2+1] = S1[i] return all_seqs,all_labels # use this to test unshuffled # bug in next line? X,y = shuffle(all_seqs,all_labels) # sklearn.utils.shuffle #Doesn't fix it #X = shuffle(all_seqs,random_state=3) # sklearn.utils.shuffle #y = shuffle(all_labels,random_state=3) # sklearn.utils.shuffle return X,y Xseq,y=prepare_x_and_y(pc_train,nc_train) print(Xseq[:3]) print(y[:3]) # Tests: show_time() # In[23]: def seqs_to_kmer_freqs(seqs,max_K): tool = KmerTools() # from SimTools empty = tool.make_dict_upto_K(max_K) collection = [] for seq in seqs: counts = empty # Last param should be True when using Harvester. counts = tool.update_count_one_K(counts,max_K,seq,True) # Given counts for K=3, Harvester fills in counts for K=1,2. counts = tool.harvest_counts_from_K(counts,max_K) fdict = tool.count_to_frequency(counts,max_K) freqs = list(fdict.values()) collection.append(freqs) return np.asarray(collection) Xfrq=seqs_to_kmer_freqs(Xseq,MAX_K) show_time() # ## Neural network # In[24]: def make_DNN(): dt=np.float32 print("make_DNN") print("input shape:",INPUT_SHAPE) dnn = Sequential() dnn.add(Dense(NEURONS,activation="sigmoid",dtype=dt)) # relu doesn't work as well dnn.add(Dropout(DROP_RATE)) dnn.add(Dense(NEURONS,activation="sigmoid",dtype=dt)) dnn.add(Dropout(DROP_RATE)) dnn.add(Dense(1,activation="sigmoid",dtype=dt)) dnn.compile(optimizer='adam', # adadelta doesn't work as well loss=BinaryCrossentropy(from_logits=False), metrics=['accuracy']) # add to default metrics=loss dnn.build(input_shape=INPUT_SHAPE) return dnn model = make_DNN() print(model.summary()) # In[25]: def do_cross_validation(X,y): cv_scores = [] fold=0 #mycallbacks = [ModelCheckpoint( # filepath=MODELPATH, save_best_only=True, # monitor='val_accuracy', mode='max')] # When shuffle=True, the valid indices are a random subset. splitter = KFold(n_splits=SPLITS,shuffle=True) model = None for train_index,valid_index in splitter.split(X): if fold < FOLDS: fold += 1 X_train=X[train_index] # inputs for training y_train=y[train_index] # labels for training X_valid=X[valid_index] # inputs for validation y_valid=y[valid_index] # labels for validation print("MODEL") # Call constructor on each CV. Else, continually improves the same model. model = model = make_DNN() print("FIT") # model.fit() implements learning start_time=time.time() history=model.fit(X_train, y_train, epochs=EPOCHS, verbose=1, # ascii art while learning # callbacks=mycallbacks, # called at end of each epoch validation_data=(X_valid,y_valid)) end_time=time.time() elapsed_time=(end_time-start_time) print("Fold %d, %d epochs, %d sec"%(fold,EPOCHS,elapsed_time)) # print(history.history.keys()) # all these keys will be shown in figure pd.DataFrame(history.history).plot(figsize=(8,5)) plt.grid(True) plt.gca().set_ylim(0,1) # any losses > 1 will be off the scale plt.show() return model # parameters at end of training # In[26]: show_time() last_model = do_cross_validation(Xfrq,y) # In[27]: def show_test_AUC(model,X,y): ns_probs = [0 for _ in range(len(y))] bm_probs = model.predict(X) ns_auc = roc_auc_score(y, ns_probs) bm_auc = roc_auc_score(y, bm_probs) ns_fpr, ns_tpr, _ = roc_curve(y, ns_probs) bm_fpr, bm_tpr, _ = roc_curve(y, bm_probs) plt.plot(ns_fpr, ns_tpr, linestyle='--', label='Guess, auc=%.4f'%ns_auc) plt.plot(bm_fpr, bm_tpr, marker='.', label='Model, auc=%.4f'%bm_auc) plt.title('ROC') plt.xlabel('False Positive Rate') plt.ylabel('True Positive Rate') plt.legend() plt.show() print("%s: %.2f%%" %('AUC',bm_auc*100.0)) def show_test_accuracy(model,X,y): scores = model.evaluate(X, y, verbose=0) print("%s: %.2f%%" % (model.metrics_names[1], scores[1]*100)) # In[28]: print("Accuracy on training data.") print("Prepare...") show_time() Xseq,y=prepare_x_and_y(pc_train,nc_train) print("Extract K-mer features...") show_time() Xfrq=seqs_to_kmer_freqs(Xseq,MAX_K) print("Plot...") show_time() show_test_AUC(last_model,Xfrq,y) show_test_accuracy(last_model,Xfrq,y) show_time() # In[29]: print("Accuracy on test data.") print("Prepare...") show_time() Xseq,y=prepare_x_and_y(pc_test,nc_test) print("Extract K-mer features...") show_time() Xfrq=seqs_to_kmer_freqs(Xseq,MAX_K) print("Plot...") show_time() show_test_AUC(last_model,Xfrq,y) show_test_accuracy(last_model,Xfrq,y) show_time()
1f98e74eef835ca6a17c0f6a2081205ba2b18a15
c41069e0cb4105c4092853e60de6bf116b332e70
/resaspy/__init__.py
a5fdbf6faf8e137b9e35b31325e2ee25d4a95bd9
[ "MIT" ]
permissive
ar90n/resaspy
5a4e7789dc24f412e1f1f929fb491f349abe90f1
58d140ad1e61478ab8f3993676bd0c97ad43ae18
refs/heads/master
2021-01-11T10:13:13.712176
2017-05-10T14:17:57
2017-05-10T14:17:57
78,554,641
7
1
null
null
null
null
UTF-8
Python
false
false
510
py
# -*- coding: utf-8 -*- """ resaspy is a simple utility for RESAS api(https://opendata.resas-portal.go.jp). usage: >>> from resaspy import Resaspy >>> resas = Resaspy( key ) >>> r = resas.prefectures() >>> r.result :copyright: (c) 2016 by Masahiro Wada. :license: MIT, see LICENSE for more details. """ __title__ = 'resaspy' __version__ = '0.2.1' __build__ = 0x021204 __author__ = 'Masahiro Wada' __license__ = 'MIT' __copyright__ = 'Copyright 2016 Masahiro Wada' from .resaspy import Resaspy
6a97d871b8ed8a49afb5c0e20a56bf9f9b38ed3a
5e80f0b1af9fbf9dc774dbb68aa603574e4ae0ba
/algorithm-study/codewars/Quarter_of_the_year.py
96e4279a8205373084cf8d50623834bef2b00e87
[]
no_license
namujinju/study-note
4271b4248b3c4ac1b96ef1da484d86569a030762
790b21e5318a326e434dc836f5f678a608037a8c
refs/heads/master
2023-02-04T13:25:55.418896
2020-12-26T10:47:11
2020-12-26T10:47:11
275,279,138
0
0
null
null
null
null
UTF-8
Python
false
false
56
py
def quarter_of(month): return (month - 1) // 3 + 1
30d1e191fd39fe9da315d8edc408653ef79ab813
9baa9f1bedf7bc973f26ab37c9b3046824b80ca7
/venv-bck/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/timeout.py
1940bcf576151f37297b97d9ebafec8007ab3c80
[]
no_license
shakthydoss/suriyan
58774fc5de1de0a9f9975c2ee3a98900e0a5dff4
8e39eb2e65cc6c6551fc165b422b46d598cc54b8
refs/heads/master
2020-04-12T05:36:59.957153
2017-01-08T06:12:13
2017-01-08T06:12:13
59,631,349
0
0
null
null
null
null
UTF-8
Python
false
false
9,507
py
from __future__ import absolute_import import time from socket import _GLOBAL_DEFAULT_TIMEOUT from ..exceptions import TimeoutStateError # A sentinel value to indicate that no timeout was specified by the user in # urllib3 _Default = object() def current_time(): """ Retrieve the current time. This function is mocked out in unit testing. """ return time.time() class Timeout(object): """ Timeout configuration. Timeouts can be defined as a default for a pool:: timeout = Timeout(connect=2.0, read=7.0) http = PoolManager(timeout=timeout) response = http.request('GET', 'http://example.com/') Or per-request (which overrides the default for the pool):: response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) Timeouts can be disabled by setting all the parameters to ``None``:: no_timeout = Timeout(connect=None, read=None) response = http.request('GET', 'http://example.com/, timeout=no_timeout) :param total: This combines the connect and read timeouts into one; the read timeout will be set to the time leftover from the connect attempt. In the event that both a connect timeout and a total are specified, or a read timeout and a total are specified, the shorter timeout will be applied. Defaults to None. :type total: integer, float, or None :param connect: The maximum amount of time to wait for a connection attempt to a server to succeed. Omitting the parameter will default the connect timeout to the system default, probably `the global default timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout for connection attempts. :type connect: integer, float, or None :param read: The maximum amount of time to wait between consecutive read operations for a response from the server. Omitting the parameter will default the read timeout to the system default, probably `the global default timeout in socket.py <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_. None will set an infinite timeout. :type read: integer, float, or None .. note:: Many factors can affect the total amount of time for urllib3 to return an HTTP response. For example, Python's DNS resolver does not obey the timeout specified on the socket. Other factors that can affect total request time include high CPU load, high swap, the program running at a low priority level, or other behaviors. In addition, the read and total timeouts only measure the time between read operations on the socket connecting the client and the server, not the total amount of time for the request to return a complete response. For most requests, the timeout is raised because the server has not sent the first byte in the specified time. This is not always the case; if a server streams one byte every fifteen seconds, a timeout of 20 seconds will not trigger, even though the request will take several minutes to complete. If your goal is to cut off any request after a set amount of wall clock time, consider having a second "watcher" thread to cut off a slow request. """ #: A sentinel object representing the default timeout value DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT def __init__(self, total=None, connect=_Default, read=_Default): self._connect = self._validate_timeout(connect, 'connect') self._read = self._validate_timeout(read, 'read') self.total = self._validate_timeout(total, 'total') self._start_connect = None def __str__(self): return '%s(connect=%r, read=%r, total=%r)' % ( type(self).__name__, self._connect, self._read, self.total) @classmethod def _validate_timeout(cls, value, name): """ Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If the type is not an integer or a float, or if it is a numeric value less than zero. """ if value is _Default: return cls.DEFAULT_TIMEOUT if value is None or value is cls.DEFAULT_TIMEOUT: return value try: float(value) except (TypeError, ValueError): raise ValueError("Timeout value %s was %s, but it must be an " "int or float." % (name, value)) try: if value < 0: raise ValueError("Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than 0." % (name, value)) except TypeError: # Python 3 raise ValueError("Timeout value %s was %s, but it must be an " "int or float." % (name, value)) return value @classmethod def from_float(cls, timeout): """ Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None :return: Timeout object :rtype: :class:`Timeout` """ return Timeout(read=timeout, connect=timeout) def clone(self): """ Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` """ # We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. return Timeout(connect=self._connect, read=self._read, total=self.total) def start_connect(self): """ Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already. """ if self._start_connect is not None: raise TimeoutStateError("Timeout timer has already been started.") self._start_connect = current_time() return self._start_connect def get_connect_duration(self): """ Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn't been started. """ if self._start_connect is None: raise TimeoutStateError("Can't get connect duration for timer " "that has not started.") return current_time() - self._start_connect @property def connect_timeout(self): """ Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """ if self.total is None: return self._connect if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: return self.total return min(self._connect, self.total) @property def read_timeout(self): """ Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """ if (self.total is not None and self.total is not self.DEFAULT_TIMEOUT and self._read is not None and self._read is not self.DEFAULT_TIMEOUT): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: return self._read
5215d7aee1f79545124b261740052b0b5e5af4fb
f44c0ae69de99da4ca248d7ec00f6a56d84f649d
/src/parameters_combinator/ListOfParams.py
6446198df70ded58d37de5c8a074601cd06bedf7
[ "BSD-3-Clause" ]
permissive
birlrobotics/parameters_combinator
641f7701e4b7429fb68be524de97e1eb2e514fa6
6f4f1ed838867c3d6b692b57d5895c18320394e9
refs/heads/master
2021-01-25T14:10:20.852853
2018-07-09T06:44:11
2018-07-09T06:44:11
123,659,869
0
0
null
null
null
null
UTF-8
Python
false
false
36
py
class ListOfParams(list): pass
5376359526eb1ac0de52a283b309692922b54864
74a01e6a22fe7c6b552e2ffb9f92d9671c54aa20
/bpb/parser/pdf.py
fb7471eb62cbce5bdbd4260bce0c4ba579fa4d16
[]
no_license
snagwuk/blog_post_bot_cli
549805ba988c3753185111575ba759566c7ea17f
29e6c6e9e7c48e5ad7c9b4dda26e56226c683290
refs/heads/master
2022-03-27T01:05:44.441712
2020-01-05T01:00:54
2020-01-05T01:00:54
null
0
0
null
null
null
null
UTF-8
Python
false
false
442
py
# modules for import PyPDF2 import pprint # pdf file object # you can find find the pdf file with complete code in below pdfFileObj = open('../data/test.pdf', 'rb') # pdf reader object pdfReader = PyPDF2.PdfFileReader(pdfFileObj) # number of pages in pdf print(pdfReader.numPages) # a page object pageObj = pdfReader.getPage(0) # extracting text from page. # this will print the text you can also save that into String pprint.pprint(pageObj)
69e64077be97c782e455563333f9f0aaafc67fca
9b64f0f04707a3a18968fd8f8a3ace718cd597bc
/huaweicloud-sdk-ims/huaweicloudsdkims/v2/model/list_image_tags_response.py
76a1f6343bbb44bb9fa8a53ef623e27886720b43
[ "Apache-2.0" ]
permissive
jaminGH/huaweicloud-sdk-python-v3
eeecb3fb0f3396a475995df36d17095038615fba
83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b
refs/heads/master
2023-06-18T11:49:13.958677
2021-07-16T07:57:47
2021-07-16T07:57:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,811
py
# coding: utf-8 import re import six from huaweicloudsdkcore.sdk_response import SdkResponse class ListImageTagsResponse(SdkResponse): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ sensitive_list = [] openapi_types = { 'tags': 'list[ResourceTag]' } attribute_map = { 'tags': 'tags' } def __init__(self, tags=None): """ListImageTagsResponse - a model defined in huaweicloud sdk""" super(ListImageTagsResponse, self).__init__() self._tags = None self.discriminator = None if tags is not None: self.tags = tags @property def tags(self): """Gets the tags of this ListImageTagsResponse. 标签列表 :return: The tags of this ListImageTagsResponse. :rtype: list[ResourceTag] """ return self._tags @tags.setter def tags(self, tags): """Sets the tags of this ListImageTagsResponse. 标签列表 :param tags: The tags of this ListImageTagsResponse. :type: list[ResourceTag] """ self._tags = tags def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): import simplejson as json return json.dumps(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ListImageTagsResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
6d93b0cd78292a61ae919edfa5a15e96fa5f6f6a
c9697437c292df7fefd68559fdd9636066bdb2f1
/dev/potentials/sinc_pulse_from_number_of_cycles.py
0b1d64c026d7f66d3afbc925237681fad25c3cd4
[]
no_license
JoshKarpel/ionization
ebdb387483a9bc3fdb52818ab8e897e562ffcc67
3056df523ee90147d262b0e8bfaaef6f2678ea11
refs/heads/master
2021-03-24T13:03:57.469388
2020-04-06T03:37:04
2020-04-06T03:37:04
62,348,115
0
0
null
null
null
null
UTF-8
Python
false
false
1,601
py
#!/usr/bin/env python import logging import os import numpy as np import simulacra as si import simulacra.units as u FILE_NAME = os.path.splitext(os.path.basename(__file__))[0] OUT_DIR = os.path.join(os.getcwd(), "out", FILE_NAME) LOGMAN = si.utils.LogManager("simulacra", "ionization", stdout_level=logging.DEBUG) PLOT_KWARGS = dict(target_dir=OUT_DIR, img_format="png", fig_dpi_scale=6) if __name__ == "__main__": with LOGMAN as logger: number_of_cycles = [0.51, 1, 2, 3] nc_pulses = [ ( nc, ion.potentials.SincPulse.from_number_of_cycles( pulse_width=100 * u.asec, number_of_cycles=nc, phase=u.pi / 2 ), ) for nc in number_of_cycles ] # note that you actually get twice as many carrier cycles as you specify in the "center" # because the center of the sinc is twice as wide as a pulse width (it's double-sided) tb = 1 for nc, pulse in nc_pulses: print(pulse.info()) times = np.linspace(-tb * pulse.pulse_width, tb * pulse.pulse_width, 10000) si.vis.xy_plot( f"Nc={nc}", times, pulse.amplitude * np.cos((pulse.omega_carrier * times) + pulse.phase), pulse.get_electric_field_amplitude(times), line_labels=["carrier", "pulse"], line_kwargs=[{"linestyle": "--"}, None], x_unit=pulse.pulse_width, y_unit=pulse.amplitude, **PLOT_KWARGS, )
151392182417b31d3dd7cb2a6d0fcfa253fee301
436177bf038f9941f67e351796668700ffd1cef2
/venv/Lib/site-packages/sklearn/linear_model/__init__.py
796b13e6c63d51def5a559c6a79836627fc25551
[]
no_license
python019/matplotlib_simple
4359d35f174cd2946d96da4d086026661c3d1f9c
32e9a8e773f9423153d73811f69822f9567e6de4
refs/heads/main
2023-08-22T18:17:38.883274
2021-10-07T15:55:50
2021-10-07T15:55:50
380,471,961
29
0
null
null
null
null
UTF-8
Python
false
false
2,952
py
""" The :mod:`sklearn.linear_model` module implements a variety of linear models. """ # See http://scikit-learn.sourceforge.net/modules/sgd.html and # http://scikit-learn.sourceforge.net/modules/linear_model.html for # complete documentation. from ._base import LinearRegression from ._bayes import BayesianRidge, ARDRegression from ._least_angle import (Lars, LassoLars, lars_path, lars_path_gram, LarsCV, LassoLarsCV, LassoLarsIC) from ._coordinate_descent import (Lasso, ElasticNet, LassoCV, ElasticNetCV, lasso_path, enet_path, MultiTaskLasso, MultiTaskElasticNet, MultiTaskElasticNetCV, MultiTaskLassoCV) from ._glm import (PoissonRegressor, GammaRegressor, TweedieRegressor) from ._huber import HuberRegressor from ._sgd_fast import Hinge, Log, ModifiedHuber, SquaredLoss, Huber from ._stochastic_gradient import SGDClassifier, SGDRegressor from ._ridge import (Ridge, RidgeCV, RidgeClassifier, RidgeClassifierCV, ridge_regression) from ._logistic import LogisticRegression, LogisticRegressionCV from ._omp import (orthogonal_mp, orthogonal_mp_gram, OrthogonalMatchingPursuit, OrthogonalMatchingPursuitCV) from ._passive_aggressive import PassiveAggressiveClassifier from ._passive_aggressive import PassiveAggressiveRegressor from ._perceptron import Perceptron from ._ransac import RANSACRegressor from ._theil_sen import TheilSenRegressor __all__ = ['ARDRegression', 'BayesianRidge', 'ElasticNet', 'ElasticNetCV', 'Hinge', 'Huber', 'HuberRegressor', 'Lars', 'LarsCV', 'Lasso', 'LassoCV', 'LassoLars', 'LassoLarsCV', 'LassoLarsIC', 'LinearRegression', 'Log', 'LogisticRegression', 'LogisticRegressionCV', 'ModifiedHuber', 'MultiTaskElasticNet', 'MultiTaskElasticNetCV', 'MultiTaskLasso', 'MultiTaskLassoCV', 'OrthogonalMatchingPursuit', 'OrthogonalMatchingPursuitCV', 'PassiveAggressiveClassifier', 'PassiveAggressiveRegressor', 'Perceptron', 'Ridge', 'RidgeCV', 'RidgeClassifier', 'RidgeClassifierCV', 'SGDClassifier', 'SGDRegressor', 'SquaredLoss', 'TheilSenRegressor', 'enet_path', 'lars_path', 'lars_path_gram', 'lasso_path', 'orthogonal_mp', 'orthogonal_mp_gram', 'ridge_regression', 'RANSACRegressor', 'PoissonRegressor', 'GammaRegressor', 'TweedieRegressor']
ccbb02c3cf0ac4b9e9da7e4142bf9b2deecd73fd
c7a932e28a1a1dbc70c05c62caa43ce6cb691686
/fabric/service/monitor/promethues/prometheus.py
13d3ebd36fcfa08a10fc933ae20e580484cc043f
[]
no_license
Martians/deploy
9c2c9a9b0e4431e965960aada0f40df6a34b2e09
6fd3f892edd7a12aa69d92f357cc52932df86d9c
refs/heads/master
2022-01-09T03:29:13.948962
2019-04-29T05:15:40
2019-04-29T05:15:40
112,311,997
0
0
null
null
null
null
UTF-8
Python
false
false
4,554
py
# coding=utf-8 from invoke import task from common import * import system class LocalConfig(LocalBase): """ 默认配置 """ def __init__(self): LocalBase.__init__(self, 'prometheus') self.source = 'https://github.com/prometheus/prometheus/releases/download/v2.6.0/prometheus-2.6.0.linux-amd64.tar.gz' self.config = 'prometheus.yml' self.node_source = 'https://github.com/prometheus/node_exporter/releases/download/v0.17.0/node_exporter-0.17.0.linux-amd64.tar.gz' self.node_name = 'node_exporter' self.node_port = 9100 self.node_config = 'node.yaml' self.client_config = 'client.yaml' self.alert = 'https://github.com/prometheus/alertmanager/releases/download/v0.16.0-beta.0/alertmanager-0.16.0-beta.0.linux-amd64.tar.gz' """ 提供个默认参数 该变量定义在头部,这样在函数的默认参数中,也可以使用了 """ local = LocalConfig() """ fab install-server fab install-node fab start-server fab start-node """ @task def install_server(c): c = hosts.one() download(c, local.name, source=local.source) """ 安装包下载后,到master上进行解压 """ scp(c, hosts.get(0), package(), dest=local.temp) unpack(conn(0), local.name, path=package(local.temp)) config_server(conn(0)) def config_server(c): sed.path(os.path.join(local.base, local.config)) """ 配置文件 """ file_sd_node = """ - job_name: 'node' file_sd_configs: - files: - '{node}'""".format(node=local.node_config) file_sd_client = """ - job_name: 'client' scrape_interval: 1s file_sd_configs: - files: - '{client}'""".format(client=local.client_config) sed.append(c, file_sd_node) sed.append(c, file_sd_client) sed.append(c, ' - "*_rules.yml"', 'rule_files:') """ file service discovery """ with c.cd(local.base): c.run("""echo ' - labels: type: 'node' targets:' > {node}""".format(node=local.node_config)) c.run("""echo ' - labels: type: 'client' targets:' > {client}""".format(client=local.client_config)) @task def help(c): c = conn(c, True) system.help(c, ''' monitor node: {base}/{node} monitor client: {base}/{client} monitor rules; {base}/*_rules.yaml\n'''.format(base=local.base, node=local.node_config, client=local.client_config), 'config') @task def install_node(c): c = hosts.one() download(c, local.node_name, source=local.node_source) copy_pack(c, dest=local.temp) hosts.execute('sudo rm -rf /opt/*{}*'.format(local.node_name)) for index in hosts.lists(): unpack(hosts.conn(index), local.node_name, path=package(local.temp)) config_server_node(c) def config_server_node(c): c = hosts.conn(0) append = '' for host in hosts.lists(index=False): append += " - '{}:{}'\n".format(host.host, local.node_port) sed.path(os.path.join(local.base, local.node_config)) sed.append(c, append) @task def start_server(c): c = hosts.conn(0) c.run(system.nohup('cd {}; nohup ./prometheus --config.file={}' .format(local.base, local.config), nohup=''), pty=True) @task def stop_server(c): c = hosts.conn(0) c.run('{}'.format(system.kills('prometheus', string=True))) @task def start_node(c): system.start(local.node_name, system.nohup('cd {}; nohup ./node_exporter --web.listen-address=":{}"' .format(base(local.node_name), local.node_port), nohup=''), pty=True) @task def stop_node(c): system.stop(local.node_name) @task def clean(c): stop_server(c) stop_node(c) system.clean('/opt/{}, /opt/{}'.format(local.name, local.node_name)) @task def install_alert(c): pass # hosts.execute('sudo rm -rf /opt/*kafka*') # # for index in hosts.lists(): # unpack(hosts.conn(index), local.name, path=package(local.temp)) @task def help(c): c = conn(c, True) system.help(c, ''' http://192.168.0.81:9090 fab install-server fab start-server node: http://192.168.0.81:9100 fab install-node fab start-node ''', 'server') # install_server(conn(0)) # install_node(conn(0)) # start_server(conn(0)) # stop(conn(0)) # clean(conn(0)) # start_node(conn(0))
797987fe548a6f7c7c46884932412b3e90e8bc1a
119437adb7830659307c18b79a9cc3f6bfc6fe40
/onnx_model_serving/onnx_model_predict.py
95a0f36ce1f8192ebe4a598455dc1cc4eb833cee
[]
no_license
percent4/PyTorch_Learning
478bec35422cdc66bf41b4258e29fbcb6d24f60c
24184d49032c9c9a68142aff89dabe33adc17b52
refs/heads/master
2023-03-31T03:01:19.372830
2023-03-17T17:02:39
2023-03-17T17:02:39
171,400,828
16
7
null
2023-09-02T08:53:26
2019-02-19T03:47:41
Jupyter Notebook
UTF-8
Python
false
false
730
py
# -*- coding: utf-8 -*- # @Time : 2021/2/3 20:09 # @Author : Jclian91 # @File : onnx_model_predict.py # @Place : Yangpu, Shanghai import onnxruntime import torch import numpy as np def to_numpy(tensor): return tensor.detach().cpu().numpy() if tensor.requires_grad else tensor.cpu().numpy() ort_session = onnxruntime.InferenceSession("iris.onnx") # compute ONNX Runtime output prediction x = torch.Tensor([[6.4, 2.8, 5.6, 2.1]]) print("input size: ", to_numpy(x).shape) ort_inputs = {ort_session.get_inputs()[0].name: to_numpy(x)} ort_outs = ort_session.run(None, ort_inputs) # compare ONNX Runtime and PyTorch results print(ort_outs[0]) print("Exported model has been tested with ONNXRuntime, and the result looks good!")
a3ad6fec8b7b991839d4265cfb3f8f96df862df6
7b270cf5f9d0a3e26b5afd758563c6cff73a5248
/comportamentais/iterator/canal/canal/canais/__init__.py
89401268f00e0cba0c575756511cf3b8a786a561
[]
no_license
reginaldosantarosa/DesignPatterns
10810672d3831e562ec636a5f66bd709c797ca34
bec4247f52b8d2e1fe41c570408816a5d4b22608
refs/heads/master
2020-04-04T06:54:19.757054
2018-01-04T03:06:05
2018-01-04T03:06:05
155,761,201
0
0
null
null
null
null
UTF-8
Python
false
false
80
py
from canal.canais.filmes import Filme from canal.canais.esportes import Esporte
c0bc193c0ca45d24c0490317457e0038ba7a2b66
7a550d2268bc4bc7e2fec608ffb1db4b2e5e94a0
/0701-0800/0701-Insert into a Binary Search Tree/0701-Insert into a Binary Search Tree.py
fe9dea06abb24db8df133f5a1db2ab1c7bbf15c4
[ "MIT" ]
permissive
jiadaizhao/LeetCode
be31bd0db50cc6835d9c9eff8e0175747098afc6
4ddea0a532fe7c5d053ffbd6870174ec99fc2d60
refs/heads/master
2021-11-05T04:38:47.252590
2021-10-31T09:54:53
2021-10-31T09:54:53
99,655,604
52
28
MIT
2020-10-02T12:47:47
2017-08-08T05:57:26
C++
UTF-8
Python
false
false
617
py
# Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def insertIntoBST(self, root: TreeNode, val: int) -> TreeNode: inode = TreeNode(val) if root is None: return inode node = root while node: prev = node if node.val < val: node = node.right else: node = node.left if prev.val < val: prev.right = inode else: prev.left = inode return root
c0a4b1ecee5eb7705fb4d6c81545e651d56f3071
d36c4c882089b9b81e6e3b6323eeb9c43f5160a9
/7KYU/Square Area Inside Circle/solution.py
dead9b201402be6e5751806d9e7f0d05e24b1f5d
[]
no_license
stuartstein777/CodeWars
a6fdc2fa6c4fcf209986e939698d8075345dd16f
d8b449a16c04a9b883c4b5e272cc90a4e6d8a2e6
refs/heads/master
2023-08-27T20:32:49.018950
2023-08-24T23:23:29
2023-08-24T23:23:29
233,281,814
0
0
null
null
null
null
UTF-8
Python
false
false
128
py
import math def square_area_to_circle(size): radius = math.sqrt(size) / 2 return round((math.pi * (radius * radius)), 8)
ef14e05b00b14f120326d7133682265e3176e41e
93a613f09d564a1d45ecc01b54b73745ce2850b7
/majora2/migrations/0023_biosampleartifact_secondary_accession.py
0d98165508518f2dfdfd9b53251418ed78c4a31c
[]
no_license
pythseq/majora
fa17c77fa8a916c688fd2b40744d768dd851b99b
40b918d32b4061cddee5f7279f97e70eb894623d
refs/heads/master
2022-12-23T20:09:41.233844
2020-09-28T18:18:42
2020-09-28T18:18:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
433
py
# Generated by Django 2.2.10 on 2020-03-22 16:34 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('majora2', '0022_auto_20200322_1616'), ] operations = [ migrations.AddField( model_name='biosampleartifact', name='secondary_accession', field=models.CharField(blank=True, max_length=256, null=True), ), ]