commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
---|---|---|---|---|---|---|---|---|---|---|
b74c56b3999800917946378f20288407347710e6 | social/backends/gae.py | social/backends/gae.py | """
Google App Engine support using User API
"""
from __future__ import absolute_import
from google.appengine.api import users
from social.backends.base import BaseAuth
from social.exceptions import AuthException
class GoogleAppEngineAuth(BaseAuth):
"""GoogleAppengine authentication backend"""
name = 'google-appengine'
def get_user_id(self, details, response):
"""Return current user id."""
user = users.get_current_user()
if user:
return user.user_id()
def get_user_details(self, response):
"""Return user basic information (id and email only)."""
user = users.get_current_user()
return {'username': user.user_id(),
'email': user.email(),
'fullname': '',
'first_name': '',
'last_name': ''}
def auth_url(self):
"""Build and return complete URL."""
return users.create_login_url(self.redirect_uri)
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
if not users.get_current_user():
raise AuthException('Authentication error')
kwargs.update({'response': '', 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
BACKENDS = {
'gae': GoogleAppEngineAuth
}
| """
Google App Engine support using User API
"""
from __future__ import absolute_import
from google.appengine.api import users
from social.backends.base import BaseAuth
from social.exceptions import AuthException
class GoogleAppEngineAuth(BaseAuth):
"""GoogleAppengine authentication backend"""
name = 'google-appengine'
def get_user_id(self, details, response):
"""Return current user id."""
user = users.get_current_user()
if user:
return user.user_id()
def get_user_details(self, response):
"""Return user basic information (id and email only)."""
user = users.get_current_user()
return {'username': user.user_id(),
'email': user.email(),
'fullname': '',
'first_name': '',
'last_name': ''}
def auth_url(self):
"""Build and return complete URL."""
return users.create_login_url(self.redirect_uri)
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance."""
if not users.get_current_user():
raise AuthException('Authentication error')
kwargs.update({'response': '', 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
BACKENDS = {
'google-appengine': GoogleAppEngineAuth
}
| Rename to be consistent with backend name | Rename to be consistent with backend name
| Python | bsd-3-clause | ononeor12/python-social-auth,barseghyanartur/python-social-auth,tutumcloud/python-social-auth,webjunkie/python-social-auth,cmichal/python-social-auth,henocdz/python-social-auth,mchdks/python-social-auth,falcon1kr/python-social-auth,cmichal/python-social-auth,contracode/python-social-auth,SeanHayes/python-social-auth,VishvajitP/python-social-auth,robbiet480/python-social-auth,duoduo369/python-social-auth,lamby/python-social-auth,mrwags/python-social-auth,tkajtoch/python-social-auth,nirmalvp/python-social-auth,mchdks/python-social-auth,JerzySpendel/python-social-auth,henocdz/python-social-auth,rsteca/python-social-auth,joelstanner/python-social-auth,mark-adams/python-social-auth,frankier/python-social-auth,mathspace/python-social-auth,ByteInternet/python-social-auth,drxos/python-social-auth,mathspace/python-social-auth,msampathkumar/python-social-auth,yprez/python-social-auth,frankier/python-social-auth,yprez/python-social-auth,VishvajitP/python-social-auth,SeanHayes/python-social-auth,python-social-auth/social-app-django,firstjob/python-social-auth,ononeor12/python-social-auth,falcon1kr/python-social-auth,michael-borisov/python-social-auth,tkajtoch/python-social-auth,JJediny/python-social-auth,ariestiyansyah/python-social-auth,jameslittle/python-social-auth,clef/python-social-auth,chandolia/python-social-auth,bjorand/python-social-auth,S01780/python-social-auth,mrwags/python-social-auth,muhammad-ammar/python-social-auth,jneves/python-social-auth,daniula/python-social-auth,cmichal/python-social-auth,wildtetris/python-social-auth,jneves/python-social-auth,bjorand/python-social-auth,contracode/python-social-auth,ByteInternet/python-social-auth,cjltsod/python-social-auth,S01780/python-social-auth,barseghyanartur/python-social-auth,daniula/python-social-auth,lneoe/python-social-auth,muhammad-ammar/python-social-auth,Andygmb/python-social-auth,webjunkie/python-social-auth,muhammad-ammar/python-social-auth,DhiaEddineSaidi/python-social-auth,garrett-schlesinger/python-social-auth,python-social-auth/social-storage-sqlalchemy,JJediny/python-social-auth,jameslittle/python-social-auth,JerzySpendel/python-social-auth,fearlessspider/python-social-auth,tobias47n9e/social-core,ononeor12/python-social-auth,msampathkumar/python-social-auth,henocdz/python-social-auth,san-mate/python-social-auth,jneves/python-social-auth,daniula/python-social-auth,mark-adams/python-social-auth,garrett-schlesinger/python-social-auth,iruga090/python-social-auth,wildtetris/python-social-auth,MSOpenTech/python-social-auth,alrusdi/python-social-auth,merutak/python-social-auth,lamby/python-social-auth,ariestiyansyah/python-social-auth,lneoe/python-social-auth,VishvajitP/python-social-auth,degs098/python-social-auth,imsparsh/python-social-auth,nvbn/python-social-auth,rsteca/python-social-auth,degs098/python-social-auth,JJediny/python-social-auth,cjltsod/python-social-auth,rsalmaso/python-social-auth,michael-borisov/python-social-auth,tkajtoch/python-social-auth,barseghyanartur/python-social-auth,tutumcloud/python-social-auth,robbiet480/python-social-auth,noodle-learns-programming/python-social-auth,falcon1kr/python-social-auth,jameslittle/python-social-auth,jeyraof/python-social-auth,san-mate/python-social-auth,python-social-auth/social-app-django,rsteca/python-social-auth,lamby/python-social-auth,chandolia/python-social-auth,imsparsh/python-social-auth,jeyraof/python-social-auth,Andygmb/python-social-auth,iruga090/python-social-auth,iruga090/python-social-auth,noodle-learns-programming/python-social-auth,msampathkumar/python-social-auth,clef/python-social-auth,fearlessspider/python-social-auth,JerzySpendel/python-social-auth,hsr-ba-fs15-dat/python-social-auth,chandolia/python-social-auth,hsr-ba-fs15-dat/python-social-auth,DhiaEddineSaidi/python-social-auth,python-social-auth/social-core,rsalmaso/python-social-auth,robbiet480/python-social-auth,python-social-auth/social-docs,python-social-auth/social-app-cherrypy,drxos/python-social-auth,MSOpenTech/python-social-auth,firstjob/python-social-auth,contracode/python-social-auth,fearlessspider/python-social-auth,duoduo369/python-social-auth,alrusdi/python-social-auth,alrusdi/python-social-auth,mrwags/python-social-auth,mathspace/python-social-auth,hsr-ba-fs15-dat/python-social-auth,degs098/python-social-auth,python-social-auth/social-core,joelstanner/python-social-auth,webjunkie/python-social-auth,jeyraof/python-social-auth,san-mate/python-social-auth,clef/python-social-auth,Andygmb/python-social-auth,lawrence34/python-social-auth,drxos/python-social-auth,imsparsh/python-social-auth,michael-borisov/python-social-auth,noodle-learns-programming/python-social-auth,python-social-auth/social-app-django,lawrence34/python-social-auth,merutak/python-social-auth,DhiaEddineSaidi/python-social-auth,nirmalvp/python-social-auth,ariestiyansyah/python-social-auth,bjorand/python-social-auth,MSOpenTech/python-social-auth,yprez/python-social-auth,lawrence34/python-social-auth,ByteInternet/python-social-auth,nirmalvp/python-social-auth,nvbn/python-social-auth,lneoe/python-social-auth,S01780/python-social-auth,mark-adams/python-social-auth,firstjob/python-social-auth,joelstanner/python-social-auth,merutak/python-social-auth,mchdks/python-social-auth,wildtetris/python-social-auth | ---
+++
@@ -41,5 +41,5 @@
BACKENDS = {
- 'gae': GoogleAppEngineAuth
+ 'google-appengine': GoogleAppEngineAuth
} |
b5fe965315920cc10664b3e0ba52bc50bf717374 | sites/scrapy.org/site/urls.py | sites/scrapy.org/site/urls.py | from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.conf import settings
urlpatterns = patterns('',
(r"^$", direct_to_template, { "template": "home.html" }),
(r"^article/", include("article.urls")),
(r"^download/", include("download.urls")),
(r"^weblog/", include("blog.urls")),
(r"^admin/", include("django.contrib.admin.urls")),
)
if settings.DEBUG: # devel
urlpatterns += patterns('',
(r"^site-media/(?P<path>.*)$", "django.views.static.serve", { "document_root": settings.MEDIA_ROOT }),
)
| from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.conf import settings
urlpatterns = patterns('',
(r"^$", direct_to_template, { "template": "home.html" }),
(r"^article/", include("article.urls")),
(r"^download/", include("download.urls")),
(r"^link/", include("link.urls")),
(r"^weblog/", include("blog.urls")),
(r"^admin/", include("django.contrib.admin.urls")),
)
if settings.DEBUG: # devel
urlpatterns += patterns('',
(r"^site-media/(?P<path>.*)$", "django.views.static.serve", { "document_root": settings.MEDIA_ROOT }),
)
| Add link url to access position inc/dec views | Add link url to access position inc/dec views
--HG--
extra : convert_revision : svn%3Ab85faa78-f9eb-468e-a121-7cced6da292c%409
| Python | bsd-3-clause | zjuwangg/scrapy,famorted/scrapy,pranjalpatil/scrapy,yusofm/scrapy,Geeglee/scrapy,foromer4/scrapy,CodeJuan/scrapy,rahulsharma1991/scrapy,jeffreyjinfeng/scrapy,dhenyjarasandy/scrapy,emschorsch/scrapy,lacrazyboy/scrapy,webmakin/scrapy,cleydson/scrapy,Preetwinder/scrapy,olorz/scrapy,cyrixhero/scrapy,pfctdayelise/scrapy,farhan0581/scrapy,devGregA/scrapy,emschorsch/scrapy,nfunato/scrapy,Bourneer/scrapy,ndemir/scrapy,TarasRudnyk/scrapy,ssh-odoo/scrapy,pawelmhm/scrapy,GregoryVigoTorres/scrapy,elijah513/scrapy,hwsyy/scrapy,Lucifer-Kim/scrapy,redapple/scrapy,jdemaeyer/scrapy,rklabs/scrapy,agusc/scrapy,tliber/scrapy,Cnfc19932/scrapy,dacjames/scrapy,kalessin/scrapy,jeffreyjinfeng/scrapy,elacuesta/scrapy,Digenis/scrapy,mlyundin/scrapy,eliasdorneles/scrapy,dgillis/scrapy,yidongliu/scrapy,foromer4/scrapy,csalazar/scrapy,Ryezhang/scrapy,curita/scrapy,wujuguang/scrapy,sigma-random/scrapy,kazitanvirahsan/scrapy,wzyuliyang/scrapy,1yvT0s/scrapy,xiao26/scrapy,eliasdorneles/scrapy,Chenmxs/scrapy,cleydson/scrapy,tliber/scrapy,kalessin/scrapy,Preetwinder/scrapy,jorik041/scrapy,dhenyjarasandy/scrapy,scorphus/scrapy,nikgr95/scrapy,pawelmhm/scrapy,zhangtao11/scrapy,w495/scrapy,avtoritet/scrapy,dangra/scrapy,rahulsharma1991/scrapy,crasker/scrapy,github-account-because-they-want-it/scrapy,hansenDise/scrapy,finfish/scrapy,ramiro/scrapy,liyy7/scrapy,zorojean/scrapy,hyrole/scrapy,codebhendi/scrapy,URXtech/scrapy,github-account-because-they-want-it/scrapy,rahul-c1/scrapy,coderabhishek/scrapy,pranjalpatil/scrapy,jdemaeyer/scrapy,eLRuLL/scrapy,kmike/scrapy,songfj/scrapy,Allianzcortex/scrapy,hectoruelo/scrapy,agusc/scrapy,fafaman/scrapy,hectoruelo/scrapy,scorphus/scrapy,csalazar/scrapy,fpy171/scrapy,famorted/scrapy,jeffreyjinfeng/scrapy,yarikoptic/scrapy,olafdietsche/scrapy,famorted/scrapy,zjuwangg/scrapy,rahul-c1/scrapy,CENDARI/scrapy,crasker/scrapy,Adai0808/scrapy-1,pfctdayelise/scrapy,AaronTao1990/scrapy,jiezhu2007/scrapy,KublaikhanGeek/scrapy,foromer4/scrapy,songfj/scrapy,wenyu1001/scrapy,nguyenhongson03/scrapy,livepy/scrapy,Zephor5/scrapy,dangra/scrapy,ArturGaspar/scrapy,joshlk/scrapy,cyrixhero/scrapy,rolando-contrib/scrapy,amboxer21/scrapy,fontenele/scrapy,KublaikhanGeek/scrapy,jdemaeyer/scrapy,starrify/scrapy,kashyap32/scrapy,amboxer21/scrapy,Digenis/scrapy,rootAvish/scrapy,raphaelfruneaux/scrapy,dracony/scrapy,nikgr95/scrapy,zackslash/scrapy,YeelerG/scrapy,CodeJuan/scrapy,fqul/scrapy,crasker/scrapy,rdowinton/scrapy,yusofm/scrapy,legendtkl/scrapy,ramiro/scrapy,irwinlove/scrapy,sigma-random/scrapy,huoxudong125/scrapy,Chenmxs/scrapy,mouadino/scrapy,johnardavies/scrapy,chekunkov/scrapy,codebhendi/scrapy,WilliamKinaan/scrapy,moraesnicol/scrapy,hbwzhsh/scrapy,arush0311/scrapy,cyberplant/scrapy,scrapy/scrapy,Zephor5/scrapy,Slater-Victoroff/scrapy,elijah513/scrapy,cyberplant/scrapy,lacrazyboy/scrapy,Geeglee/scrapy,livepy/scrapy,cursesun/scrapy,nowopen/scrapy,JacobStevenR/scrapy,dracony/scrapy,1yvT0s/scrapy,Adai0808/scrapy-1,ssteo/scrapy,coderabhishek/scrapy,w495/scrapy,rklabs/scrapy,cyrixhero/scrapy,nett55/scrapy,irwinlove/scrapy,elacuesta/scrapy,olorz/scrapy,bmess/scrapy,nowopen/scrapy,codebhendi/scrapy,dhenyjarasandy/scrapy,dracony/scrapy,carlosp420/scrapy,beni55/scrapy,sardok/scrapy,lacrazyboy/scrapy,hbwzhsh/scrapy,Adai0808/scrapy-1,redapple/scrapy,zorojean/scrapy,Parlin-Galanodel/scrapy,tliber/scrapy,yarikoptic/scrapy,ssteo/scrapy,WilliamKinaan/scrapy,farhan0581/scrapy,hyrole/scrapy,raphaelfruneaux/scrapy,yarikoptic/scrapy,Ryezhang/scrapy,z-fork/scrapy,jamesblunt/scrapy,johnardavies/scrapy,gbirke/scrapy,redapple/scrapy,avtoritet/scrapy,rolando-contrib/scrapy,URXtech/scrapy,snowdream1314/scrapy,rootAvish/scrapy,pombredanne/scrapy,nfunato/scrapy,nikgr95/scrapy,rootAvish/scrapy,liyy7/scrapy,Digenis/scrapy,mlyundin/scrapy,hyrole/scrapy,kashyap32/scrapy,zjuwangg/scrapy,dgillis/scrapy,dgillis/scrapy,shaform/scrapy,darkrho/scrapy-scrapy,rolando/scrapy,pablohoffman/scrapy,mgedmin/scrapy,rahul-c1/scrapy,Allianzcortex/scrapy,Timeship/scrapy,CodeJuan/scrapy,wangjun/scrapy,ENjOyAbLE1991/scrapy,legendtkl/scrapy,hansenDise/scrapy,starrify/scrapy,eLRuLL/scrapy,pawelmhm/scrapy,ssteo/scrapy,bmess/scrapy,irwinlove/scrapy,godfreyy/scrapy,stenskjaer/scrapy,JacobStevenR/scrapy,Ryezhang/scrapy,wenyu1001/scrapy,haiiiiiyun/scrapy,fpy171/scrapy,shaform/scrapy,eliasdorneles/scrapy,Djlavoy/scrapy,barraponto/scrapy,livepy/scrapy,agusc/scrapy,snowdream1314/scrapy,dacjames/scrapy,beni55/scrapy,taito/scrapy,coderabhishek/scrapy,kimimj/scrapy,OpenWhere/scrapy,jiezhu2007/scrapy,carlosp420/scrapy,tntC4stl3/scrapy,rdowinton/scrapy,rolando/scrapy,finfish/scrapy,joshlk/scrapy,profjrr/scrapy,eLRuLL/scrapy,rahulsharma1991/scrapy,mouadino/scrapy,curita/scrapy,kazitanvirahsan/scrapy,ENjOyAbLE1991/scrapy,tntC4stl3/scrapy,Lucifer-Kim/scrapy,kashyap32/scrapy,bmess/scrapy,cleydson/scrapy,zackslash/scrapy,pranjalpatil/scrapy,Partoo/scrapy,webmakin/scrapy,shaform/scrapy,ndemir/scrapy,Lucifer-Kim/scrapy,Cnfc19932/scrapy,mgedmin/scrapy,Timeship/scrapy,barraponto/scrapy,jc0n/scrapy,wangjun/scrapy,mgedmin/scrapy,olorz/scrapy,legendtkl/scrapy,OpenWhere/scrapy,nett55/scrapy,moraesnicol/scrapy,pablohoffman/scrapy,huoxudong125/scrapy,gnemoug/scrapy,z-fork/scrapy,CENDARI/scrapy,godfreyy/scrapy,emschorsch/scrapy,yidongliu/scrapy,rolando/scrapy,umrashrf/scrapy,fqul/scrapy,fpy171/scrapy,nowopen/scrapy,IvanGavran/scrapy,AaronTao1990/scrapy,Chenmxs/scrapy,Partoo/scrapy,taito/scrapy,stenskjaer/scrapy,olafdietsche/scrapy,beni55/scrapy,nett55/scrapy,smaty1/scrapy,avtoritet/scrapy,jorik041/scrapy,kmike/scrapy,starrify/scrapy,nguyenhongson03/scrapy,mlyundin/scrapy,Cnfc19932/scrapy,huoxudong125/scrapy,darkrho/scrapy-scrapy,Djlavoy/scrapy,IvanGavran/scrapy,CENDARI/scrapy,elacuesta/scrapy,ArturGaspar/scrapy,kmike/scrapy,devGregA/scrapy,smaty1/scrapy,URXtech/scrapy,Allianzcortex/scrapy,TarasRudnyk/scrapy,olafdietsche/scrapy,tagatac/scrapy,zackslash/scrapy,hwsyy/scrapy,jc0n/scrapy,Bourneer/scrapy,carlosp420/scrapy,yidongliu/scrapy,wzyuliyang/scrapy,github-account-because-they-want-it/scrapy,ashishnerkar1/scrapy,rdowinton/scrapy,cursesun/scrapy,w495/scrapy,fafaman/scrapy,jorik041/scrapy,wenyu1001/scrapy,Partoo/scrapy,wangjun/scrapy,chekunkov/scrapy,kalessin/scrapy,barraponto/scrapy,jc0n/scrapy,1yvT0s/scrapy,ylcolala/scrapy,ylcolala/scrapy,hectoruelo/scrapy,Djlavoy/scrapy,xiao26/scrapy,kimimj/scrapy,rklabs/scrapy,moraesnicol/scrapy,stenskjaer/scrapy,joshlk/scrapy,Geeglee/scrapy,pfctdayelise/scrapy,arush0311/scrapy,tntC4stl3/scrapy,OpenWhere/scrapy,taito/scrapy,tagatac/scrapy,nguyenhongson03/scrapy,heamon7/scrapy,zhangtao11/scrapy,JacobStevenR/scrapy,Slater-Victoroff/scrapy,zorojean/scrapy,curita/scrapy,elijah513/scrapy,aivarsk/scrapy,johnardavies/scrapy,jiezhu2007/scrapy,liyy7/scrapy,nfunato/scrapy,snowdream1314/scrapy,gbirke/scrapy,ssh-odoo/scrapy,umrashrf/scrapy,profjrr/scrapy,kazitanvirahsan/scrapy,kimimj/scrapy,YeelerG/scrapy,AaronTao1990/scrapy,ashishnerkar1/scrapy,raphaelfruneaux/scrapy,aivarsk/scrapy,scorphus/scrapy,rolando-contrib/scrapy,GregoryVigoTorres/scrapy,hansenDise/scrapy,Zephor5/scrapy,ENjOyAbLE1991/scrapy,pombredanne/scrapy,xiao26/scrapy,pombredanne/scrapy,hwsyy/scrapy,ylcolala/scrapy,chekunkov/scrapy,aivarsk/scrapy,farhan0581/scrapy,ssh-odoo/scrapy,wzyuliyang/scrapy,jamesblunt/scrapy,z-fork/scrapy,scrapy/scrapy,profjrr/scrapy,agreen/scrapy,Parlin-Galanodel/scrapy,cursesun/scrapy,fontenele/scrapy,ramiro/scrapy,amboxer21/scrapy,WilliamKinaan/scrapy,Bourneer/scrapy,agreen/scrapy,fafaman/scrapy,IvanGavran/scrapy,dacjames/scrapy,heamon7/scrapy,Parlin-Galanodel/scrapy,agreen/scrapy,KublaikhanGeek/scrapy,heamon7/scrapy,Timeship/scrapy,GregoryVigoTorres/scrapy,csalazar/scrapy,dangra/scrapy,zhangtao11/scrapy,Preetwinder/scrapy,tagatac/scrapy,devGregA/scrapy,scrapy/scrapy,cyberplant/scrapy,songfj/scrapy,hbwzhsh/scrapy,gnemoug/scrapy,wujuguang/scrapy,sardok/scrapy,TarasRudnyk/scrapy,umrashrf/scrapy,wujuguang/scrapy,ArturGaspar/scrapy,haiiiiiyun/scrapy,godfreyy/scrapy,smaty1/scrapy,finfish/scrapy,fontenele/scrapy,yusofm/scrapy,webmakin/scrapy,fqul/scrapy,YeelerG/scrapy,pablohoffman/scrapy,haiiiiiyun/scrapy,arush0311/scrapy,Slater-Victoroff/scrapy,darkrho/scrapy-scrapy | ---
+++
@@ -7,6 +7,7 @@
(r"^$", direct_to_template, { "template": "home.html" }),
(r"^article/", include("article.urls")),
(r"^download/", include("download.urls")),
+ (r"^link/", include("link.urls")),
(r"^weblog/", include("blog.urls")),
(r"^admin/", include("django.contrib.admin.urls")), |
b78c457d52702beb5067eb7c3067cb69af5e935d | itunes/exceptions.py | itunes/exceptions.py | """
exceptions.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file defines custom exceptions for the iTunes funcitonality.
"""
class ITunesError(Exception):
"""
Base exception class for iTunes interface.
"""
pass
class AppleScriptError(ITunesError):
"""
Represents an error received from AppleScript while running a script.
Parameters
----------
message : str
The message that the exception will hold.
script : str
The AppleScript that was running when this exception was raised (default
"").
Attributes
----------
script : str
The AppleScript that was running when this exception was raised, if one
was provided.
"""
def __init__(self, message, script=""):
super(AppleScriptError, self).__init__(message)
self.script = script
| """
exceptions.py
Copyright © 2015 Alex Danoff. All Rights Reserved.
2015-08-02
This file defines custom exceptions for the iTunes funcitonality.
"""
class ITunesError(Exception):
"""
Base exception class for iTunes interface.
"""
pass
class AppleScriptError(ITunesError):
"""
Represents an error received from AppleScript while running a script.
Parameters
----------
message : str
The message that the exception will hold.
script : str
The AppleScript that was running when this exception was raised (default
"").
Attributes
----------
script : str
The AppleScript that was running when this exception was raised, if one
was provided.
"""
def __init__(self, message, script=""):
super(AppleScriptError, self).__init__(message)
self.script = script
class TrackError(ITunesError):
"""
Represents an error in finding or playing a track.
Parameters
----------
message : str
The message that the exception will hold.
title : str
The title of the track that caused the error (default "").
Attributes
----------
title : str
The title of the track that caused the error.
"""
def __init__(self, message, title=""):
super(TrackError, self).__init__(message)
self.title = title
| Add custom exception for track-related errors | Add custom exception for track-related errors
The new exception type (`TrackError`) will be used when a track cannot
be played or found.
| Python | mit | adanoff/iTunesTUI | ---
+++
@@ -36,3 +36,25 @@
super(AppleScriptError, self).__init__(message)
self.script = script
+
+class TrackError(ITunesError):
+ """
+ Represents an error in finding or playing a track.
+
+ Parameters
+ ----------
+ message : str
+ The message that the exception will hold.
+ title : str
+ The title of the track that caused the error (default "").
+
+ Attributes
+ ----------
+ title : str
+ The title of the track that caused the error.
+ """
+
+ def __init__(self, message, title=""):
+
+ super(TrackError, self).__init__(message)
+ self.title = title |
b339c25068e849dbbf769f22893125b15325eb66 | figgypy/utils.py | figgypy/utils.py | import os
def env_or_default(var, default=None):
"""Get environment variable or provide default.
Args:
var (str): environment variable to search for
default (optional(str)): default to return
"""
if var in os.environ:
return os.environ[var]
return default
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from future.utils import bytes_to_native_str as n
from base64 import b64encode
import os
import boto3
def env_or_default(var, default=None):
"""Get environment variable or provide default.
Args:
var (str): environment variable to search for
default (optional(str)): default to return
"""
if var in os.environ:
return os.environ[var]
return default
def kms_encrypt(value, key, aws_config=None):
"""Encrypt and value with KMS key.
Args:
value (str): value to encrypt
key (str): key id or alias
aws_config (optional[dict]): aws credentials
dict of arguments passed into boto3 session
example:
aws_creds = {'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key,
'region_name': 'us-east-1'}
Returns:
str: encrypted cipher text
"""
aws_config = aws_config or {}
aws = boto3.session.Session(**aws_config)
client = aws.client('kms')
enc_res = client.encrypt(KeyId=key,
Plaintext=value)
return n(b64encode(enc_res['CiphertextBlob']))
| Add new helper function to encrypt for KMS | Add new helper function to encrypt for KMS
| Python | mit | theherk/figgypy | ---
+++
@@ -1,4 +1,11 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+from future.utils import bytes_to_native_str as n
+
+from base64 import b64encode
import os
+
+import boto3
def env_or_default(var, default=None):
@@ -11,3 +18,27 @@
if var in os.environ:
return os.environ[var]
return default
+
+
+def kms_encrypt(value, key, aws_config=None):
+ """Encrypt and value with KMS key.
+
+ Args:
+ value (str): value to encrypt
+ key (str): key id or alias
+ aws_config (optional[dict]): aws credentials
+ dict of arguments passed into boto3 session
+ example:
+ aws_creds = {'aws_access_key_id': aws_access_key_id,
+ 'aws_secret_access_key': aws_secret_access_key,
+ 'region_name': 'us-east-1'}
+
+ Returns:
+ str: encrypted cipher text
+ """
+ aws_config = aws_config or {}
+ aws = boto3.session.Session(**aws_config)
+ client = aws.client('kms')
+ enc_res = client.encrypt(KeyId=key,
+ Plaintext=value)
+ return n(b64encode(enc_res['CiphertextBlob'])) |
0f4ca12e524be7cbd82ac79e81a62015b47ca6ef | openfisca_core/tests/formula_helpers.py | openfisca_core/tests/formula_helpers.py | # -*- coding: utf-8 -*-
import numpy
from nose.tools import raises
from openfisca_core.formula_helpers import apply_threshold as apply_threshold
from openfisca_core.tools import assert_near
@raises(AssertionError)
def test_apply_threshold_with_too_many_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10]
return apply_threshold(input, thresholds, outputs)
@raises(AssertionError)
def test_apply_threshold_with_too_few_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10, 15, 20]
return apply_threshold(input, thresholds, outputs)
def test_apply_threshold():
input = numpy.array([4, 5, 6, 7, 8])
thresholds = [5, 7]
outputs = [10, 15, 20]
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [10, 10, 15, 15, 20])
| # -*- coding: utf-8 -*-
import numpy
from nose.tools import raises
from openfisca_core.formula_helpers import apply_threshold as apply_threshold
from openfisca_core.tools import assert_near
@raises(AssertionError)
def test_apply_threshold_with_too_many_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10]
return apply_threshold(input, thresholds, outputs)
@raises(AssertionError)
def test_apply_threshold_with_too_few_thresholds():
input = numpy.array([10])
thresholds = [5]
outputs = [10, 15, 20]
return apply_threshold(input, thresholds, outputs)
def test_apply_threshold():
input = numpy.array([4, 5, 6, 7, 8])
thresholds = [5, 7]
outputs = [10, 15, 20]
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [10, 10, 15, 15, 20])
def test_apply_threshold_with_variable_threshold():
input = numpy.array([1000, 1000, 1000])
thresholds = [numpy.array([500, 1500, 1000])] # Only one thresold, but varies with the person
outputs = [True, False] # True if input <= threshold, false otherwise
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [False, True, True])
| Add more tricky case test for apply_threshold | Add more tricky case test for apply_threshold
| Python | agpl-3.0 | benjello/openfisca-core,openfisca/openfisca-core,benjello/openfisca-core,sgmap/openfisca-core,openfisca/openfisca-core | ---
+++
@@ -26,3 +26,10 @@
outputs = [10, 15, 20]
result = apply_threshold(input, thresholds, outputs)
assert_near(result, [10, 10, 15, 15, 20])
+
+def test_apply_threshold_with_variable_threshold():
+ input = numpy.array([1000, 1000, 1000])
+ thresholds = [numpy.array([500, 1500, 1000])] # Only one thresold, but varies with the person
+ outputs = [True, False] # True if input <= threshold, false otherwise
+ result = apply_threshold(input, thresholds, outputs)
+ assert_near(result, [False, True, True]) |
342e6134a63c5b575ae8e4348a54f61350bca2da | parser/crimeparser/pipelinesEnricher.py | parser/crimeparser/pipelinesEnricher.py | from geopy import Nominatim
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Nominatim(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
| from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider):
for crime in item["crimes"]:
place = crime["place"]
latitude, longitude = self.__geocode_address(place)
crime["latitude"] = latitude
crime["longitude"] = longitude
return item
def __geocode_address(self, place):
if place is None:
return None, None
location = self.__geocodeFunc(place)
if location is not None:
return location.latitude, location.longitude
else:
return None, None
| Use Phonon instead of Nominatim for geo coding | Use Phonon instead of Nominatim for geo coding
Phonon is more fault tolerant to spelling mistakes.
| Python | mit | aberklotz/crimereport,aberklotz/crimereport,aberklotz/crimereport | ---
+++
@@ -1,11 +1,11 @@
-from geopy import Nominatim
+from geopy import Nominatim, Photon
from geopy.extra.rate_limiter import RateLimiter
class GeoCodePipeline(object):
def open_spider(self, spider):
- geolocator = Nominatim(timeout=5)
+ geolocator = Photon(timeout=5)
self.__geocodeFunc = RateLimiter(geolocator.geocode, min_delay_seconds=2)
def process_item(self, item, spider): |
5398a864449db0a1d6ec106ddb839fff3b6afcda | mopidy_frontpanel/frontend.py | mopidy_frontpanel/frontend.py | from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
| from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
if (input == "play"):
pass
elif (input == "pause"):
pass
elif (input == "stop"):
pass
elif (input == "vol_up"):
pass
elif (input == "vol_down"):
pass
else:
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
| Handle playback changes in FrontPanel | Handle playback changes in FrontPanel
| Python | apache-2.0 | nick-bulleid/mopidy-frontpanel | ---
+++
@@ -22,7 +22,19 @@
self.painter.start()
def handleInput(self, input):
- self.menu.handleInput(input)
+ if (input == "play"):
+ pass
+ elif (input == "pause"):
+ pass
+ elif (input == "stop"):
+ pass
+ elif (input == "vol_up"):
+ pass
+ elif (input == "vol_down"):
+ pass
+ else:
+ self.menu.handleInput(input)
+
self.painter.update()
def track_playback_started(self, tl_track): |
8d52686ddf40266b0022b77816cf3899c726a834 | notifications/upcoming_match.py | notifications/upcoming_match.py | import calendar
import datetime
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
self._event_feed = event.key_name
self._district_feed = event.event_district_enum
@property
def _type(self):
return NotificationType.UPCOMING_MATCH
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
if self.match.time:
data['message_data']['scheduled_time'] = calendar.timegm(self.match.time.utctimetuple())
data['message_data']['predicted_time'] = calendar.timegm(self.match.time.utctimetuple()) # TODO Add in some time predictions
else:
data['message_data']['scheduled_time'] = None
data['message_data']['predicted_time'] = None
return data
| import calendar
import datetime
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
self._event_feed = event.key_name
self._district_feed = event.event_district_enum
@property
def _type(self):
return NotificationType.UPCOMING_MATCH
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_key'] = self.event.key_name
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
if self.match.time:
data['message_data']['scheduled_time'] = calendar.timegm(self.match.time.utctimetuple())
data['message_data']['predicted_time'] = calendar.timegm(self.match.time.utctimetuple()) # TODO Add in some time predictions
else:
data['message_data']['scheduled_time'] = None
data['message_data']['predicted_time'] = None
return data
| Add event key to upcoming match notification | Add event key to upcoming match notification | Python | mit | fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance | ---
+++
@@ -21,6 +21,7 @@
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
+ data['message_data']['event_key'] = self.event.key_name
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names |
77038432486071c9459c5ce43492905e158b7713 | Topo/LoopTopo.py | Topo/LoopTopo.py | '''
SDN project testing topo
s1
/ \
s2--s3
| |
host.. host...
'''
from mininet.topo import Topo
class LoopTopo( Topo ):
def __init__( self , n=2 ):
# Initialize topology
Topo.__init__( self)
# Add Host
h1 = self.addHost( 'h1' )
h2 = self.addHost( 'h2' )
h3 = self.addHost( 'h3' )
h4 = self.addHost( 'h4' )
h5 = self.addHost( 'h5' )
h6 = self.addHost( 'h6' )
# Add Switch
s1 = self.addSwitch( 's1' )
s2 = self.addSwitch( 's2' )
s3 = self.addSwitch( 's3' )
# Add Link
self.addLink( s1, s2 )
self.addLink( s1, s3 )
self.addLink( s2, s3 )
self.addLink( s2, h1 )
self.addLink( s2, h2 )
self.addLink( s2, h3 )
self.addLink( s3, h4 )
self.addLink( s3, h5 )
self.addLink( s3, h6 )
topos = { 'LoopTopo': ( lambda: LoopTopo() ) }
| '''
SDN project testing topo
s1
/ \
s2--s3
| |
host.. host...
'''
from mininet.topo import Topo
class LoopTopo( Topo ):
def __init__( self , n=2 ):
# Initialize topology
Topo.__init__( self)
# Add Host
h1 = self.addHost( 'h1' )
h2 = self.addHost( 'h2' )
h3 = self.addHost( 'h3' )
h4 = self.addHost( 'h4' )
h5 = self.addHost( 'h5' )
h6 = self.addHost( 'h6' )
# Add Switch
s1 = self.addSwitch( 's1' )
s2 = self.addSwitch( 's2' )
s3 = self.addSwitch( 's3' )
# Add Link
self.addLink( s1, s2 )
self.addLink( s1, s3 )
self.addLink( s2, s3 )
self.addLink( s2, h1 )
self.addLink( s2, h2 )
self.addLink( s2, h3 )
self.addLink( s3, h4 )
self.addLink( s3, h5 )
self.addLink( s3, h6 )
topos = { 'Loop': ( lambda: LoopTopo() ) }
| Rename the name of topo. | Rename the name of topo.
| Python | mit | ray6/sdn,ray6/sdn,ray6/sdn | ---
+++
@@ -41,4 +41,4 @@
self.addLink( s3, h5 )
self.addLink( s3, h6 )
-topos = { 'LoopTopo': ( lambda: LoopTopo() ) }
+topos = { 'Loop': ( lambda: LoopTopo() ) } |
35849633c3ce751445ffca92410001513d445ce9 | code2html/cli.py | code2html/cli.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from args import get_args
from subprocess import Popen, PIPE
if __name__ == '__main__':
# Get the arguments passed by user
args = get_args()
# TODO: Check whether Vim is available
# TODO: Arguments validation
# TODO: Test the inupt/output directories
# TODO: Call Vim to do the conversion
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from args import get_args
from subprocess import Popen, PIPE
if __name__ == '__main__':
# Get the arguments passed by user
args = get_args()
# Check whether Vim is available
p1 = Popen(["vim", "--version"], stdout=PIPE)
p2 = Popen(["grep", "IMproved"], stdin=p1.stdout, stdout=PIPE)
vim_header = p2.communicate()[0].strip('\n')
if vim_header:
pass # Vim detected
else:
sys.exit(u'ERROR: Vim is not yet installed on this system, aborted.')
# TODO: Arguments validation
# TODO: Test the inupt/output directories
# TODO: Call Vim to do the conversion
| Check whether Vim is available | Check whether Vim is available
| Python | mit | kfei/code2html | ---
+++
@@ -9,7 +9,14 @@
# Get the arguments passed by user
args = get_args()
- # TODO: Check whether Vim is available
+ # Check whether Vim is available
+ p1 = Popen(["vim", "--version"], stdout=PIPE)
+ p2 = Popen(["grep", "IMproved"], stdin=p1.stdout, stdout=PIPE)
+ vim_header = p2.communicate()[0].strip('\n')
+ if vim_header:
+ pass # Vim detected
+ else:
+ sys.exit(u'ERROR: Vim is not yet installed on this system, aborted.')
# TODO: Arguments validation
|
76b916c6f53d97b4658c16a85f10302e75794bcd | kitsune/upload/storage.py | kitsune/upload/storage.py | import hashlib
import itertools
import os
import time
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
DjangoStorage = S3Boto3Storage if settings.AWS_ACCESS_KEY_ID else FileSystemStorage
class RenameFileStorage(DjangoStorage):
"""Subclass Django's file system storage to add our file naming
conventions."""
def get_available_name(self, name):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# Set file_root to something we like: clean and all ascii
md5_sub = hashlib.md5(file_root.encode('utf8')).hexdigest()[0:6]
file_root = time.strftime('%Y-%m-%d-%H-%M-%S-',
time.localtime()) + md5_sub
name = os.path.join(dir_name, file_root + file_ext)
# If the filename already exists, add an underscore and a number
# (before the file extension, if one exists) to the filename until
# the generated filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" %
(file_root, count.next(), file_ext))
return name
| import hashlib
import itertools
import os
import time
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from storages.backends.s3boto3 import S3Boto3Storage
DjangoStorage = S3Boto3Storage if settings.AWS_ACCESS_KEY_ID else FileSystemStorage
class RenameFileStorage(DjangoStorage):
"""Subclass Django's file system storage to add our file naming
conventions."""
def get_available_name(self, name, max_length=None):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# Set file_root to something we like: clean and all ascii
md5_sub = hashlib.md5(file_root.encode('utf8')).hexdigest()[0:6]
file_root = time.strftime('%Y-%m-%d-%H-%M-%S-',
time.localtime()) + md5_sub
name = os.path.join(dir_name, file_root + file_ext)
# If the filename already exists, add an underscore and a number
# (before the file extension, if one exists) to the filename until
# the generated filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" %
(file_root, count.next(), file_ext))
return name
| Update RenameFileStorage method to be 1.11 compatible | Update RenameFileStorage method to be 1.11 compatible
| Python | bsd-3-clause | mozilla/kitsune,anushbmx/kitsune,mozilla/kitsune,anushbmx/kitsune,anushbmx/kitsune,mozilla/kitsune,mozilla/kitsune,anushbmx/kitsune | ---
+++
@@ -16,7 +16,7 @@
"""Subclass Django's file system storage to add our file naming
conventions."""
- def get_available_name(self, name):
+ def get_available_name(self, name, max_length=None):
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
|
f0d16962b2b07e602cff15391cc44bfd199a4d43 | pywikibot/families/wikivoyage_family.py | pywikibot/families/wikivoyage_family.py | # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2017
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
__version__ = '$Id$'
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
def __init__(self):
"""Constructor."""
self.languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'fi', 'es',
'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk',
]
super(Family, self).__init__()
self.category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
self.cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| # -*- coding: utf-8 -*-
"""Family module for Wikivoyage."""
#
# (C) Pywikibot team, 2012-2017
#
# Distributed under the terms of the MIT license.
#
# The new wikivoyage family that is hosted at wikimedia
from __future__ import absolute_import, unicode_literals
from pywikibot import family
__version__ = '$Id$'
class Family(family.SubdomainFamily, family.WikimediaFamily):
"""Family class for Wikivoyage."""
name = 'wikivoyage'
def __init__(self):
"""Constructor."""
self.languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'fi', 'es',
'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
super(Family, self).__init__()
self.category_redirect_templates = {
'_default': (),
'zh': ('分类重定向',),
}
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/BPI#Current_implementation
# & https://meta.wikimedia.org/wiki/Special:WikiSets/2
self.cross_allowed = [
'el', 'en', 'es', 'fa', 'ru',
]
| Add hi.wikivoyage to wikivoyage family | Add hi.wikivoyage to wikivoyage family
Bug: T173013
Change-Id: I82e708c0b55df2e48c83838a4db38e43bf0b5085
| Python | mit | PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core,magul/pywikibot-core,wikimedia/pywikibot-core,magul/pywikibot-core | ---
+++
@@ -23,7 +23,7 @@
"""Constructor."""
self.languages_by_size = [
'en', 'de', 'fa', 'it', 'fr', 'pl', 'ru', 'nl', 'pt', 'fi', 'es',
- 'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk',
+ 'zh', 'he', 'vi', 'sv', 'el', 'ro', 'uk', 'hi',
]
super(Family, self).__init__() |
669325d6ca93f81c4635d7d3d57120d8e23e5251 | organizations/backends/forms.py | organizations/backends/forms.py | from django import forms
from django.contrib.auth.models import User
class InvitationRegistrationForm(forms.ModelForm):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=30)
password = forms.CharField(max_length=30, widget=forms.PasswordInput)
password_confirm = forms.CharField(max_length=30,
widget=forms.PasswordInput)
class Meta:
model = User
| from django import forms
from django.contrib.auth.models import User
class InvitationRegistrationForm(forms.ModelForm):
first_name = forms.CharField(max_length=30)
last_name = forms.CharField(max_length=30)
password = forms.CharField(max_length=30, widget=forms.PasswordInput)
password_confirm = forms.CharField(max_length=30,
widget=forms.PasswordInput)
def __init__(self, *args, **kwargs):
super(InvitationRegistrationForm, self).__init__(*args, **kwargs)
self.initial['username'] = ''
class Meta:
model = User
exclude = ('is_staff', 'is_superuser', 'is_active', 'last_login',
'date_joined', 'groups', 'user_permissions')
| Hide all unnecessary user info | Hide all unnecessary user info
Excludes all User fields save for useranme, first/last name, email, and
password. Also clears the username of its default data.
| Python | bsd-2-clause | aptivate/django-organizations,arteria/django-ar-organizations,GauthamGoli/django-organizations,aptivate/django-organizations,DESHRAJ/django-organizations,DESHRAJ/django-organizations,GauthamGoli/django-organizations,st8st8/django-organizations,bennylope/django-organizations,arteria/django-ar-organizations,aptivate/django-organizations,st8st8/django-organizations,bennylope/django-organizations | ---
+++
@@ -9,6 +9,12 @@
password_confirm = forms.CharField(max_length=30,
widget=forms.PasswordInput)
+ def __init__(self, *args, **kwargs):
+ super(InvitationRegistrationForm, self).__init__(*args, **kwargs)
+ self.initial['username'] = ''
+
class Meta:
model = User
+ exclude = ('is_staff', 'is_superuser', 'is_active', 'last_login',
+ 'date_joined', 'groups', 'user_permissions')
|
16fdad8ce40a539d732c8def4898aae0f2d58cd0 | foxybot/registrar.py | foxybot/registrar.py |
class CommandRegistrar():
"""A singleton to manage the command table and command execution"""
_instance = None
def __init__(self):
self.command_table = {}
@staticmethod
def instance():
"""Get the singleton, create an instance if needed"""
if not CommandRegistrar._instance:
CommandRegistrar._instance = CommandRegistrar()
return CommandRegistrar._instance
@staticmethod
async def execute_command(shards, shard, msg):
# !roll 100 -> 'roll'
instance = CommandRegistrar.instance()
command = msg.content[1:].split(' ')[0].lower()
if command in instance.command_table.keys():
await instance.command_table[command].execute(shards, shard, msg)
@property
def loaded_commands(self):
return [command.name for command in set(self.command_table.values())]
@property
def loaded_aliases(self):
return list(self.command_table.keys())
|
class CommandRegistrar():
"""A singleton to manage the command table and command execution"""
_instance = None
def __init__(self):
self.command_table = {}
@staticmethod
def instance():
"""Get the singleton, create an instance if needed"""
if not CommandRegistrar._instance:
CommandRegistrar._instance = CommandRegistrar()
return CommandRegistrar._instance
@staticmethod
async def execute_command(shards, shard, msg):
# !roll 100 -> 'roll'
instance = CommandRegistrar.instance()
command = msg.content[1:].split(' ')[0].lower()
if command in instance.command_table.keys():
await instance.command_table[command].execute(shards, shard, msg)
@property
def commands(self):
return self.command_table
@property
def loaded_commands(self):
return [command.name for command in set(self.command_table.values())]
@property
def loaded_aliases(self):
return list(self.command_table.keys())
| Add `commands` property to `CommandManager` to allow retrieving the command table | Add `commands` property to `CommandManager` to allow retrieving the command table
| Python | bsd-2-clause | 6180/foxybot | ---
+++
@@ -26,6 +26,10 @@
@property
+ def commands(self):
+ return self.command_table
+
+ @property
def loaded_commands(self):
return [command.name for command in set(self.command_table.values())]
|
5bb0259a747651290f91c0384ca93492a423c82d | IPython/utils/docs.py | IPython/utils/docs.py | # encoding: utf-8
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import os
GENERATING_DOCUMENTATION = os.environ.get("IN_SPHINX_RUN", None) == "True"
| import os
GENERATING_DOCUMENTATION = os.environ.get("IN_SPHINX_RUN", None) == "True"
| Remove outdated header as suggested | Remove outdated header as suggested
Co-authored-by: Matthias Bussonnier <[email protected]> | Python | bsd-3-clause | ipython/ipython,ipython/ipython | ---
+++
@@ -1,7 +1,3 @@
-# encoding: utf-8
-
-# Copyright (c) IPython Development Team.
-# Distributed under the terms of the Modified BSD License.
import os
GENERATING_DOCUMENTATION = os.environ.get("IN_SPHINX_RUN", None) == "True" |
bc50a924c50fb22a0ac03b3b696d6fba4efcd120 | src/main.py | src/main.py | #!/usr/bin/env python2
import sys
from direct.showbase.ShowBase import ShowBase
import panda3d.core as p3d
import ecs
from player import PlayerController
class NodePathComponent(ecs.Component):
__slots__ = [
"nodepath",
]
def __init__(self, modelpath=None):
if modelpath is not None:
self.nodepath = base.loader.loadModel(modelpath)
else:
self.nodepath = p3d.NodePath(p3d.PandaNode('node'))
class Sigurd(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.disableMouse()
self.ecsmanager = ecs.ECSManager()
def run_ecs(task):
self.ecsmanager.update(0)
task.cont
self.taskMgr.add(run_ecs, 'ECS')
level = ecs.Entity()
np_component = NodePathComponent('models/level')
np_component.nodepath.reparent_to(base.render)
self.ecsmanager.add_entity(level)
PlayerController(self.camera)
self.camLens.setFov(90)
self.accept('escape-up', sys.exit)
if __name__ == '__main__':
app = Sigurd()
app.run()
| #!/usr/bin/env python2
import math
import sys
from direct.showbase.ShowBase import ShowBase
import panda3d.core as p3d
import ecs
from player import PlayerController
class NodePathComponent(ecs.Component):
__slots__ = [
"nodepath",
]
def __init__(self, modelpath=None):
if modelpath is not None:
self.nodepath = base.loader.loadModel(modelpath)
else:
self.nodepath = p3d.NodePath(p3d.PandaNode('node'))
class Sigurd(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.disableMouse()
self.ecsmanager = ecs.ECSManager()
def run_ecs(task):
self.ecsmanager.update(0)
task.cont
self.taskMgr.add(run_ecs, 'ECS')
level = ecs.Entity()
np_component = NodePathComponent('models/level')
np_component.nodepath.reparent_to(base.render)
self.ecsmanager.add_entity(level)
PlayerController(self.camera)
self.accept('escape-up', sys.exit)
self.accept('aspectRatioChanged', self.cb_resize)
def cb_resize(self):
vfov = 70
aspect = self.camLens.get_aspect_ratio()
hfov = math.degrees(2 * math.atan(math.tan(math.radians(vfov)/2.0) * aspect))
print(hfov)
self.camLens.setFov(hfov, vfov)
if __name__ == '__main__':
app = Sigurd()
app.run()
| Change fov scaling to "Hor+". | Change fov scaling to "Hor+".
| Python | apache-2.0 | Moguri/sigurd | ---
+++
@@ -1,4 +1,5 @@
#!/usr/bin/env python2
+import math
import sys
from direct.showbase.ShowBase import ShowBase
@@ -36,9 +37,16 @@
self.ecsmanager.add_entity(level)
PlayerController(self.camera)
- self.camLens.setFov(90)
self.accept('escape-up', sys.exit)
+ self.accept('aspectRatioChanged', self.cb_resize)
+
+ def cb_resize(self):
+ vfov = 70
+ aspect = self.camLens.get_aspect_ratio()
+ hfov = math.degrees(2 * math.atan(math.tan(math.radians(vfov)/2.0) * aspect))
+ print(hfov)
+ self.camLens.setFov(hfov, vfov)
if __name__ == '__main__':
app = Sigurd() |
6d2d915d7bec4e4a8e733a073ec3dc79a1d06812 | src/stop.py | src/stop.py | import os
import json
from flask import Flask
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
print(result)
return json.dumps(result)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
| import os
import json
from flask import Flask
from flask import make_response
from flask import request
from flask import json
import services
app = Flask(__name__)
digitransitAPIService = services.DigitransitAPIService()
@app.route('/')
def hello_world():
return 'Hello World!'
@app.route('/test')
def digitransit_test():
return json.dumps(digitransitAPIService.get_stops(60.203978, 24.9633573))
@app.route('/stops', methods=['GET'])
def stops():
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
resp = make_response(json.dumps(result))
resp.mimetype = 'application/json'
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000'))
| Set response content type of a json response to application/json | Set response content type of a json response to application/json
| Python | mit | STOP2/stop2.0-backend,STOP2/stop2.0-backend | ---
+++
@@ -1,6 +1,7 @@
import os
import json
from flask import Flask
+from flask import make_response
from flask import request
from flask import json
@@ -27,8 +28,9 @@
lat = float(request.args.get('lat'))
lon = float(request.args.get('lon'))
result = digitransitAPIService.get_stops(lat, lon)
- print(result)
- return json.dumps(result)
+ resp = make_response(json.dumps(result))
+ resp.mimetype = 'application/json'
+ return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', port=os.getenv('PORT', '5000')) |
6327703fa1a0a7463facc9574bf74ef9dace7d15 | dthm4kaiako/config/__init__.py | dthm4kaiako/config/__init__.py | """Configuration for Django system."""
__version__ = "0.8.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| """Configuration for Django system."""
__version__ = "0.8.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.8.2 | Increment version number to 0.8.2
| Python | mit | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers | ---
+++
@@ -1,6 +1,6 @@
"""Configuration for Django system."""
-__version__ = "0.8.1"
+__version__ = "0.8.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num |
f40b42890fef78483825525dfbfdc226a50dc9c1 | scripts/master/factory/dart/channels.py | scripts/master/factory/dart/channels.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.1', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.2', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| Update stable channel builders to the 1.2 branch | Update stable channel builders to the 1.2 branch
Review URL: https://codereview.chromium.org/179723002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@253135 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | ---
+++
@@ -19,7 +19,7 @@
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 3),
Channel('dev', 'trunk', 1, '-dev', 2),
- Channel('stable', 'branches/1.1', 2, '-stable', 1),
+ Channel('stable', 'branches/1.2', 2, '-stable', 1),
]
CHANNELS_BY_NAME = {} |
7cfdf48bd04ba45a962901e1778ba05bab4699e6 | readthedocs/core/migrations/0005_migrate-old-passwords.py | readthedocs/core/migrations/0005_migrate-old-passwords.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
def forwards_func(apps, schema_editor):
User = apps.get_model('auth', 'User')
old_password_patterns = (
'sha1$',
# RTD's production database doesn't have any of these
# but they are included for completeness
'md5$',
'crypt$',
)
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
user.set_unusable_password()
user.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0004_ad-opt-out'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.RunPython(forwards_func),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-11 17:28
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.hashers import make_password
def forwards_func(apps, schema_editor):
User = apps.get_model('auth', 'User')
old_password_patterns = (
'sha1$',
# RTD's production database doesn't have any of these
# but they are included for completeness
'md5$',
'crypt$',
)
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
user.password = make_password(None)
user.save()
class Migration(migrations.Migration):
dependencies = [
('core', '0004_ad-opt-out'),
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.RunPython(forwards_func),
]
| Migrate old passwords without "set_unusable_password" | Migrate old passwords without "set_unusable_password"
| Python | mit | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | ---
+++
@@ -3,6 +3,7 @@
from __future__ import unicode_literals
from django.db import migrations
+from django.contrib.auth.hashers import make_password
def forwards_func(apps, schema_editor):
@@ -18,7 +19,7 @@
for pattern in old_password_patterns:
users = User.objects.filter(password__startswith=pattern)
for user in users:
- user.set_unusable_password()
+ user.password = make_password(None)
user.save()
|
673f4ad22ccd14f9feb68cfc3afc1f34580c0a51 | test/teeminus10_helpers_test.py | test/teeminus10_helpers_test.py | from teeminus10_helpers import *
import unittest
class TestInTimeOfDay(unittest.TestCase):
def setUp(self):
self.location = ephem.city('London')
self.location.date = datetime(2013, 03, 14, 9, 0, 0)
self.pass_day_time = datetime(2013, 03, 14, 12, 0, 0)
self.pass_night_time = datetime(2013, 03, 14, 0, 0, 0)
self.sun = ephem.Sun("2013/03/14")
def test_pass_in_whatever_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "whatever"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "whatever"))
def test_pass_in_day_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "day"))
self.assertFalse(in_time_of_day(self.location, self.pass_night_time, "day"))
def test_pass_in_night_time(self):
self.assertFalse(in_time_of_day(self.location, self.pass_day_time, "night"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "night"))
| from teeminus10_helpers import *
import unittest
class TestInTimeOfDay(unittest.TestCase):
def setUp(self):
self.location = ephem.city('London')
self.location.date = datetime(2013, 03, 14, 9, 0, 0)
self.pass_day_time = datetime(2013, 03, 14, 12, 0, 0)
self.pass_night_time = datetime(2013, 03, 14, 0, 0, 0)
def test_pass_in_whatever_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "whatever"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "whatever"))
def test_pass_in_day_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "day"))
self.assertFalse(in_time_of_day(self.location, self.pass_night_time, "day"))
def test_pass_in_night_time(self):
self.assertFalse(in_time_of_day(self.location, self.pass_day_time, "night"))
self.assertTrue(in_time_of_day(self.location, self.pass_night_time, "night"))
| Remove sun setup for now | Remove sun setup for now
| Python | mit | jpgneves/t-10_server,jpgneves/t-10_server | ---
+++
@@ -7,7 +7,6 @@
self.location.date = datetime(2013, 03, 14, 9, 0, 0)
self.pass_day_time = datetime(2013, 03, 14, 12, 0, 0)
self.pass_night_time = datetime(2013, 03, 14, 0, 0, 0)
- self.sun = ephem.Sun("2013/03/14")
def test_pass_in_whatever_time(self):
self.assertTrue(in_time_of_day(self.location, self.pass_day_time, "whatever")) |
848c3a8b754d7a359da94c211f58d16bdf34c804 | fabfile.py | fabfile.py | # -*- coding: utf-8 -*-
u"""
.. module:: fabfile
Be aware, that becaus fabric doesn't support py3k You need to execute this
particular script using Python 2.
"""
import contextlib
from fabric.api import cd
from fabric.api import env
from fabric.api import prefix
from fabric.api import run
env.user = 'root'
env.hosts = ['wysadzulice.pl']
env.forward_agent = True
def update():
u"""Function defining all steps required to properly update application."""
with contextlib.nested(
cd('/var/www/wysadzulice_pl'),
prefix('workon wysadzulice_pl')
):
run('git pull')
run('git checkout master')
run('python manage.py migrate --traceback')
run('service apache2 restart')
| # -*- coding: utf-8 -*-
u"""
.. module:: fabfile
Be aware, that becaus fabric doesn't support py3k You need to execute this
particular script using Python 2.
"""
import contextlib
from fabric.api import cd
from fabric.api import env
from fabric.api import prefix
from fabric.api import run
env.user = 'root'
env.hosts = ['wysadzulice.pl']
env.forward_agent = True
def update():
u"""Function defining all steps required to properly update application."""
with contextlib.nested(
cd('/var/www/wysadzulice_pl'),
prefix('workon wysadzulice_pl')
):
run('git pull')
run('git checkout master')
run('python manage.py migrate --traceback')
run('npm cache clear')
run('rm -rf ./node_modules')
run('npm install')
run('gulp build')
run('service apache2 restart')
| Add js tasks to fabric update | Add js tasks to fabric update
| Python | mit | komitywa/wysadzulice.pl,magul/wysadzulice.pl,magul/wysadzulice.pl,magul/wysadzulice.pl,komitywa/wysadzulice.pl,komitywa/wysadzulice.pl | ---
+++
@@ -28,5 +28,9 @@
run('git pull')
run('git checkout master')
run('python manage.py migrate --traceback')
+ run('npm cache clear')
+ run('rm -rf ./node_modules')
+ run('npm install')
+ run('gulp build')
run('service apache2 restart') |
db4ecaba64a4fbd9d432b461ca0df5b63dd11fb4 | marathon_acme/cli.py | marathon_acme/cli.py | import argparse
import sys
def main(raw_args=sys.argv[1:]):
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Marathon and served by marathon-lb.
"""
parser = argparse.ArgumentParser(
description='Automatically manage ACME certificates for Marathon apps')
parser.add_argument('-a', '--acme',
help='The address for the ACME Directory Resource '
'(default: %(default)s)',
default=(
'https://acme-v01.api.letsencrypt.org/directory'))
parser.add_argument('-m', '--marathon', nargs='+',
help='The address for the Marathon HTTP API (default: '
'%(default)s)',
default='http://marathon.mesos:8080')
parser.add_argument('-l', '--lb', nargs='+',
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
args = parser.parse_args(raw_args) # noqa
if __name__ == '__main__':
main()
| import argparse
import sys
def main(raw_args=sys.argv[1:]):
"""
A tool to automatically request, renew and distribute Let's Encrypt
certificates for apps running on Marathon and served by marathon-lb.
"""
parser = argparse.ArgumentParser(
description='Automatically manage ACME certificates for Marathon apps')
parser.add_argument('-a', '--acme',
help='The address for the ACME Directory Resource '
'(default: %(default)s)',
default=(
'https://acme-v01.api.letsencrypt.org/directory'))
parser.add_argument('-m', '--marathon', nargs='+',
help='The address for the Marathon HTTP API (default: '
'%(default)s)',
default='http://marathon.mesos:8080')
parser.add_argument('-l', '--lb', nargs='+',
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
parser.add_argument('-g', '--group',
help='The marathon-lb group to issue certificates for '
'(default: %(default)s)',
default='external')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
args = parser.parse_args(raw_args) # noqa
if __name__ == '__main__':
main()
| Add --group option to CLI | Add --group option to CLI
| Python | mit | praekeltfoundation/certbot,praekeltfoundation/certbot | ---
+++
@@ -22,6 +22,10 @@
help='The address for the marathon-lb HTTP API '
'(default: %(default)s)',
default='http://marathon-lb.marathon.mesos:9090')
+ parser.add_argument('-g', '--group',
+ help='The marathon-lb group to issue certificates for '
+ '(default: %(default)s)',
+ default='external')
parser.add_argument('storage-dir',
help='Path to directory for storing certificates')
|
51d7414e94981dc530d9f8be427ac9942ee263d7 | spacy/tests/regression/test_issue704.py | spacy/tests/regression/test_issue704.py | # coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.xfail
@pytest.mark.models('en')
def test_issue704(EN):
"""Test that sentence boundaries are detected correctly."""
text = '“Atticus said to Jem one day, “I’d rather you shot at tin cans in the backyard, but I know you’ll go after birds. Shoot all the blue jays you want, if you can hit ‘em, but remember it’s a sin to kill a mockingbird.”'
doc = EN(text)
sents = [sent for sent in doc.sents]
assert len(sents) == 3
| # coding: utf8
from __future__ import unicode_literals
import pytest
@pytest.mark.xfail
@pytest.mark.models('en')
def test_issue704(EN):
"""Test that sentence boundaries are detected correctly."""
text = '“Atticus said to Jem one day, “I’d rather you shot at tin cans in the backyard, but I know you’ll go after birds. Shoot all the blue jays you want, if you can hit ‘em, but remember it’s a sin to kill a mockingbird.”'
doc = EN(text)
sents = list([sent for sent in doc.sents])
assert len(sents) == 3
| Make sure sents are a list | Make sure sents are a list
| Python | mit | aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,aikramer2/spaCy | ---
+++
@@ -11,5 +11,5 @@
text = '“Atticus said to Jem one day, “I’d rather you shot at tin cans in the backyard, but I know you’ll go after birds. Shoot all the blue jays you want, if you can hit ‘em, but remember it’s a sin to kill a mockingbird.”'
doc = EN(text)
- sents = [sent for sent in doc.sents]
+ sents = list([sent for sent in doc.sents])
assert len(sents) == 3 |
316533b3d0864c3cf3dba7ae7a3a83e30a02f33a | scrape-10k.py | scrape-10k.py | import csv
import time
import requests
import lxml.html
top10k = {}
for page_index in range(1, 201):
print('Requesting page {}'.format(page_index))
url = 'https://osu.ppy.sh/p/pp/'
payload = {
'm': 0, # osu! standard gamemode
'o': 1, # descending order
'page': page_index,
}
page = requests.get(url, params=payload)
tree = lxml.html.document_fromstring(page.text)
print('Processing page {}'.format(page_index))
rows = tree.cssselect('tr a')
for row in rows:
user_name = row.text
user_id = row.attrib['href'][3:]
top10k[user_id] = user_name
print(user_name, user_id)
time.sleep(1) # Be nice and slow down
with open('names.csv', 'a', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
for user_id, user_name in top10k.items():
writer.writerow([user_id, user_name])
| import csv
import time
import collections
import requests
import lxml.html
top10k = collections.OrderedDict()
for page_index in range(1, 201):
print('Requesting page {}'.format(page_index))
url = 'https://osu.ppy.sh/p/pp/'
payload = {
'm': 0, # osu! standard gamemode
'o': 1, # descending order
'page': page_index,
}
page = requests.get(url, params=payload)
tree = lxml.html.document_fromstring(page.text)
print('Processing page {}'.format(page_index))
rows = tree.cssselect('tr a')
for row in rows:
user_name = row.text
user_id = row.attrib['href'][3:]
top10k[user_id] = user_name
print(user_name, user_id)
time.sleep(1) # Be nice and slow down
with open('names.csv', 'a', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
for user_id, user_name in top10k.items():
writer.writerow([user_id, user_name])
| Maintain top 10k order when writing into file | Maintain top 10k order when writing into file
| Python | mit | Cyanogenoid/osu-modspecific-rank | ---
+++
@@ -1,11 +1,12 @@
import csv
import time
+import collections
import requests
import lxml.html
-top10k = {}
+top10k = collections.OrderedDict()
for page_index in range(1, 201):
print('Requesting page {}'.format(page_index))
url = 'https://osu.ppy.sh/p/pp/' |
89b9bb45b17d457f6cf158330dfde6fe00e78cf4 | core/storage/statistics/models.py | core/storage/statistics/models.py | # coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy model file to keep django happy."""
__author__ = 'Tarashish Mishra'
from core.storage.statistics import django_models
StateCounterModel = django_models.StateCounterModel
StateFeedbackFromReaderModel = django_models.StateFeedbackFromReaderModel
StateRuleAnswerLogModel = django_models.StateRuleAnswerLogModel
| # coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy model file to keep django happy."""
__author__ = 'Tarashish Mishra'
from core.storage.statistics import django_models
StateCounterModel = django_models.StateCounterModel
StateRuleAnswerLogModel = django_models.StateRuleAnswerLogModel
FeedbackItemModel = django_models.FeedbackItemModel
| Fix omission in previous commit. | Fix omission in previous commit.
| Python | apache-2.0 | leandrotoledo/oppia,rackstar17/oppia,zgchizi/oppia-uc,nagyistoce/oppia,kennho/oppia,MAKOSCAFEE/oppia,raju249/oppia,hazmatzo/oppia,Atlas-Sailed-Co/oppia,Atlas-Sailed-Co/oppia,mit0110/oppia,brylie/oppia,nagyistoce/oppia,BenHenning/oppia,dippatel1994/oppia,VictoriaRoux/oppia,CMDann/oppia,kennho/oppia,cleophasmashiri/oppia,aldeka/oppia,sunu/oh-missions-oppia-beta,sbhowmik89/oppia,sbhowmik89/oppia,michaelWagner/oppia,raju249/oppia,wangsai/oppia,prasanna08/oppia,sarahfo/oppia,sanyaade-teachings/oppia,anthkris/oppia,BenHenning/oppia,aldeka/oppia,google-code-export/oppia,oppia/oppia,rackstar17/oppia,whygee/oppia,sdulal/oppia,sdulal/oppia,hazmatzo/oppia,virajprabhu/oppia,MaximLich/oppia,kingctan/oppia,Cgruppo/oppia,kevinlee12/oppia,miyucy/oppia,AllanYangZhou/oppia,michaelWagner/oppia,CMDann/oppia,kingctan/oppia,wangsai/oppia,DewarM/oppia,sanyaade-teachings/oppia,terrameijar/oppia,asandyz/oppia,souravbadami/oppia,asandyz/oppia,oppia/oppia,miyucy/oppia,aldeka/oppia,shaz13/oppia,wangsai/oppia,openhatch/oh-missions-oppia-beta,souravbadami/oppia,virajprabhu/oppia,virajprabhu/oppia,Cgruppo/oppia,miyucy/oppia,dippatel1994/oppia,felipecocco/oppia,kaffeel/oppia,bjvoth/oppia,won0089/oppia,whygee/oppia,jestapinski/oppia,nagyistoce/oppia,kevinlee12/oppia,Atlas-Sailed-Co/oppia,sunu/oh-missions-oppia-beta,amgowano/oppia,BenHenning/oppia,dippatel1994/oppia,oulan/oppia,sanyaade-teachings/oppia,kaffeel/oppia,anggorodewanto/oppia,zgchizi/oppia-uc,rackstar17/oppia,brianrodri/oppia,sdulal/oppia,shaz13/oppia,oppia/oppia,whygee/oppia,gale320/oppia,dippatel1994/oppia,MAKOSCAFEE/oppia,Atlas-Sailed-Co/oppia,won0089/oppia,toooooper/oppia,CMDann/oppia,kingctan/oppia,danieljjh/oppia,danieljjh/oppia,VictoriaRoux/oppia,oulan/oppia,oulan/oppia,won0089/oppia,himanshu-dixit/oppia,mindpin/mindpin_oppia,himanshu-dixit/oppia,toooooper/oppia,cleophasmashiri/oppia,MaximLich/oppia,kevinlee12/oppia,won0089/oppia,fernandopinhati/oppia,brylie/oppia,MaximLich/oppia,mit0110/oppia,leandrotoledo/oppia,shaz13/oppia,MAKOSCAFEE/oppia,asandyz/oppia,CMDann/oppia,hazmatzo/oppia,DewarM/oppia,toooooper/oppia,directorlive/oppia,kennho/oppia,Dev4X/oppia,mindpin/mindpin_oppia,google-code-export/oppia,brylie/oppia,miyucy/oppia,wangsai/oppia,sarahfo/oppia,mit0110/oppia,hazmatzo/oppia,danieljjh/oppia,anggorodewanto/oppia,mindpin/mindpin_oppia,kingctan/oppia,google-code-export/oppia,anggorodewanto/oppia,virajprabhu/oppia,hazmatzo/oppia,zgchizi/oppia-uc,cleophasmashiri/oppia,gale320/oppia,infinyte/oppia,won0089/oppia,Cgruppo/oppia,kennho/oppia,openhatch/oh-missions-oppia-beta,Cgruppo/oppia,himanshu-dixit/oppia,leandrotoledo/oppia,jestapinski/oppia,felipecocco/oppia,Dev4X/oppia,AllanYangZhou/oppia,sdulal/oppia,amitdeutsch/oppia,kevinlee12/oppia,aldeka/oppia,mit0110/oppia,Cgruppo/oppia,cleophasmashiri/oppia,MaximLich/oppia,Atlas-Sailed-Co/oppia,souravbadami/oppia,jestapinski/oppia,google-code-export/oppia,DewarM/oppia,edallison/oppia,fernandopinhati/oppia,infinyte/oppia,prasanna08/oppia,leandrotoledo/oppia,felipecocco/oppia,directorlive/oppia,sunu/oppia,sunu/oppia,raju249/oppia,michaelWagner/oppia,kingctan/oppia,amitdeutsch/oppia,prasanna08/oppia,sbhowmik89/oppia,VictoriaRoux/oppia,sarahfo/oppia,rackstar17/oppia,sanyaade-teachings/oppia,bjvoth/oppia,AllanYangZhou/oppia,sunu/oh-missions-oppia-beta,gale320/oppia,sunu/oppia,gale320/oppia,directorlive/oppia,himanshu-dixit/oppia,openhatch/oh-missions-oppia-beta,infinyte/oppia,google-code-export/oppia,dippatel1994/oppia,brylie/oppia,sunu/oppia,danieljjh/oppia,sarahfo/oppia,anthkris/oppia,directorlive/oppia,CMDann/oppia,souravbadami/oppia,amgowano/oppia,BenHenning/oppia,brianrodri/oppia,edallison/oppia,terrameijar/oppia,wangsai/oppia,bjvoth/oppia,amitdeutsch/oppia,prasanna08/oppia,asandyz/oppia,leandrotoledo/oppia,michaelWagner/oppia,VictoriaRoux/oppia,oppia/oppia,VictoriaRoux/oppia,kevinlee12/oppia,amitdeutsch/oppia,amitdeutsch/oppia,sarahfo/oppia,sdulal/oppia,michaelWagner/oppia,asandyz/oppia,zgchizi/oppia-uc,DewarM/oppia,toooooper/oppia,mit0110/oppia,whygee/oppia,Dev4X/oppia,kaffeel/oppia,anggorodewanto/oppia,fernandopinhati/oppia,bjvoth/oppia,sbhowmik89/oppia,danieljjh/oppia,oulan/oppia,nagyistoce/oppia,whygee/oppia,openhatch/oh-missions-oppia-beta,bjvoth/oppia,BenHenning/oppia,amgowano/oppia,directorlive/oppia,edallison/oppia,toooooper/oppia,anthkris/oppia,fernandopinhati/oppia,terrameijar/oppia,oppia/oppia,souravbadami/oppia,brylie/oppia,kennho/oppia,AllanYangZhou/oppia,DewarM/oppia,sbhowmik89/oppia,amgowano/oppia,virajprabhu/oppia,sunu/oh-missions-oppia-beta,sunu/oppia,infinyte/oppia,edallison/oppia,prasanna08/oppia,infinyte/oppia,shaz13/oppia,raju249/oppia,jestapinski/oppia,Dev4X/oppia,sanyaade-teachings/oppia,brianrodri/oppia,nagyistoce/oppia,cleophasmashiri/oppia,Dev4X/oppia,brianrodri/oppia,felipecocco/oppia,fernandopinhati/oppia,mindpin/mindpin_oppia,kaffeel/oppia,felipecocco/oppia,terrameijar/oppia,anthkris/oppia,kaffeel/oppia,MAKOSCAFEE/oppia,gale320/oppia,oulan/oppia,brianrodri/oppia | ---
+++
@@ -21,5 +21,5 @@
from core.storage.statistics import django_models
StateCounterModel = django_models.StateCounterModel
-StateFeedbackFromReaderModel = django_models.StateFeedbackFromReaderModel
StateRuleAnswerLogModel = django_models.StateRuleAnswerLogModel
+FeedbackItemModel = django_models.FeedbackItemModel |
805f92a6aee1d9b9514818459809a5230fa7f7eb | svenv/blog/settings.py | svenv/blog/settings.py | BASE_URL = 'http://svenv.nl'
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.OrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 3,
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
} | BASE_URL = 'http://svenv.nl'
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.OrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 12,
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
} | Set page size to 12 | Set page size to 12
| Python | mit | svenvandescheur/svenv.nl-app,svenvandescheur/svenv.nl-app,svenvandescheur/svenv.nl-app,svenvandescheur/svenv.nl-app | ---
+++
@@ -3,7 +3,7 @@
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.OrderingFilter',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
- 'PAGE_SIZE': 3,
+ 'PAGE_SIZE': 12,
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
], |
6ea521e022a40a7abb7ed43b28907d7085bbf423 | muzicast/web/main.py | muzicast/web/main.py | from flask import Module, render_template, url_for, redirect, session, escape, request
from muzicast.web.util import is_first_run
from muzicast.meta import Track
main = Module(__name__)
def top_tracks(n):
"""
Returns the top n tracks
"""
#TODO(nikhil) fix this to use statistics
return [Track.get(i) for i in range(1, n+1)]
def recently_played(n):
"""
Returns n latest played tracks
"""
#TODO(nikhil) fix this to use statistics
return [Track.get(i) for i in range(1, n+1)]
@main.route('/')
def index():
#just do first run check
if is_first_run():
return redirect(url_for('admin.index'))
# TODO: will need attributes for template
return render_template('home.html', top_tracks=top_tracks, recently_played=recently_played)
| from flask import Module, render_template, url_for, redirect, session, escape, request
from sqlobject.main import SQLObjectNotFound
from muzicast.web.util import is_first_run
from muzicast.meta import Track
main = Module(__name__)
def top_tracks(n):
"""
Returns the top n tracks
"""
#TODO(nikhil) fix this to use statistics
try:
return [Track.get(i) for i in range(1, n+1)]
except SQLObjectNotFound:
return []
def recently_played(n):
"""
Returns n latest played tracks
"""
#TODO(nikhil) fix this to use statistics
try:
return [Track.get(i) for i in range(1, n+1)]
except SQLObjectNotFound:
return []
@main.route('/')
def index():
#just do first run check
if is_first_run():
return redirect(url_for('admin.index'))
# TODO: will need attributes for template
return render_template('home.html', top_tracks=top_tracks, recently_played=recently_played)
| Return 0 tracks in case of error | Return 0 tracks in case of error
| Python | mit | nikhilm/muzicast,nikhilm/muzicast | ---
+++
@@ -1,4 +1,6 @@
from flask import Module, render_template, url_for, redirect, session, escape, request
+
+from sqlobject.main import SQLObjectNotFound
from muzicast.web.util import is_first_run
from muzicast.meta import Track
@@ -10,14 +12,20 @@
Returns the top n tracks
"""
#TODO(nikhil) fix this to use statistics
- return [Track.get(i) for i in range(1, n+1)]
+ try:
+ return [Track.get(i) for i in range(1, n+1)]
+ except SQLObjectNotFound:
+ return []
def recently_played(n):
"""
Returns n latest played tracks
"""
#TODO(nikhil) fix this to use statistics
- return [Track.get(i) for i in range(1, n+1)]
+ try:
+ return [Track.get(i) for i in range(1, n+1)]
+ except SQLObjectNotFound:
+ return []
@main.route('/')
def index(): |
f9be8cce93134dc4e5592591beb605c0a75c813c | chunks/redsolution_setup/make.py | chunks/redsolution_setup/make.py | from redsolutioncms.make import BaseMake
from redsolutioncms.models import CMSSettings
class Make(BaseMake):
def make(self):
super(Make, self).make()
cms_settings = CMSSettings.objects.get_settings()
cms_settings.render_to('settings.py', 'chunks/redsolutioncms/settings.pyt')
cms_settings.render_to(['..', 'templates', 'base_chunks.html'],
'chunks/redsolutioncms/base_chunks.html', {
}, 'w')
cms_settings.render_to('urls.py', 'chunks/redsolutioncms/urls.pyt')
cms_settings.render_to(['..', 'templates', 'robots.txt'],
'chunks/redsolutioncms/robots.txt')
cms_settings.base_template = 'base_chunks.html'
cms_settings.save()
make = Make()
| from redsolutioncms.make import BaseMake
from redsolutioncms.models import CMSSettings
class Make(BaseMake):
def make(self):
super(Make, self).make()
cms_settings = CMSSettings.objects.get_settings()
cms_settings.render_to('settings.py', 'chunks/redsolutioncms/settings.pyt')
cms_settings.render_to(['..', 'templates', 'base_chunks.html'],
'chunks/redsolutioncms/base_chunks.html', {
}, 'w')
cms_settings.render_to('urls.py', 'chunks/redsolutioncms/urls.pyt')
cms_settings.render_to(['..', 'templates', 'robots.txt'],
'chunks/redsolutioncms/robots.txt', {}, 'w')
cms_settings.base_template = 'base_chunks.html'
cms_settings.save()
make = Make()
| Rewrite robots.txt file in setup | Rewrite robots.txt file in setup
| Python | bsd-3-clause | redsolution/django-chunks,redsolution/django-chunks | ---
+++
@@ -11,7 +11,7 @@
}, 'w')
cms_settings.render_to('urls.py', 'chunks/redsolutioncms/urls.pyt')
cms_settings.render_to(['..', 'templates', 'robots.txt'],
- 'chunks/redsolutioncms/robots.txt')
+ 'chunks/redsolutioncms/robots.txt', {}, 'w')
cms_settings.base_template = 'base_chunks.html'
cms_settings.save()
|
d68935dfb34f7c5fc463f94e49f0c060717b17b8 | cmsplugin_contact_plus/checks.py | cmsplugin_contact_plus/checks.py | # -*- coding: utf-8 -*-
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
warn_1_3_changes,
]:
register(check)
| # -*- coding: utf-8 -*-
from django.core.checks import Warning, register
def warn_1_3_changes(app_configs, **kwargs):
return [
Warning(
'cmsplugin-contact-plus >= 1.3 has renamed the "input" field. Do not forget to migrate your '
'database and update your templates',
hint=None,
obj=None,
id='cmsplugin_contact_plus.W001',
)
]
def register_checks():
for check in [
# warn_1_3_changes, # Might be more annoying than useful
]:
register(check)
| Comment out warning for renamed field | Comment out warning for renamed field
| Python | bsd-3-clause | arteria/cmsplugin-contact-plus,arteria/cmsplugin-contact-plus,worthwhile/cmsplugin-remote-form,worthwhile/cmsplugin-remote-form | ---
+++
@@ -16,6 +16,6 @@
def register_checks():
for check in [
- warn_1_3_changes,
+ # warn_1_3_changes, # Might be more annoying than useful
]:
register(check) |
f0f66aa917d9ec85cfbe2a0460b2d4b4d5ffe0eb | middleware/hat_manager.py | middleware/hat_manager.py | class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
| class HatManager(object):
def __init__(self, sense):
self.sense = sense
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
def refresh_state(self):
self._pressure = self.sense.get_pressure()
self._temperature = self.sense.get_temperature()
self._humidity = self.sense.get_humidity()
@property
def get_humidity(self):
return self._humidity
@property
def get_temperature(self):
return self._temperature
@property
def get_pressure(self):
return self._pressure
def set_message(self, msg):
self.sense.show_message(msg, scroll_speed=0.05)
| Add a method to print a message on the sense hat | Add a method to print a message on the sense hat
| Python | mit | ylerjen/pir-hat,ylerjen/pir-hat,ylerjen/pir-hat | ---
+++
@@ -23,3 +23,6 @@
@property
def get_pressure(self):
return self._pressure
+
+ def set_message(self, msg):
+ self.sense.show_message(msg, scroll_speed=0.05) |
5b4208712d5f5f64774e033d0e831863822cb90f | libraries/log.py | libraries/log.py | # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.core.mail import send_mail
def cronosDebug(msg, logfile):
logging.basicConfig(level = logging.DEBUG, format = '%(asctime)s: %(message)s', filename = settings.LOGDIR + logfile, filemode = 'a+')
logging.debug(msg)
def mailCronosAdmin(title, message):
try:
send_mail(title, message, '[email protected]', ['[email protected]'])
except:
pass
class CronosError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| # -*- coding: utf-8 -*-
import logging
from django.conf import settings
from django.core.mail import send_mail
def cronos_debug(msg, logfile):
logging.basicConfig(level = logging.DEBUG, format = '%(asctime)s: %(message)s', filename = settings.LOGDIR + logfile, filemode = 'a+')
logging.debug(msg)
def mail_cronos_admin(title, message):
try:
send_mail(title, message, '[email protected]', ['[email protected]'])
except:
pass
class CronosError(Exception):
def __init__(self, value):
self.value = value
def __unicode__(self):
return repr(self.value)
| Rename libraries, I'll prefer underscores instead of stuck names Prefer unicodes in CronosError | Rename libraries, I'll prefer underscores instead of stuck names
Prefer unicodes in CronosError
| Python | agpl-3.0 | LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr | ---
+++
@@ -3,11 +3,11 @@
from django.conf import settings
from django.core.mail import send_mail
-def cronosDebug(msg, logfile):
+def cronos_debug(msg, logfile):
logging.basicConfig(level = logging.DEBUG, format = '%(asctime)s: %(message)s', filename = settings.LOGDIR + logfile, filemode = 'a+')
logging.debug(msg)
-def mailCronosAdmin(title, message):
+def mail_cronos_admin(title, message):
try:
send_mail(title, message, '[email protected]', ['[email protected]'])
except:
@@ -16,5 +16,5 @@
class CronosError(Exception):
def __init__(self, value):
self.value = value
- def __str__(self):
+ def __unicode__(self):
return repr(self.value) |
60173acbecf1239872411b2ca0dd9eb75b543843 | tests/sentry/web/frontend/test_organization_stats.py | tests/sentry/web/frontend/test_organization_stats.py | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_cannot_load(self):
self.assert_org_member_cannot_access(self.path)
def test_org_admin_can_load(self):
self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| Correct permission tests for organization stats | Correct permission tests for organization stats
| Python | bsd-3-clause | looker/sentry,alexm92/sentry,gg7/sentry,zenefits/sentry,vperron/sentry,ifduyue/sentry,imankulov/sentry,JamesMura/sentry,daevaorn/sentry,mitsuhiko/sentry,JackDanger/sentry,ewdurbin/sentry,BuildingLink/sentry,daevaorn/sentry,kevinlondon/sentry,songyi199111/sentry,TedaLIEz/sentry,kevinlondon/sentry,wujuguang/sentry,mitsuhiko/sentry,argonemyth/sentry,hongliang5623/sentry,ifduyue/sentry,kevinlondon/sentry,JamesMura/sentry,nicholasserra/sentry,boneyao/sentry,ewdurbin/sentry,TedaLIEz/sentry,vperron/sentry,looker/sentry,ifduyue/sentry,1tush/sentry,BuildingLink/sentry,Kryz/sentry,kevinastone/sentry,gencer/sentry,mvaled/sentry,looker/sentry,boneyao/sentry,jean/sentry,JTCunning/sentry,wong2/sentry,songyi199111/sentry,ngonzalvez/sentry,imankulov/sentry,felixbuenemann/sentry,TedaLIEz/sentry,jean/sentry,JackDanger/sentry,jean/sentry,jean/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,ewdurbin/sentry,daevaorn/sentry,fuziontech/sentry,JackDanger/sentry,argonemyth/sentry,hongliang5623/sentry,mvaled/sentry,JTCunning/sentry,nicholasserra/sentry,ifduyue/sentry,fotinakis/sentry,korealerts1/sentry,boneyao/sentry,kevinastone/sentry,Natim/sentry,beeftornado/sentry,drcapulet/sentry,gg7/sentry,gencer/sentry,llonchj/sentry,Kryz/sentry,drcapulet/sentry,llonchj/sentry,BayanGroup/sentry,korealerts1/sentry,fotinakis/sentry,vperron/sentry,BayanGroup/sentry,fuziontech/sentry,looker/sentry,drcapulet/sentry,felixbuenemann/sentry,fotinakis/sentry,wong2/sentry,zenefits/sentry,beeftornado/sentry,mvaled/sentry,Natim/sentry,beeftornado/sentry,Kryz/sentry,imankulov/sentry,pauloschilling/sentry,BuildingLink/sentry,gencer/sentry,mvaled/sentry,fuziontech/sentry,alexm92/sentry,Natim/sentry,1tush/sentry,kevinastone/sentry,korealerts1/sentry,JamesMura/sentry,BuildingLink/sentry,ngonzalvez/sentry,pauloschilling/sentry,songyi199111/sentry,wong2/sentry,JamesMura/sentry,zenefits/sentry,mvaled/sentry,wujuguang/sentry,fotinakis/sentry,gencer/sentry,hongliang5623/sentry,gencer/sentry,daevaorn/sentry,pauloschilling/sentry,nicholasserra/sentry,BayanGroup/sentry,jean/sentry,wujuguang/sentry,1tush/sentry,mvaled/sentry,llonchj/sentry,alexm92/sentry,JamesMura/sentry,argonemyth/sentry,zenefits/sentry,gg7/sentry,ifduyue/sentry,felixbuenemann/sentry,looker/sentry,JTCunning/sentry | ---
+++
@@ -13,11 +13,8 @@
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
- def test_org_member_cannot_load(self):
- self.assert_org_member_cannot_access(self.path)
-
- def test_org_admin_can_load(self):
- self.assert_org_admin_can_access(self.path)
+ def test_org_member_can_load(self):
+ self.assert_org_member_can_access(self.path)
class OrganizationStatsTest(TestCase): |
54e5ee0cb6df1f47a1a6edd114c65ad62fd0c517 | node/floor_divide.py | node/floor_divide.py | #!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))] | #!/usr/bin/env python
from nodes import Node
class FloorDiv(Node):
char = "f"
args = 2
results = 1
@Node.test_func([3,2], [1])
@Node.test_func([6,-3], [-2])
def func(self, a:Node.number,b:Node.number):
"""a/b. Rounds down, returns an int."""
return a//b
@Node.test_func(["test", "e"], [["t", "e", "st"]])
def partition(self, string:str, sep:str):
"""Split the string at the first occurrence of sep,
return a 3-list containing the part before the separator,
the separator itself, and the part after the separator.
If the separator is not found,
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
@Node.test_func(["134", 1], [["134"]])
@Node.test_func(["1234", 2], [["12", "34"]])
@Node.test_func(["1234", 3], [["1", "2", "34"]])
@Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
def chunk(self, inp:Node.indexable, num:Node.number):
"""Return inp seperated into num groups"""
rtn = []
last = 0
size = len(inp)//num
for i in range(size, len(inp), size):
rtn.append(inp[last:i])
last = i
if len(rtn) != num:
rtn.append(inp[last:])
else:
rtn[-1] += inp[last:]
if len(rtn):
if isinstance(inp, str):
rtn[-1] = "".join(rtn[-1])
else:
rtn[-1] = type(inp)(rtn[-1])
return [rtn] | Add a group chunk, chunks a list into N groups | Add a group chunk, chunks a list into N groups
| Python | mit | muddyfish/PYKE,muddyfish/PYKE | ---
+++
@@ -22,3 +22,26 @@
return a 3-list containing the string itself,
followed by two empty strings."""
return [list(string.partition(sep))]
+
+ @Node.test_func(["134", 1], [["134"]])
+ @Node.test_func(["1234", 2], [["12", "34"]])
+ @Node.test_func(["1234", 3], [["1", "2", "34"]])
+ @Node.test_func([[4,8,15,16,23,42], 5], [[[4],[8],[15],[16],[23,42]]])
+ def chunk(self, inp:Node.indexable, num:Node.number):
+ """Return inp seperated into num groups"""
+ rtn = []
+ last = 0
+ size = len(inp)//num
+ for i in range(size, len(inp), size):
+ rtn.append(inp[last:i])
+ last = i
+ if len(rtn) != num:
+ rtn.append(inp[last:])
+ else:
+ rtn[-1] += inp[last:]
+ if len(rtn):
+ if isinstance(inp, str):
+ rtn[-1] = "".join(rtn[-1])
+ else:
+ rtn[-1] = type(inp)(rtn[-1])
+ return [rtn] |
2477bc7e4cb39c8fc34efec75143f42b6fee7dda | bot.py | bot.py | import zirc
import ssl
import socket
import utils
import commands
debug = False
class Bot(zirc.Client):
def __init__(self):
self.connection = zirc.Socket(family=socket.AF_INET6,
wrapper=ssl.wrap_socket)
self.config = zirc.IRCConfig(host="chat.freenode.net",
port=6697,
nickname="zIRCBot2",
ident="zirc",
realname="A zIRC bot",
channels=["##wolfy1339", "##powder-bots"],
sasl_user="BigWolfy1339",
sasl_pass="")
self.connect(self.config)
self.start()
@staticmethod
def on_ctcp(irc, raw):
utils.print_(raw, flush=True)
@classmethod
def on_privmsg(self, event, irc, arguments):
if " ".join(arguments).startswith("?"):
utils.call_command(self, event, irc, arguments)
@classmethod
def on_all(self, event, irc):
if debug:
utils.print_(event.raw, flush=True)
Bot()
| import zirc
import ssl
import socket
import utils
import commands
debug = False
class Bot(zirc.Client):
def __init__(self):
self.connection = zirc.Socket(family=socket.AF_INET6,
wrapper=ssl.wrap_socket)
self.config = zirc.IRCConfig(host="chat.freenode.net",
port=6697,
nickname="zIRCBot2",
ident="zirc",
realname="A zIRC bot",
channels=["##wolfy1339", "##powder-bots"],
sasl_user="BigWolfy1339",
sasl_pass="")
self.connect(self.config)
self.start()
@staticmethod
def on_ctcp(irc, raw):
utils.print_(raw, flush=True)
@classmethod
def on_privmsg(self, event, irc, arguments):
if " ".join(arguments).startswith("?"):
utils.call_command(self, event, irc, arguments)
@classmethod
def on_all(self, event, irc):
if debug:
utils.print_(event.raw, flush=True)
@classmethod
def on_nicknameinuse(self, event, irc):
irc.nick(bot.config['nickname'] + "_")
Bot()
| Add handler for 433 Nickname already in use | Add handler for 433 Nickname already in use
| Python | mit | wolfy1339/Python-IRC-Bot | ---
+++
@@ -37,4 +37,8 @@
if debug:
utils.print_(event.raw, flush=True)
+ @classmethod
+ def on_nicknameinuse(self, event, irc):
+ irc.nick(bot.config['nickname'] + "_")
+
Bot() |
80737e5de2ca3f0f039c9d4fbbf3df4ac8b59193 | run.py | run.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import twitter_rss
import time
import subprocess
import config
# Launch web server
p = subprocess.Popen(['/usr/bin/python2', config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.' | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import twitter_rss
import time
import subprocess
import config
import sys
# Launch web server
p = subprocess.Popen([sys.executable, config.INSTALL_DIR + 'server.py'])
# Update the feeds
try:
while 1:
print 'Updating ALL THE FEEDS!'
try:
with open(config.XML_DIR + 'user/user.txt', 'r') as usernames:
for user in usernames:
twitter_rss.UserTweetGetter(user)
usernames.close()
with open(config.XML_DIR + 'htag/htag.txt', 'r') as hashtags:
for htag in hashtags:
twitter_rss.HashtagTweetGetter(user)
hashtags.close()
except IOError:
print 'File could not be read'
time.sleep(config.TIMER)
except (KeyboardInterrupt, SystemExit):
p.kill() # kill the subprocess
print '\nKeyboardInterrupt catched -- Finishing program.'
| Use sys.executable instead of harcoded python path | Use sys.executable instead of harcoded python path
Fixes issue when running in a virtualenv and in non-standard python
installations.
| Python | mit | Astalaseven/twitter-rss,Astalaseven/twitter-rss | ---
+++
@@ -5,9 +5,10 @@
import time
import subprocess
import config
+import sys
# Launch web server
-p = subprocess.Popen(['/usr/bin/python2', config.INSTALL_DIR + 'server.py'])
+p = subprocess.Popen([sys.executable, config.INSTALL_DIR + 'server.py'])
# Update the feeds
try: |
d7d1e2937c9f09189aad713db1f5ee5d2d6a64bd | run.py | run.py | # -*- coding: utf-8 -*-
"""
This script generates all the relevant figures from the experiment.
"""
from Modules.processing import *
from Modules.plotting import *
set_sns()
save = True
savetype = ".eps"
show = True
def main():
plot_perf_curves(save=save, savetype=savetype)
plot_perf_curves(subplots=False, save=save, savetype=savetype)
plot_perf_re_dep(save=save, savetype=savetype, errorbars=False,
dual_xaxes=True)
PerfCurve(1.0).plotcp(save=save, savetype=savetype, show=False)
wm = WakeMap()
wm.plot_meancontquiv(save=save, savetype=savetype)
wm.plot_k(save=save, savetype=savetype)
plot_no_blades_all(save=save, savetype=savetype)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=False)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=True)
if show:
plt.show()
if __name__ == "__main__":
if not os.path.isdir("Figures"):
os.mkdir("Figures")
main() | # -*- coding: utf-8 -*-
"""
This script generates all the relevant figures from the experiment.
"""
from Modules.processing import *
from Modules.plotting import *
set_sns()
save = True
savetype = ".eps"
show = True
def main():
plot_perf_curves(save=save, savetype=savetype)
plot_perf_curves(subplots=False, save=save, savetype=savetype)
plot_perf_re_dep(save=save, savetype=savetype, errorbars=False,
dual_xaxes=True)
PerfCurve(1.0).plotcp(save=save, savetype=savetype, show=False)
wm = WakeMap()
wm.plot_meancontquiv(save=save, savetype=savetype)
wm.plot_k(save=save, savetype=savetype)
wm.make_K_bar_graph(save=save, savetype=savetype)
plot_no_blades_all(save=save, savetype=savetype)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=False)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=True)
if show:
plt.show()
if __name__ == "__main__":
if not os.path.isdir("Figures"):
os.mkdir("Figures")
main() | Make K transport bar graph | Make K transport bar graph
| Python | mit | UNH-CORE/RM2-tow-tank | ---
+++
@@ -21,6 +21,7 @@
wm = WakeMap()
wm.plot_meancontquiv(save=save, savetype=savetype)
wm.plot_k(save=save, savetype=savetype)
+ wm.make_K_bar_graph(save=save, savetype=savetype)
plot_no_blades_all(save=save, savetype=savetype)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=False)
plot_cp_covers(save=save, savetype=savetype, add_strut_torque=True) |
5f14b7217f81b6d7653f94065d1a3305204cf83b | ddcz/templatetags/creations.py | ddcz/templatetags/creations.py | from django import template
from django.contrib.staticfiles.storage import staticfiles_storage
from ..creations import RATING_DESCRIPTIONS
register = template.Library()
@register.inclusion_tag('creations/rating.html')
def creation_rating(rating, skin):
return {
'rating_description': RATING_DESCRIPTIONS[round(rating)],
'rating': range(rating),
'skin': skin,
'skin_rating_star_url': staticfiles_storage.url("skins/%s/img/rating-star.gif" % skin),
}
| from django import template
from django.contrib.staticfiles.storage import staticfiles_storage
from ..creations import RATING_DESCRIPTIONS
register = template.Library()
@register.inclusion_tag('creations/rating.html')
def creation_rating(rating, skin):
return {
'rating_description': "Hodnocení: %s" % RATING_DESCRIPTIONS[round(rating)],
'rating': range(rating),
'skin': skin,
'skin_rating_star_url': staticfiles_storage.url("skins/%s/img/rating-star.gif" % skin),
}
| Add explicit rating word to rating alt | Add explicit rating word to rating alt
| Python | mit | dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard | ---
+++
@@ -8,7 +8,7 @@
@register.inclusion_tag('creations/rating.html')
def creation_rating(rating, skin):
return {
- 'rating_description': RATING_DESCRIPTIONS[round(rating)],
+ 'rating_description': "Hodnocení: %s" % RATING_DESCRIPTIONS[round(rating)],
'rating': range(rating),
'skin': skin,
'skin_rating_star_url': staticfiles_storage.url("skins/%s/img/rating-star.gif" % skin), |
6b59d17aa06741f40bb99dde6c10950de3a142e6 | utils/load.py | utils/load.py | #!/usr/local/bin/python
from website import carts
import urllib2
import json
def load():
carts.remove_all()
host = 'http://data.cityofnewyork.us/resource/xfyi-uyt5.json'
for i in range(0, 7000, 1000):
query = 'permit_type_description=MOBILE+FOOD+UNIT&$offset=%d' % i
request = host + '?' + query
data = urllib2.urlopen(request)
results = json.loads(data.read())
data.close()
required_keys = ['longitude_wgs84', 'latitude_wgs84', 'street', 'address', 'zip_code', 'borough', 'license_permit_holder']
for r in results:
for k in required_keys:
if not r.has_key(k):
r[k] = ''
carts.insert(lat=r['latitude_wgs84'], lng=r['longitude_wgs84'],
address=r['address'] + ' ' + r['street'],
zip_code=r['zip_code'], borough=r['borough'],
name=r['license_permit_holder'])
out = [c for c in carts.find()]
print len(out)
| #!/usr/local/bin/python
from website import carts
import urllib2
import json
def load():
carts.remove_all()
request = 'http://data.cityofnewyork.us/resource/akqf-qv4n.json'
for i in range(0, 24000, 1000):
query = '?$offset=%d' % i
data = urllib2.urlopen(request + query)
results = json.loads(data.read())
data.close()
required_keys = ['license_permit_holder', 'license_permit_holder_name',
'license_permit_number', 'permit_issuance_date',
'permit_expiration_date', 'longitude_wgs84', 'latitude_wgs84',
'zip_code', 'borough']
for r in results:
for k in required_keys:
if not r.has_key(k):
r[k] = ''
carts.insert(name=r['license_permit_holder'],
owner=r['license_permit_holder_name'],
permit_number=r['license_permit_number'],
issuance=r['permit_issuance_date'],
expiration=r['permit_expiration_date'],
loc=[ float(r['longitude_wgs84']),
float(r['latitude_wgs84']) ],
zip_code=r['zip_code'], borough=r['borough'])
out = [c for c in carts.find()]
print len(out)
| Change cart structure and url endpoint for getting cart data | Change cart structure and url endpoint for getting cart data
| Python | bsd-3-clause | stuycs-softdev-fall-2013/proj3-7-cartwheels,stuycs-softdev-fall-2013/proj3-7-cartwheels | ---
+++
@@ -6,26 +6,32 @@
def load():
carts.remove_all()
- host = 'http://data.cityofnewyork.us/resource/xfyi-uyt5.json'
+ request = 'http://data.cityofnewyork.us/resource/akqf-qv4n.json'
- for i in range(0, 7000, 1000):
- query = 'permit_type_description=MOBILE+FOOD+UNIT&$offset=%d' % i
- request = host + '?' + query
- data = urllib2.urlopen(request)
+ for i in range(0, 24000, 1000):
+ query = '?$offset=%d' % i
+ data = urllib2.urlopen(request + query)
results = json.loads(data.read())
data.close()
- required_keys = ['longitude_wgs84', 'latitude_wgs84', 'street', 'address', 'zip_code', 'borough', 'license_permit_holder']
+ required_keys = ['license_permit_holder', 'license_permit_holder_name',
+ 'license_permit_number', 'permit_issuance_date',
+ 'permit_expiration_date', 'longitude_wgs84', 'latitude_wgs84',
+ 'zip_code', 'borough']
for r in results:
for k in required_keys:
if not r.has_key(k):
r[k] = ''
- carts.insert(lat=r['latitude_wgs84'], lng=r['longitude_wgs84'],
- address=r['address'] + ' ' + r['street'],
- zip_code=r['zip_code'], borough=r['borough'],
- name=r['license_permit_holder'])
+ carts.insert(name=r['license_permit_holder'],
+ owner=r['license_permit_holder_name'],
+ permit_number=r['license_permit_number'],
+ issuance=r['permit_issuance_date'],
+ expiration=r['permit_expiration_date'],
+ loc=[ float(r['longitude_wgs84']),
+ float(r['latitude_wgs84']) ],
+ zip_code=r['zip_code'], borough=r['borough'])
out = [c for c in carts.find()]
print len(out) |
de1c2842d7f07025f23e9b12efc7dd52e4d0efbf | device_notifications/tests/model_tests.py | device_notifications/tests/model_tests.py | from mock import patch
from django.test.testcases import TestCase
from device_notifications import settings
from device_notifications.models import AbstractBaseDevice
from device_notifications.models import InvalidDeviceType
class ConcreteTestDevice(AbstractBaseDevice):
pass
@patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice)
class AbstractBaseDeviceTests(TestCase):
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='android')
message = 'Hello World'
device.send_message(message)
gcm_send_message_task.apply_async.assert_called_with(
args=[device.pk, message])
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message_bad_device_type(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='windows_phone')
self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
| from mock import patch
from django.test.testcases import TestCase
from device_notifications import settings
from device_notifications.models import AbstractBaseDevice
from device_notifications.models import InvalidDeviceType
class ConcreteTestDevice(AbstractBaseDevice):
pass
class AbstractBaseDeviceTests(TestCase):
def setUp(self):
self.get_device_model_patcher = patch.object(
settings,
'get_device_model',
return_value=ConcreteTestDevice)
self.get_device_model_patcher.start()
super(AbstractBaseDeviceTests, self).setUp()
def tearDown(self):
super(AbstractBaseDeviceTests, self).tearDown()
self.get_device_model_patcher.stop()
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='android')
message = 'Hello World'
device.send_message(message)
gcm_send_message_task.apply_async.assert_called_with(
args=[device.pk, message])
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message_bad_device_type(self, gcm_send_message_task):
device = ConcreteTestDevice(
pk=1,
device_type='windows_phone')
self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
| Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method. | Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method. | Python | bsd-3-clause | roverdotcom/django-device-notifications | ---
+++
@@ -12,8 +12,19 @@
pass
[email protected](settings, 'get_device_model', return_value=ConcreteTestDevice)
class AbstractBaseDeviceTests(TestCase):
+ def setUp(self):
+ self.get_device_model_patcher = patch.object(
+ settings,
+ 'get_device_model',
+ return_value=ConcreteTestDevice)
+ self.get_device_model_patcher.start()
+ super(AbstractBaseDeviceTests, self).setUp()
+
+ def tearDown(self):
+ super(AbstractBaseDeviceTests, self).tearDown()
+ self.get_device_model_patcher.stop()
+
@patch('device_notifications.models.gcm_send_message_task')
def test_send_message(self, gcm_send_message_task):
device = ConcreteTestDevice( |
b6dcb4029d3bf4b402a6874c942c9e4a105f2a62 | tracker_project/tracker_project/urls.py | tracker_project/tracker_project/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
url(r'^', 'tracker_project.views.home', name='home')
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page': reverse_lazy('home')},
name='logout'
),
url(r'^tracker/', include('tracker.urls', 'tracker')),
)
| Fix login and logout URLs | Fix login and logout URLs
| Python | mit | abarto/tracker_project,abarto/tracker_project,abarto/tracker_project,vivek8943/tracker_project,vivek8943/tracker_project,vivek8943/tracker_project | ---
+++
@@ -1,11 +1,18 @@
from django.conf.urls import patterns, include, url
from django.contrib import admin
+from django.core.urlresolvers import reverse_lazy
urlpatterns = patterns(
'',
+ url(r'^$', 'tracker_project.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('django.contrib.auth.urls')),
- url(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/'}, name='logout'),
- url(r'^', 'tracker_project.views.home', name='home')
+ url(
+ r'^logout/$',
+ 'django.contrib.auth.views.logout',
+ {'next_page': reverse_lazy('home')},
+ name='logout'
+ ),
+ url(r'^tracker/', include('tracker.urls', 'tracker')),
) |
02140561a29a2b7fe50f7bf2402da566e60be641 | bluebottle/organizations/serializers.py | bluebottle/organizations/serializers.py | from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
| Make the name of an organization required | Make the name of an organization required
| Python | bsd-3-clause | jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle | ---
+++
@@ -14,7 +14,7 @@
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
- name = serializers.CharField(required=True, allow_blank=True)
+ name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
|
33f2075396ded90e3cf17033985f29d262965500 | dariah_static_data/management/commands/import_tadirah_vcc.py | dariah_static_data/management/commands/import_tadirah_vcc.py | from dariah_static_data.models import VCC
from dariah_static_data.management.commands._private_helper import Command as SuperCommand
class Command(SuperCommand):
filename = 'tadirah_vcc.csv'
fieldnames = ['uri', 'name', 'description']
mapping = [('name', 'name', 1), ('uri', 'uri', 1), ('description', 'description', 1)] # [('model_fieldname', 'csv_fieldname', required?),...], omit fields that are not in the model
model = VCC
| from dariah_static_data.models import TADIRAHVCC
from dariah_static_data.management.commands._private_helper import Command as SuperCommand
class Command(SuperCommand):
filename = 'tadirah_vcc.csv'
fieldnames = ['uri', 'name', 'description']
mapping = [('name', 'name', 1), ('uri', 'uri', 1), ('description', 'description', 1)] # [('model_fieldname', 'csv_fieldname', required?),...], omit fields that are not in the model
model = TADIRAHVCC
| Fix incorrect import after refactor of dariah_static_data models. | Fix incorrect import after refactor of dariah_static_data models.
| Python | apache-2.0 | DANS-KNAW/dariah-contribute,DANS-KNAW/dariah-contribute | ---
+++
@@ -1,4 +1,4 @@
-from dariah_static_data.models import VCC
+from dariah_static_data.models import TADIRAHVCC
from dariah_static_data.management.commands._private_helper import Command as SuperCommand
@@ -6,4 +6,4 @@
filename = 'tadirah_vcc.csv'
fieldnames = ['uri', 'name', 'description']
mapping = [('name', 'name', 1), ('uri', 'uri', 1), ('description', 'description', 1)] # [('model_fieldname', 'csv_fieldname', required?),...], omit fields that are not in the model
- model = VCC
+ model = TADIRAHVCC |
add508b780d16fd2da2fd0639304935b762c001f | tests/cupy_tests/binary_tests/test_packing.py | tests/cupy_tests/binary_tests/test_packing.py | import unittest
from cupy import testing
@testing.gpu
class TestPacking(unittest.TestCase):
_multiprocess_can_split_ = True
| import numpy
import unittest
from cupy import testing
@testing.gpu
class TestPacking(unittest.TestCase):
_multiprocess_can_split_ = True
@testing.for_int_dtypes()
@testing.numpy_cupy_array_equal()
def check_packbits(self, data, xp, dtype):
a = xp.array(data, dtype=dtype)
return xp.packbits(a)
@testing.numpy_cupy_array_equal()
def check_unpackbits(self, data, xp):
a = xp.array(data, dtype=xp.uint8)
return xp.unpackbits(a)
def test_packbits(self):
self.check_packbits([])
self.check_packbits([0])
self.check_packbits([1])
self.check_packbits([0, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1])
self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1, 1])
self.check_packbits(numpy.arange(24).reshape((2, 3, 4)) % 2)
def test_unpackbits(self):
self.check_unpackbits([])
self.check_unpackbits([0])
self.check_unpackbits([1])
self.check_unpackbits([255])
self.check_unpackbits([100, 200, 123, 213])
| Add tests for packbits and unpackbits | Add tests for packbits and unpackbits
| Python | mit | okuta/chainer,niboshi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,ysekky/chainer,pfnet/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ktnyt/chainer,chainer/chainer,hvy/chainer,jnishi/chainer,anaruse/chainer,wkentaro/chainer,keisuke-umezawa/chainer,ronekko/chainer,niboshi/chainer,chainer/chainer,kashif/chainer,okuta/chainer,rezoo/chainer,keisuke-umezawa/chainer,cupy/cupy,chainer/chainer,hvy/chainer,jnishi/chainer,ktnyt/chainer,niboshi/chainer,cupy/cupy,keisuke-umezawa/chainer,okuta/chainer,okuta/chainer,jnishi/chainer,hvy/chainer,wkentaro/chainer,delta2323/chainer,cupy/cupy,kiyukuta/chainer,tkerola/chainer,cupy/cupy,niboshi/chainer,hvy/chainer,aonotas/chainer,wkentaro/chainer | ---
+++
@@ -1,3 +1,4 @@
+import numpy
import unittest
from cupy import testing
@@ -7,3 +8,30 @@
class TestPacking(unittest.TestCase):
_multiprocess_can_split_ = True
+
+ @testing.for_int_dtypes()
+ @testing.numpy_cupy_array_equal()
+ def check_packbits(self, data, xp, dtype):
+ a = xp.array(data, dtype=dtype)
+ return xp.packbits(a)
+
+ @testing.numpy_cupy_array_equal()
+ def check_unpackbits(self, data, xp):
+ a = xp.array(data, dtype=xp.uint8)
+ return xp.unpackbits(a)
+
+ def test_packbits(self):
+ self.check_packbits([])
+ self.check_packbits([0])
+ self.check_packbits([1])
+ self.check_packbits([0, 1])
+ self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1])
+ self.check_packbits([1, 0, 1, 1, 0, 1, 1, 1, 1])
+ self.check_packbits(numpy.arange(24).reshape((2, 3, 4)) % 2)
+
+ def test_unpackbits(self):
+ self.check_unpackbits([])
+ self.check_unpackbits([0])
+ self.check_unpackbits([1])
+ self.check_unpackbits([255])
+ self.check_unpackbits([100, 200, 123, 213]) |
09ef3ba394faf9edc941e30e5c3f86bffa96d645 | plugins/eightball.py | plugins/eightball.py | # Copyright (c) 2013-2014 Molly White
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines)) | # Copyright (c) 2013-2014 Molly White
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from plugins.util import command
from random import choice
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
#- !8ball [question]
#-
#- ```irc
#- < GorillaWarfare> !8ball
#- < GorillaBot> Most likely.
#- ```
#-
#- Returns a magic 8 ball response.
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines)) | Add documentation for 8ball command | Add documentation for 8ball command
| Python | mit | quanticle/GorillaBot,molly/GorillaBot,quanticle/GorillaBot,molly/GorillaBot | ---
+++
@@ -21,6 +21,16 @@
@command("8ball", "8-ball")
def eightball(m):
"""Returns 8-ball advice."""
+
+ #- !8ball [question]
+ #-
+ #- ```irc
+ #- < GorillaWarfare> !8ball
+ #- < GorillaBot> Most likely.
+ #- ```
+ #-
+ #- Returns a magic 8 ball response.
+
with open(m.bot.base_path + '/plugins/responses/8ball.txt', 'r') as replies:
lines = replies.read().splitlines()
m.bot.private_message(m.location, choice(lines)) |
ee6df56514c2d592a527719b60abf42028e07420 | tests/test_japanese.py | tests/test_japanese.py | from nose.tools import eq_, assert_almost_equal
from wordfreq import tokenize, word_frequency
def test_tokens():
eq_(tokenize('おはようございます', 'ja'),
['おはよう', 'ござい', 'ます'])
def test_combination():
ohayou_freq = word_frequency('おはよう', 'ja')
gozai_freq = word_frequency('ござい', 'ja')
masu_freq = word_frequency('ます', 'ja')
assert_almost_equal(
word_frequency('おはようおはよう', 'ja'),
ohayou_freq / 2
)
assert_almost_equal(
1.0 / word_frequency('おはようございます', 'ja'),
(1.0 / ohayou_freq + 1.0 / gozai_freq + 1.0 / masu_freq)
)
| from nose.tools import eq_, assert_almost_equal
from wordfreq import tokenize, word_frequency
def test_tokens():
eq_(tokenize('おはようございます', 'ja'),
['おはよう', 'ござい', 'ます'])
def test_combination():
ohayou_freq = word_frequency('おはよう', 'ja')
gozai_freq = word_frequency('ござい', 'ja')
masu_freq = word_frequency('ます', 'ja')
assert_almost_equal(
word_frequency('おはようおはよう', 'ja'),
ohayou_freq / 2
)
assert_almost_equal(
1.0 / word_frequency('おはようございます', 'ja'),
1.0 / ohayou_freq + 1.0 / gozai_freq + 1.0 / masu_freq
)
| Revert a small syntax change introduced by a circular series of changes. | Revert a small syntax change introduced by a circular series of changes.
Former-commit-id: 09597b7cf33f4c1692f48d08a535bdbc45042cde | Python | mit | LuminosoInsight/wordfreq | ---
+++
@@ -18,6 +18,6 @@
)
assert_almost_equal(
1.0 / word_frequency('おはようございます', 'ja'),
- (1.0 / ohayou_freq + 1.0 / gozai_freq + 1.0 / masu_freq)
+ 1.0 / ohayou_freq + 1.0 / gozai_freq + 1.0 / masu_freq
)
|
421ce72069c269be3207fbc32022c3f4471654ba | examples/sentiment/download.py | examples/sentiment/download.py | #!/usr/bin/env python
import os
import os.path
import urllib
import zipfile
urllib.urlretrieve(
'http://nlp.stanford.edu/sentiment/trainDevTestTrees_PTB.zip',
'trainDevTestTrees_PTB.zip')
zf = zipfile.ZipFile('trainDevTestTrees_PTB.zip')
for name in zf.namelist():
(dirname, filename) = os.path.split(name)
if not filename == '':
zf.extract(name, '.')
| #!/usr/bin/env python
import os
import os.path
import urllib
import zipfile
urllib.urlretrieve(
'http://nlp.stanford.edu/sentiment/trainDevTestTrees_PTB.zip',
'trainDevTestTrees_PTB.zip')
zf = zipfile.ZipFile('trainDevTestTrees_PTB.zip')
for name in zf.namelist():
(dirname, filename) = os.path.split(name)
if not filename == '':
zf.extract(name, '.')
| Add one more empty line to end of import to adjust to H306 | Add one more empty line to end of import to adjust to H306
| Python | mit | keisuke-umezawa/chainer,cemoody/chainer,woodshop/complex-chainer,ikasumi/chainer,kiyukuta/chainer,cupy/cupy,anaruse/chainer,delta2323/chainer,t-abe/chainer,t-abe/chainer,hvy/chainer,wkentaro/chainer,ronekko/chainer,wkentaro/chainer,kuwa32/chainer,1986ks/chainer,hidenori-t/chainer,benob/chainer,cupy/cupy,chainer/chainer,bayerj/chainer,cupy/cupy,pfnet/chainer,chainer/chainer,niboshi/chainer,hvy/chainer,elviswf/chainer,tereka114/chainer,AlpacaDB/chainer,wkentaro/chainer,chainer/chainer,keisuke-umezawa/chainer,rezoo/chainer,jnishi/chainer,tkerola/chainer,ysekky/chainer,niboshi/chainer,minhpqn/chainer,jnishi/chainer,muupan/chainer,chainer/chainer,ktnyt/chainer,kashif/chainer,masia02/chainer,ytoyama/yans_chainer_hackathon,okuta/chainer,sou81821/chainer,keisuke-umezawa/chainer,laysakura/chainer,wkentaro/chainer,sinhrks/chainer,umitanuki/chainer,Kaisuke5/chainer,truongdq/chainer,kikusu/chainer,jnishi/chainer,ktnyt/chainer,benob/chainer,muupan/chainer,okuta/chainer,wavelets/chainer,AlpacaDB/chainer,niboshi/chainer,sinhrks/chainer,woodshop/chainer,jnishi/chainer,hvy/chainer,ktnyt/chainer,aonotas/chainer,yanweifu/chainer,truongdq/chainer,ktnyt/chainer,okuta/chainer,jfsantos/chainer,cupy/cupy,tscohen/chainer,keisuke-umezawa/chainer,kikusu/chainer,niboshi/chainer,okuta/chainer,hvy/chainer,tigerneil/chainer | ---
+++
@@ -3,6 +3,7 @@
import os.path
import urllib
import zipfile
+
urllib.urlretrieve(
'http://nlp.stanford.edu/sentiment/trainDevTestTrees_PTB.zip', |
7c5061e4fbf0737ce07f13cb9102cdbbacf73115 | pyethapp/tests/test_genesis.py | pyethapp/tests/test_genesis.py | import pytest
from ethereum import blocks
from ethereum.db import DB
from ethereum.config import Env
from pyethapp.utils import merge_dict
from pyethapp.utils import update_config_from_genesis_json
import pyethapp.config as konfig
from pyethapp.profiles import PROFILES
def check_genesis(profile):
config = dict(eth=dict())
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
print config['eth'].keys()
bc = config['eth']['block']
print bc.keys()
env = Env(DB(), bc)
genesis = blocks.genesis(env)
print 'genesis.hash', genesis.hash.encode('hex')
print 'expected', config['eth']['genesis_hash']
assert genesis.hash == config['eth']['genesis_hash'].decode('hex')
@pytest.mark.xfail # FIXME
def test_olympic():
check_genesis('olympic')
def test_frontier():
check_genesis('frontier')
if __name__ == '__main__':
test_genesis()
| from pprint import pprint
import pytest
from ethereum import blocks
from ethereum.db import DB
from ethereum.config import Env
from pyethapp.utils import merge_dict
from pyethapp.utils import update_config_from_genesis_json
import pyethapp.config as konfig
from pyethapp.profiles import PROFILES
@pytest.mark.parametrize('profile', PROFILES.keys())
def test_profile(profile):
config = dict(eth=dict())
konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
bc = config['eth']['block']
pprint(bc)
env = Env(DB(), bc)
genesis = blocks.genesis(env)
assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
| Fix & cleanup profile genesis tests | Fix & cleanup profile genesis tests
| Python | mit | ethereum/pyethapp,gsalgado/pyethapp,gsalgado/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp | ---
+++
@@ -1,3 +1,4 @@
+from pprint import pprint
import pytest
from ethereum import blocks
from ethereum.db import DB
@@ -8,8 +9,11 @@
from pyethapp.profiles import PROFILES
-def check_genesis(profile):
[email protected]('profile', PROFILES.keys())
+def test_profile(profile):
config = dict(eth=dict())
+
+ konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
# Set config values based on profile selection
merge_dict(config, PROFILES[profile])
@@ -17,27 +21,9 @@
# Load genesis config
update_config_from_genesis_json(config, config['eth']['genesis'])
- konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}})
-
- print config['eth'].keys()
bc = config['eth']['block']
- print bc.keys()
+ pprint(bc)
env = Env(DB(), bc)
genesis = blocks.genesis(env)
- print 'genesis.hash', genesis.hash.encode('hex')
- print 'expected', config['eth']['genesis_hash']
- assert genesis.hash == config['eth']['genesis_hash'].decode('hex')
-
-
[email protected] # FIXME
-def test_olympic():
- check_genesis('olympic')
-
-
-def test_frontier():
- check_genesis('frontier')
-
-
-if __name__ == '__main__':
- test_genesis()
+ assert genesis.hash.encode('hex') == config['eth']['genesis_hash'] |
71d42d763bdb2d0c1bd8474a4da99695d5b77f91 | alg_selection_sort.py | alg_selection_sort.py | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(nums):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last num, iteratively select next max num to swap them.
for i in reversed(range(len(nums))):
max_i = 0
for j in range(1, i + 1):
# Update max pos max_i to get max num in loop i.
if nums[j] > nums[max_i]:
max_i = j
nums[max_i], nums[i] = nums[i], nums[max_i]
def main():
nums = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('By selection sort: ')
selection_sort(nums)
print(nums)
if __name__ == '__main__':
main()
| from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(nums):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last num, iteratively select max num to swap.
for i in reversed(range(len(nums))):
i_max = 0
for j in range(1, i + 1):
if nums[j] > nums[i_max]:
i_max = j
nums[i_max], nums[i] = nums[i], nums[i_max]
def main():
nums = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('By selection sort: ')
selection_sort(nums)
print(nums)
if __name__ == '__main__':
main()
| Revise to i_max and enhance comments | Revise to i_max and enhance comments
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | ---
+++
@@ -9,15 +9,13 @@
Time complexity: O(n^2).
Space complexity: O(1).
"""
- # Start from the last num, iteratively select next max num to swap them.
+ # Start from the last num, iteratively select max num to swap.
for i in reversed(range(len(nums))):
- max_i = 0
+ i_max = 0
for j in range(1, i + 1):
- # Update max pos max_i to get max num in loop i.
- if nums[j] > nums[max_i]:
- max_i = j
-
- nums[max_i], nums[i] = nums[i], nums[max_i]
+ if nums[j] > nums[i_max]:
+ i_max = j
+ nums[i_max], nums[i] = nums[i], nums[i_max]
def main(): |
a8d3790e4ef539c2a833fa493aeef4456b4a5dbb | unchecked_repos.py | unchecked_repos.py | #!/usr/bin/env python
"""List repos missing from repos.yaml."""
from __future__ import print_function
import yaml
from helpers import paginated_get
REPOS_URL = "https://api.github.com/orgs/{org}/repos"
# This is hacky; you need to have repo-tools-data cloned locally one dir up.
# To do this properly, you should use yamldata.py
with open("../repo-tools-data/repos.yaml") as repos_yaml:
tracked_repos = yaml.load(repos_yaml)
repos = list(paginated_get(REPOS_URL.format(org="edX")))
shown_any = False
for r in repos:
if not r['private'] and not r['fork']:
if r['full_name'] not in tracked_repos:
if not shown_any:
print("\n### Untracked repos:")
print("{r[full_name]}: {r[description]}".format(r=r))
shown_any = True
shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
if tracked not in actual_repos:
if not shown_any:
print("\n### Disappeared repos:")
print(tracked)
shown_any = True
| #!/usr/bin/env python
"""List repos missing from repos.yaml."""
from __future__ import print_function
import yaml
from helpers import paginated_get
REPOS_URL = "https://api.github.com/orgs/{org}/repos"
# This is hacky; you need to have repo-tools-data cloned locally one dir up.
# To do this properly, you should use yamldata.py
with open("../repo-tools-data/repos.yaml") as repos_yaml:
tracked_repos = yaml.load(repos_yaml)
ORGS = ["edX", "edx-solutions"]
repos = []
for org in ORGS:
repos.extend(paginated_get(REPOS_URL.format(org=org)))
shown_any = False
for r in repos:
if not r['private'] and not r['fork']:
if r['full_name'] not in tracked_repos:
if not shown_any:
print("\n### Untracked repos:")
print("{r[full_name]}: {r[description]}".format(r=r))
shown_any = True
shown_any = False
actual_repos = set(r['full_name'] for r in repos)
for tracked in tracked_repos:
if tracked not in actual_repos:
if not shown_any:
print("\n### Disappeared repos:")
print(tracked)
shown_any = True
| Check for unchecked repos in more than just the edx org | Check for unchecked repos in more than just the edx org
| Python | apache-2.0 | edx/repo-tools,edx/repo-tools | ---
+++
@@ -14,7 +14,10 @@
with open("../repo-tools-data/repos.yaml") as repos_yaml:
tracked_repos = yaml.load(repos_yaml)
-repos = list(paginated_get(REPOS_URL.format(org="edX")))
+ORGS = ["edX", "edx-solutions"]
+repos = []
+for org in ORGS:
+ repos.extend(paginated_get(REPOS_URL.format(org=org)))
shown_any = False
for r in repos: |
4a9d1a373b5a460f1e793dd94d0c248e81b75f40 | website/addons/box/settings/defaults.py | website/addons/box/settings/defaults.py | # OAuth app keys
BOX_KEY = None
BOX_SECRET = None
BOX_AUTH_CSRF_TOKEN = 'box-auth-csrf-token'
| # OAuth app keys
BOX_KEY = None
BOX_SECRET = None
BOX_OAUTH_TOKEN_ENDPOINT = 'https://www.box.com/api/oauth2/token'
BOX_OAUTH_AUTH_ENDPOINT = 'https://www.box.com/api/oauth2/authorize'
| Add oauth endpoints to settings | Add oauth endpoints to settings
| Python | apache-2.0 | caneruguz/osf.io,GageGaskins/osf.io,GageGaskins/osf.io,ticklemepierce/osf.io,ticklemepierce/osf.io,kwierman/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,DanielSBrown/osf.io,zkraime/osf.io,MerlinZhang/osf.io,mfraezz/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,fabianvf/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,erinspace/osf.io,alexschiller/osf.io,abought/osf.io,baylee-d/osf.io,zamattiac/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,barbour-em/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,caseyrygt/osf.io,cosenal/osf.io,jnayak1/osf.io,ZobairAlijan/osf.io,lyndsysimon/osf.io,jinluyuan/osf.io,monikagrabowska/osf.io,cldershem/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,ckc6cz/osf.io,leb2dg/osf.io,GaryKriebel/osf.io,jolene-esposito/osf.io,cosenal/osf.io,alexschiller/osf.io,caseyrollins/osf.io,aaxelb/osf.io,Nesiehr/osf.io,binoculars/osf.io,cwisecarver/osf.io,KAsante95/osf.io,felliott/osf.io,barbour-em/osf.io,adlius/osf.io,himanshuo/osf.io,SSJohns/osf.io,doublebits/osf.io,himanshuo/osf.io,petermalcolm/osf.io,lamdnhan/osf.io,lamdnhan/osf.io,mluo613/osf.io,zachjanicki/osf.io,ckc6cz/osf.io,KAsante95/osf.io,kwierman/osf.io,emetsger/osf.io,ZobairAlijan/osf.io,sbt9uc/osf.io,felliott/osf.io,mluke93/osf.io,reinaH/osf.io,cslzchen/osf.io,samanehsan/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,icereval/osf.io,brandonPurvis/osf.io,danielneis/osf.io,baylee-d/osf.io,amyshi188/osf.io,icereval/osf.io,reinaH/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,kch8qx/osf.io,dplorimer/osf,cslzchen/osf.io,jnayak1/osf.io,samchrisinger/osf.io,zkraime/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,sbt9uc/osf.io,billyhunt/osf.io,crcresearch/osf.io,mluo613/osf.io,chrisseto/osf.io,doublebits/osf.io,jeffreyliu3230/osf.io,pattisdr/osf.io,MerlinZhang/osf.io,danielneis/osf.io,dplorimer/osf,danielneis/osf.io,chrisseto/osf.io,abought/osf.io,zamattiac/osf.io,hmoco/osf.io,kushG/osf.io,chrisseto/osf.io,jolene-esposito/osf.io,reinaH/osf.io,abought/osf.io,aaxelb/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,SSJohns/osf.io,njantrania/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,acshi/osf.io,doublebits/osf.io,mluo613/osf.io,jeffreyliu3230/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,ckc6cz/osf.io,hmoco/osf.io,cslzchen/osf.io,chennan47/osf.io,amyshi188/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,erinspace/osf.io,RomanZWang/osf.io,mfraezz/osf.io,bdyetton/prettychart,zamattiac/osf.io,caneruguz/osf.io,leb2dg/osf.io,KAsante95/osf.io,cwisecarver/osf.io,rdhyee/osf.io,barbour-em/osf.io,GaryKriebel/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,Nesiehr/osf.io,jnayak1/osf.io,chrisseto/osf.io,Ghalko/osf.io,crcresearch/osf.io,dplorimer/osf,HarryRybacki/osf.io,mattclark/osf.io,cosenal/osf.io,caseyrollins/osf.io,fabianvf/osf.io,wearpants/osf.io,HarryRybacki/osf.io,chennan47/osf.io,caneruguz/osf.io,Ghalko/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,emetsger/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,HarryRybacki/osf.io,RomanZWang/osf.io,caseyrollins/osf.io,kushG/osf.io,GageGaskins/osf.io,TomBaxter/osf.io,MerlinZhang/osf.io,crcresearch/osf.io,haoyuchen1992/osf.io,DanielSBrown/osf.io,lyndsysimon/osf.io,wearpants/osf.io,sloria/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,icereval/osf.io,mluke93/osf.io,njantrania/osf.io,fabianvf/osf.io,revanthkolli/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,jinluyuan/osf.io,kwierman/osf.io,jmcarp/osf.io,zkraime/osf.io,kwierman/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,acshi/osf.io,HarryRybacki/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,mattclark/osf.io,mluke93/osf.io,Ghalko/osf.io,hmoco/osf.io,petermalcolm/osf.io,njantrania/osf.io,acshi/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,mluo613/osf.io,amyshi188/osf.io,acshi/osf.io,cldershem/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,billyhunt/osf.io,jolene-esposito/osf.io,jmcarp/osf.io,jmcarp/osf.io,wearpants/osf.io,RomanZWang/osf.io,jinluyuan/osf.io,caseyrygt/osf.io,arpitar/osf.io,pattisdr/osf.io,danielneis/osf.io,aaxelb/osf.io,saradbowman/osf.io,bdyetton/prettychart,doublebits/osf.io,baylee-d/osf.io,cosenal/osf.io,njantrania/osf.io,kushG/osf.io,emetsger/osf.io,laurenrevere/osf.io,amyshi188/osf.io,sloria/osf.io,GaryKriebel/osf.io,samanehsan/osf.io,bdyetton/prettychart,revanthkolli/osf.io,GaryKriebel/osf.io,himanshuo/osf.io,hmoco/osf.io,monikagrabowska/osf.io,revanthkolli/osf.io,binoculars/osf.io,ckc6cz/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,wearpants/osf.io,zachjanicki/osf.io,himanshuo/osf.io,mattclark/osf.io,caneruguz/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,abought/osf.io,jmcarp/osf.io,fabianvf/osf.io,kch8qx/osf.io,revanthkolli/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,kushG/osf.io,jnayak1/osf.io,RomanZWang/osf.io,brandonPurvis/osf.io,arpitar/osf.io,chennan47/osf.io,dplorimer/osf,CenterForOpenScience/osf.io,arpitar/osf.io,bdyetton/prettychart,mfraezz/osf.io,rdhyee/osf.io,HalcyonChimera/osf.io,zkraime/osf.io,KAsante95/osf.io,leb2dg/osf.io,mluo613/osf.io,TomBaxter/osf.io,sloria/osf.io,adlius/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,arpitar/osf.io,laurenrevere/osf.io,lyndsysimon/osf.io,samanehsan/osf.io,samchrisinger/osf.io,mfraezz/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,adlius/osf.io,jolene-esposito/osf.io,jinluyuan/osf.io,alexschiller/osf.io,lamdnhan/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,brianjgeiger/osf.io,doublebits/osf.io,saradbowman/osf.io,KAsante95/osf.io,adlius/osf.io,erinspace/osf.io,felliott/osf.io,zamattiac/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,reinaH/osf.io,monikagrabowska/osf.io,cldershem/osf.io,brandonPurvis/osf.io,acshi/osf.io,mluke93/osf.io,caseyrygt/osf.io,lamdnhan/osf.io | ---
+++
@@ -2,4 +2,5 @@
BOX_KEY = None
BOX_SECRET = None
-BOX_AUTH_CSRF_TOKEN = 'box-auth-csrf-token'
+BOX_OAUTH_TOKEN_ENDPOINT = 'https://www.box.com/api/oauth2/token'
+BOX_OAUTH_AUTH_ENDPOINT = 'https://www.box.com/api/oauth2/authorize' |
62d7c94968d70564839b32375fac6608720c2a67 | backend/pycon/urls.py | backend/pycon/urls.py | from api.views import GraphQLView
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
]
| from api.views import GraphQLView
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
urlpatterns = [
path("admin/", admin.site.urls),
path("graphql", csrf_exempt(GraphQLView.as_view()), name="graphql"),
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Add media url when running in debug mode | Add media url when running in debug mode
| Python | mit | patrick91/pycon,patrick91/pycon | ---
+++
@@ -1,4 +1,6 @@
from api.views import GraphQLView
+from django.conf import settings
+from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.decorators.csrf import csrf_exempt
@@ -9,4 +11,4 @@
path("user/", include("users.urls")),
path("", include("social_django.urls", namespace="social")),
path("", include("payments.urls")),
-]
+] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) |
7185c5ef58757949197081808bf237f0111e7a86 | packages/mono.py | packages/mono.py | class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10.6',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build'
]
)
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
| class MonoPackage (Package):
def __init__ (self):
Package.__init__ (self, 'mono', '2.10.6',
sources = [
'http://download.mono-project.com/sources/%{name}/%{name}-%{version}.tar.bz2',
'patches/mono-runtime-relocation.patch'
],
configure_flags = [
'--with-jit=yes',
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
'--enable-quiet-build',
]
)
if Package.profile.name == 'darwin':
self.configure_flags.extend ([
# fix build on lion, it uses 64-bit host even with -m32
'--build=i386-apple-darwin11.2.0',
])
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done'
# def prep (self):
# Package.prep (self)
# self.sh ('patch -p1 < "%{sources[1]}"')
def install (self):
Package.install (self)
if Package.profile.name == 'darwin':
self.sh ('sed -ie "s/libcairo.so.2/libcairo.2.dylib/" "%{prefix}/etc/mono/config"')
MonoPackage ()
| Fix building Mono 32-bit with Mac 10.7 SDK | Fix building Mono 32-bit with Mac 10.7 SDK | Python | mit | mono/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,bl8/bockbuild,bl8/bockbuild,mono/bockbuild | ---
+++
@@ -10,9 +10,14 @@
'--with-ikvm=no',
'--with-mcs-docs=no',
'--with-moonlight=no',
- '--enable-quiet-build'
+ '--enable-quiet-build',
]
)
+ if Package.profile.name == 'darwin':
+ self.configure_flags.extend ([
+ # fix build on lion, it uses 64-bit host even with -m32
+ '--build=i386-apple-darwin11.2.0',
+ ])
# Mono (in libgc) likes to fail to build randomly
self.make = 'for i in 1 2 3 4 5 6 7 8 9 10; do make && break; done' |
6246c26365b2df4cbb91142969aa857c7187e094 | app/test_base.py | app/test_base.py | from flask.ext.testing import TestCase
import unittest
from app import app, db
class BaseTestCase(TestCase):
def create_app(self):
app.config.from_object('config.TestingConfiguration')
return app
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def login(self, username, password):
return self.client.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
return self.client.get('/logout', follow_redirects=True)
if __name__ == '__main__':
unittest.main()
| from flask.ext.testing import TestCase
import unittest
from app import create_app, db
class BaseTestCase(TestCase):
def create_app(self):
return create_app('config.TestingConfiguration')
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def login(self, username, password):
return self.client.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
return self.client.get('/logout', follow_redirects=True)
if __name__ == '__main__':
unittest.main()
| Update tests to leverage factory pattern | Update tests to leverage factory pattern
| Python | mit | rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy | ---
+++
@@ -1,11 +1,10 @@
from flask.ext.testing import TestCase
import unittest
-from app import app, db
+from app import create_app, db
class BaseTestCase(TestCase):
def create_app(self):
- app.config.from_object('config.TestingConfiguration')
- return app
+ return create_app('config.TestingConfiguration')
def setUp(self):
db.create_all() |
fcd98cc714b5a790eaf2e946c492ab4e14700568 | scripts/award_badge_to_user.py | scripts/award_badge_to_user.py | #!/usr/bin/env python
"""Award a badge to a user.
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.services.user_badge import service as badge_service
from byceps.util.system import get_config_filename_from_env_or_exit
from bootstrap.validators import validate_user_screen_name
from bootstrap.util import app_context
@click.command()
@click.argument('badge_slug')
@click.argument('user', callback=validate_user_screen_name)
def execute(badge_slug, user):
badge = badge_service.find_badge_by_slug(badge_slug)
if badge is None:
raise click.BadParameter('Unknown badge slug "{}".'.format(badge_slug))
click.echo('Awarding badge "{}" to user "{}" ... '
.format(badge.label, user.screen_name), nl=False)
badge_service.award_badge_to_user(badge.id, user.id)
click.secho('done.', fg='green')
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
| #!/usr/bin/env python
"""Award a badge to a user.
:Copyright: 2006-2018 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import click
from byceps.database import db
from byceps.services.user_badge.models.badge import Badge, BadgeID
from byceps.services.user_badge import service as badge_service
from byceps.util.system import get_config_filename_from_env_or_exit
from bootstrap.validators import validate_user_screen_name
from bootstrap.util import app_context
@click.command()
@click.argument('badge_slug')
@click.argument('user', callback=validate_user_screen_name)
def execute(badge_slug, user):
badge_id = find_badge_id_for_badge_slug(badge_slug)
click.echo('Awarding badge "{}" to user "{}" ... '
.format(badge_slug, user.screen_name), nl=False)
badge_service.award_badge_to_user(badge_id, user.id)
click.secho('done.', fg='green')
def find_badge_id_for_badge_slug(slug: str) -> BadgeID:
"""Finde the badge with that slug and return its ID, or raise an
error if not found.
"""
badge_id = db.session \
.query(Badge.id) \
.filter_by(slug=slug) \
.scalar()
if badge_id is None:
raise click.BadParameter('Unknown badge slug "{}".'.format(slug))
return badge_id
if __name__ == '__main__':
config_filename = get_config_filename_from_env_or_exit()
with app_context(config_filename):
execute()
| Change script to avoid creation of badge URLs to make it work outside of a *party-specific* app context | Change script to avoid creation of badge URLs to make it work outside of a *party-specific* app context
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps | ---
+++
@@ -8,6 +8,8 @@
import click
+from byceps.database import db
+from byceps.services.user_badge.models.badge import Badge, BadgeID
from byceps.services.user_badge import service as badge_service
from byceps.util.system import get_config_filename_from_env_or_exit
@@ -19,17 +21,29 @@
@click.argument('badge_slug')
@click.argument('user', callback=validate_user_screen_name)
def execute(badge_slug, user):
- badge = badge_service.find_badge_by_slug(badge_slug)
-
- if badge is None:
- raise click.BadParameter('Unknown badge slug "{}".'.format(badge_slug))
+ badge_id = find_badge_id_for_badge_slug(badge_slug)
click.echo('Awarding badge "{}" to user "{}" ... '
- .format(badge.label, user.screen_name), nl=False)
+ .format(badge_slug, user.screen_name), nl=False)
- badge_service.award_badge_to_user(badge.id, user.id)
+ badge_service.award_badge_to_user(badge_id, user.id)
click.secho('done.', fg='green')
+
+
+def find_badge_id_for_badge_slug(slug: str) -> BadgeID:
+ """Finde the badge with that slug and return its ID, or raise an
+ error if not found.
+ """
+ badge_id = db.session \
+ .query(Badge.id) \
+ .filter_by(slug=slug) \
+ .scalar()
+
+ if badge_id is None:
+ raise click.BadParameter('Unknown badge slug "{}".'.format(slug))
+
+ return badge_id
if __name__ == '__main__': |
8332dc01c3c743543f4c3faff44da84436ae5da2 | planner/forms.py | planner/forms.py | from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
| from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser, Trip, Step
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class TripForm(forms.ModelForm):
class Meta:
model = Trip
fields = ['date_origin', 'max_num_passengers']
class StepForm(forms.ModelForm):
class Meta:
model = Step
fields = ['origin', 'destination', 'hour_origin', 'hour_destination', 'max_price']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
| Add Trip and Step ModelForms | Add Trip and Step ModelForms
| Python | mit | livingsilver94/getaride,livingsilver94/getaride,livingsilver94/getaride | ---
+++
@@ -1,7 +1,7 @@
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
-from .models import PoolingUser
+from .models import PoolingUser, Trip, Step
from users.forms import UserCreationForm
@@ -31,6 +31,18 @@
fields = ['birth_date', 'driving_license', 'cellphone_number']
+class TripForm(forms.ModelForm):
+ class Meta:
+ model = Trip
+ fields = ['date_origin', 'max_num_passengers']
+
+
+class StepForm(forms.ModelForm):
+ class Meta:
+ model = Step
+ fields = ['origin', 'destination', 'hour_origin', 'hour_destination', 'max_price']
+
+
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name') |
bd4e1c3f511ac1163e39d99fdc8e70f261023c44 | setup/create_player_seasons.py | setup/create_player_seasons.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import concurrent.futures
from db.common import session_scope
from db.player import Player
from utils.player_data_retriever import PlayerDataRetriever
def create_player_seasons(simulation=False):
data_retriever = PlayerDataRetriever()
with session_scope() as session:
players = session.query(Player).all()[:25]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
threads.submit(
data_retriever.retrieve_player_seasons,
player.player_id, simulation
): player for player in players
}
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_seasons = future.result()
print(len(plr_seasons))
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import concurrent.futures
from db.common import session_scope
from db.player import Player
from utils.player_data_retriever import PlayerDataRetriever
def create_player_seasons(simulation=False):
data_retriever = PlayerDataRetriever()
with session_scope() as session:
players = session.query(Player).all()[:]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
threads.submit(
data_retriever.retrieve_player_seasons,
player.player_id, simulation
): player for player in players
}
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_seasons = future.result()
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
| Update player season retrieval function | Update player season retrieval function
| Python | mit | leaffan/pynhldb | ---
+++
@@ -14,7 +14,7 @@
with session_scope() as session:
- players = session.query(Player).all()[:25]
+ players = session.query(Player).all()[:]
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as threads:
future_tasks = {
@@ -26,7 +26,6 @@
for future in concurrent.futures.as_completed(future_tasks):
try:
plr_seasons = future.result()
- print(len(plr_seasons))
except Exception as e:
print("Concurrent task generated an exception: %s" % e)
|
f2a88e4849876970c29b568b897dff88ffe09306 | djrichtextfield/urls.py | djrichtextfield/urls.py | from django.conf.urls import url
from djrichtextfield.views import InitView
urlpatterns = [
url('^init.js$', InitView.as_view(), name='djrichtextfield_init')
]
| from django.urls import path
from djrichtextfield.views import InitView
urlpatterns = [
path('init.js', InitView.as_view(), name='djrichtextfield_init')
]
| Use path instead of soon to be deprecated url | Use path instead of soon to be deprecated url
| Python | mit | jaap3/django-richtextfield,jaap3/django-richtextfield | ---
+++
@@ -1,7 +1,7 @@
-from django.conf.urls import url
+from django.urls import path
from djrichtextfield.views import InitView
urlpatterns = [
- url('^init.js$', InitView.as_view(), name='djrichtextfield_init')
+ path('init.js', InitView.as_view(), name='djrichtextfield_init')
] |
c080865fdb36da2718774ddff436325d947be323 | test/test_fit_allocator.py | test/test_fit_allocator.py | from support import lib,ffi
from qcgc_test import QCGCTest
class FitAllocatorTest(QCGCTest):
def test_macro_consistency(self):
self.assertEqual(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, lib.qcgc_small_free_lists + 1)
last_exp = lib.QCGC_LARGE_FREE_LIST_FIRST_EXP + lib.qcgc_large_free_lists - 1
self.assertLess(2**last_exp, 2**lib.QCGC_ARENA_SIZE_EXP)
self.assertEqual(2**(last_exp + 1), 2**lib.QCGC_ARENA_SIZE_EXP)
def test_small_free_list_index(self):
for i in range(1, lib.qcgc_small_free_lists + 1):
self.assertTrue(lib.is_small(i))
self.assertEqual(lib.small_index(i), i - 1);
def test_large_free_list_index(self):
index = -1;
for i in range(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, 2**lib.QCGC_ARENA_SIZE_EXP):
if (i & (i - 1) == 0):
# Check for power of two
index = index + 1
self.assertFalse(lib.is_small(i))
self.assertEqual(index, lib.large_index(i));
| from support import lib,ffi
from qcgc_test import QCGCTest
class FitAllocatorTest(QCGCTest):
def test_macro_consistency(self):
self.assertEqual(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, lib.qcgc_small_free_lists + 1)
last_exp = lib.QCGC_LARGE_FREE_LIST_FIRST_EXP + lib.qcgc_large_free_lists - 1
self.assertLess(2**last_exp, 2**lib.QCGC_ARENA_SIZE_EXP)
self.assertEqual(2**(last_exp + 1), 2**lib.QCGC_ARENA_SIZE_EXP)
def test_small_free_list_index(self):
for i in range(1, lib.qcgc_small_free_lists + 1):
self.assertTrue(lib.is_small(i))
self.assertEqual(lib.small_index(i), i - 1);
self.assertTrue(lib.small_index_to_cells(i - 1), i);
def test_large_free_list_index(self):
index = -1;
for i in range(2**lib.QCGC_LARGE_FREE_LIST_FIRST_EXP, 2**lib.QCGC_ARENA_SIZE_EXP):
if (i & (i - 1) == 0):
# Check for power of two
index = index + 1
self.assertFalse(lib.is_small(i))
self.assertEqual(index, lib.large_index(i));
| Add test for index to cells | Add test for index to cells
| Python | mit | ntruessel/qcgc,ntruessel/qcgc,ntruessel/qcgc | ---
+++
@@ -12,6 +12,7 @@
for i in range(1, lib.qcgc_small_free_lists + 1):
self.assertTrue(lib.is_small(i))
self.assertEqual(lib.small_index(i), i - 1);
+ self.assertTrue(lib.small_index_to_cells(i - 1), i);
def test_large_free_list_index(self):
index = -1; |
b0a6652a11236409ec3e2606e04621f714a3ab63 | test/test_jobs/__init__.py | test/test_jobs/__init__.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from pybossa.jobs import create_dict_jobs
from default import Test, with_context
class TestJobs(Test):
@with_context
def test_create_dict_jobs(self):
"""Test JOB create_dict_jobs works."""
data = [{'id': 1, 'short_name': 'app'}]
jobs = create_dict_jobs(data, 'function')
assert len(jobs) == 1
assert jobs[0]['name'] == 'function'
| Test generic creator of jobs. | Test generic creator of jobs.
| Python | agpl-3.0 | jean/pybossa,Scifabric/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,geotagx/pybossa,stefanhahmann/pybossa,stefanhahmann/pybossa,PyBossa/pybossa,Scifabric/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa | ---
+++
@@ -15,3 +15,15 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
+from pybossa.jobs import create_dict_jobs
+from default import Test, with_context
+
+class TestJobs(Test):
+
+ @with_context
+ def test_create_dict_jobs(self):
+ """Test JOB create_dict_jobs works."""
+ data = [{'id': 1, 'short_name': 'app'}]
+ jobs = create_dict_jobs(data, 'function')
+ assert len(jobs) == 1
+ assert jobs[0]['name'] == 'function' |
1e5f8299dfa0e724933bb565359650a5dc579bb6 | aybu/core/dispatch.py | aybu/core/dispatch.py | import os
import logging
from paste.deploy import loadapp
from paste.script.util.logging_config import fileConfig
log = logging.getLogger(__name__)
def get_pylons_app(global_conf):
pyramid_config = os.path.realpath(global_conf['__file__'])
dir_, conf = os.path.split(pyramid_config)
config_file = os.path.join(dir_, "pylons-%s" % (conf))
logging.debug("Pyramid config from :%s, pylons config: %s",
pyramid_config, config_file)
fileConfig(config_file)
log.info("Loading application from %s", config_file)
app = loadapp("config:%s" % (config_file))
if not hasattr(app, "__name__"):
app.__name__ = "Wrapped Pylons app"
return app
| import os
import logging
import pyramid.paster
from paste.script.util.logging_config import fileConfig
log = logging.getLogger(__name__)
def get_pylons_app(global_conf):
pyramid_config = os.path.realpath(global_conf['__file__'])
dir_, conf = os.path.split(pyramid_config)
config_file = os.path.join(dir_, "pylons-%s" % (conf))
logging.debug("Pyramid config from :%s, pylons config: %s",
pyramid_config, config_file)
fileConfig(config_file)
log.info("Loading application from %s", config_file)
app = pyramid.paster.get_app(config_file, 'main')
if not hasattr(app, "__name__"):
app.__name__ = "Wrapped Pylons app"
return app
| Use pyramid.paster instad of paste.deploy | Use pyramid.paster instad of paste.deploy
--HG--
extra : convert_revision : g.bagnoli%40asidev.com-20110509152058-7sd3ek2lvqrdksuw
| Python | apache-2.0 | asidev/aybu-core | ---
+++
@@ -1,6 +1,6 @@
import os
import logging
-from paste.deploy import loadapp
+import pyramid.paster
from paste.script.util.logging_config import fileConfig
log = logging.getLogger(__name__)
@@ -15,7 +15,7 @@
fileConfig(config_file)
log.info("Loading application from %s", config_file)
- app = loadapp("config:%s" % (config_file))
+ app = pyramid.paster.get_app(config_file, 'main')
if not hasattr(app, "__name__"):
app.__name__ = "Wrapped Pylons app"
return app |
b14ca021e93c40b02c5fef4972795796bddeb5ba | boris/utils/management/load_static_fixtures.py | boris/utils/management/load_static_fixtures.py | from django.conf import settings
from django.core.management import call_command
from south.signals import post_migrate
__author__ = 'xaralis'
APPS_TO_WAIT_FOR = ['clients', 'services']
def load_static_data(app, **kwargs):
global APPS_TO_WAIT_FOR
APPS_TO_WAIT_FOR.remove(app)
if len(APPS_TO_WAIT_FOR) == 0:
print "Loading static fixtures (Groups)."
call_command('loaddata', 'groups.json')
post_migrate.connect(load_static_data)
| from django.conf import settings
from django.core.management import call_command
from south.signals import post_migrate
__author__ = 'xaralis'
APPS_TO_WAIT_FOR = ['clients', 'services']
def load_static_data(app, **kwargs):
global APPS_TO_WAIT_FOR
APPS_TO_WAIT_FOR.remove(app)
if len(APPS_TO_WAIT_FOR) == 0:
print "Loading static fixtures (Groups)."
call_command('loaddata', 'groups.json')
#post_migrate.connect(load_static_data)
| Disable groups.json loading until it's fixed. | Disable groups.json loading until it's fixed.
| Python | mit | fragaria/BorIS,fragaria/BorIS,fragaria/BorIS | ---
+++
@@ -18,4 +18,4 @@
call_command('loaddata', 'groups.json')
-post_migrate.connect(load_static_data)
+#post_migrate.connect(load_static_data) |
af31c71e49b7d63c24ab7d7c04a5e908451263e2 | iati/core/tests/test_utilities.py | iati/core/tests/test_utilities.py | """A module containing tests for the library implementation of accessing utilities."""
from lxml import etree
import iati.core.resources
import iati.core.utilities
class TestUtilities(object):
"""A container for tests relating to utilities"""
def test_convert_to_schema(self):
"""Check that an etree can be converted to a schema."""
path = iati.core.resources.path_schema('iati-activities-schema')
tree = iati.core.resources.load_as_tree(path)
if not tree:
assert False
schema = iati.core.utilities.convert_to_schema(tree)
assert isinstance(schema, etree.XMLSchema)
def test_log(self):
pass
def test_log_error(self):
pass
| """A module containing tests for the library implementation of accessing utilities."""
from lxml import etree
import iati.core.resources
import iati.core.utilities
class TestUtilities(object):
"""A container for tests relating to utilities"""
def test_convert_to_schema(self):
"""Check that an etree can be converted to a schema."""
path = iati.core.resources.path_schema('iati-activities-schema')
tree = iati.core.resources.load_as_tree(path)
if not tree:
assert False
schema = iati.core.utilities.convert_to_schema(tree)
assert isinstance(schema, etree.XMLSchema)
def test_log(self):
pass
def test_log_error(self):
pass
def test_log_exception(self):
pass
def test_log_warning(self):
pass
| Add more logging test stubs | Add more logging test stubs
| Python | mit | IATI/iati.core,IATI/iati.core | ---
+++
@@ -23,3 +23,9 @@
def test_log_error(self):
pass
+
+ def test_log_exception(self):
+ pass
+
+ def test_log_warning(self):
+ pass |
0deac2fe49d1240a1d5fee1b9c47313bde84d609 | seleniumlogin/__init__.py | seleniumlogin/__init__.py | from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.split(':')[-2].split('/')[-1]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
| from importlib import import_module
from django.contrib.auth import SESSION_KEY, BACKEND_SESSION_KEY, HASH_SESSION_KEY
def force_login(user, driver, base_url):
from django.conf import settings
SessionStore = import_module(settings.SESSION_ENGINE).SessionStore
selenium_login_start_page = getattr(settings, 'SELENIUM_LOGIN_START_PAGE', '/page_404/')
driver.get('{}{}'.format(base_url, selenium_login_start_page))
session = SessionStore()
session[SESSION_KEY] = user.id
session[BACKEND_SESSION_KEY] = settings.AUTHENTICATION_BACKENDS[0]
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
domain = base_url.rpartition('://')[2].split('/')[0].split(':')[0]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key,
'path': '/',
'domain': domain
}
driver.add_cookie(cookie)
driver.refresh()
| Change how the base_url is turned into a domain | Change how the base_url is turned into a domain
| Python | mit | feffe/django-selenium-login,feffe/django-selenium-login | ---
+++
@@ -14,7 +14,7 @@
session[HASH_SESSION_KEY] = user.get_session_auth_hash()
session.save()
- domain = base_url.split(':')[-2].split('/')[-1]
+ domain = base_url.rpartition('://')[2].split('/')[0].split(':')[0]
cookie = {
'name': settings.SESSION_COOKIE_NAME,
'value': session.session_key, |
53b17a3b576a90e5d5bb773ca868a3dd160b3273 | roamer/record.py | roamer/record.py | """
argh
"""
import os
import json
from roamer.entry import Entry
entries_path = os.path.expanduser('~/.roamer-data/entries.json')
class Record(object):
def __init__(self):
self.entries = {}
# TODO: Load saved directory json
if os.path.exists(entries_path):
with open(entries_path) as data_file:
data = json.load(data_file)
for digest, entry_data in data.iteritems():
entry = Entry(entry_data['name'], entry_data['directory'], digest)
self.entries[entry.digest] = entry
# TODO: load trash json
def add_dir(self, directory):
# TODO: Create parent dirs if they don't exist
with open(entries_path, 'w') as outfile:
entries = {}
for digest, entry in directory.entries.iteritems():
entries[entry.digest] = {'name': entry.name, 'directory': entry.directory.path}
json.dump(entries, outfile)
def add_trash(self, path, digest):
pass
# TODO: add trash
| """
argh
"""
import os
import json
from roamer.entry import Entry
entries_path = os.path.expanduser('~/.roamer-data/entries.json')
trash_path = os.path.expanduser('~/.roamer-data/trash.json')
class Record(object):
def __init__(self):
self.entries = self._load(entries_path)
self.trash_entries = self._load(trash_path)
@staticmethod
def _load(path):
dictionary = {}
if os.path.exists(path):
with open(path) as data_file:
data = json.load(data_file)
for digest, entry_data in data.iteritems():
entry = Entry(entry_data['name'], entry_data['directory'], digest)
dictionary[entry.digest] = entry
return dictionary
@staticmethod
def add_dir(directory):
# TODO: Create parent dirs if they don't exist
with open(entries_path, 'w') as outfile:
entries = {}
for digest, entry in directory.entries.iteritems():
entries[entry.digest] = {'name': entry.name, 'directory': entry.directory.path}
json.dump(entries, outfile)
@staticmethod
def add_trash(path, digest):
pass
# TODO: add trash
| Add trash loading. Refactor shared load method | Add trash loading. Refactor shared load method
| Python | mit | abaldwin88/roamer | ---
+++
@@ -6,29 +6,35 @@
from roamer.entry import Entry
entries_path = os.path.expanduser('~/.roamer-data/entries.json')
+trash_path = os.path.expanduser('~/.roamer-data/trash.json')
class Record(object):
def __init__(self):
- self.entries = {}
- # TODO: Load saved directory json
- if os.path.exists(entries_path):
- with open(entries_path) as data_file:
+ self.entries = self._load(entries_path)
+ self.trash_entries = self._load(trash_path)
+
+ @staticmethod
+ def _load(path):
+ dictionary = {}
+ if os.path.exists(path):
+ with open(path) as data_file:
data = json.load(data_file)
for digest, entry_data in data.iteritems():
entry = Entry(entry_data['name'], entry_data['directory'], digest)
- self.entries[entry.digest] = entry
- # TODO: load trash json
+ dictionary[entry.digest] = entry
+ return dictionary
- def add_dir(self, directory):
+ @staticmethod
+ def add_dir(directory):
# TODO: Create parent dirs if they don't exist
-
with open(entries_path, 'w') as outfile:
entries = {}
for digest, entry in directory.entries.iteritems():
entries[entry.digest] = {'name': entry.name, 'directory': entry.directory.path}
json.dump(entries, outfile)
-
- def add_trash(self, path, digest):
+ @staticmethod
+ def add_trash(path, digest):
pass
# TODO: add trash
+ |
56e764835e75035452a6a1ea06c386ec61dbe872 | src/rinoh/stylesheets/__init__.py | src/rinoh/stylesheets/__init__.py | # This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = (':entry point name: ``{}``\n\n{}'
.format(stylesheet, stylesheet.description))
| # This file is part of rinohtype, the Python document preparation system.
#
# Copyright (c) Brecht Machiels.
#
# Use of this source code is subject to the terms of the GNU Affero General
# Public License v3. See the LICENSE file or http://www.gnu.org/licenses/.
import inspect
import os
import sys
from .. import DATA_PATH
from ..style import StyleSheetFile
from .matcher import matcher
__all__ = ['matcher', 'sphinx', 'sphinx_base14']
STYLESHEETS_PATH = os.path.join(DATA_PATH, 'stylesheets')
def path(filename):
return os.path.join(STYLESHEETS_PATH, filename)
sphinx = StyleSheetFile(path('sphinx.rts'))
sphinx_article = StyleSheetFile(path('sphinx_article.rts'))
sphinx_base14 = StyleSheetFile(path('base14.rts'))
# generate docstrings for the StyleSheet instances
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
.format(stylesheet.description, stylesheet))
| Fix the auto-generated docstrings of style sheets | Fix the auto-generated docstrings of style sheets
| Python | agpl-3.0 | brechtm/rinohtype,brechtm/rinohtype,brechtm/rinohtype | ---
+++
@@ -38,5 +38,5 @@
for name, stylesheet in inspect.getmembers(sys.modules[__name__]):
if not isinstance(stylesheet, StyleSheetFile):
continue
- stylesheet.__doc__ = (':entry point name: ``{}``\n\n{}'
- .format(stylesheet, stylesheet.description))
+ stylesheet.__doc__ = ('{}\n\nEntry point name: ``{}``'
+ .format(stylesheet.description, stylesheet)) |
e9df17b0319e50351cdaa39abb504dfafc77de6d | glitch/config.py | glitch/config.py | # Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
| # Basic config for stuff that can be easily changed, but which is git-managed.
# See also apikeys_sample.py for the configs which are _not_ git-managed.
#server_domain = "http://www.infiniteglitch.net"
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400
min_track_length = 90
| Use port 81 instead of 8889. | Use port 81 instead of 8889.
| Python | artistic-2.0 | MikeiLL/appension,MikeiLL/appension,MikeiLL/appension,MikeiLL/appension | ---
+++
@@ -4,7 +4,7 @@
server_domain = "http://50.116.55.59"
http_port = 8888 # Port for the main web site, in debug mode
-renderer_port = 8889 # Port for the renderer (/all.mp3 and friends)
+renderer_port = 81 # Port for the renderer (/all.mp3 and friends)
# Track limits in seconds
max_track_length = 400 |
27deb367c8fb080ad3a05c684920d2a90fb207f5 | hoomd/analyze/python_analyzer.py | hoomd/analyze/python_analyzer.py | from hoomd.python_action import _PythonAction, _InternalPythonAction
from hoomd.custom_action import _CustomAction, _InternalCustomAction
class CustomAnalyzer(_CustomAction):
def analyze(self, timestep):
return self.act(timestep)
class _InternalCustomAnayzer(_InternalCustomAction):
def analyze(self, timestep):
return self.act(timestep)
class _PythonAnalyzer(_PythonAction):
_cpp_list_name = 'analyzers'
_cpp_class_name = 'PythonAnalyzer'
_cpp_action = 'analyze'
@property
def analyzer(self):
return self._action
@analyzer.setter
def analyzer(self, analyzer):
if isinstance(analyzer, _CustomAction):
self._action = analyzer
else:
raise ValueError("analyzer must be an instance of _CustomAction")
class _InternalPythonAnalyzer(_InternalPythonAction):
_cpp_list_name = 'analyzers'
_cpp_class_name = 'PythonAnalyzer'
_cpp_action = 'analyze'
@property
def analyzer(self):
return self._action
@analyzer.setter
def analyzer(self, analyzer):
if isinstance(analyzer, _CustomAction):
self._action = analyzer
else:
raise ValueError("analyzer must be an instance of _CustomAction")
| from hoomd.python_action import _PythonAction, _InternalPythonAction
from hoomd.custom_action import _CustomAction, _InternalCustomAction
class _CustomAnalyzer(_CustomAction):
def analyze(self, timestep):
return self.act(timestep)
class _InternalCustomAnayzer(_InternalCustomAction):
def analyze(self, timestep):
return self.act(timestep)
class _PythonAnalyzer(_PythonAction):
_cpp_list_name = 'analyzers'
_cpp_class_name = 'PythonAnalyzer'
_cpp_action = 'analyze'
@property
def analyzer(self):
return self._action
@analyzer.setter
def analyzer(self, analyzer):
if isinstance(analyzer, _CustomAction):
self._action = analyzer
else:
raise ValueError("analyzer must be an instance of _CustomAction")
class _InternalPythonAnalyzer(_InternalPythonAction):
_cpp_list_name = 'analyzers'
_cpp_class_name = 'PythonAnalyzer'
_cpp_action = 'analyze'
@property
def analyzer(self):
return self._action
@analyzer.setter
def analyzer(self, analyzer):
if isinstance(analyzer, _CustomAction):
self._action = analyzer
else:
raise ValueError("analyzer must be an instance of _CustomAction")
| Make _CustomAnalyzer a private class | Make _CustomAnalyzer a private class
| Python | bsd-3-clause | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | ---
+++
@@ -2,7 +2,7 @@
from hoomd.custom_action import _CustomAction, _InternalCustomAction
-class CustomAnalyzer(_CustomAction):
+class _CustomAnalyzer(_CustomAction):
def analyze(self, timestep):
return self.act(timestep)
|
b448d52e5a30346633dd20e52431af39eb6859ec | importer/importer/connections.py | importer/importer/connections.py | import aioes
from .utils import wait_for_all_services
from .settings import ELASTICSEARCH_ENDPOINTS
async def connect_to_elasticsearch():
print("Connecting to Elasticsearch...")
await wait_for_all_services(ELASTICSEARCH_ENDPOINTS, timeout=10)
elastic = aioes.Elasticsearch(ELASTICSEARCH_ENDPOINTS)
await elastic.cluster.health(wait_for_status='yellow', timeout='5s')
return elastic
| import aioes
from .utils import wait_for_all_services
from .settings import ELASTICSEARCH_ENDPOINTS
async def connect_to_elasticsearch():
print("Connecting to Elasticsearch...")
await wait_for_all_services(ELASTICSEARCH_ENDPOINTS, timeout=10)
elastic = aioes.Elasticsearch(ELASTICSEARCH_ENDPOINTS)
return elastic
| Remove the pointless cluster health check | Remove the pointless cluster health check
| Python | mit | despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics | ---
+++
@@ -9,5 +9,4 @@
await wait_for_all_services(ELASTICSEARCH_ENDPOINTS, timeout=10)
elastic = aioes.Elasticsearch(ELASTICSEARCH_ENDPOINTS)
- await elastic.cluster.health(wait_for_status='yellow', timeout='5s')
return elastic |
70d435e1176a1132db6a04c34c04567df354d1d9 | cla_backend/apps/reports/management/commands/mi_cb1_report.py | cla_backend/apps/reports/management/commands/mi_cb1_report.py | # coding=utf-8
import logging
from django.core.management.base import BaseCommand
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "This runs the MCCB1sSLA report"
def handle(self, *args, **options):
self.create_report()
def create_report():
print("stuff goes here")
# '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}'
# report_data = json_stuff_goes_here
# ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data)
| # coding=utf-8
import logging
from django.core.management.base import BaseCommand
from reports.tasks import ExportTask
from core.models import get_web_user
from django.views.decorators.csrf import csrf_exempt
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "This runs the MCCB1sSLA report"
def handle(self, *args, **options):
self.create_report()
@csrf_exempt
def create_report(self):
report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}'
# report_data = json_stuff_goes_here
web_user = get_web_user()
filename_of_report = "WEEKLY-REPORT-TEST.csv"
ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data)
| Send weekly report to aws | Send weekly report to aws
| Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | ---
+++
@@ -1,7 +1,9 @@
# coding=utf-8
import logging
from django.core.management.base import BaseCommand
-
+from reports.tasks import ExportTask
+from core.models import get_web_user
+from django.views.decorators.csrf import csrf_exempt
logger = logging.getLogger(__name__)
@@ -12,10 +14,11 @@
def handle(self, *args, **options):
self.create_report()
- def create_report():
- print("stuff goes here")
-
- # '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}'
+ @csrf_exempt
+ def create_report(self):
+ report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}'
# report_data = json_stuff_goes_here
- # ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data)
+ web_user = get_web_user()
+ filename_of_report = "WEEKLY-REPORT-TEST.csv"
+ ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data) |
28803e4669f4c7b2b84e53e39e3a0a99ff57572d | skyfield/__main__.py | skyfield/__main__.py | # -*- coding: utf-8 -*-
import pkg_resources
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
def main():
print('Skyfield version: {0}'.format(skyfield.__version__))
print('jplephem version: {0}'.format(version_of('jplephem')))
print('sgp4 version: {0}'.format(version_of('sgp4')))
ts = load.timescale()
fmt = '%Y-%m-%d'
final_leap = (ts._leap_tai[-1] - 1) / (24 * 60 * 60)
print('Built-in leap seconds table ends with leap second at: {0}'
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
tt, delta_t = arrays['delta_t_recent']
start = ts.tt_jd(tt[0])
end = ts.tt_jd(tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
def version_of(distribution):
try:
d = pkg_resources.get_distribution(distribution)
except pkg_resources.DistributionNotFound:
return 'Unknown'
else:
return d.version
main()
| # -*- coding: utf-8 -*-
import pkg_resources
import numpy as np
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
def main():
print('Skyfield version: {0}'.format(skyfield.__version__))
print('jplephem version: {0}'.format(version_of('jplephem')))
print('sgp4 version: {0}'.format(version_of('sgp4')))
ts = load.timescale()
fmt = '%Y-%m-%d'
final_leap = (ts._leap_tai[-1] - 1) / (24 * 60 * 60)
print('Built-in leap seconds table ends with leap second at: {0}'
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
daily_tt = arrays['tt_jd_minus_arange']
daily_tt += np.arange(len(daily_tt))
start = ts.tt_jd(daily_tt[0])
end = ts.tt_jd(daily_tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
def version_of(distribution):
try:
d = pkg_resources.get_distribution(distribution)
except pkg_resources.DistributionNotFound:
return 'Unknown'
else:
return d.version
main()
| Fix “python -m skyfield” following ∆T array rename | Fix “python -m skyfield” following ∆T array rename
| Python | mit | skyfielders/python-skyfield,skyfielders/python-skyfield | ---
+++
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import pkg_resources
+import numpy as np
import skyfield
from skyfield.api import load
from skyfield.functions import load_bundled_npy
@@ -18,9 +19,10 @@
.format(ts.tai_jd(final_leap).utc_strftime()))
arrays = load_bundled_npy('iers.npz')
- tt, delta_t = arrays['delta_t_recent']
- start = ts.tt_jd(tt[0])
- end = ts.tt_jd(tt[-1])
+ daily_tt = arrays['tt_jd_minus_arange']
+ daily_tt += np.arange(len(daily_tt))
+ start = ts.tt_jd(daily_tt[0])
+ end = ts.tt_jd(daily_tt[-1])
print('Built-in ∆T table from finals2000A.all covers: {0} to {1}'
.format(start.utc_strftime(fmt), end.utc_strftime(fmt)))
|
b812a8da81ec9943d11b8cb9f709e234c90a2282 | stylo/utils.py | stylo/utils.py | from uuid import uuid4
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
| import inspect
from uuid import uuid4
def get_parameters(f):
return list(inspect.signature(f).parameters.keys())
class MessageBus:
"""A class that is used behind the scenes to coordinate events and timings of
animations.
"""
def __init__(self):
self.subs = {}
def new_id(self):
"""Use this to get a name to use for your events."""
return str(uuid4())
def register(self, event, obj):
"""Register to receive notifications of an event.
:param event: The name of the kind of event to receive
:param obj: The object to receive that kind of message.
"""
if event not in self.subs:
self.subs[event] = [obj]
return
self.subs[event].append(obj)
def send(self, event, **kwargs):
"""Send a message to whoever may be listening."""
if event not in self.subs:
return
for obj in self.subs[event]:
params = get_parameters(obj)
values = {k: v for k, v in kwargs.items() if k in params}
obj(**values)
_message_bus = MessageBus()
def get_message_bus():
"""A function that returns an instance of the message bus to ensure everyone uses
the same instance."""
return _message_bus
| Add the function back for now | Add the function back for now
| Python | mit | alcarney/stylo,alcarney/stylo | ---
+++
@@ -1,4 +1,9 @@
+import inspect
from uuid import uuid4
+
+
+def get_parameters(f):
+ return list(inspect.signature(f).parameters.keys())
class MessageBus: |
2e631e8e3b3fe738d9f41ab46f138769f688d00f | dthm4kaiako/config/__init__.py | dthm4kaiako/config/__init__.py | """Configuration for Django system."""
__version__ = "0.17.4"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| """Configuration for Django system."""
__version__ = "0.17.5"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| Increment version number to 0.17.5 | Increment version number to 0.17.5
| Python | mit | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers | ---
+++
@@ -1,6 +1,6 @@
"""Configuration for Django system."""
-__version__ = "0.17.4"
+__version__ = "0.17.5"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num |
eb4d480d8bf7807f7a0d38f445a5779331f8d3f3 | aids/stack/stack.py | aids/stack/stack.py | '''
Implementation of Stack data structure
'''
class Stack:
def __init__(self):
'''
Initialize stack
'''
self.items = []
def is_empty(self):
'''
Return True if stack if empty else False
'''
return self.item == []
def push(self, item):
'''
Push item to stack
'''
self.items.append(item)
def pop(self):
'''
Pop item from stack
'''
return self.items.pop()
def peek(self):
'''
Return value of item on top of stack
'''
return self.items[-1]
def size(self):
'''
Return number of items in stack
'''
return len(self.items)
| '''
Implementation of Stack data structure
'''
class Stack(object):
def __init__(self):
'''
Initialize stack
'''
self.items = []
def is_empty(self):
'''
Return True if stack if empty else False
'''
return self.item == []
def push(self, item):
'''
Push item to stack
'''
self.items.append(item)
def pop(self):
'''
Pop item from stack
'''
return self.items.pop()
def peek(self):
'''
Return value of item on top of stack
'''
return self.items[-1]
def size(self):
'''
Return number of items in stack
'''
return len(self.items)
| Convert old-style class to new-style class | Convert old-style class to new-style class
| Python | mit | ueg1990/aids | ---
+++
@@ -4,7 +4,7 @@
'''
-class Stack:
+class Stack(object):
def __init__(self):
''' |
602184794c3f38bf6307cf68f4d61294b523c009 | examples/LKE_example.py | examples/LKE_example.py | from pygraphc.misc.LKE import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input and output path
ip_address = '161.166.232.17'
standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
standard_file = standard_path + 'auth.log.anon.labeled'
analyzed_file = 'auth.log.anon'
prediction_file = 'lke-result-' + ip_address + '.txt'
OutputPath = './results'
para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
# run LKE method
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# perform evaluation
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
prediction_file)
# print evaluation result
print homogeneity_completeness_vmeasure
print ('The running time of LKE is', time)
| from pygraphc.misc.LKE import *
from pygraphc.evaluation.ExternalEvaluation import *
# set input and output path
dataset_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1_perday/'
groundtruth_file = dataset_path + 'Dec 1.log.labeled'
analyzed_file = 'Dec 1.log'
OutputPath = '/home/hudan/Git/pygraphc/result/misc/'
prediction_file = OutputPath + 'Dec 1.log.perline'
para = Para(path=dataset_path, logname=analyzed_file, save_path=OutputPath)
# run LKE method
myparser = LKE(para)
time = myparser.main_process()
clusters = myparser.get_clusters()
original_logs = myparser.logs
# perform evaluation
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
# get evaluation of clustering performance
ar = ExternalEvaluation.get_adjusted_rand(groundtruth_file, prediction_file)
ami = ExternalEvaluation.get_adjusted_mutual_info(groundtruth_file, prediction_file)
nmi = ExternalEvaluation.get_normalized_mutual_info(groundtruth_file, prediction_file)
h = ExternalEvaluation.get_homogeneity(groundtruth_file, prediction_file)
c = ExternalEvaluation.get_completeness(groundtruth_file, prediction_file)
v = ExternalEvaluation.get_vmeasure(groundtruth_file, prediction_file)
# print evaluation result
print ar, ami, nmi, h, c, v
print ('The running time of LKE is', time)
| Edit path and external evaluation | Edit path and external evaluation
| Python | mit | studiawan/pygraphc | ---
+++
@@ -2,13 +2,12 @@
from pygraphc.evaluation.ExternalEvaluation import *
# set input and output path
-ip_address = '161.166.232.17'
-standard_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1/' + ip_address
-standard_file = standard_path + 'auth.log.anon.labeled'
-analyzed_file = 'auth.log.anon'
-prediction_file = 'lke-result-' + ip_address + '.txt'
-OutputPath = './results'
-para = Para(path=standard_path, logname=analyzed_file, save_path=OutputPath)
+dataset_path = '/home/hudan/Git/labeled-authlog/dataset/Hofstede2014/dataset1_perday/'
+groundtruth_file = dataset_path + 'Dec 1.log.labeled'
+analyzed_file = 'Dec 1.log'
+OutputPath = '/home/hudan/Git/pygraphc/result/misc/'
+prediction_file = OutputPath + 'Dec 1.log.perline'
+para = Para(path=dataset_path, logname=analyzed_file, save_path=OutputPath)
# run LKE method
myparser = LKE(para)
@@ -18,8 +17,15 @@
# perform evaluation
ExternalEvaluation.set_cluster_label_id(None, clusters, original_logs, prediction_file)
-homogeneity_completeness_vmeasure = ExternalEvaluation.get_homogeneity_completeness_vmeasure(standard_file,
- prediction_file)
+
+# get evaluation of clustering performance
+ar = ExternalEvaluation.get_adjusted_rand(groundtruth_file, prediction_file)
+ami = ExternalEvaluation.get_adjusted_mutual_info(groundtruth_file, prediction_file)
+nmi = ExternalEvaluation.get_normalized_mutual_info(groundtruth_file, prediction_file)
+h = ExternalEvaluation.get_homogeneity(groundtruth_file, prediction_file)
+c = ExternalEvaluation.get_completeness(groundtruth_file, prediction_file)
+v = ExternalEvaluation.get_vmeasure(groundtruth_file, prediction_file)
+
# print evaluation result
-print homogeneity_completeness_vmeasure
+print ar, ami, nmi, h, c, v
print ('The running time of LKE is', time) |
2739999c6fa0628e7cfe7a918e3cde3b7d791d66 | tests/astroplpython/data/test_Timeseries.py | tests/astroplpython/data/test_Timeseries.py | '''
Created on Jul 16, 2014
@author: thomas
'''
import unittest
class TestTimeseries (unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strToXTArray (self):
import astroplpython.data.Timeseries as t
strarr = ['(1,1.)', '(2,2.)', '(2.1,3.)', '(2.018,4.)']
x_t_list = t.x_t.strToXTArray(strarr)
print (str(x_t_list))
self.assertEqual(4, len(x_t_list), "list has right number of elements")
x = [1, 2, 2.1, 2.018]
t = [1., 2., 3., 4.]
i = 0
while (i < 4):
self.assertEquals(x[i], x_t_list[i].value) #, "x value is correct")
self.assertEquals(t[i], x_t_list[i].time, "t value is correct")
i += 1
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | '''
Created on Jul 16, 2014
@author: thomas
'''
import unittest
class TestTimeseries (unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_strToXTArray (self):
import astroplpython.data.Timeseries as Timeseries
# test data strarr has combination of integer, floats.
#
strarr = ['(1,1)', '(2,2.)', '(2.1,3.)', '(2.018,4.)']
x_t_list = Timeseries.x_t.strToXTArray(strarr)
#print (str(x_t_list))
self.assertEqual(4, len(x_t_list), "list has right number of elements")
# Check class, return values. In checking values be sure
# to check that we cast back to float
x = [1., 2.0, 2.1, 2.018]
t = [1.000, 2.0, 3.0, 4.0]
i = 0
while (i < 4):
#print (str(x_t_list[i]))
#print (str(x_t_list[i].value))
self.assertIsInstance(x_t_list[i], Timeseries.x_t, "is class of Timeseries")
self.assertEquals(x[i], x_t_list[i].value) #, "x value is correct")
self.assertEquals(t[i], x_t_list[i].time, "t value is correct")
i += 1
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | Modify test to match changes in class | Modify test to match changes in class
| Python | mit | brianthomas/astroplpython,brianthomas/astroplpython | ---
+++
@@ -17,16 +17,26 @@
def test_strToXTArray (self):
- import astroplpython.data.Timeseries as t
- strarr = ['(1,1.)', '(2,2.)', '(2.1,3.)', '(2.018,4.)']
- x_t_list = t.x_t.strToXTArray(strarr)
- print (str(x_t_list))
+
+ import astroplpython.data.Timeseries as Timeseries
+
+ # test data strarr has combination of integer, floats.
+ #
+ strarr = ['(1,1)', '(2,2.)', '(2.1,3.)', '(2.018,4.)']
+ x_t_list = Timeseries.x_t.strToXTArray(strarr)
+ #print (str(x_t_list))
+
self.assertEqual(4, len(x_t_list), "list has right number of elements")
- x = [1, 2, 2.1, 2.018]
- t = [1., 2., 3., 4.]
+ # Check class, return values. In checking values be sure
+ # to check that we cast back to float
+ x = [1., 2.0, 2.1, 2.018]
+ t = [1.000, 2.0, 3.0, 4.0]
i = 0
while (i < 4):
+ #print (str(x_t_list[i]))
+ #print (str(x_t_list[i].value))
+ self.assertIsInstance(x_t_list[i], Timeseries.x_t, "is class of Timeseries")
self.assertEquals(x[i], x_t_list[i].value) #, "x value is correct")
self.assertEquals(t[i], x_t_list[i].time, "t value is correct")
i += 1 |
5b7789d519be7251c58b68879f013d5f3bf0c950 | tests/thread/thread_stacksize1.py | tests/thread/thread_stacksize1.py | # test setting the thread stack size
#
# MIT license; Copyright (c) 2016 Damien P. George on behalf of Pycom Ltd
import sys
import _thread
# different implementations have different minimum sizes
if sys.implementation == 'micropython':
sz = 2 * 1024
else:
sz = 32 * 1024
def foo():
pass
def thread_entry():
foo()
with lock:
global n_finished
n_finished += 1
# test set/get of stack size
print(_thread.stack_size())
print(_thread.stack_size(sz))
print(_thread.stack_size() == sz)
print(_thread.stack_size())
lock = _thread.allocate_lock()
n_thread = 2
n_finished = 0
# set stack size and spawn a few threads
_thread.stack_size(sz)
for i in range(n_thread):
_thread.start_new_thread(thread_entry, ())
# busy wait for threads to finish
while n_finished < n_thread:
pass
print('done')
| # test setting the thread stack size
#
# MIT license; Copyright (c) 2016 Damien P. George on behalf of Pycom Ltd
import sys
import _thread
# different implementations have different minimum sizes
if sys.implementation.name == 'micropython':
sz = 2 * 1024
else:
sz = 32 * 1024
def foo():
pass
def thread_entry():
foo()
with lock:
global n_finished
n_finished += 1
# reset stack size to default
_thread.stack_size()
# test set/get of stack size
print(_thread.stack_size())
print(_thread.stack_size(sz))
print(_thread.stack_size() == sz)
print(_thread.stack_size())
lock = _thread.allocate_lock()
n_thread = 2
n_finished = 0
# set stack size and spawn a few threads
_thread.stack_size(sz)
for i in range(n_thread):
_thread.start_new_thread(thread_entry, ())
# busy wait for threads to finish
while n_finished < n_thread:
pass
print('done')
| Make stack-size test run correctly and reliable on uPy. | tests/thread: Make stack-size test run correctly and reliable on uPy.
| Python | mit | mhoffma/micropython,HenrikSolver/micropython,ryannathans/micropython,dmazzella/micropython,mhoffma/micropython,PappaPeppar/micropython,MrSurly/micropython,tuc-osg/micropython,oopy/micropython,oopy/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,toolmacher/micropython,redbear/micropython,mhoffma/micropython,tobbad/micropython,selste/micropython,adafruit/circuitpython,toolmacher/micropython,alex-robbins/micropython,emfcamp/micropython,Timmenem/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,dmazzella/micropython,mhoffma/micropython,dxxb/micropython,blazewicz/micropython,PappaPeppar/micropython,adafruit/circuitpython,pramasoul/micropython,kerneltask/micropython,lowRISC/micropython,pfalcon/micropython,puuu/micropython,dxxb/micropython,tuc-osg/micropython,kerneltask/micropython,tobbad/micropython,MrSurly/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,selste/micropython,ganshun666/micropython,hiway/micropython,redbear/micropython,lowRISC/micropython,turbinenreiter/micropython,tralamazza/micropython,pramasoul/micropython,oopy/micropython,SHA2017-badge/micropython-esp32,Peetz0r/micropython-esp32,hosaka/micropython,chrisdearman/micropython,MrSurly/micropython,bvernoux/micropython,TDAbboud/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,Timmenem/micropython,MrSurly/micropython-esp32,deshipu/micropython,ryannathans/micropython,pozetroninc/micropython,infinnovation/micropython,chrisdearman/micropython,adafruit/circuitpython,adafruit/circuitpython,chrisdearman/micropython,infinnovation/micropython,matthewelse/micropython,swegener/micropython,hiway/micropython,MrSurly/micropython-esp32,adafruit/micropython,tralamazza/micropython,alex-march/micropython,swegener/micropython,toolmacher/micropython,PappaPeppar/micropython,puuu/micropython,adafruit/circuitpython,cwyark/micropython,hiway/micropython,deshipu/micropython,deshipu/micropython,cwyark/micropython,blazewicz/micropython,pozetroninc/micropython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,micropython/micropython-esp32,alex-march/micropython,adafruit/micropython,alex-march/micropython,pozetroninc/micropython,henriknelson/micropython,jmarcelino/pycom-micropython,henriknelson/micropython,turbinenreiter/micropython,MrSurly/micropython,kerneltask/micropython,emfcamp/micropython,swegener/micropython,ganshun666/micropython,pozetroninc/micropython,kerneltask/micropython,PappaPeppar/micropython,Peetz0r/micropython-esp32,tuc-osg/micropython,Timmenem/micropython,AriZuu/micropython,dxxb/micropython,matthewelse/micropython,toolmacher/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,HenrikSolver/micropython,lowRISC/micropython,HenrikSolver/micropython,swegener/micropython,MrSurly/micropython-esp32,bvernoux/micropython,MrSurly/micropython-esp32,pfalcon/micropython,torwag/micropython,henriknelson/micropython,blazewicz/micropython,TDAbboud/micropython,bvernoux/micropython,hosaka/micropython,oopy/micropython,alex-robbins/micropython,jmarcelino/pycom-micropython,deshipu/micropython,Peetz0r/micropython-esp32,TDAbboud/micropython,Peetz0r/micropython-esp32,turbinenreiter/micropython,redbear/micropython,bvernoux/micropython,mhoffma/micropython,alex-march/micropython,adafruit/micropython,dxxb/micropython,blazewicz/micropython,micropython/micropython-esp32,emfcamp/micropython,tobbad/micropython,pramasoul/micropython,puuu/micropython,torwag/micropython,pozetroninc/micropython,toolmacher/micropython,hosaka/micropython,adafruit/micropython,henriknelson/micropython,bvernoux/micropython,infinnovation/micropython,turbinenreiter/micropython,HenrikSolver/micropython,oopy/micropython,adafruit/circuitpython,alex-robbins/micropython,matthewelse/micropython,PappaPeppar/micropython,micropython/micropython-esp32,ryannathans/micropython,ganshun666/micropython,selste/micropython,deshipu/micropython,torwag/micropython,ryannathans/micropython,MrSurly/micropython,hiway/micropython,redbear/micropython,tralamazza/micropython,AriZuu/micropython,alex-march/micropython,tralamazza/micropython,ryannathans/micropython,trezor/micropython,henriknelson/micropython,alex-robbins/micropython,trezor/micropython,tobbad/micropython,trezor/micropython,puuu/micropython,TDAbboud/micropython,pfalcon/micropython,turbinenreiter/micropython,alex-robbins/micropython,emfcamp/micropython,TDAbboud/micropython,hosaka/micropython,kerneltask/micropython,redbear/micropython,HenrikSolver/micropython,matthewelse/micropython,pfalcon/micropython,jmarcelino/pycom-micropython,emfcamp/micropython,adafruit/micropython,tobbad/micropython,AriZuu/micropython,ganshun666/micropython,dmazzella/micropython,torwag/micropython,chrisdearman/micropython,pfalcon/micropython,selste/micropython,selste/micropython,matthewelse/micropython,Timmenem/micropython,trezor/micropython,pramasoul/micropython,swegener/micropython,matthewelse/micropython,jmarcelino/pycom-micropython,lowRISC/micropython,pramasoul/micropython,cwyark/micropython,infinnovation/micropython,puuu/micropython,ganshun666/micropython,cwyark/micropython,dxxb/micropython,hiway/micropython,blazewicz/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,Timmenem/micropython,torwag/micropython,infinnovation/micropython,hosaka/micropython,trezor/micropython,cwyark/micropython,micropython/micropython-esp32 | ---
+++
@@ -6,7 +6,7 @@
import _thread
# different implementations have different minimum sizes
-if sys.implementation == 'micropython':
+if sys.implementation.name == 'micropython':
sz = 2 * 1024
else:
sz = 32 * 1024
@@ -19,6 +19,9 @@
with lock:
global n_finished
n_finished += 1
+
+# reset stack size to default
+_thread.stack_size()
# test set/get of stack size
print(_thread.stack_size()) |
7a68eaf67ce57be832e37922c52f1b8bc73a5e69 | indra/tests/test_kappa_util.py | indra/tests/test_kappa_util.py | from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import json
from os import path
from indra.util.kappa_util import im_json_to_graph
def test_kappy_influence_json_to_graph():
with open(path.join(path.dirname(path.abspath(__file__)),
'kappy_influence.json'), 'r') as f:
imap = json.load(f)
graph = im_json_to_graph(imap)
assert graph is not None, 'No graph produced.'
n_nodes = len(graph.nodes)
n_edges = len(graph.edges)
assert n_nodes == 13, \
'Wrong number (%d vs. %d) of nodes on the graph.' % (n_nodes, 13)
assert n_edges == 6, \
"Wrong number (%d vs. %d) of edges on graph." % (n_edges, 4)
| from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import json
from os import path
from indra.util.kappa_util import im_json_to_graph
def test_kappy_influence_json_to_graph():
with open(path.join(path.dirname(path.abspath(__file__)),
'kappy_influence.json'), 'r') as f:
imap = json.load(f)
graph = im_json_to_graph(imap)
assert graph is not None, 'No graph produced.'
n_nodes = len(graph.nodes())
n_edges = len(graph.edges())
assert n_nodes == 13, \
'Wrong number (%d vs. %d) of nodes on the graph.' % (n_nodes, 13)
assert n_edges == 6, \
"Wrong number (%d vs. %d) of edges on graph." % (n_edges, 4)
| Fix networkx 1.11 compatibility issue | Fix networkx 1.11 compatibility issue
| Python | bsd-2-clause | pvtodorov/indra,bgyori/indra,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,johnbachman/indra,johnbachman/belpy,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/belpy | ---
+++
@@ -11,8 +11,8 @@
imap = json.load(f)
graph = im_json_to_graph(imap)
assert graph is not None, 'No graph produced.'
- n_nodes = len(graph.nodes)
- n_edges = len(graph.edges)
+ n_nodes = len(graph.nodes())
+ n_edges = len(graph.edges())
assert n_nodes == 13, \
'Wrong number (%d vs. %d) of nodes on the graph.' % (n_nodes, 13)
assert n_edges == 6, \ |
ec9b1f0ebda55e3e02e597e10ac28d62286b922f | SimPEG/EM/NSEM/Utils/__init__.py | SimPEG/EM/NSEM/Utils/__init__.py | """ module SimPEG.EM.NSEM.Utils
Collection of utilities that are usefull for the NSEM problem
NOTE: These utilities are not well test, use with care
"""
from __future__ import absolute_import
from .MT1Dsolutions import get1DEfields # Add the names of the functions
from .MT1Danalytic import getEHfields, getImpedance
from .dataUtils import (getAppRes, appResPhs, rec_to_ndarr, rotateData,
skindepth, makeAnalyticSolution, plotMT1DModelData,
plotImpAppRes, printTime, convert3Dto1Dobject,
resampleNSEMdataAtFreq, extract_data_info)
from .ediFilesUtils import (EDIimporter, _findLatLong, _findLine, _findEDIcomp)
from .testUtils import (getAppResPhs, setup1DSurvey, setupSimpegNSEM_ePrimSec,
random, halfSpace, blockInhalfSpace, twoLayer)
| """ module SimPEG.EM.NSEM.Utils
Collection of utilities that are usefull for the NSEM problem
NOTE: These utilities are not well test, use with care
"""
from __future__ import absolute_import
from .MT1Dsolutions import get1DEfields # Add the names of the functions
from .MT1Danalytic import getEHfields, getImpedance
from .dataUtils import (getAppRes, appResPhs, rec_to_ndarr, rotateData,
skindepth, makeAnalyticSolution, plotMT1DModelData,
plotImpAppRes, printTime, convert3Dto1Dobject,
resample_data, extract_data_info)
from .ediFilesUtils import (EDIimporter, _findLatLong, _findLine, _findEDIcomp)
from .testUtils import (getAppResPhs, setup1DSurvey, setupSimpegNSEM_ePrimSec,
random, halfSpace, blockInhalfSpace, twoLayer)
| Fix import issue due to name changes | Fix import issue due to name changes
| Python | mit | simpeg/simpeg | ---
+++
@@ -12,7 +12,7 @@
from .dataUtils import (getAppRes, appResPhs, rec_to_ndarr, rotateData,
skindepth, makeAnalyticSolution, plotMT1DModelData,
plotImpAppRes, printTime, convert3Dto1Dobject,
- resampleNSEMdataAtFreq, extract_data_info)
+ resample_data, extract_data_info)
from .ediFilesUtils import (EDIimporter, _findLatLong, _findLine, _findEDIcomp)
from .testUtils import (getAppResPhs, setup1DSurvey, setupSimpegNSEM_ePrimSec,
random, halfSpace, blockInhalfSpace, twoLayer) |
3a470c02a1a171f876200258897d6e277a1aab91 | tournamentcontrol/competition/signals/__init__.py | tournamentcontrol/competition/signals/__init__.py | from django.db import models
from tournamentcontrol.competition.signals.custom import match_forfeit # noqa
from tournamentcontrol.competition.signals.ladders import ( # noqa
changed_points_formula,
scale_ladder_entry,
team_ladder_entry_aggregation,
)
from tournamentcontrol.competition.signals.matches import ( # noqa
match_saved_handler,
notify_match_forfeit_email,
)
from tournamentcontrol.competition.signals.places import ( # noqa
set_ground_latlng,
set_ground_timezone,
)
from tournamentcontrol.competition.signals.teams import delete_team # noqa
def delete_related(sender, instance, *args, **kwargs):
for ro, __ in instance._meta.get_all_related_objects_with_model():
name = ro.get_accessor_name()
if isinstance(ro.field, models.ManyToManyField):
continue
if isinstance(instance, ro.model):
continue
manager = getattr(instance, name)
for obj in manager.all():
obj.delete()
| from django.db import models
from tournamentcontrol.competition.signals.custom import match_forfeit # noqa
from tournamentcontrol.competition.signals.ladders import ( # noqa
changed_points_formula,
scale_ladder_entry,
team_ladder_entry_aggregation,
)
from tournamentcontrol.competition.signals.matches import ( # noqa
match_saved_handler,
notify_match_forfeit_email,
)
from tournamentcontrol.competition.signals.places import ( # noqa
set_ground_latlng,
set_ground_timezone,
)
from tournamentcontrol.competition.signals.teams import delete_team # noqa
def delete_related(sender, instance, *args, **kwargs):
for ro, __ in [
(f, f.model)
for f in instance._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete]:
name = ro.get_accessor_name()
if isinstance(ro.field, models.ManyToManyField):
continue
if isinstance(instance, ro.model):
continue
manager = getattr(instance, name)
for obj in manager.all():
obj.delete()
| Stop using the undocumented get_all_related_objects_with_model API | Stop using the undocumented get_all_related_objects_with_model API
| Python | bsd-3-clause | goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic,goodtune/vitriolic | ---
+++
@@ -18,7 +18,11 @@
def delete_related(sender, instance, *args, **kwargs):
- for ro, __ in instance._meta.get_all_related_objects_with_model():
+ for ro, __ in [
+ (f, f.model)
+ for f in instance._meta.get_fields()
+ if (f.one_to_many or f.one_to_one)
+ and f.auto_created and not f.concrete]:
name = ro.get_accessor_name()
if isinstance(ro.field, models.ManyToManyField):
continue |
cbb90d03b83a495b1c46514a583538f2cfc0d29c | test/functional/test_manager.py | test/functional/test_manager.py | from osmviz.manager import PILImageManager, OSMManager
import PIL.Image as Image
def test_pil():
imgr = PILImageManager("RGB")
osm = OSMManager(image_manager=imgr)
image, bnds = osm.createOSMImage((30, 35, -117, -112), 9)
wh_ratio = float(image.size[0]) / image.size[1]
image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS)
del image
image2.show()
if __name__ == "__main__":
test_pil()
# End of file
| from osmviz.manager import PILImageManager, OSMManager
import PIL.Image as Image
def test_pil():
image_manager = PILImageManager("RGB")
osm = OSMManager(image_manager=image_manager)
image, bounds = osm.createOSMImage((30, 31, -117, -116), 9)
wh_ratio = float(image.size[0]) / image.size[1]
image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS)
del image
image2.show()
if __name__ == "__main__":
test_pil()
# End of file
| Reduce number of tiles downloaded | Reduce number of tiles downloaded
| Python | mit | hugovk/osmviz,hugovk/osmviz | ---
+++
@@ -3,9 +3,9 @@
def test_pil():
- imgr = PILImageManager("RGB")
- osm = OSMManager(image_manager=imgr)
- image, bnds = osm.createOSMImage((30, 35, -117, -112), 9)
+ image_manager = PILImageManager("RGB")
+ osm = OSMManager(image_manager=image_manager)
+ image, bounds = osm.createOSMImage((30, 31, -117, -116), 9)
wh_ratio = float(image.size[0]) / image.size[1]
image2 = image.resize((int(800 * wh_ratio), 800), Image.ANTIALIAS)
del image |
a1e90125199a12beaa132c4d1eba4c09be3f9ba0 | fileupload/serialize.py | fileupload/serialize.py | # encoding: utf-8
import mimetypes
import re
from django.core.urlresolvers import reverse
def order_name(name):
"""order_name -- Limit a text to 20 chars length, if necessary strips the
middle of the text and substitute it for an ellipsis.
name -- text to be limited.
"""
name = re.sub(r'^.*/', '', name)
if len(name) <= 20:
return name
return name[:10] + "..." + name[-7:]
def serialize(instance, file_attr='file'):
"""serialize -- Serialize a Picture instance into a dict.
instance -- Picture instance
file_attr -- attribute name that contains the FileField or ImageField
"""
obj = getattr(instance, file_attr)
return {
'resource_id': instance.pk,
'url': obj.url,
'file_type': obj.file_type,
'name': order_name(obj.name),
'type': mimetypes.guess_type(obj.path)[0] or 'image/png',
'thumbnailUrl': obj.url,
'size': obj.size,
'deleteUrl': reverse('upload-delete', args=[instance.pk]),
'deleteType': 'DELETE',
}
| # encoding: utf-8
import mimetypes
import re
from django.core.urlresolvers import reverse
def order_name(name):
"""order_name -- Limit a text to 20 chars length, if necessary strips the
middle of the text and substitute it for an ellipsis.
name -- text to be limited.
"""
name = re.sub(r'^.*/', '', name)
if len(name) <= 20:
return name
return name[:10] + "..." + name[-7:]
def serialize(instance, file_attr='file'):
"""serialize -- Serialize a Picture instance into a dict.
instance -- Picture instance
file_attr -- attribute name that contains the FileField or ImageField
"""
obj = getattr(instance, file_attr)
return {
'resource_id': instance.pk,
'url': obj.url,
'file_type': instance.file_type,
'name': order_name(obj.name),
'type': mimetypes.guess_type(obj.path)[0] or 'image/png',
'thumbnailUrl': obj.url,
'size': obj.size,
'deleteUrl': reverse('upload-delete', args=[instance.pk]),
'deleteType': 'DELETE',
}
| FIX add links to reports generated v2 | FIX add links to reports generated v2
| Python | mit | semitki/canales,semitki/canales,semitki/canales | ---
+++
@@ -28,7 +28,7 @@
return {
'resource_id': instance.pk,
'url': obj.url,
- 'file_type': obj.file_type,
+ 'file_type': instance.file_type,
'name': order_name(obj.name),
'type': mimetypes.guess_type(obj.path)[0] or 'image/png',
'thumbnailUrl': obj.url, |
40c15fb4300dfe729461e0bc09066f8176830188 | src/cutecoin/main.py | src/cutecoin/main.py | """
Created on 1 févr. 2014
@author: inso
"""
import signal
import sys
import asyncio
from quamash import QEventLoop
from PyQt5.QtWidgets import QApplication
from cutecoin.gui.mainwindow import MainWindow
from cutecoin.core.app import Application
if __name__ == '__main__':
# activate ctrl-c interrupt
signal.signal(signal.SIGINT, signal.SIG_DFL)
cutecoin = QApplication(sys.argv)
loop = QEventLoop(cutecoin)
asyncio.set_event_loop(loop)
with loop:
app = Application.startup(sys.argv, cutecoin, loop)
window = MainWindow(app)
window.startup()
loop.run_forever()
sys.exit()
| """
Created on 1 févr. 2014
@author: inso
"""
import signal
import sys
import asyncio
import logging
import os
from quamash import QEventLoop
from PyQt5.QtWidgets import QApplication
from cutecoin.gui.mainwindow import MainWindow
from cutecoin.core.app import Application
def async_exception_handler(loop, context):
"""
An exception handler which exists the program if the exception
was not catch
:param loop: the asyncio loop
:param context: the exception context
"""
logging.debug('Exception handler executing')
message = context.get('message')
if not message:
message = 'Unhandled exception in event loop'
try:
exception = context['exception']
except KeyError:
exc_info = False
else:
exc_info = (type(exception), exception, exception.__traceback__)
log_lines = [message]
for key in [k for k in sorted(context) if k not in {'message', 'exception'}]:
log_lines.append('{}: {!r}'.format(key, context[key]))
logging.error('\n'.join(log_lines), exc_info=exc_info)
os._exit(1)
if __name__ == '__main__':
# activate ctrl-c interrupt
signal.signal(signal.SIGINT, signal.SIG_DFL)
cutecoin = QApplication(sys.argv)
loop = QEventLoop(cutecoin)
loop.set_exception_handler(async_exception_handler)
asyncio.set_event_loop(loop)
with loop:
app = Application.startup(sys.argv, cutecoin, loop)
window = MainWindow(app)
window.startup()
loop.run_forever()
sys.exit()
| Exit process when exception not catch in async task | Exit process when exception not catch in async task
| Python | mit | ucoin-io/cutecoin,ucoin-io/cutecoin,ucoin-io/cutecoin | ---
+++
@@ -6,11 +6,41 @@
import signal
import sys
import asyncio
+import logging
+import os
from quamash import QEventLoop
from PyQt5.QtWidgets import QApplication
from cutecoin.gui.mainwindow import MainWindow
from cutecoin.core.app import Application
+
+
+def async_exception_handler(loop, context):
+ """
+ An exception handler which exists the program if the exception
+ was not catch
+ :param loop: the asyncio loop
+ :param context: the exception context
+ """
+ logging.debug('Exception handler executing')
+ message = context.get('message')
+ if not message:
+ message = 'Unhandled exception in event loop'
+
+ try:
+ exception = context['exception']
+ except KeyError:
+ exc_info = False
+ else:
+ exc_info = (type(exception), exception, exception.__traceback__)
+
+ log_lines = [message]
+ for key in [k for k in sorted(context) if k not in {'message', 'exception'}]:
+ log_lines.append('{}: {!r}'.format(key, context[key]))
+
+ logging.error('\n'.join(log_lines), exc_info=exc_info)
+ os._exit(1)
+
if __name__ == '__main__':
# activate ctrl-c interrupt
@@ -18,6 +48,7 @@
cutecoin = QApplication(sys.argv)
loop = QEventLoop(cutecoin)
+ loop.set_exception_handler(async_exception_handler)
asyncio.set_event_loop(loop)
with loop: |
2b89b28b2a5900d8e5be5509b6f01054e2bfdb23 | app.py | app.py | import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.heroku import Heroku
# Initialize Application
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(
use_debugger=True,
use_reloader=True)
)
heroku = Heroku()
app.config.from_object('config.Development')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@app.route('/')
def test():
return "lala"
# Load Controllers
# Load Endpoints
# app.register_blueprint(users, url_prefix='/users')
#
heroku.init_app(app)
db.init_app(app)
#
# if __name__ == '__main__':
# port = int(os.environ.get('PORT', 5000))
# app.run(host='0.0.0.0', port=port, debug=True)
| import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from flask.ext.heroku import Heroku
# Initialize Application
app = Flask(__name__)
manager = Manager(app)
manager.add_command("runserver", Server(
host='0.0.0.0',
port = int(os.environ.get('PORT', 5000)),
use_debugger=True,
use_reloader=True)
)
heroku = Heroku()
app.config.from_object('config.Development')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
@app.route('/')
def test():
return "lala"
# Load Controllers
# Load Endpoints
# app.register_blueprint(users, url_prefix='/users')
#
heroku.init_app(app)
db.init_app(app)
#
# if __name__ == '__main__':
# port = int(os.environ.get('PORT', 5000))
# app.run(host='0.0.0.0', port=port, debug=True)
| Fix host and port for Heroku | Fix host and port for Heroku
| Python | mpl-2.0 | xeBuz/CompanyDK,xeBuz/CompanyDK,xeBuz/CompanyDK,xeBuz/CompanyDK | ---
+++
@@ -10,6 +10,8 @@
manager = Manager(app)
manager.add_command("runserver", Server(
+ host='0.0.0.0',
+ port = int(os.environ.get('PORT', 5000)),
use_debugger=True,
use_reloader=True)
) |
257686cfa72318c0b476d4623731080f848c4942 | app.py | app.py | import requests
from flask import Flask, render_template
app = Flask(__name__)
BBC_id= "bbc-news"
API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={API_KEY}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run() | import requests
from flask import Flask, render_template
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
@app.route("/")
def index():
r = requests.get(
f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
if __name__ == "__main__":
app.run() | Use instance folder to load configuration file. | Use instance folder to load configuration file.
| Python | mit | alchermd/headlines,alchermd/headlines | ---
+++
@@ -1,15 +1,15 @@
import requests
from flask import Flask, render_template
-app = Flask(__name__)
+app = Flask(__name__, instance_relative_config=True)
+app.config.from_pyfile("appconfig.py")
BBC_id= "bbc-news"
-API_KEY = "c4002216fa5446d582b5f31d73959d36"
@app.route("/")
def index():
r = requests.get(
- f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={API_KEY}"
+ f"https://newsapi.org/v1/articles?source={BBC_id}&sortBy=top&apiKey={app.config['API_KEY']}"
)
return render_template("index.html", articles=r.json().get("articles"))
|
2a8a50528a36525f37ea26dbccefd153d4cdcc8c | go/apps/dialogue/definition.py | go/apps/dialogue/definition.py | from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class SendDialogueAction(ConversationAction):
action_name = 'send_dialogue'
action_display_name = 'Send Dialogue'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting(generic_sends=True):
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_survey', batch_id=self._conv.get_latest_batch_key(),
msg_options={}, is_client_initiated=False,
delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
redirect_to = 'user_data'
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'dialogue'
actions = (
SendDialogueAction,
DownloadUserDataAction,
)
| from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class SendDialogueAction(ConversationAction):
action_name = 'send_dialogue'
action_display_name = 'Send Dialogue'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting(generic_sends=True):
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'send_dialogue', batch_id=self._conv.get_latest_batch_key(),
msg_options={}, is_client_initiated=False,
delivery_class=self._conv.delivery_class)
class DownloadUserDataAction(ConversationAction):
action_name = 'download_user_data'
action_display_name = 'Download User Data'
redirect_to = 'user_data'
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'dialogue'
actions = (
SendDialogueAction,
DownloadUserDataAction,
)
| Fix name of command sent by send_dialogue action. | Fix name of command sent by send_dialogue action.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | ---
+++
@@ -19,7 +19,7 @@
def perform_action(self, action_data):
return self.send_command(
- 'send_survey', batch_id=self._conv.get_latest_batch_key(),
+ 'send_dialogue', batch_id=self._conv.get_latest_batch_key(),
msg_options={}, is_client_initiated=False,
delivery_class=self._conv.delivery_class)
|
d1ccd3e93043d11a22e873e7ccdb76d749746151 | api/app/app.py | api/app/app.py | import os
import logging
from flask import Flask
from model.base import db
from route.base import blueprint
# Register models and routes
import model
import route
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
# app.config['PROPAGATE_EXCEPTIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\
os.environ['USER'] + ':' +\
os.environ['PASSWORD'] + '@' +\
'db/' + os.environ['SCHEMA']
db.init_app(app)
with app.test_request_context():
db.create_all()
db.session.commit()
app.register_blueprint(blueprint)
| import os
import logging
from uwsgidecorators import postfork
from flask import Flask
from model.base import db
from route.base import blueprint
# Register models and routes
import model
import route
logging.basicConfig(level=logging.INFO)
app = Flask(__name__)
# app.config['PROPAGATE_EXCEPTIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\
os.environ['USER'] + ':' +\
os.environ['PASSWORD'] + '@' +\
'db/' + os.environ['SCHEMA']
db.init_app(app)
with app.test_request_context():
db.create_all()
db.session.commit()
app.register_blueprint(blueprint)
@postfork
def refresh_db():
db.session.remove()
db.init_app(app)
| Refresh db connections on uwsgi fork | Refresh db connections on uwsgi fork
| Python | mit | hexa4313/velov-companion-server,hexa4313/velov-companion-server | ---
+++
@@ -1,5 +1,6 @@
import os
import logging
+from uwsgidecorators import postfork
from flask import Flask
from model.base import db
from route.base import blueprint
@@ -24,3 +25,9 @@
db.session.commit()
app.register_blueprint(blueprint)
+
+
+@postfork
+def refresh_db():
+ db.session.remove()
+ db.init_app(app) |
08fbfa49129a42821b128913e4aa9fbacf966f20 | grizzly-jersey/setup.py | grizzly-jersey/setup.py | import subprocess
import sys
import setup_util
import os
def start(args):
try:
subprocess.check_call("mvn clean package shade:shade", shell=True, cwd="grizzly-jersey")
subprocess.Popen("java -jar target/grizzly-jersey-example-0.1.jar".rsplit(" "), cwd="grizzly-jersey")
return 0
except subprocess.CalledProcessError:
return 1
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'grizzly-jersey' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
| import subprocess
import sys
import setup_util
import os
def start(args):
try:
subprocess.check_call("mvn clean package", shell=True, cwd="grizzly-jersey")
subprocess.Popen("java -jar target/grizzly-jersey-example-0.1.jar".rsplit(" "), cwd="grizzly-jersey")
return 0
except subprocess.CalledProcessError:
return 1
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'grizzly-jersey' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
return 0
| Fix the build so it no longer double-shades. This removes all the warnings it printed. | Fix the build so it no longer double-shades. This removes all the warnings it printed.
| Python | bsd-3-clause | yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,testn/FrameworkBenchmarks,dmacd/FB-try1,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,joshk/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,methane/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jamming/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sgml/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,valyala/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,dmacd/FB-try1,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,grob/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Verber/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sxend/FrameworkBenchmarks,testn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,methane/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,torhve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,dmacd/FB-try1,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,testn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,joshk/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zloster/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jamming/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,denkab/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,grob/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,methane/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,torhve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,dmacd/FB-try1,Jesterovskiy/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Verber/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sxend/FrameworkBenchmarks,testn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,methane/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Verber/FrameworkBenchmarks,joshk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jamming/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,herloct/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,dmacd/FB-try1,kbrock/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,doom369/FrameworkBenchmarks,leafo/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,khellang/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,khellang/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sgml/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,valyala/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,torhve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,dmacd/FB-try1,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jamming/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,torhve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,torhve/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,methane/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Verber/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zloster/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,actframework/FrameworkBenchmarks,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,valyala/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,testn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,leafo/FrameworkBenchmarks,methane/FrameworkBenchmarks,denkab/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,khellang/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Verber/FrameworkBenchmarks,dmacd/FB-try1,Rayne/FrameworkBenchmarks,sxend/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,torhve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,leafo/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,grob/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,testn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,methane/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,khellang/FrameworkBenchmarks,methane/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sxend/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,herloct/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,dmacd/FB-try1,leafo/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,methane/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,leafo/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sgml/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,dmacd/FB-try1,circlespainter/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,valyala/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,grob/FrameworkBenchmarks,dmacd/FB-try1,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,grob/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,khellang/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,leafo/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,joshk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,dmacd/FB-try1,zane-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,denkab/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sgml/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,actframework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,testn/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,grob/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,doom369/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,khellang/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,leafo/FrameworkBenchmarks,herloct/FrameworkBenchmarks,leafo/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,denkab/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sxend/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zloster/FrameworkBenchmarks,dmacd/FB-try1,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,testn/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,denkab/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks | ---
+++
@@ -5,7 +5,7 @@
def start(args):
try:
- subprocess.check_call("mvn clean package shade:shade", shell=True, cwd="grizzly-jersey")
+ subprocess.check_call("mvn clean package", shell=True, cwd="grizzly-jersey")
subprocess.Popen("java -jar target/grizzly-jersey-example-0.1.jar".rsplit(" "), cwd="grizzly-jersey")
return 0
except subprocess.CalledProcessError: |
8ae763c69bbba11a264f8404b8189a53c63d4f40 | marathon_itests/environment.py | marathon_itests/environment.py | import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if scenario.status != 'passed':
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
| import time
from itest_utils import wait_for_marathon
from itest_utils import print_container_logs
def before_all(context):
wait_for_marathon()
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
if context.client:
while True:
apps = context.client.list_apps()
if not apps:
break
for app in apps:
context.client.delete_app(app.id, force=True)
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
def after_step(context, step):
if step.status == "failed":
print "Zookeeper container logs:"
print_container_logs('zookeeper')
print "Marathon container logs:"
print_container_logs('marathon')
| Move log print to after_step | Move log print to after_step
| Python | apache-2.0 | Yelp/paasta,gstarnberger/paasta,gstarnberger/paasta,Yelp/paasta,somic/paasta,somic/paasta | ---
+++
@@ -10,11 +10,6 @@
def after_scenario(context, scenario):
"""If a marathon client object exists in our context, delete any apps in Marathon and wait until they die."""
- if scenario.status != 'passed':
- print "Zookeeper container logs:"
- print_container_logs('zookeeper')
- print "Marathon container logs:"
- print_container_logs('marathon')
if context.client:
while True:
apps = context.client.list_apps()
@@ -25,3 +20,11 @@
time.sleep(0.5)
while context.client.list_deployments():
time.sleep(0.5)
+
+
+def after_step(context, step):
+ if step.status == "failed":
+ print "Zookeeper container logs:"
+ print_container_logs('zookeeper')
+ print "Marathon container logs:"
+ print_container_logs('marathon') |
605443886582d13c2b45b19fad86854bf4e8ddbd | backend/catalogue/serializers.py | backend/catalogue/serializers.py | from rest_framework import serializers
from .models import Release, Track, Comment
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
cdid = serializers.StringRelatedField(
read_only=True
)
class Meta:
model = Track
fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'cdid')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
comments = serializers.HyperlinkedIdentityField(view_name='release-comments')
class Meta:
model = Release
fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
| from rest_framework import serializers
from .models import Release, Track, Comment
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'release')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
comments = serializers.HyperlinkedIdentityField(view_name='release-comments')
class Meta:
model = Release
fields = ('id', 'arrivaldate', 'artist', 'title', 'year','company','genre','format', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
| Add more fields to Release serializer. | Add more fields to Release serializer.
| Python | mit | ThreeDRadio/playlists,ThreeDRadio/playlists,ThreeDRadio/playlists | ---
+++
@@ -8,13 +8,10 @@
fields = ('id', 'comment')
class TrackSerializer(serializers.ModelSerializer):
- cdid = serializers.StringRelatedField(
- read_only=True
- )
class Meta:
model = Track
- fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'cdid')
+ fields = ('trackid', 'tracknum', 'trackartist', 'tracktitle', 'tracklength', 'release')
class ReleaseSerializer(serializers.ModelSerializer):
tracks = serializers.HyperlinkedIdentityField(view_name='release-tracks')
@@ -22,4 +19,4 @@
class Meta:
model = Release
- fields = ('id', 'arrivaldate', 'artist', 'title', 'year', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments')
+ fields = ('id', 'arrivaldate', 'artist', 'title', 'year','company','genre','format', 'local', 'cpa', 'compilation', 'female', 'tracks', 'comments') |
1118541b1cdea7f6079bb63d000ba54f69dfa119 | books/views.py | books/views.py | from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from books import models
from books import forms
@login_required
def receipt_list(request, user_id):
user = User.objects.get(id=user_id)
ctx = {}
ctx['user'] = user
ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id')
return render(request, 'receipt_list.html', context=ctx)
@login_required
def receipt_create(request, user_id):
if request.method == "POST":
form = forms.ReceiptForm(request.POST)
if form.is_valid():
data = form.cleaned_data
models.Receipt.objects.create(title=data.get("title"),
price=data.get("price"),
user=request.user)
return HttpResponseRedirect(reverse('receipt_list',
args=[request.user.id]))
else:
form = forms.ReceiptForm()
return render(request, 'receipt_create.html', {'form': form})
| from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.shortcuts import render
from books import models
from books import forms
@login_required
def receipt_list(request, user_id):
user = User.objects.get(id=user_id)
ctx = {}
ctx['user'] = user
ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id')
return render(request, 'receipt_list.html', context=ctx)
@login_required
def receipt_create(request, user_id):
if request.method == "POST":
form = forms.ReceiptForm(request.POST)
if form.is_valid():
form.instance.user = request.user
form.save()
return redirect(reverse('receipt_list', args=[request.user.id]))
else:
form = forms.ReceiptForm()
return render(request, 'receipt_create.html', {'form': form})
| Use form.save for receipt creation | Use form.save for receipt creation
| Python | mit | trimailov/finance,trimailov/finance,trimailov/finance | ---
+++
@@ -1,7 +1,7 @@
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
-from django.http import HttpResponseRedirect
+from django.shortcuts import redirect
from django.shortcuts import render
from books import models
@@ -22,12 +22,9 @@
if request.method == "POST":
form = forms.ReceiptForm(request.POST)
if form.is_valid():
- data = form.cleaned_data
- models.Receipt.objects.create(title=data.get("title"),
- price=data.get("price"),
- user=request.user)
- return HttpResponseRedirect(reverse('receipt_list',
- args=[request.user.id]))
+ form.instance.user = request.user
+ form.save()
+ return redirect(reverse('receipt_list', args=[request.user.id]))
else:
form = forms.ReceiptForm()
|
1e01e66f23f7a2ca541a29d29658749f95352c41 | generate-key.py | generate-key.py | #!/usr/bin/python
import os
import sqlite3
import sys
import time
if len(sys.argv) < 3:
raise ValueError('Usage: %s "Firstnam Lastname" [email protected]' % sys.argv[0])
db = sqlite3.connect('/var/lib/zon-api/data.db')
api_key = str(os.urandom(26).encode('hex'))
tier = 'free'
name = sys.argv[1]
email = sys.argv[2]
requests = 0
reset = int(time.time())
query = 'INSERT INTO client VALUES (?, ?, ?, ?, ?, ?)'
db.execute(query, (api_key, tier, name, email, requests, reset))
db.commit()
db.close()
print api_key
| #!/usr/bin/python
import os
import sqlite3
import sys
import time
db = sqlite3.connect('/var/lib/zon-api/data.db')
if len(sys.argv) < 3:
print('Usage: %s "Firstname Lastname" [email protected]' % sys.argv[0])
print('\nLast keys:')
query = 'SELECT * FROM client ORDER by reset DESC limit 10'
for client in db.execute(query):
print('{0}: "{2}" {3}'.format(*client))
sys.exit(1)
api_key = str(os.urandom(26).encode('hex'))
tier = 'free'
name = sys.argv[1]
email = sys.argv[2]
requests = 0
reset = int(time.time())
query = 'INSERT INTO client VALUES (?, ?, ?, ?, ?, ?)'
db.execute(query, (api_key, tier, name, email, requests, reset))
db.commit()
db.close()
print api_key
| Print last 10 generated keys when no arguments were given. | Print last 10 generated keys when no arguments were given.
| Python | bsd-3-clause | ZeitOnline/content-api,ZeitOnline/content-api | ---
+++
@@ -5,10 +5,16 @@
import sys
import time
+db = sqlite3.connect('/var/lib/zon-api/data.db')
+
if len(sys.argv) < 3:
- raise ValueError('Usage: %s "Firstnam Lastname" [email protected]' % sys.argv[0])
+ print('Usage: %s "Firstname Lastname" [email protected]' % sys.argv[0])
+ print('\nLast keys:')
+ query = 'SELECT * FROM client ORDER by reset DESC limit 10'
+ for client in db.execute(query):
+ print('{0}: "{2}" {3}'.format(*client))
+ sys.exit(1)
-db = sqlite3.connect('/var/lib/zon-api/data.db')
api_key = str(os.urandom(26).encode('hex'))
tier = 'free'
name = sys.argv[1] |
8c01d9c3b7d1938e78a52a86dfb4d8536987c223 | hackfmi/urls.py | hackfmi/urls.py | from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT}),
)
| from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>.*)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT}),
)
| Change search url regexp to match all characters | Change search url regexp to match all characters
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | ---
+++
@@ -20,7 +20,7 @@
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
- url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
+ url(r'^search/(?P<name>.*)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'), |
2af5eff46cbae0927aeee135c22304e108519659 | server/python_django/file_uploader/__init__.py | server/python_django/file_uploader/__init__.py | """
@author: Ferdinand E. Silva
@email: [email protected]
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
#read the file content, if it is not read when the request is multi part then the client get an error
fileContent = uploaded(fileSize)
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(fileContent)
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
| """
@author: Ferdinand E. Silva
@email: [email protected]
@website: http://ferdinandsilva.com
"""
import os
from django.conf import settings
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=None):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit or settings.FILE_UPLOAD_MAX_MEMORY_SIZE
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
#read the file content, if it is not read when the request is multi part then the client get an error
fileContent = uploaded(fileSize)
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(fileContent)
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
| Use the default file upload max memory size | Use the default file upload max memory size
| Python | mit | SimonWaldherr/uploader,SimonWaldherr/uploader,FineUploader/fine-uploader,SimonWaldherr/uploader,FineUploader/fine-uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,FineUploader/fine-uploader | ---
+++
@@ -4,14 +4,15 @@
@website: http://ferdinandsilva.com
"""
import os
+from django.conf import settings
from django.utils import simplejson as json
class qqFileUploader(object):
- def __init__(self, allowedExtensions=None, sizeLimit=1024):
+ def __init__(self, allowedExtensions=None, sizeLimit=None):
self.allowedExtensions = allowedExtensions or []
- self.sizeLimit = sizeLimit
+ self.sizeLimit = sizeLimit or settings.FILE_UPLOAD_MAX_MEMORY_SIZE
def handleUpload(self, request, uploadDirectory):
#read file info from stream |
5c874677cc978e1cdd563a563d62bae162d3b7ac | mycroft/skills/audioservice.py | mycroft/skills/audioservice.py | import time
from mycroft.messagebus.message import Message
class AudioService():
def __init__(self, emitter):
self.emitter = emitter
self.emitter.on('MycroftAudioServiceTrackInfoReply', self._track_info)
self.info = None
def _track_info(self, message=None):
self.info = message.data
def play(self, tracks=[], utterance=''):
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance}))
def track_info(self):
self.info = None
self.emitter.emit(Message('MycroftAudioServiceTrackInfo'))
while self.info is None:
time.sleep(0.1)
return self.info
| import time
from mycroft.messagebus.message import Message
class AudioService():
def __init__(self, emitter):
self.emitter = emitter
self.emitter.on('MycroftAudioServiceTrackInfoReply', self._track_info)
self.info = None
def _track_info(self, message=None):
self.info = message.data
def play(self, tracks=[], utterance=''):
if isinstance(tracks, basestring):
tracks = [tracks]
elif not isinstance(tracks, list):
raise ValueError
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance}))
def track_info(self):
self.info = None
self.emitter.emit(Message('MycroftAudioServiceTrackInfo'))
while self.info is None:
time.sleep(0.1)
return self.info
| Add check for valid type of tracks | Add check for valid type of tracks
| Python | apache-2.0 | aatchison/mycroft-core,MycroftAI/mycroft-core,MycroftAI/mycroft-core,aatchison/mycroft-core,linuxipho/mycroft-core,forslund/mycroft-core,linuxipho/mycroft-core,Dark5ide/mycroft-core,Dark5ide/mycroft-core,forslund/mycroft-core | ---
+++
@@ -13,6 +13,11 @@
self.info = message.data
def play(self, tracks=[], utterance=''):
+ if isinstance(tracks, basestring):
+ tracks = [tracks]
+ elif not isinstance(tracks, list):
+ raise ValueError
+
self.emitter.emit(Message('MycroftAudioServicePlay',
data={'tracks': tracks,
'utterance': utterance})) |
73fb23af2db047c8012382c03abbf48f392f63b9 | dimod/package_info.py | dimod/package_info.py | __version__ = '1.0.0.dev2'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = '[email protected]'
__description__ = 'A shared API for binary quadratic model samplers.'
| __version__ = '1.0.0.dev3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = '[email protected]'
__description__ = 'A shared API for binary quadratic model samplers.'
| Update verion 1.0.0.dev2 -> 1.0.0.dev3 | Update verion 1.0.0.dev2 -> 1.0.0.dev3 | Python | apache-2.0 | oneklc/dimod,oneklc/dimod | ---
+++
@@ -1,4 +1,4 @@
-__version__ = '1.0.0.dev2'
+__version__ = '1.0.0.dev3'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = '[email protected]'
__description__ = 'A shared API for binary quadratic model samplers.' |
77ef9f4a7ccd51d7b070da31ff4c30768653bb7b | tools/build_modref_templates.py | tools/build_modref_templates.py | #!/usr/bin/env python
"""Script to auto-generate our API docs.
"""
# stdlib imports
import os
# local imports
from apigen import ApiDocWriter
#*****************************************************************************
if __name__ == '__main__':
package = 'nipype'
outdir = os.path.join('api','generated')
docwriter = ApiDocWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='api')
print '%d files written' % len(docwriter.written_modules)
| #!/usr/bin/env python
"""Script to auto-generate our API docs.
"""
# stdlib imports
import os
# local imports
from apigen import ApiDocWriter
#*****************************************************************************
if __name__ == '__main__':
package = 'nipype'
outdir = os.path.join('api','generated')
docwriter = ApiDocWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='api')
print '%d files written' % len(docwriter.written_modules)
| Remove alloy and s3 from generated docs, just for 0.1 release. | Remove alloy and s3 from generated docs, just for 0.1 release.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@496 ead46cd0-7350-4e37-8683-fc4c6f79bf00
| Python | bsd-3-clause | blakedewey/nipype,gerddie/nipype,FCP-INDI/nipype,rameshvs/nipype,blakedewey/nipype,satra/NiPypeold,FCP-INDI/nipype,FredLoney/nipype,arokem/nipype,dmordom/nipype,pearsonlab/nipype,pearsonlab/nipype,Leoniela/nipype,glatard/nipype,mick-d/nipype_source,mick-d/nipype,dmordom/nipype,arokem/nipype,carlohamalainen/nipype,mick-d/nipype_source,sgiavasis/nipype,FredLoney/nipype,arokem/nipype,mick-d/nipype,FredLoney/nipype,carolFrohlich/nipype,JohnGriffiths/nipype,JohnGriffiths/nipype,glatard/nipype,gerddie/nipype,mick-d/nipype,sgiavasis/nipype,blakedewey/nipype,fprados/nipype,glatard/nipype,carlohamalainen/nipype,carolFrohlich/nipype,pearsonlab/nipype,grlee77/nipype,blakedewey/nipype,wanderine/nipype,mick-d/nipype,carolFrohlich/nipype,dmordom/nipype,rameshvs/nipype,JohnGriffiths/nipype,sgiavasis/nipype,fprados/nipype,FCP-INDI/nipype,iglpdc/nipype,grlee77/nipype,iglpdc/nipype,glatard/nipype,grlee77/nipype,wanderine/nipype,wanderine/nipype,dgellis90/nipype,JohnGriffiths/nipype,FCP-INDI/nipype,rameshvs/nipype,rameshvs/nipype,satra/NiPypeold,wanderine/nipype,christianbrodbeck/nipype,gerddie/nipype,gerddie/nipype,dgellis90/nipype,iglpdc/nipype,fprados/nipype,dgellis90/nipype,pearsonlab/nipype,sgiavasis/nipype,arokem/nipype,carolFrohlich/nipype,carlohamalainen/nipype,iglpdc/nipype,grlee77/nipype,dgellis90/nipype,Leoniela/nipype,mick-d/nipype_source,Leoniela/nipype,christianbrodbeck/nipype | ---
+++
@@ -19,6 +19,8 @@
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
+ '\.pipeline\.alloy$',
+ '\.pipeline\.s3_node_wrapper$',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='api') |
0f3c33de86d38cf47f84df97a79e838d37264b7c | sugar/session/LogWriter.py | sugar/session/LogWriter.py | import os
import sys
import dbus
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
self._logger.log(self._application, s)
| import os
import sys
import dbus
import gobject
class LogWriter:
def __init__(self, application):
self._application = application
bus = dbus.SessionBus()
proxy_obj = bus.get_object('com.redhat.Sugar.Logger', '/com/redhat/Sugar/Logger')
self._logger = dbus.Interface(proxy_obj, 'com.redhat.Sugar.Logger')
def start(self):
if os.environ.has_key('SUGAR_USE_CONSOLE'):
sys.stdout = self
sys.stderr = self
def write(self, s):
gobject.idle_add(self._write, s)
def _write(self, s):
self._logger.log(self._application, s)
return False
| Add messages on idle so that we don't break | Add messages on idle so that we don't break
| Python | lgpl-2.1 | sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,manuq/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,puneetgkaur/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit | ---
+++
@@ -1,6 +1,7 @@
import os
import sys
import dbus
+import gobject
class LogWriter:
def __init__(self, application):
@@ -15,4 +16,8 @@
sys.stderr = self
def write(self, s):
- self._logger.log(self._application, s)
+ gobject.idle_add(self._write, s)
+
+ def _write(self, s):
+ self._logger.log(self._application, s)
+ return False |
2022357fd0f81be6f3ca91718a6c8c1d1d46ac1b | examples/olfaction/config_files/gen_olf_stimuli.py | examples/olfaction/config_files/gen_olf_stimuli.py | """
Create odorant stimuli in hd5 format
"""
"""
Create the gexf configuration based on E. Hallem's cell paper on 2006
"""
import numpy as np
import h5py
osn_num = 1375;
f = h5py.File("al.hdf5","w")
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
Rt = 1000 # number of data point during odor delivery period
Nt = 4*Ot + 3*Rt # number of data point
t = np.arange(0,dt*Nt,dt)
I = -1.*0.0195 # amplitude of the onset odorant concentration
u_on = I*np.ones( Ot, dtype=np.float64)
u_off = np.zeros( Ot, dtype=np.float64)
u_reset = np.zeros( Rt, dtype=np.float64)
u = np.concatenate((u_off,u_reset,u_on,u_reset,u_off,u_reset,u_on))
u_all = np.transpose( np.kron( np.ones((osn_num,1)), u))
# create the dataset
dset = f.create_dataset("acetone_on_off.hdf5",(Nt, osn_num), dtype=np.float64,\
data = u_all)
f.close()
| """
Create odorant stimuli in hd5 format
"""
"""
Create the gexf configuration based on E. Hallem's cell paper on 2006
"""
import numpy as np
import h5py
osn_num = 1375;
f = h5py.File("olfactory_stimulus.h5","w")
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
Rt = 1000 # number of data point during odor delivery period
Nt = 4*Ot + 3*Rt # number of data point
t = np.arange(0,dt*Nt,dt)
I = -1.*0.0195 # amplitude of the onset odorant concentration
u_on = I*np.ones( Ot, dtype=np.float64)
u_off = np.zeros( Ot, dtype=np.float64)
u_reset = np.zeros( Rt, dtype=np.float64)
u = np.concatenate((u_off,u_reset,u_on,u_reset,u_off,u_reset,u_on))
u_all = np.transpose( np.kron( np.ones((osn_num,1)), u))
# create the dataset
dset = f.create_dataset("real",(Nt, osn_num), dtype=np.float64,\
data = u_all)
f.close()
| Rename olfactory stimulus file and internal array. | Rename olfactory stimulus file and internal array.
--HG--
branch : LPU
| Python | bsd-3-clause | cerrno/neurokernel | ---
+++
@@ -11,7 +11,7 @@
import h5py
osn_num = 1375;
-f = h5py.File("al.hdf5","w")
+f = h5py.File("olfactory_stimulus.h5","w")
dt = 1e-4 # time step
Ot = 2000 # number of data point during reset period
@@ -27,7 +27,7 @@
u_all = np.transpose( np.kron( np.ones((osn_num,1)), u))
# create the dataset
-dset = f.create_dataset("acetone_on_off.hdf5",(Nt, osn_num), dtype=np.float64,\
+dset = f.create_dataset("real",(Nt, osn_num), dtype=np.float64,\
data = u_all)
f.close() |
58d131e8aceb1adbbcdce2e1d4a86f5fb4615196 | Lib/xml/__init__.py | Lib/xml/__init__.py | """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
if __name__ == "xml":
try:
import _xmlplus
except ImportError:
pass
else:
import sys
sys.modules[__name__] = _xmlplus
| """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
try:
import _xmlplus
except ImportError:
pass
else:
import sys
sys.modules[__name__] = _xmlplus
| Remove the outer test for __name__; not necessary. | Remove the outer test for __name__; not necessary.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | ---
+++
@@ -13,11 +13,10 @@
"""
-if __name__ == "xml":
- try:
- import _xmlplus
- except ImportError:
- pass
- else:
- import sys
- sys.modules[__name__] = _xmlplus
+try:
+ import _xmlplus
+except ImportError:
+ pass
+else:
+ import sys
+ sys.modules[__name__] = _xmlplus |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.