commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
785a9cccafab36ef1eb49d9379aed9aef2f0f87f | tools/dataset_rst.py | tools/dataset_rst.py | #! /usr/bin/env python
"""
Run this script to convert dataset documentation to ReST files. Relies
on the meta-information from the datasets of the currently installed version.
Ie., it imports the datasets package to scrape the meta-information.
"""
import statsmodels.api as sm
import os
from os.path import join
import inspect
from string import Template
datasets = dict(inspect.getmembers(sm.datasets, inspect.ismodule))
datasets.pop('datautils')
datasets.pop('nile') #TODO: fix docstring in nile
doc_template = Template(u"""$TITLE
$title_
Description
-----------
$DESCRIPTION
Notes
-----
$NOTES
Source
------
$SOURCE
Copyright
---------
$COPYRIGHT
""")
for dataset in datasets:
write_pth = join('../docs/source/datasets/generated',
dataset+'.rst')
data_mod = datasets[dataset]
with open(os.path.realpath(write_pth), 'w') as rst_file:
title = getattr(data_mod,'TITLE')
descr = getattr(data_mod, 'DESCRLONG')
copyr = getattr(data_mod, 'COPYRIGHT')
notes = getattr(data_mod, 'NOTE')
source = getattr(data_mod, 'SOURCE')
write_file = doc_template.substitute(TITLE=title,
title_='='*len(title),
DESCRIPTION=descr, NOTES=notes,
SOURCE=source, COPYRIGHT=copyr)
rst_file.write(write_file)
| #! /usr/bin/env python
"""
Run this script to convert dataset documentation to ReST files. Relies
on the meta-information from the datasets of the currently installed version.
Ie., it imports the datasets package to scrape the meta-information.
"""
import statsmodels.api as sm
import os
from os.path import join
import inspect
from string import Template
datasets = dict(inspect.getmembers(sm.datasets, inspect.ismodule))
datasets.pop('utils')
datasets.pop('nile') #TODO: fix docstring in nile
doc_template = Template(u"""$TITLE
$title_
Description
-----------
$DESCRIPTION
Notes
-----
$NOTES
Source
------
$SOURCE
Copyright
---------
$COPYRIGHT
""")
for dataset in datasets:
write_pth = join('../docs/source/datasets/generated',
dataset+'.rst')
data_mod = datasets[dataset]
with open(os.path.realpath(write_pth), 'w') as rst_file:
title = getattr(data_mod,'TITLE')
descr = getattr(data_mod, 'DESCRLONG')
copyr = getattr(data_mod, 'COPYRIGHT')
notes = getattr(data_mod, 'NOTE')
source = getattr(data_mod, 'SOURCE')
write_file = doc_template.substitute(TITLE=title,
title_='='*len(title),
DESCRIPTION=descr, NOTES=notes,
SOURCE=source, COPYRIGHT=copyr)
rst_file.write(write_file)
| Update tools script for file renaming | ENH: Update tools script for file renaming
| Python | bsd-3-clause | kiyoto/statsmodels,yl565/statsmodels,edhuckle/statsmodels,Averroes/statsmodels,huongttlan/statsmodels,saketkc/statsmodels,hlin117/statsmodels,alekz112/statsmodels,waynenilsen/statsmodels,yarikoptic/pystatsmodels,bzero/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,alekz112/statsmodels,Averroes/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,adammenges/statsmodels,gef756/statsmodels,astocko/statsmodels,gef756/statsmodels,waynenilsen/statsmodels,yl565/statsmodels,wdurhamh/statsmodels,yarikoptic/pystatsmodels,jstoxrocky/statsmodels,phobson/statsmodels,bashtage/statsmodels,bzero/statsmodels,bashtage/statsmodels,wwf5067/statsmodels,jseabold/statsmodels,nguyentu1602/statsmodels,bzero/statsmodels,jstoxrocky/statsmodels,statsmodels/statsmodels,musically-ut/statsmodels,YihaoLu/statsmodels,yarikoptic/pystatsmodels,bashtage/statsmodels,jstoxrocky/statsmodels,wzbozon/statsmodels,josef-pkt/statsmodels,musically-ut/statsmodels,bert9bert/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,nvoron23/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,bert9bert/statsmodels,wdurhamh/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,gef756/statsmodels,cbmoore/statsmodels,nguyentu1602/statsmodels,wwf5067/statsmodels,cbmoore/statsmodels,bavardage/statsmodels,rgommers/statsmodels,gef756/statsmodels,ChadFulton/statsmodels,ChadFulton/statsmodels,huongttlan/statsmodels,jseabold/statsmodels,kiyoto/statsmodels,detrout/debian-statsmodels,wdurhamh/statsmodels,gef756/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,rgommers/statsmodels,wkfwkf/statsmodels,bavardage/statsmodels,adammenges/statsmodels,yl565/statsmodels,hlin117/statsmodels,rgommers/statsmodels,cbmoore/statsmodels,yl565/statsmodels,musically-ut/statsmodels,wwf5067/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,bashtage/statsmodels,phobson/statsmodels,wzbozon/statsmodels,bavardage/statsmodels,DonBeo/statsmodels,detrout/debian-statsmodels,ChadFulton/statsmodels,bsipocz/statsmodels,jseabold/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,josef-pkt/statsmodels,bert9bert/statsmodels,rgommers/statsmodels,astocko/statsmodels,hlin117/statsmodels,yl565/statsmodels,DonBeo/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,wzbozon/statsmodels,saketkc/statsmodels,hainm/statsmodels,saketkc/statsmodels,edhuckle/statsmodels,detrout/debian-statsmodels,DonBeo/statsmodels,josef-pkt/statsmodels,YihaoLu/statsmodels,jseabold/statsmodels,waynenilsen/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,adammenges/statsmodels,nvoron23/statsmodels,alekz112/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,hainm/statsmodels,nvoron23/statsmodels,musically-ut/statsmodels,astocko/statsmodels,adammenges/statsmodels,josef-pkt/statsmodels,jseabold/statsmodels,Averroes/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,bzero/statsmodels,rgommers/statsmodels,alekz112/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,jstoxrocky/statsmodels,nguyentu1602/statsmodels,phobson/statsmodels,bert9bert/statsmodels,YihaoLu/statsmodels,kiyoto/statsmodels,hlin117/statsmodels,wkfwkf/statsmodels,kiyoto/statsmodels,phobson/statsmodels,DonBeo/statsmodels,bavardage/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,bzero/statsmodels,saketkc/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,bsipocz/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,DonBeo/statsmodels,bavardage/statsmodels,edhuckle/statsmodels,astocko/statsmodels,nvoron23/statsmodels,wzbozon/statsmodels |
e985163d189883a2419e34021971709c9c7498c0 | request/__init__.py | request/__init__.py | __version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
| __version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
| Add jezdez to the authors | Add jezdez to the authors
| Python | bsd-2-clause | gnublade/django-request,kylef/django-request,kylef/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request |
5881436bea688ee49175192452dec18fad4ba9b2 | airflow/executors/__init__.py | airflow/executors/__init__.py | import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
# TODO Fix this emergency fix
try:
from airflow.executors.celery_executor import CeleryExecutor
except:
pass
try:
from airflow.contrib.executors.mesos_executor import MesosExecutor
except:
pass
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
| import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
from airflow.executors.celery_executor import CeleryExecutor
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
from airflow.contrib.executors.mesos_executor import MesosExecutor
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
| Remove hack by only importing when configured | Remove hack by only importing when configured
| Python | apache-2.0 | asnir/airflow,DEVELByte/incubator-airflow,yati-sagade/incubator-airflow,OpringaoDoTurno/airflow,yk5/incubator-airflow,spektom/incubator-airflow,owlabs/incubator-airflow,preete-dixit-ck/incubator-airflow,malmiron/incubator-airflow,alexvanboxel/airflow,wndhydrnt/airflow,bolkedebruin/airflow,dhuang/incubator-airflow,ledsusop/airflow,mylons/incubator-airflow,easytaxibr/airflow,lxneng/incubator-airflow,hgrif/incubator-airflow,modsy/incubator-airflow,vineet-rh/incubator-airflow,sergiohgz/incubator-airflow,andyxhadji/incubator-airflow,wooga/airflow,ProstoMaxim/incubator-airflow,artwr/airflow,jesusfcr/airflow,yoziru-desu/airflow,adrpar/incubator-airflow,Acehaidrey/incubator-airflow,adrpar/incubator-airflow,hgrif/incubator-airflow,sdiazb/airflow,forevernull/incubator-airflow,skudriashev/incubator-airflow,mtustin-handy/airflow,moritzpein/airflow,bolkedebruin/airflow,stverhae/incubator-airflow,dgies/incubator-airflow,apache/incubator-airflow,CloverHealth/airflow,mtdewulf/incubator-airflow,vineet-rh/incubator-airflow,brandsoulmates/incubator-airflow,andrewmchen/incubator-airflow,nathanielvarona/airflow,mrares/incubator-airflow,dud225/incubator-airflow,rishibarve/incubator-airflow,btallman/incubator-airflow,kerzhner/airflow,vijaysbhat/incubator-airflow,juvoinc/airflow,Twistbioscience/incubator-airflow,sdiazb/airflow,mistercrunch/airflow,malmiron/incubator-airflow,ledsusop/airflow,ronfung/incubator-airflow,d-lee/airflow,danielvdende/incubator-airflow,mtustin-handy/airflow,mylons/incubator-airflow,caseyching/incubator-airflow,asnir/airflow,DEVELByte/incubator-airflow,jfantom/incubator-airflow,moritzpein/airflow,holygits/incubator-airflow,skudriashev/incubator-airflow,asnir/airflow,lyft/incubator-airflow,ronfung/incubator-airflow,spektom/incubator-airflow,opensignal/airflow,bolkedebruin/airflow,owlabs/incubator-airflow,jwi078/incubator-airflow,jiwang576/incubator-airflow,andrewmchen/incubator-airflow,ProstoMaxim/incubator-airflow,jlowin/airflow,sid88in/incubator-airflow,OpringaoDoTurno/airflow,modsy/incubator-airflow,modsy/incubator-airflow,NielsZeilemaker/incubator-airflow,yk5/incubator-airflow,RealImpactAnalytics/airflow,wolfier/incubator-airflow,dgies/incubator-airflow,hamedhsn/incubator-airflow,N3da/incubator-airflow,janczak10/incubator-airflow,Chedi/airflow,apache/airflow,DinoCow/airflow,andyxhadji/incubator-airflow,apache/airflow,easytaxibr/airflow,mistercrunch/airflow,hamedhsn/incubator-airflow,danielvdende/incubator-airflow,hamedhsn/incubator-airflow,ronfung/incubator-airflow,wndhydrnt/airflow,gilt/incubator-airflow,Acehaidrey/incubator-airflow,plypaul/airflow,RealImpactAnalytics/airflow,yk5/incubator-airflow,yiqingj/airflow,janczak10/incubator-airflow,ProstoMaxim/incubator-airflow,mrkm4ntr/incubator-airflow,nathanielvarona/airflow,yoziru-desu/airflow,griffinqiu/airflow,dud225/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,alexvanboxel/airflow,jhsenjaliya/incubator-airflow,nathanielvarona/airflow,fenglu-g/incubator-airflow,DinoCow/airflow,Acehaidrey/incubator-airflow,N3da/incubator-airflow,danielvdende/incubator-airflow,Fokko/incubator-airflow,neovintage/airflow,Acehaidrey/incubator-airflow,cjqian/incubator-airflow,mylons/incubator-airflow,cfei18/incubator-airflow,ty707/airflow,ty707/airflow,mattuuh7/incubator-airflow,d-lee/airflow,Twistbioscience/incubator-airflow,wileeam/airflow,janczak10/incubator-airflow,N3da/incubator-airflow,NielsZeilemaker/incubator-airflow,yoziru-desu/airflow,apache/incubator-airflow,jwi078/incubator-airflow,vineet-rh/incubator-airflow,airbnb/airflow,danielvdende/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,fenglu-g/incubator-airflow,sergiohgz/incubator-airflow,wndhydrnt/airflow,mistercrunch/airflow,jgao54/airflow,gritlogic/incubator-airflow,fenglu-g/incubator-airflow,dgies/incubator-airflow,r39132/airflow,btallman/incubator-airflow,mattuuh7/incubator-airflow,dmitry-r/incubator-airflow,kerzhner/airflow,holygits/incubator-airflow,hamedhsn/incubator-airflow,MortalViews/incubator-airflow,btallman/incubator-airflow,mrkm4ntr/incubator-airflow,zack3241/incubator-airflow,owlabs/incubator-airflow,cfei18/incubator-airflow,AllisonWang/incubator-airflow,adamhaney/airflow,dgies/incubator-airflow,wolfier/incubator-airflow,sergiohgz/incubator-airflow,r39132/airflow,jesusfcr/airflow,cfei18/incubator-airflow,MetrodataTeam/incubator-airflow,airbnb/airflow,andyxhadji/incubator-airflow,yiqingj/airflow,yati-sagade/incubator-airflow,preete-dixit-ck/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,sid88in/incubator-airflow,holygits/incubator-airflow,forevernull/incubator-airflow,CloverHealth/airflow,yati-sagade/incubator-airflow,fenglu-g/incubator-airflow,ronfung/incubator-airflow,zoyahav/incubator-airflow,cjqian/incubator-airflow,cjqian/incubator-airflow,lxneng/incubator-airflow,forevernull/incubator-airflow,sergiohgz/incubator-airflow,DinoCow/airflow,akosel/incubator-airflow,bolkedebruin/airflow,nathanielvarona/airflow,AllisonWang/incubator-airflow,wndhydrnt/airflow,rishibarve/incubator-airflow,edgarRd/incubator-airflow,jhsenjaliya/incubator-airflow,jesusfcr/airflow,lxneng/incubator-airflow,neovintage/airflow,adamhaney/airflow,jgao54/airflow,sdiazb/airflow,cfei18/incubator-airflow,MortalViews/incubator-airflow,brandsoulmates/incubator-airflow,gritlogic/incubator-airflow,mtustin-handy/airflow,stverhae/incubator-airflow,saguziel/incubator-airflow,jfantom/incubator-airflow,malmiron/incubator-airflow,Chedi/airflow,saguziel/incubator-airflow,neovintage/airflow,KL-WLCR/incubator-airflow,OpringaoDoTurno/airflow,wileeam/airflow,aminghadersohi/airflow,jwi078/incubator-airflow,DinoCow/airflow,mrkm4ntr/incubator-airflow,jfantom/incubator-airflow,Tagar/incubator-airflow,wxiang7/airflow,kerzhner/airflow,dmitry-r/incubator-airflow,Fokko/incubator-airflow,jiwang576/incubator-airflow,andrewmchen/incubator-airflow,N3da/incubator-airflow,mrkm4ntr/incubator-airflow,mattuuh7/incubator-airflow,Chedi/airflow,jhsenjaliya/incubator-airflow,NielsZeilemaker/incubator-airflow,d-lee/airflow,zack3241/incubator-airflow,vijaysbhat/incubator-airflow,nathanielvarona/airflow,wooga/airflow,preete-dixit-ck/incubator-airflow,skudriashev/incubator-airflow,wolfier/incubator-airflow,preete-dixit-ck/incubator-airflow,zodiac/incubator-airflow,MetrodataTeam/incubator-airflow,wooga/airflow,easytaxibr/airflow,caseyching/incubator-airflow,malmiron/incubator-airflow,dmitry-r/incubator-airflow,jhsenjaliya/incubator-airflow,ledsusop/airflow,KL-WLCR/incubator-airflow,DEVELByte/incubator-airflow,jiwang576/incubator-airflow,d-lee/airflow,bolkedebruin/airflow,griffinqiu/airflow,wxiang7/airflow,MetrodataTeam/incubator-airflow,sekikn/incubator-airflow,criccomini/airflow,akosel/incubator-airflow,Tagar/incubator-airflow,DEVELByte/incubator-airflow,airbnb/airflow,opensignal/airflow,mtustin-handy/airflow,danielvdende/incubator-airflow,subodhchhabra/airflow,yiqingj/airflow,plypaul/airflow,mtagle/airflow,rishibarve/incubator-airflow,caseyching/incubator-airflow,rishibarve/incubator-airflow,gtoonstra/airflow,wileeam/airflow,wxiang7/airflow,apache/airflow,plypaul/airflow,CloverHealth/airflow,biln/airflow,hgrif/incubator-airflow,AllisonWang/incubator-airflow,dhuang/incubator-airflow,gilt/incubator-airflow,sid88in/incubator-airflow,ty707/airflow,edgarRd/incubator-airflow,mrares/incubator-airflow,adrpar/incubator-airflow,KL-WLCR/incubator-airflow,zack3241/incubator-airflow,Tagar/incubator-airflow,Acehaidrey/incubator-airflow,ProstoMaxim/incubator-airflow,wolfier/incubator-airflow,edgarRd/incubator-airflow,mistercrunch/airflow,dud225/incubator-airflow,griffinqiu/airflow,zoyahav/incubator-airflow,spektom/incubator-airflow,mtdewulf/incubator-airflow,andrewmchen/incubator-airflow,sekikn/incubator-airflow,alexvanboxel/airflow,Fokko/incubator-airflow,jbhsieh/incubator-airflow,r39132/airflow,jwi078/incubator-airflow,mtagle/airflow,mrares/incubator-airflow,hgrif/incubator-airflow,btallman/incubator-airflow,yati-sagade/incubator-airflow,stverhae/incubator-airflow,apache/airflow,Acehaidrey/incubator-airflow,dhuang/incubator-airflow,jbhsieh/incubator-airflow,modsy/incubator-airflow,AllisonWang/incubator-airflow,Tagar/incubator-airflow,jgao54/airflow,gritlogic/incubator-airflow,opensignal/airflow,sekikn/incubator-airflow,yiqingj/airflow,zodiac/incubator-airflow,artwr/airflow,ledsusop/airflow,vijaysbhat/incubator-airflow,vineet-rh/incubator-airflow,caseyching/incubator-airflow,Twistbioscience/incubator-airflow,artwr/airflow,artwr/airflow,mattuuh7/incubator-airflow,easytaxibr/airflow,andyxhadji/incubator-airflow,adamhaney/airflow,jbhsieh/incubator-airflow,Twistbioscience/incubator-airflow,owlabs/incubator-airflow,lxneng/incubator-airflow,apache/airflow,vijaysbhat/incubator-airflow,adrpar/incubator-airflow,MetrodataTeam/incubator-airflow,sid88in/incubator-airflow,jfantom/incubator-airflow,dud225/incubator-airflow,brandsoulmates/incubator-airflow,nathanielvarona/airflow,gilt/incubator-airflow,KL-WLCR/incubator-airflow,subodhchhabra/airflow,cademarkegard/airflow,Fokko/incubator-airflow,stverhae/incubator-airflow,jlowin/airflow,gtoonstra/airflow,ty707/airflow,apache/incubator-airflow,criccomini/airflow,adamhaney/airflow,jesusfcr/airflow,MortalViews/incubator-airflow,yk5/incubator-airflow,mtagle/airflow,mtdewulf/incubator-airflow,airbnb/airflow,aminghadersohi/airflow,aminghadersohi/airflow,dhuang/incubator-airflow,lyft/incubator-airflow,juvoinc/airflow,gritlogic/incubator-airflow,juvoinc/airflow,wooga/airflow,sdiazb/airflow,zoyahav/incubator-airflow,juvoinc/airflow,mrares/incubator-airflow,subodhchhabra/airflow,brandsoulmates/incubator-airflow,cademarkegard/airflow,cademarkegard/airflow,saguziel/incubator-airflow,skudriashev/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,dmitry-r/incubator-airflow,subodhchhabra/airflow,zodiac/incubator-airflow,wxiang7/airflow,biln/airflow,cfei18/incubator-airflow,gtoonstra/airflow,mtdewulf/incubator-airflow,biln/airflow,lyft/incubator-airflow,spektom/incubator-airflow,gilt/incubator-airflow,akosel/incubator-airflow,mtagle/airflow,saguziel/incubator-airflow,yoziru-desu/airflow,kerzhner/airflow,jlowin/airflow,griffinqiu/airflow,RealImpactAnalytics/airflow,asnir/airflow,wileeam/airflow,jbhsieh/incubator-airflow,cademarkegard/airflow,criccomini/airflow,apache/incubator-airflow,zodiac/incubator-airflow,plypaul/airflow,RealImpactAnalytics/airflow,akosel/incubator-airflow,mylons/incubator-airflow,aminghadersohi/airflow,r39132/airflow,NielsZeilemaker/incubator-airflow,apache/airflow,lyft/incubator-airflow,criccomini/airflow,zoyahav/incubator-airflow,moritzpein/airflow,MortalViews/incubator-airflow,forevernull/incubator-airflow,jgao54/airflow,OpringaoDoTurno/airflow,janczak10/incubator-airflow,edgarRd/incubator-airflow,neovintage/airflow,Chedi/airflow,jlowin/airflow,CloverHealth/airflow,biln/airflow,moritzpein/airflow,danielvdende/incubator-airflow,zack3241/incubator-airflow |
3036533d25ce9955568efb4ba119d6e32af78ace | fabfile.py | fabfile.py | from __future__ import with_statement
import os.path
from fabric.api import *
from fabric.contrib.project import *
env.user = 'root'
env.hosts = ['80.169.183.93']
env.remote_dir = '/mnt/persist/www/helloper.com'
def deploy(where=None):
rsync_project(
env.remote_dir,
'dist/',
['.git', '.git*', '.DS_Store', '.sass-cache*'],
True
)
| from __future__ import with_statement
import os.path
from fabric.api import *
from fabric.contrib.project import *
env.user = 'root'
env.hosts = ['146.185.132.96']
env.remote_dir = '/mnt/persist/www/helloper.com'
def deploy(where=None):
rsync_project(
env.remote_dir,
'dist/',
['.git', '.git*', '.DS_Store', '.sass-cache*'],
True
)
| Use new IP in deployment | Use new IP in deployment
| Python | mit | persand/helloper,persand/helloper.com,persand/helloper,persand/helloper.com,persand/helloper,persand/helloper.com |
04085c781621b47cfd47632531341fa7b2e9a956 | raven/contrib/django/apps.py | raven/contrib/django/apps.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.apps import AppConfig
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven.contrib.django'
verbose_name = 'Raven'
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.apps import AppConfig
class RavenConfig(AppConfig):
name = 'raven.contrib.django'
label = 'raven_contrib_django'
verbose_name = 'Raven'
| Remove periods in label value | Remove periods in label value
Fixes https://github.com/getsentry/raven-python/issues/594 | Python | bsd-3-clause | hzy/raven-python,hzy/raven-python,percipient/raven-python,smarkets/raven-python,lepture/raven-python,johansteffner/raven-python,Photonomie/raven-python,akalipetis/raven-python,recht/raven-python,ewdurbin/raven-python,someonehan/raven-python,dbravender/raven-python,ronaldevers/raven-python,smarkets/raven-python,johansteffner/raven-python,akalipetis/raven-python,akalipetis/raven-python,dbravender/raven-python,akheron/raven-python,ewdurbin/raven-python,lepture/raven-python,jmagnusson/raven-python,jmagnusson/raven-python,percipient/raven-python,getsentry/raven-python,jmp0xf/raven-python,lepture/raven-python,jmp0xf/raven-python,akheron/raven-python,jmagnusson/raven-python,ronaldevers/raven-python,johansteffner/raven-python,danriti/raven-python,percipient/raven-python,ewdurbin/raven-python,Photonomie/raven-python,recht/raven-python,smarkets/raven-python,getsentry/raven-python,someonehan/raven-python,Photonomie/raven-python,dbravender/raven-python,recht/raven-python,smarkets/raven-python,jmp0xf/raven-python,hzy/raven-python,danriti/raven-python,someonehan/raven-python,getsentry/raven-python,ronaldevers/raven-python,akheron/raven-python,danriti/raven-python |
1e62898d02ae5187ce078a2bb699eefd6bc184ef | s2v2.py | s2v2.py | from s2v1 import *
def number_of_records(data_sample):
return len(data_sample)
number_of_ties = number_of_records(data_from_csv) - 1 # minus header row
print(number_of_ties, "ties in our data sample")
def number_of_records2(data_sample):
return data_sample.size
number_of_ties_my_csv = number_of_records2(my_csv)
print(number_of_ties_my_csv, "ties in our data sample") | from s2v1 import *
def number_of_records(data_sample):
return len(data_sample)
number_of_ties = number_of_records(data_from_csv) - 1 # minus header row
# print(number_of_ties, "ties in our data sample")
def number_of_records2(data_sample):
return data_sample.size
number_of_ties_my_csv = number_of_records2(my_csv)
# print(number_of_ties_my_csv, "ties in our data sample") | Comment out print statements for total number of ties | Comment out print statements for total number of ties
| Python | mit | alexmilesyounger/ds_basics |
4a5bc9439840db3197610d67a5ad885849e8312b | urls.py | urls.py | from django.conf.urls import patterns, include, url
from contact.views import contactForm
urlpatterns = patterns('',
url(r'^contact/$', contactForm),
)
| from django.conf.urls import patterns, include, url
from .views import contactForm
urlpatterns = patterns('',
url(r'^contact/$', contactForm),
)
| Update for more generic url pattern and import | Update for more generic url pattern and import
| Python | mit | brob/simple-django-contact |
093e51ea520400faef4f2f64f926135786b652a3 | src/serve.py | src/serve.py | """
Transit Status
"""
from flask import Flask, render_template
import json
import settings
app = Flask(__name__)
app.debug = settings.DEBUG
@app.route("/")
def root():
wifi = {'WIFI_NAME': settings.WIFI_NAME, 'WIFI_PASSWORD': settings.WIFI_PASSWORD}
return render_template('home.html', stops = json.dumps(settings.STOPS), wifi=wifi)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=9001)
| """
Transit Status
"""
from flask import Flask, render_template
import json
import mlb_schedule
import settings
app = Flask(__name__)
app.debug = settings.DEBUG
@app.route("/")
def root():
wifi = {'WIFI_NAME': settings.WIFI_NAME, 'WIFI_PASSWORD': settings.WIFI_PASSWORD}
return render_template('home.html', stops = json.dumps(settings.STOPS), wifi=wifi)
@app.route("/giants_schedule")
def giants_schedule():
return json.dumps(mlb_schedule.get_todays_game())
if __name__ == "__main__":
app.run(host='0.0.0.0', port=9001)
| Add endpoint for getting today's Giants game | Add endpoint for getting today's Giants game
| Python | mit | albertyw/wilo,albertyw/wilo,albertyw/wilo,albertyw/wilo,albertyw/wilo |
5ac310b7c5cee4a8c5f247ae117fda17fc4cb61a | pypocketexplore/jobs.py | pypocketexplore/jobs.py | from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
def extract_topic_items(topic):
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
if items:
res = db['items'].insert(items)
db['topics'].update_many({'topic': topic}, {'$set': {'topic': topic,
'is_scraped': True,
'datetime_scraped': datetime.utcnow(),
'queued': True}},
upsert=True)
for related_topic in related_topics:
req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
elif resp.ok and not items:
return
else:
raise Exception
if __name__ == '__main__':
extract_topic_items('finance')
| from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
from redis import StrictRedis
import rq
def extract_topic_items(topic):
r = StrictRedis()
def topic_in_queue(topic):
q = rq.Queue('topics', connection=StrictRedis())
if any(job.kwargs.get('topic') for job in q.get_jobs()):
return True
else:
return False
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
if resp.ok:
print('Inserting {} items for topic {}'.format(len(items)), topic)
res = db['items'].insert(items)
r.sadd('scraped_topics', topic)
for related_topic in related_topics:
if not topic_in_queue(related_topic) and not r.sismember('scraped_topics', related_topic):
print('Enqueuing related topic'.format(related_topic))
req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
else:
raise Exception('Something went wrong with topic {}. /api/explore/{} returned {}'.format(topic, topic, resp))
if __name__ == '__main__':
extract_topic_items('finance')
| Fix bug to avoid duplicating topics | Fix bug to avoid duplicating topics
| Python | mit | Florents-Tselai/PyPocketExplore |
edec2186f5a83789a5d6a5dbd112c9ff716c3d46 | src/python/datamodels/output_models.py | src/python/datamodels/output_models.py | import hashlib
class Store(object):
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
| import hashlib
class Store(object):
"""
Record for stores.
id -- integer
name -- string
location -- ZipcodeRecord
"""
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
"""
Record for customers.
id -- integer
name -- string
location -- ZipcodeRecord
"""
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
"""
Record for transactions
store -- Store
customer -- Customer
trans_time -- transaction time in days since start of simulation. int or long
purchased_items -- list of products purchased
trans_count -- hidden transaction id
"""
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
"""
Compute transaction id as a hash of the transaction.
Returns a string
"""
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
| Add docstrings to output models | Add docstrings to output models
| Python | apache-2.0 | rnowling/bigpetstore-data-generator,rnowling/bigpetstore-data-generator,rnowling/bigpetstore-data-generator |
fb4aa211f64ed6fdc0443d03dd02dc52fc882978 | server/dummy/dummy_server.py | server/dummy/dummy_server.py | #!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
| #!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def _transaction_string(self, command, path, headers, content):
return '%s %s\n%s%s\n' % (command, path, headers, content)
def _print_request(self, *request):
print('--> %s' % self._transaction_string(*request))
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
self._print_request(self.command, self.path, self.headers, content)
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
| Clean up and refactor printing of request | Clean up and refactor printing of request
| Python | mit | jonspeicher/Puddle,jonspeicher/Puddle,jonspeicher/Puddle |
abe46e145114805a2ad69c27f601b5da3f5e9959 | api/armory_service.py | api/armory_service.py | import urlparse
import os, sys, re, random,pybitcointools, bitcoinrpc, math
from decimal import Decimal
from flask import Flask, request, jsonify, abort, json, make_response
from msc_apps import *
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
data_dir_root = os.environ.get('DATADIR')
sys.path.append("/usr/lib/armory/")
from armoryengine.ALL import *
app = Flask(__name__)
app.debug = True
@app.route('/getunsigned', methods=['POST'])
def generate_unsigned():
unsigned_hex = request.form['unsigned_hex']
pubkey = request.form['pubkey']
#Translate raw txn
pytx = PyTx()
print("Encoding raw txn: %s" % unsigned_hex)
binTxn = hex_to_binary(unsigned_hex)
pytx.unserialize(binTxn)
tx = PyTxDistProposal(pytx)
print("\n\nOutput is:\n%s" % tx.serializeAscii())
return jsonify({'armoryUnsigned':tx.serializeAscii()}) | import urlparse
import os, sys, re, random,pybitcointools, bitcoinrpc, math
from decimal import Decimal
from flask import Flask, request, jsonify, abort, json, make_response
from msc_apps import *
tools_dir = os.environ.get('TOOLSDIR')
lib_path = os.path.abspath(tools_dir)
sys.path.append(lib_path)
data_dir_root = os.environ.get('DATADIR')
sys.path.append("/usr/lib/armory/")
from armoryengine.ALL import *
app = Flask(__name__)
app.debug = True
@app.route('/getunsigned', methods=['POST'])
def generate_unsigned():
unsigned_hex = request.form['unsigned_hex']
pubkey = request.form['pubkey']
#Translate raw txn
pytx = PyTx()
print("Encoding raw txn: %s" % unsigned_hex)
try:
unsigned_tx_bin = hex_to_binary(unsigned_tx_hex)
pytx = PyTx().unserialize(unsigned_tx_bin)
utx = UnsignedTransaction(pytx=pytx, pubKeyMap=hex_to_binary(public_key_hex))
unsigned_tx_ascii = utx.serializeAscii()
except Exception, e:
abort("Error serializing transaction: %s" % e)
print("\n\nOutput is:\n%s" % unsigned_tx_ascii)
return jsonify({'armoryUnsigned':unsigned_tx_ascii}) | Refactor Armory API to use UnsignedTransaction class | Refactor Armory API to use UnsignedTransaction class
| Python | agpl-3.0 | achamely/omniwallet,Nevtep/omniwallet,OmniLayer/omniwallet,habibmasuro/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet,VukDukic/omniwallet,habibmasuro/omniwallet,OmniLayer/omniwallet,OmniLayer/omniwallet,achamely/omniwallet,achamely/omniwallet,VukDukic/omniwallet,VukDukic/omniwallet,Nevtep/omniwallet,achamely/omniwallet,OmniLayer/omniwallet,Nevtep/omniwallet,habibmasuro/omniwallet |
3e3f7b827e226146ec7d3efe523f1f900ac4e99a | sjconfparts/type.py | sjconfparts/type.py | class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true":
return True
elif str_object == "no" or str_object == "off" or str_object == "false":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| Allow “enabled“, “enable”, “disabled“, “disable” as boolean values | Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
| Python | lgpl-2.1 | SmartJog/sjconf,SmartJog/sjconf |
ba0a4aff1ea21670712b35061570805e62bb4159 | Instanssi/admin_blog/forms.py | Instanssi/admin_blog/forms.py | # -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
from Instanssi.ext_blog.models import BlogEntry
class BlogEntryForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(BlogEntryForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Blogientry',
'title',
'text',
'public',
ButtonHolder (
Submit('submit', 'Tallenna')
)
)
)
class Meta:
model = BlogEntry
fields = ('title','text','public') | # -*- coding: utf-8 -*-
from django import forms
from uni_form.helper import FormHelper
from uni_form.layout import Submit, Layout, Fieldset, ButtonHolder
from Instanssi.ext_blog.models import BlogEntry
class BlogEntryForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(BlogEntryForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Blogientry',
'title',
'text',
'date',
'public',
ButtonHolder (
Submit('submit', 'Tallenna')
)
)
)
class Meta:
model = BlogEntry
fields = ('title','text','public','date') | Add date field to edit form. | admin_blog: Add date field to edit form.
| Python | mit | Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org,Instanssi/Instanssi.org |
1c2e17e31c00a52661706a3c90efbb3c93d6fbef | app/initialization.py | app/initialization.py | import sys
import os
import shutil
import composer
import configuration
import downloader
def run():
project_dir = os.getcwd()+'/'
execution_dir = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0]+'/'
if len(sys.argv) == 2:
project_dir = sys.argv[1]
os.chdir(execution_dir)
print '>>> Execution dir: '+execution_dir
print '>>> Project dir: '+project_dir
build_dir = project_dir+'build/'
configuration.load(project_dir)
configuration.add('project-dir', project_dir)
configuration.add('build-dir', build_dir)
composer.initialization()
downloader.initialization()
def update():
php_bin = 'php'
if len(sys.argv) == 2:
php_bin = sys.argv[1]
print '>>> PHP version is: '+php_bin
configuration.add('php', php_bin)
composer.initialization()
composer.update()
downloader.update()
def prepare_dir(path):
if os.path.isdir(path):
shutil.rmtree(path)
os.makedirs(path)
| import sys
import os
import shutil
import composer
import configuration
import downloader
def run():
try:
project_dir = configuration.get_value('project-dir')
except:
project_dir = os.getcwd()+'/'
execution_dir = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0]+'/'
if len(sys.argv) == 2:
project_dir = sys.argv[1]
os.chdir(execution_dir)
print '>>> Execution dir: '+execution_dir
print '>>> Project dir: '+project_dir
build_dir = project_dir+'build/'
configuration.load(project_dir)
configuration.add('project-dir', project_dir)
configuration.add('build-dir', build_dir)
composer.initialization()
downloader.initialization()
def update():
php_bin = 'php'
if len(sys.argv) == 2:
php_bin = sys.argv[1]
print '>>> PHP version is: '+php_bin
configuration.add('php', php_bin)
composer.initialization()
composer.update()
downloader.update()
def prepare_dir(path):
if os.path.isdir(path):
shutil.rmtree(path)
os.makedirs(path)
| Fix issue with duplicated call | Fix issue with duplicated call
| Python | mit | mi-schi/php-code-checker |
b9fcd270f520f49fcbe85bcbc53940326f556fdf | Lib/test/test_import.py | Lib/test/test_import.py | from test_support import TESTFN
import os
import random
source = TESTFN + ".py"
pyc = TESTFN + ".pyc"
pyo = TESTFN + ".pyo"
f = open(source, "w")
print >> f, "# This will test Python's ability to import a .py file"
a = random.randrange(1000)
b = random.randrange(1000)
print >> f, "a =", a
print >> f, "b =", b
f.close()
try:
try:
mod = __import__(TESTFN)
except ImportError, err:
raise ValueError, "import from .py failed: %s" % err
if mod.a != a or mod.b != b:
print a, "!=", mod.a
print b, "!=", mod.b
raise ValueError, "module loaded (%s) but contents invalid" % mod
finally:
os.unlink(source)
try:
try:
reload(mod)
except ImportError, err:
raise ValueError, "import from .pyc/.pyo failed: %s" % err
finally:
try:
os.unlink(pyc)
except os.error:
pass
try:
os.unlink(pyo)
except os.error:
pass
| from test_support import TESTFN
import os
import random
import sys
sys.path.insert(0, os.curdir)
source = TESTFN + ".py"
pyc = TESTFN + ".pyc"
pyo = TESTFN + ".pyo"
f = open(source, "w")
print >> f, "# This will test Python's ability to import a .py file"
a = random.randrange(1000)
b = random.randrange(1000)
print >> f, "a =", a
print >> f, "b =", b
f.close()
try:
try:
mod = __import__(TESTFN)
except ImportError, err:
raise ValueError, "import from .py failed: %s" % err
if mod.a != a or mod.b != b:
print a, "!=", mod.a
print b, "!=", mod.b
raise ValueError, "module loaded (%s) but contents invalid" % mod
finally:
os.unlink(source)
try:
try:
reload(mod)
except ImportError, err:
raise ValueError, "import from .pyc/.pyo failed: %s" % err
finally:
try:
os.unlink(pyc)
except os.error:
pass
try:
os.unlink(pyo)
except os.error:
pass
del sys.path[0]
| Insert the current directory to the front of sys.path -- and remove it at the end. This fixes a problem where | Insert the current directory to the front of sys.path -- and remove it
at the end. This fixes a problem where
python Lib/test/test_import.py
failed while "make test" succeeded.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator |
2951520fab9f213322584327c9e5841fe13fc993 | tests/run.py | tests/run.py | #! /usr/bin/env python3
import sys
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
INSTALLED_APPS=(
# Put contenttypes before auth to work around test issue.
# See: https://code.djangoproject.com/ticket/10827#comment:12
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.admin',
'django-admin-sso',
'django-crispy-forms',
'incuna_auth',
),
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),
AUTH_USER_MODEL='tests.User',
ROOT_URLCONF='incuna_auth.urls',
REST_FRAMEWORK={
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),
'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),
},
)
from django.test.runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
| import sys
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
INSTALLED_APPS=(
# Put contenttypes before auth to work around test issue.
# See: https://code.djangoproject.com/ticket/10827#comment:12
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.admin',
'django-admin-sso',
'django-crispy-forms',
'incuna_auth',
),
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),
AUTH_USER_MODEL='tests.User',
ROOT_URLCONF='incuna_auth.urls',
REST_FRAMEWORK={
'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),
'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),
},
)
from django.test.runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
| Remove unnecessary Python 3 declaration. | Remove unnecessary Python 3 declaration.
| Python | bsd-2-clause | incuna/incuna-auth,incuna/incuna-auth,ghickman/incuna-auth,ghickman/incuna-auth |
8edd3879c87727b7d6b0808227d32d9bd3072c90 | server/resources.py | server/resources.py | from flask_restful import Resource, Api, abort
from .models import Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| from flask_restful import Resource, Api, abort
from .models import Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| Use first() when querying one lecture from DB | Use first() when querying one lecture from DB
| Python | mit | MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS |
00aad9bc179aa4a090f703db9669e8ba49ff8f3c | bibliopixel/main/arguments.py | bibliopixel/main/arguments.py | from .. project import project
"""Common command line arguments for run and demo."""
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
result = {}
for name in 'driver', 'layout', 'animation':
value = args and getattr(args, name)
result[name] = {'typename': value} if value else {}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
| import json
from .. project import project
"""Common command line arguments for run and demo."""
COMPONENTS = 'driver', 'layout', 'animation'
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
def get_value(name):
value = args and getattr(args, name)
if not value:
return {}
if '{' in value:
return json.loads(value)
return {'typename': value}
result = {name: get_value(name) for name in COMPONENTS}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
| Allow json in component flags. | Allow json in component flags.
| Python | mit | ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel |
618bcd64fa23e1bd8868c06ce38e30b7ef47b7e1 | bin/create_traveltime_data.py | bin/create_traveltime_data.py | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
from nsmaps.station import StationType
DATA_DIR = './website/nsmaps-data'
MAX_STATIONS = 60
def main():
stations = nsmaps.station.Stations(DATA_DIR)
major_station_types = (
StationType.intercitystation,
StationType.knooppuntIntercitystation,
StationType.megastation,
StationType.knooppuntSneltreinstation,
StationType.sneltreinstation,
StationType.knooppuntStoptreinstation,
StationType.stoptreinstation
)
stations_options = stations.get_stations_for_types(major_station_types)
stations_todo = []
n_stations = 0
for station in stations_options:
if n_stations >= MAX_STATIONS:
break
if not station.has_travel_time_data() and station.get_country_code() == 'NL':
print(station.get_travel_time_filepath())
stations_todo.append(station)
n_stations += 1
print(station)
timestamp = "19-04-2016 08:00"
stations.create_traveltimes_data(stations_todo, timestamp)
stations.recreate_missing_destinations(DATA_DIR, timestamp, False)
if __name__ == "__main__":
main() | #!/usr/bin/env python3
import sys
import os
sys.path.append('../nsmaps')
import nsmaps
from nsmaps.station import StationType
DATA_DIR = './website/nsmaps-data'
MAX_STATIONS = 60
def main():
stations = nsmaps.station.Stations(DATA_DIR)
major_station_types = (
StationType.intercitystation,
StationType.knooppuntIntercitystation,
StationType.megastation,
StationType.knooppuntSneltreinstation,
StationType.sneltreinstation,
StationType.knooppuntStoptreinstation,
StationType.stoptreinstation
)
stations_options = stations.get_stations_for_types(major_station_types)
stations_todo = []
n_stations = 0
for station in stations_options:
if n_stations >= MAX_STATIONS:
break
if not station.has_travel_time_data() and station.get_country_code() == 'NL':
print(station.get_travel_time_filepath())
stations_todo.append(station)
n_stations += 1
print(station)
timestamp = "19-04-2016 08:00"
stations.create_traveltimes_data(stations_todo, timestamp)
stations.recreate_missing_destinations(timestamp, False)
if __name__ == "__main__":
main() | Fix create missing station data function arguments | Fix create missing station data function arguments
| Python | mit | bartromgens/nsmaps,bartromgens/nsmaps,bartromgens/nsmaps |
5b9e2849c6ee49d68968fdc2588fefd5a25e7bac | contrib/migrateticketmodel.py | contrib/migrateticketmodel.py | #!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import os
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
| Fix missing import in contrib script added in [2630]. | Fix missing import in contrib script added in [2630].
git-svn-id: 0d96b0c1a6983ccc08b3732614f4d6bfcf9cbb42@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2
| Python | bsd-3-clause | rbaumg/trac,rbaumg/trac,rbaumg/trac,rbaumg/trac |
97ffd9f5271ffb93b04da06866591f6e6650d76b | bluebottle/settings/travis.py | bluebottle/settings/travis.py | SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
| # NOTE: local.py must be an empty file when using this configuration.
from .defaults import *
# Put the travis-ci environment specific overrides below.
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
| Fix Travis config so that the test run. | Fix Travis config so that the test run.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site |
802d030087d7f15add5ccfa5d305555632575642 | changes/jobs/cleanup_tasks.py | changes/jobs/cleanup_tasks.py | from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
| from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount, incr
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
incr('cleanup_unfinished')
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
| Add counter for cleanup tasks not following the decorator | Add counter for cleanup tasks not following the decorator
| Python | apache-2.0 | bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes |
c69b9519c2984154dd15d31395d9590e00d689b5 | allauth/socialaccount/providers/trello/provider.py | allauth/socialaccount/providers/trello/provider.py | from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
| from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
data['scope'] = self.get_scope(request)
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
| Use 'scope' in TrelloProvider auth params. Allows overriding from django settings. | feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
| Python | mit | lukeburden/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,bittner/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,bittner/django-allauth,bittner/django-allauth,pennersr/django-allauth,lukeburden/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth |
b6db7abfd59a1b97fbb4d1b867e3316c029c94ff | spec/Report_S06_spec.py | spec/Report_S06_spec.py | from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
result = self.report[key].values
expect(result).to(equal(expected_result))
# result_filename = '{}_result.txt'.format(self.data_filename)
#
# with open(result_filename) as result_file:
# result_string = result_file.read()
# self.expected_result = literal_eval(result_string)
#
# result = self.report.values
#
# expect(result).to(equal(self.expected_result))
| from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
for cnc in self.report[key].concentrators:
if cnc.meters:
for meter in cnc.meters:
for value in meter.values:
result.append(value)
warnings.append(meter.warnings)
print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
result, expected_result, warnings))
expect(result).to(equal(expected_result))
expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
"Meter(ZIV42553686). Thrown exception: "
"object of type 'NoneType' has no len()"], []]
expect(warnings).to(equal(expected_warnings))
| TEST for correct an with errors S06 report | TEST for correct an with errors S06 report
| Python | agpl-3.0 | gisce/primestg |
a2c13df57f2db1721c656200c1c37cf0e52b22c9 | dashboard/views.py | dashboard/views.py | # -*- coding: utf-8 -*-
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from babybuddy.mixins import PermissionRequired403Mixin
from core.models import Child
class Dashboard(LoginRequiredMixin, TemplateView):
# TODO: Use .card-deck in this template once BS4 is finalized.
template_name = 'dashboard/dashboard.html'
# Show the overall dashboard or a child dashboard if one Child instance.
def get(self, request, *args, **kwargs):
children = Child.objects.count()
if children == 0:
return HttpResponseRedirect(reverse('babybuddy:welcome'))
elif children == 1:
return HttpResponseRedirect(
reverse(
'dashboard:dashboard-child',
args={Child.objects.first().slug}
)
)
return super(Dashboard, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(Dashboard, self).get_context_data(**kwargs)
context['objects'] = Child.objects.all().order_by('last_name')
return context
class ChildDashboard(PermissionRequired403Mixin, DetailView):
model = Child
permission_required = ('core.view_child',)
raise_exception = True
template_name = 'dashboard/child.html'
| # -*- coding: utf-8 -*-
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from babybuddy.mixins import PermissionRequired403Mixin
from core.models import Child
class Dashboard(LoginRequiredMixin, TemplateView):
# TODO: Use .card-deck in this template once BS4 is finalized.
template_name = 'dashboard/dashboard.html'
# Show the overall dashboard or a child dashboard if one Child instance.
def get(self, request, *args, **kwargs):
children = Child.objects.count()
if children == 0:
return HttpResponseRedirect(reverse('babybuddy:welcome'))
elif children == 1:
return HttpResponseRedirect(
reverse(
'dashboard:dashboard-child',
args={Child.objects.first().slug}
)
)
return super(Dashboard, self).get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(Dashboard, self).get_context_data(**kwargs)
context['objects'] = Child.objects.all() \
.order_by('last_name', 'first_name', 'id')
return context
class ChildDashboard(PermissionRequired403Mixin, DetailView):
model = Child
permission_required = ('core.view_child',)
raise_exception = True
template_name = 'dashboard/child.html'
| Add dashboard sort clauses: first name and id | Add dashboard sort clauses: first name and id
I have seen the dashboard sorting be inconsistent. Last name then
first name then id should produce a consistent and predictable
sort order.
| Python | bsd-2-clause | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy |
d7ea1e9c7728b5e98e6c798ab3d5ef5b9066463c | barrage/basetestcases.py | barrage/basetestcases.py | from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), stdout))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_expected.to_stdout(), stdout))
return False
print("")
return True
| from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_got.to_stdout()))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_expected.to_stdout(), answer_got.to_stdout()))
return False
print("")
return True
| Fix a bug with application stdout print | Fix a bug with application stdout print
| Python | mit | vnetserg/barrage |
8a6bc4a46141b42d4457fdc4d63df234f788253d | django_nose/plugin.py | django_nose/plugin.py |
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
name = "django setup"
enabled = True
# We need this to run before the coverage plugin (which has a score
# of 500), so that we still have a stdout for the user interaction
# Django sometimes wants to do during test database setup.
score = 700
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
def begin(self):
"""Setup the environment"""
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
| import sys
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
name = "django setup"
enabled = True
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
self.sys_stdout = sys.stdout
def begin(self):
"""Setup the environment"""
sys_stdout = sys.stdout
sys.stdout = self.sys_stdout
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
sys.stdout = sys_stdout
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
| Allow coverage to work and keep stdout and be activated before initial imports. | Allow coverage to work and keep stdout and be activated before initial imports.
| Python | bsd-3-clause | aristiden7o/django-nose,harukaeru/django-nose,disqus/django-nose,dgladkov/django-nose,mzdaniel/django-nose,sociateru/django-nose,krinart/django-nose,alexhayes/django-nose,daineX/django-nose,harukaeru/django-nose,mzdaniel/django-nose,Deepomatic/django-nose,krinart/django-nose,fabiosantoscode/django-nose-123-fix,alexhayes/django-nose,daineX/django-nose,dgladkov/django-nose,sociateru/django-nose,aristiden7o/django-nose,millerdev/django-nose,Deepomatic/django-nose,franciscoruiz/django-nose,360youlun/django-nose,disqus/django-nose,franciscoruiz/django-nose,fabiosantoscode/django-nose-123-fix,millerdev/django-nose,brilliant-org/django-nose,360youlun/django-nose,brilliant-org/django-nose |
9c037ed3ebe7353b419562311bbc1f07875ab358 | django_su/forms.py | django_su/forms.py | # -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from . import get_user_model
class UserSuForm(forms.Form):
user = forms.ModelChoiceField(
label=_('Users'), queryset=get_user_model()._default_manager.order_by(
'username'), required=True) # pylint: disable=W0212
use_ajax_select = False
def __init__(self, *args, **kwargs):
super(UserSuForm, self).__init__(*args, **kwargs)
if 'ajax_select' in settings.INSTALLED_APPS and getattr(
settings, 'AJAX_LOOKUP_CHANNELS', None):
from ajax_select.fields import AutoCompleteSelectField
lookup = settings.AJAX_LOOKUP_CHANNELS.get('django_su', None)
if lookup is not None:
old_field = self.fields['user']
self.fields['user'] = AutoCompleteSelectField(
'django_su',
required=old_field.required,
label=old_field.label,
)
self.use_ajax_select = True
def get_user(self):
return self.cleaned_data.get('user', None)
def __str__(self):
if 'formadmin' in settings.INSTALLED_APPS:
try:
from formadmin.forms import as_django_admin
return as_django_admin(self)
except ImportError:
pass
return super(UserSuForm, self).__str__()
| # -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from . import get_user_model
class UserSuForm(forms.Form):
username_field = get_user_model().USERNAME_FIELD
user = forms.ModelChoiceField(
label=_('Users'), queryset=get_user_model()._default_manager.order_by(
username_field), required=True) # pylint: disable=W0212
use_ajax_select = False
def __init__(self, *args, **kwargs):
super(UserSuForm, self).__init__(*args, **kwargs)
if 'ajax_select' in settings.INSTALLED_APPS and getattr(
settings, 'AJAX_LOOKUP_CHANNELS', None):
from ajax_select.fields import AutoCompleteSelectField
lookup = settings.AJAX_LOOKUP_CHANNELS.get('django_su', None)
if lookup is not None:
old_field = self.fields['user']
self.fields['user'] = AutoCompleteSelectField(
'django_su',
required=old_field.required,
label=old_field.label,
)
self.use_ajax_select = True
def get_user(self):
return self.cleaned_data.get('user', None)
def __str__(self):
if 'formadmin' in settings.INSTALLED_APPS:
try:
from formadmin.forms import as_django_admin
return as_django_admin(self)
except ImportError:
pass
return super(UserSuForm, self).__str__()
| Update UserSuForm to enhance compatibility with custom user models. | Update UserSuForm to enhance compatibility with custom user models.
In custom user models, we cannot rely on there being a 'username'
field. Instead, we should use whichever field has been specified as
the username field.
| Python | mit | adamcharnock/django-su,PetrDlouhy/django-su,adamcharnock/django-su,PetrDlouhy/django-su |
f100faade749d86597e1c8c52b88d55261e7a4dc | suorganizer/wsgi.py | suorganizer/wsgi.py | """
WSGI config for suorganizer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "suorganizer.settings")
application = get_wsgi_application()
| """
WSGI config for suorganizer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "suorganizer.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| Use WhiteNoise for static content. | Ch29: Use WhiteNoise for static content.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 |
4412a59bfe8228698e5b5bbe8bb21c8e8a70d357 | test/functional/feature_shutdown.py | test/functional/feature_shutdown.py | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind shutdown."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, get_rpc_proxy
from threading import Thread
def test_long_call(node):
block = node.waitfornewblock()
assert_equal(block['height'], 0)
class ShutdownTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir)
Thread(target=test_long_call, args=(node,)).start()
# wait 1 second to ensure event loop waits for current connections to close
self.stop_node(0, wait=1000)
if __name__ == '__main__':
ShutdownTest().main()
| #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind shutdown."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, get_rpc_proxy, wait_until
from threading import Thread
def test_long_call(node):
block = node.waitfornewblock()
assert_equal(block['height'], 0)
class ShutdownTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir)
# Force connection establishment by executing a dummy command.
node.getblockcount()
Thread(target=test_long_call, args=(node,)).start()
# Wait until the server is executing the above `waitfornewblock`.
wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2)
# Wait 1 second after requesting shutdown but not before the `stop` call
# finishes. This is to ensure event loop waits for current connections
# to close.
self.stop_node(0, wait=1000)
if __name__ == '__main__':
ShutdownTest().main()
| Remove race between connecting and shutdown on separate connections | qa: Remove race between connecting and shutdown on separate connections
| Python | mit | fujicoin/fujicoin,myriadteam/myriadcoin,apoelstra/bitcoin,prusnak/bitcoin,namecoin/namecore,midnightmagic/bitcoin,jamesob/bitcoin,fujicoin/fujicoin,pataquets/namecoin-core,r8921039/bitcoin,lateminer/bitcoin,DigitalPandacoin/pandacoin,Sjors/bitcoin,sipsorcery/bitcoin,bitcoin/bitcoin,AkioNak/bitcoin,bespike/litecoin,particl/particl-core,EthanHeilman/bitcoin,ajtowns/bitcoin,ahmedbodi/vertcoin,namecoin/namecoin-core,dscotese/bitcoin,GroestlCoin/GroestlCoin,domob1812/bitcoin,qtumproject/qtum,sipsorcery/bitcoin,monacoinproject/monacoin,instagibbs/bitcoin,litecoin-project/litecoin,jonasschnelli/bitcoin,jambolo/bitcoin,rnicoll/bitcoin,DigitalPandacoin/pandacoin,prusnak/bitcoin,droark/bitcoin,vertcoin/vertcoin,myriadcoin/myriadcoin,rnicoll/bitcoin,rnicoll/bitcoin,mruddy/bitcoin,ElementsProject/elements,namecoin/namecore,GroestlCoin/GroestlCoin,bespike/litecoin,droark/bitcoin,wellenreiter01/Feathercoin,jtimon/bitcoin,jonasschnelli/bitcoin,bitcoin/bitcoin,vertcoin/vertcoin,vertcoin/vertcoin,dscotese/bitcoin,jonasschnelli/bitcoin,monacoinproject/monacoin,nikkitan/bitcoin,ElementsProject/elements,DigitalPandacoin/pandacoin,domob1812/namecore,MarcoFalke/bitcoin,Sjors/bitcoin,FeatherCoin/Feathercoin,rnicoll/dogecoin,EthanHeilman/bitcoin,andreaskern/bitcoin,dscotese/bitcoin,CryptArc/bitcoin,droark/bitcoin,afk11/bitcoin,jamesob/bitcoin,litecoin-project/litecoin,gjhiggins/vcoincore,apoelstra/bitcoin,instagibbs/bitcoin,bitcoinsSG/bitcoin,sstone/bitcoin,GroestlCoin/bitcoin,GroestlCoin/bitcoin,mitchellcash/bitcoin,MarcoFalke/bitcoin,lateminer/bitcoin,afk11/bitcoin,kallewoof/bitcoin,anditto/bitcoin,andreaskern/bitcoin,AkioNak/bitcoin,domob1812/namecore,MeshCollider/bitcoin,achow101/bitcoin,tjps/bitcoin,pataquets/namecoin-core,CryptArc/bitcoin,peercoin/peercoin,bitcoin/bitcoin,ajtowns/bitcoin,namecoin/namecoin-core,myriadcoin/myriadcoin,alecalve/bitcoin,achow101/bitcoin,jamesob/bitcoin,CryptArc/bitcoin,domob1812/bitcoin,jamesob/bitcoin,wellenreiter01/Feathercoin,wellenreiter01/Feathercoin,JeremyRubin/bitcoin,pstratem/bitcoin,CryptArc/bitcoin,tjps/bitcoin,achow101/bitcoin,prusnak/bitcoin,dscotese/bitcoin,pstratem/bitcoin,Xekyo/bitcoin,bitcoinknots/bitcoin,kallewoof/bitcoin,jtimon/bitcoin,jtimon/bitcoin,bitcoinsSG/bitcoin,untrustbank/litecoin,MarcoFalke/bitcoin,cdecker/bitcoin,MeshCollider/bitcoin,GroestlCoin/GroestlCoin,untrustbank/litecoin,myriadcoin/myriadcoin,apoelstra/bitcoin,pstratem/bitcoin,Sjors/bitcoin,nikkitan/bitcoin,ElementsProject/elements,monacoinproject/monacoin,rnicoll/bitcoin,Xekyo/bitcoin,mm-s/bitcoin,cdecker/bitcoin,OmniLayer/omnicore,MeshCollider/bitcoin,pataquets/namecoin-core,jlopp/statoshi,litecoin-project/litecoin,bitcoinsSG/bitcoin,mitchellcash/bitcoin,anditto/bitcoin,midnightmagic/bitcoin,anditto/bitcoin,bespike/litecoin,jambolo/bitcoin,qtumproject/qtum,yenliangl/bitcoin,FeatherCoin/Feathercoin,n1bor/bitcoin,ahmedbodi/vertcoin,MarcoFalke/bitcoin,OmniLayer/omnicore,afk11/bitcoin,tjps/bitcoin,bitcoinknots/bitcoin,jlopp/statoshi,namecoin/namecore,alecalve/bitcoin,kallewoof/bitcoin,MarcoFalke/bitcoin,jlopp/statoshi,yenliangl/bitcoin,bitcoinknots/bitcoin,FeatherCoin/Feathercoin,pstratem/bitcoin,qtumproject/qtum,jnewbery/bitcoin,ElementsProject/elements,bitcoinknots/bitcoin,MeshCollider/bitcoin,monacoinproject/monacoin,gjhiggins/vcoincore,myriadcoin/myriadcoin,instagibbs/bitcoin,EthanHeilman/bitcoin,n1bor/bitcoin,pstratem/bitcoin,peercoin/peercoin,anditto/bitcoin,ajtowns/bitcoin,andreaskern/bitcoin,mitchellcash/bitcoin,CryptArc/bitcoin,tecnovert/particl-core,n1bor/bitcoin,bespike/litecoin,OmniLayer/omnicore,untrustbank/litecoin,DigitalPandacoin/pandacoin,tecnovert/particl-core,particl/particl-core,ahmedbodi/vertcoin,andreaskern/bitcoin,OmniLayer/omnicore,DigitalPandacoin/pandacoin,JeremyRubin/bitcoin,midnightmagic/bitcoin,sipsorcery/bitcoin,jambolo/bitcoin,lateminer/bitcoin,qtumproject/qtum,GroestlCoin/bitcoin,namecoin/namecoin-core,pataquets/namecoin-core,bespike/litecoin,jamesob/bitcoin,practicalswift/bitcoin,MeshCollider/bitcoin,myriadteam/myriadcoin,mruddy/bitcoin,OmniLayer/omnicore,instagibbs/bitcoin,jnewbery/bitcoin,FeatherCoin/Feathercoin,jnewbery/bitcoin,jlopp/statoshi,dscotese/bitcoin,midnightmagic/bitcoin,AkioNak/bitcoin,n1bor/bitcoin,cdecker/bitcoin,apoelstra/bitcoin,fanquake/bitcoin,rnicoll/dogecoin,instagibbs/bitcoin,wellenreiter01/Feathercoin,mitchellcash/bitcoin,afk11/bitcoin,alecalve/bitcoin,prusnak/bitcoin,qtumproject/qtum,ajtowns/bitcoin,sstone/bitcoin,rnicoll/bitcoin,peercoin/peercoin,namecoin/namecore,gjhiggins/vcoincore,prusnak/bitcoin,untrustbank/litecoin,rnicoll/dogecoin,bitcoin/bitcoin,apoelstra/bitcoin,GroestlCoin/GroestlCoin,achow101/bitcoin,jambolo/bitcoin,jtimon/bitcoin,bitcoin/bitcoin,domob1812/namecore,namecoin/namecoin-core,myriadteam/myriadcoin,prusnak/bitcoin,sstone/bitcoin,DigitalPandacoin/pandacoin,JeremyRubin/bitcoin,mm-s/bitcoin,practicalswift/bitcoin,practicalswift/bitcoin,namecoin/namecore,qtumproject/qtum,jonasschnelli/bitcoin,bitcoin/bitcoin,afk11/bitcoin,ElementsProject/elements,sipsorcery/bitcoin,sstone/bitcoin,rnicoll/dogecoin,cdecker/bitcoin,ahmedbodi/vertcoin,domob1812/bitcoin,jonasschnelli/bitcoin,apoelstra/bitcoin,fujicoin/fujicoin,jambolo/bitcoin,sstone/bitcoin,jtimon/bitcoin,tjps/bitcoin,GroestlCoin/bitcoin,jnewbery/bitcoin,litecoin-project/litecoin,peercoin/peercoin,pataquets/namecoin-core,ahmedbodi/vertcoin,mruddy/bitcoin,fujicoin/fujicoin,namecoin/namecore,JeremyRubin/bitcoin,wellenreiter01/Feathercoin,andreaskern/bitcoin,mm-s/bitcoin,fanquake/bitcoin,nikkitan/bitcoin,bitcoinsSG/bitcoin,lateminer/bitcoin,jnewbery/bitcoin,lateminer/bitcoin,CryptArc/bitcoin,JeremyRubin/bitcoin,myriadteam/myriadcoin,domob1812/bitcoin,Xekyo/bitcoin,myriadteam/myriadcoin,fanquake/bitcoin,vertcoin/vertcoin,midnightmagic/bitcoin,alecalve/bitcoin,Sjors/bitcoin,droark/bitcoin,FeatherCoin/Feathercoin,gjhiggins/vcoincore,mitchellcash/bitcoin,midnightmagic/bitcoin,monacoinproject/monacoin,particl/particl-core,AkioNak/bitcoin,FeatherCoin/Feathercoin,afk11/bitcoin,andreaskern/bitcoin,tecnovert/particl-core,GroestlCoin/bitcoin,practicalswift/bitcoin,domob1812/namecore,sstone/bitcoin,practicalswift/bitcoin,myriadcoin/myriadcoin,n1bor/bitcoin,tecnovert/particl-core,jambolo/bitcoin,bitcoinsSG/bitcoin,Sjors/bitcoin,domob1812/namecore,tecnovert/particl-core,qtumproject/qtum,cdecker/bitcoin,yenliangl/bitcoin,nikkitan/bitcoin,sipsorcery/bitcoin,particl/particl-core,mruddy/bitcoin,nikkitan/bitcoin,untrustbank/litecoin,mm-s/bitcoin,anditto/bitcoin,instagibbs/bitcoin,rnicoll/dogecoin,namecoin/namecoin-core,ahmedbodi/vertcoin,jlopp/statoshi,monacoinproject/monacoin,n1bor/bitcoin,achow101/bitcoin,myriadteam/myriadcoin,pstratem/bitcoin,jamesob/bitcoin,MeshCollider/bitcoin,bitcoinsSG/bitcoin,fanquake/bitcoin,r8921039/bitcoin,Xekyo/bitcoin,ElementsProject/elements,AkioNak/bitcoin,jtimon/bitcoin,untrustbank/litecoin,GroestlCoin/GroestlCoin,fanquake/bitcoin,particl/particl-core,kallewoof/bitcoin,EthanHeilman/bitcoin,JeremyRubin/bitcoin,achow101/bitcoin,cdecker/bitcoin,mm-s/bitcoin,tecnovert/particl-core,namecoin/namecoin-core,sipsorcery/bitcoin,droark/bitcoin,gjhiggins/vcoincore,tjps/bitcoin,bitcoinknots/bitcoin,domob1812/bitcoin,peercoin/peercoin,kallewoof/bitcoin,litecoin-project/litecoin,yenliangl/bitcoin,lateminer/bitcoin,mruddy/bitcoin,alecalve/bitcoin,anditto/bitcoin,nikkitan/bitcoin,gjhiggins/vcoincore,particl/particl-core,r8921039/bitcoin,fanquake/bitcoin,peercoin/peercoin,fujicoin/fujicoin,r8921039/bitcoin,vertcoin/vertcoin,yenliangl/bitcoin,myriadcoin/myriadcoin,rnicoll/bitcoin,Xekyo/bitcoin,yenliangl/bitcoin,r8921039/bitcoin,domob1812/namecore,bespike/litecoin,domob1812/bitcoin,GroestlCoin/GroestlCoin,mm-s/bitcoin,OmniLayer/omnicore,kallewoof/bitcoin,vertcoin/vertcoin,fujicoin/fujicoin,droark/bitcoin,alecalve/bitcoin,mitchellcash/bitcoin,Xekyo/bitcoin,tjps/bitcoin,wellenreiter01/Feathercoin,dscotese/bitcoin,pataquets/namecoin-core,mruddy/bitcoin,MarcoFalke/bitcoin,jlopp/statoshi,ajtowns/bitcoin,litecoin-project/litecoin,GroestlCoin/bitcoin,EthanHeilman/bitcoin,r8921039/bitcoin,EthanHeilman/bitcoin,practicalswift/bitcoin,ajtowns/bitcoin,AkioNak/bitcoin |
b455b545779836e6fb2ff69717e6b0a26e23e2cc | mopidy/utils/path.py | mopidy/utils/path.py | import logging
import os
import sys
import urllib
logger = logging.getLogger('mopidy.utils.path')
def get_or_create_folder(folder):
folder = os.path.expanduser(folder)
if not os.path.isdir(folder):
logger.info(u'Creating dir %s', folder)
os.mkdir(folder, 0755)
return folder
def get_or_create_file(filename):
filename = os.path.expanduser(filename)
if not os.path.isfile(filename):
logger.info(u'Creating file %s', filename)
open(filename, 'w')
return filename
def path_to_uri(*paths):
path = os.path.join(*paths)
#path = os.path.expanduser(path) # FIXME Waiting for test case?
path = path.encode('utf-8')
if sys.platform == 'win32':
return 'file:' + urllib.pathname2url(path)
return 'file://' + urllib.pathname2url(path)
| import logging
import os
import sys
import urllib
logger = logging.getLogger('mopidy.utils.path')
def get_or_create_folder(folder):
folder = os.path.expanduser(folder)
if not os.path.isdir(folder):
logger.info(u'Creating dir %s', folder)
os.mkdir(folder, 0755)
return folder
def get_or_create_file(filename):
filename = os.path.expanduser(filename)
if not os.path.isfile(filename):
logger.info(u'Creating file %s', filename)
open(filename, 'w')
return filename
def path_to_uri(*paths):
path = os.path.join(*paths)
#path = os.path.expanduser(path) # FIXME Waiting for test case?
path = path.encode('utf-8')
if sys.platform == 'win32':
return 'file:' + urllib.pathname2url(path)
return 'file://' + urllib.pathname2url(path)
def find_files(folder):
for dirpath, dirnames, filenames in os.walk(folder):
for filename in filenames:
dirpath = os.path.abspath(dirpath)
yield os.path.join(dirpath, filename)
| Add helper for finding files in folder | Add helper for finding files in folder
| Python | apache-2.0 | priestd09/mopidy,pacificIT/mopidy,kingosticks/mopidy,rawdlite/mopidy,woutervanwijk/mopidy,jcass77/mopidy,dbrgn/mopidy,diandiankan/mopidy,abarisain/mopidy,mopidy/mopidy,adamcik/mopidy,pacificIT/mopidy,hkariti/mopidy,swak/mopidy,mokieyue/mopidy,dbrgn/mopidy,jmarsik/mopidy,bencevans/mopidy,jcass77/mopidy,dbrgn/mopidy,jmarsik/mopidy,tkem/mopidy,mokieyue/mopidy,ali/mopidy,priestd09/mopidy,bencevans/mopidy,vrs01/mopidy,rawdlite/mopidy,tkem/mopidy,swak/mopidy,pacificIT/mopidy,rawdlite/mopidy,jmarsik/mopidy,adamcik/mopidy,glogiotatidis/mopidy,ali/mopidy,diandiankan/mopidy,quartz55/mopidy,hkariti/mopidy,vrs01/mopidy,bacontext/mopidy,bacontext/mopidy,adamcik/mopidy,bencevans/mopidy,jodal/mopidy,swak/mopidy,quartz55/mopidy,SuperStarPL/mopidy,tkem/mopidy,mokieyue/mopidy,bacontext/mopidy,quartz55/mopidy,kingosticks/mopidy,quartz55/mopidy,mopidy/mopidy,woutervanwijk/mopidy,swak/mopidy,jcass77/mopidy,vrs01/mopidy,ali/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,jodal/mopidy,ZenithDK/mopidy,bencevans/mopidy,kingosticks/mopidy,diandiankan/mopidy,jodal/mopidy,hkariti/mopidy,ZenithDK/mopidy,diandiankan/mopidy,ali/mopidy,liamw9534/mopidy,bacontext/mopidy,ZenithDK/mopidy,mokieyue/mopidy,jmarsik/mopidy,liamw9534/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,vrs01/mopidy,rawdlite/mopidy,priestd09/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,abarisain/mopidy,hkariti/mopidy,glogiotatidis/mopidy,mopidy/mopidy,tkem/mopidy,ZenithDK/mopidy |
8ae27080b8ff9fe124733005a8006261a3d22266 | migrate/crud/versions/001_create_initial_tables.py | migrate/crud/versions/001_create_initial_tables.py | from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
Column('data', Blob, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
| from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
Column('data', LargeBinary, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
| Fix some of the schema. | Fix some of the schema. | Python | bsd-3-clause | mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen |
c535d9e105284bb469d10003ee0f5533b8d8d5db | auditlog/__openerp__.py | auditlog/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 ABF OSIELL (<http://osiell.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Audit Log",
'version': "1.0",
'author': "ABF OSIELL",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'views/auditlog_view.xml',
],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 ABF OSIELL (<http://osiell.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Audit Log",
'version': "1.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'views/auditlog_view.xml',
],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
| Add OCA as author of OCA addons | Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.
| Python | agpl-3.0 | brain-tec/server-tools,bmya/server-tools,bmya/server-tools,brain-tec/server-tools,brain-tec/server-tools,bmya/server-tools |
f7a9074f7096b820b5342108560b90efce619405 | tests/test_histogram.py | tests/test_histogram.py | import logging
import json
import py
from fields import Namespace
from pytest_benchmark.plugin import BenchmarkSession
class MockSession(BenchmarkSession):
def __init__(self):
self.histogram = True
me = py.path.local(__file__)
self.storage = me.dirpath(me.purebasename)
self.benchmarks = []
self.sort = u"min"
self.compare = self.storage.join('0001_b692275e28a23b5d4aae70f453079ba593e60290_20150811_052350.json')
self.logger = logging.getLogger(__name__)
for bench_file in self.storage.listdir("[0-9][0-9][0-9][0-9]_*.json"):
with bench_file.open('rb') as fh:
data = json.load(fh)
self.benchmarks.extend(
Namespace(
json=lambda: bench['stats'],
fullname=bench['fullname'],
**bench['stats']
)
for bench in data['benchmarks']
)
break
def test_rendering():
sess = MockSession()
sess.handle_histogram()
| import logging
import json
import py
from fields import Namespace
from pytest_benchmark.plugin import BenchmarkSession
class MockSession(BenchmarkSession):
def __init__(self):
self.histogram = True
me = py.path.local(__file__)
self.storage = me.dirpath(me.purebasename)
self.benchmarks = []
self.sort = u"min"
self.compare = self.storage.join('0001_b692275e28a23b5d4aae70f453079ba593e60290_20150811_052350.json')
self.logger = logging.getLogger(__name__)
for bench_file in self.storage.listdir("[0-9][0-9][0-9][0-9]_*.json"):
with bench_file.open() as fh:
data = json.load(fh)
self.benchmarks.extend(
Namespace(
json=lambda: bench['stats'],
fullname=bench['fullname'],
**bench['stats']
)
for bench in data['benchmarks']
)
break
def test_rendering():
sess = MockSession()
sess.handle_histogram()
| Use whatever is default open mode. | Use whatever is default open mode.
| Python | bsd-2-clause | thedrow/pytest-benchmark,aldanor/pytest-benchmark,SectorLabs/pytest-benchmark,ionelmc/pytest-benchmark |
5b94ce3796eb37301f2ac6928bfe0a0426bcf31e | docs/config/all.py | docs/config/all.py | # Global configuration information used across all the
# translations of documentation.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = '1.x'
# The search index version.
search_version = 'chronos-1'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Chronos'
# Other versions that display in the version picker menu.
version_list = [
{'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x', 'current': True},
]
# Languages available.
languages = ['en', 'fr', 'ja', 'pt']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = 'master'
# Current version being built
version = '1.x'
# Language in use for this directory.
language = 'en'
show_root_link = True
repository = 'cakephp/chronos'
source_path = 'docs/'
| # Global configuration information used across all the
# translations of documentation.
#
# Import the base theme configuration
from cakephpsphinx.config.all import *
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = '2.x'
# The search index version.
search_version = 'chronos-2'
# The marketing display name for the book.
version_name = ''
# Project name shown in the black header bar
project = 'Chronos'
# Other versions that display in the version picker menu.
version_list = [
{'name': '1.x', 'number': '/chronos/1.x', 'title': '1.x'},
{'name': '2.x', 'number': '/chronos/2.x', 'title': '2.x', 'current': True},
]
# Languages available.
languages = ['en', 'fr', 'ja', 'pt']
# The GitHub branch name for this version of the docs
# for edit links to point at.
branch = '2.x'
# Current version being built
version = '2.x'
# Language in use for this directory.
language = 'en'
show_root_link = True
repository = 'cakephp/chronos'
source_path = 'docs/'
is_prerelease = True
| Update docs versions for 2.x | Update docs versions for 2.x
| Python | mit | cakephp/chronos |
90ef9d9a8c5b02d32868da1236ea063f6abd7cd0 | src/sentry/api/serializers/models/auditlogentry.py | src/sentry/api/serializers/models/auditlogentry.py | from __future__ import absolute_import
import six
from sentry.api.serializers import Serializer, register, serialize
from sentry.models import AuditLogEntry
@register(AuditLogEntry)
class AuditLogEntrySerializer(Serializer):
def get_attrs(self, item_list, user):
# TODO(dcramer); assert on relations
actors = {
d['id']: d for d in serialize(set(i.actor for i in item_list if i.actor_id), user)
}
return {
item: {
'actor': actors[six.text_type(item.actor_id)] if item.actor_id else {
'name': item.get_actor_name(),
},
} for item in item_list
}
def serialize(self, obj, attrs, user):
return {
'id': six.text_type(obj.id),
'actor': attrs['actor'],
'event': obj.get_event_display(),
'ipAddress': obj.ip_address,
'note': obj.get_note(),
'dateCreated': obj.datetime,
}
| from __future__ import absolute_import
import six
from sentry.api.serializers import Serializer, register, serialize
from sentry.models import AuditLogEntry
def fix(data):
# There was a point in time where full Team objects
# got serialized into our AuditLogEntry.data, so these
# values need to be stripped and reduced down to integers
if 'teams' not in data:
return data
if not data['teams']:
return data
if not hasattr(data['teams'][0], 'id'):
return data
data['teams'] = [t.id for t in data['teams']]
return data
@register(AuditLogEntry)
class AuditLogEntrySerializer(Serializer):
def get_attrs(self, item_list, user):
# TODO(dcramer); assert on relations
users = {
d['id']: d for d in serialize(
set(i.actor for i in item_list if i.actor_id) |
set(i.target_user for i in item_list if i.target_user_id),
user,
)
}
return {
item: {
'actor': users[six.text_type(item.actor_id)] if item.actor_id else {
'name': item.get_actor_name(),
},
'targetUser': users.get(six.text_type(item.target_user_id)) or item.target_user_id
} for item in item_list
}
def serialize(self, obj, attrs, user):
return {
'id': six.text_type(obj.id),
'actor': attrs['actor'],
'event': obj.get_event_display(),
'ipAddress': obj.ip_address,
'note': obj.get_note(),
'targetObject': obj.target_object,
'targetUser': attrs['targetUser'],
'data': fix(obj.data),
'dateCreated': obj.datetime,
}
| Expand AuditLogEntry serializer to return back all of the data | feat(api): Expand AuditLogEntry serializer to return back all of the data
Fixes GH-6521
| Python | bsd-3-clause | ifduyue/sentry,ifduyue/sentry,mvaled/sentry,mvaled/sentry,looker/sentry,mvaled/sentry,ifduyue/sentry,looker/sentry,gencer/sentry,gencer/sentry,ifduyue/sentry,mvaled/sentry,gencer/sentry,mvaled/sentry,looker/sentry,beeftornado/sentry,beeftornado/sentry,ifduyue/sentry,looker/sentry,gencer/sentry,gencer/sentry,beeftornado/sentry,mvaled/sentry,looker/sentry |
91acec032abeb942bf90d6522a4d9d38ad624d46 | tests/test_buffs.py | tests/test_buffs.py | import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
if __name__ == '__main__':
unittest.main()
| import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
class BeneficialBuffTests(unittest.TestCase):
def test_init(self):
name = 'BMW'
stats_amounts = [('strength', 10), ('armor', 20), ('health', 30)]
duration = 10
buff = BeneficialBuff(name=name, buff_stats_and_amounts=stats_amounts, duration=duration)
self.assertEqual(buff.name, name)
self.assertEqual(buff.buff_stats_and_amounts, stats_amounts)
self.assertEqual(buff.duration, duration)
if __name__ == '__main__':
unittest.main()
| Test for the BeneficialBuff class | Test for the BeneficialBuff class
| Python | mit | Enether/python_wow |
c90fd7d026cdeeff7d073c1d15ff550cc937f961 | dusty/daemon.py | dusty/daemon.py | import sys
import logging
from .preflight import preflight_check
from .notifier import notify
def configure_logging():
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logging.captureWarnings(True)
def main():
notify('Dusty initializing...')
configure_logging()
preflight_check()
if __name__ == '__main__':
main()
| import os
import sys
import logging
import socket
from .preflight import preflight_check
from .notifier import notify
SOCKET_PATH = '/var/run/dusty/dusty.sock'
def _configure_logging():
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logging.captureWarnings(True)
def _clean_up_existing_socket():
try:
os.unlink(SOCKET_PATH)
except OSError:
if os.path.exists(SOCKET_PATH):
raise
def _listen_on_socket():
_clean_up_existing_socket()
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.bind(SOCKET_PATH)
sock.listen(1)
notify('Dusty is listening for commands')
while True:
connection, client_address = sock.accept()
try:
while True:
data = connection.recv(1024)
if not data:
break
print data
finally:
connection.close()
def main():
notify('Dusty initializing...')
_configure_logging()
preflight_check()
_listen_on_socket()
if __name__ == '__main__':
main()
| Set up a Unix socket we can use for input | Set up a Unix socket we can use for input
| Python | mit | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty |
9a74f11d4adfafbddec2e86251ecef17c4196bf2 | tests/test_suite.py | tests/test_suite.py | #! /usr/bin/env python
from __future__ import absolute_import
import unittest
from . import unittest_neos
from . import unittest_sedumi_writer
def main():
""" The main function.
"""
loader = unittest.TestLoader()
suite = unittest.TestSuite()
suite.addTest(loader.loadTestsFromModule(unittest_neos))
suite.addTest(loader.loadTestsFromModule(unittest_sedumi_writer))
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print "Test canceled."
| #! /usr/bin/env python
""" Test suite.
"""
from __future__ import absolute_import
import sys
import unittest
from . import unittest_neos
from . import unittest_sedumi_writer
def main():
""" The main function.
Returns:
True if all tests are successful.
"""
loader = unittest.TestLoader()
suite = unittest.TestSuite()
suite.addTest(loader.loadTestsFromModule(unittest_neos))
suite.addTest(loader.loadTestsFromModule(unittest_sedumi_writer))
res = unittest.TextTestRunner(verbosity=2).run(suite)
return res.wasSuccessful()
if __name__ == "__main__":
try:
sys.exit(0 if main() else 1)
except KeyboardInterrupt:
print "Test canceled."
sys.exit(-1)
| Fix a bug to return error status code when tests are failed. | Fix a bug to return error status code when tests are failed.
| Python | mit | TrishGillett/pysdpt3glue,discardthree/PySDPT3glue,TrishGillett/pysdpt3glue,discardthree/PySDPT3glue,TrishGillett/pysdpt3glue |
6430785e60fcef9bbac3cf4e7c70981f5af6affa | fluent_contents/plugins/sharedcontent/models.py | fluent_contents/plugins/sharedcontent/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
# Adding the reverse relation for ContentItem objects
# causes the admin to list these objects when moving the shared content
contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
| Add ContentItemRelation to SharedContent model | Add ContentItemRelation to SharedContent model
Displays objects in the admin delete screen.
| Python | apache-2.0 | jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents |
fc21802b68cf9a907218dab5b0e22cd8f1dc75d0 | djcelery/backends/database.py | djcelery/backends/database.py | from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
TaskMeta.objects.store_result(task_id, result, status,
traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
TaskSetMeta.objects.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
meta = TaskMeta.objects.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
meta = TaskSetMeta.objects.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
TaskMeta.objects.delete_expired()
TaskSetMeta.objects.delete_expired()
| from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
TaskModel = TaskMeta
TaskSetModel = TaskSetMeta
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
self.TaskModel._default_manager.store_result(task_id, result, status,
traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
self.TaskModel._default_manager.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
meta = self.TaskModel._default_manager.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
meta = self.TaskSetModel._default_manager.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
for model in self.TaskModel, self.TaskSetModel:
model._default_manager.delete_expired()
| Make it possible to override the models used to store task/taskset state | DatabaseBackend: Make it possible to override the models used to store task/taskset state
| Python | bsd-3-clause | Amanit/django-celery,kanemra/django-celery,axiom-data-science/django-celery,celery/django-celery,alexhayes/django-celery,digimarc/django-celery,tkanemoto/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,Amanit/django-celery,CloudNcodeInc/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,celery/django-celery,planorama/django-celery,nadios/django-celery,ask/django-celery,nadios/django-celery,georgewhewell/django-celery,iris-edu-int/django-celery,celery/django-celery,digimarc/django-celery,planorama/django-celery,georgewhewell/django-celery,ask/django-celery,digimarc/django-celery,tkanemoto/django-celery,georgewhewell/django-celery,alexhayes/django-celery,Amanit/django-celery,tkanemoto/django-celery,kanemra/django-celery,axiom-data-science/django-celery,axiom-data-science/django-celery,kanemra/django-celery |
370676a21ff43ae25b75b890870613ebd6dbcf03 | topiary/__init__.py | topiary/__init__.py | import commandline_args
from .mutant_epitope_predictor import MutantEpitopePredictor
from .epitope_helpers import (
epitopes_to_dataframe,
epitopes_to_csv
)
from .predict_epitopes import predict_epitopes, predict_epitopes_from_args
__all__ = [
"MutantEpitopePredictor",
"commandline_args",
"epitopes_to_dataframe",
"epitopes_to_csv",
"predict_epitopes",
"predict_epitopes_from_args",
]
| import .commandline_args
from .mutant_epitope_predictor import MutantEpitopePredictor
from .epitope_helpers import (
epitopes_to_dataframe,
epitopes_to_csv
)
from .predict_epitopes import predict_epitopes, predict_epitopes_from_args
__all__ = [
"MutantEpitopePredictor",
"commandline_args",
"epitopes_to_dataframe",
"epitopes_to_csv",
"predict_epitopes",
"predict_epitopes_from_args",
]
| Add relative import to fix Python3 tests | Add relative import to fix Python3 tests
| Python | apache-2.0 | hammerlab/topiary,hammerlab/topiary |
97535245f7da3d7e54d64dc384d6cd81caa9a689 | tests/test_story.py | tests/test_story.py | from py101 import Story
from py101 import variables
from py101 import lists
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
self.assertEqual(Story().name, 'py101', "name should be py101")
class TestAdventureVariables(unittest.TestCase):
good_solution = """
myinteger = 4
mystring = 'Python String Here'
print(myinteger)
print(mystring)
"""
def test_solution(self):
test = variables.TestOutput(self.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
class TestAdventureLists(unittest.TestCase):
good_solution = """
languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]
print(languages)
"""
def test_solution(self):
test = lists.TestOutput(self.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
| import py101
import py101.boilerplate
import py101.introduction
import py101.lists
import py101.variables
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
self.assertEqual(py101.Story().name, 'py101', "name should be py101")
class AdventureData(object):
def __init__(self, test_module, good_solution):
self.module = test_module
self.good_solution = good_solution
class TestAdventures(unittest.TestCase):
adventures = [
AdventureData(
py101.boilerplate,
""
),
AdventureData(
py101.introduction,
"""print('Hello World')"""
),
AdventureData(
py101.variables,
"""myinteger = 4; mystring = 'Python String Here'; print(myinteger); print(mystring)"""
),
AdventureData(
py101.lists,
"""languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]; print(languages)"""
)
]
def test_solution(self):
for adventure in self.adventures:
with self.subTest(adventure=adventure.module.__name__):
test = adventure.module.TestOutput(adventure.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
| Refactor tests to remove duplicate code | Refactor tests to remove duplicate code
| Python | mit | sophilabs/py101 |
6db806c28f5e6e207e4f3a690f57f09ee1e3e7dd | tilemill/tilemill.py | tilemill/tilemill.py | #!/usr/bin/env python
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.escape
import tornado.template
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
class ProjectHandler(tornado.web.RequestHandler):
def get(self, project_id):
self.write("You requested the project " + project_id)
def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", MainHandler),
(r"/(\w+)", ProjectHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.escape
import tornado.template
from tornado.options import define, options
define("port", default=8888, help="run on the given port", type=int)
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
class ProjectHandler(tornado.web.RequestHandler):
def get(self, project_id):
self.write("You requested the project " + project_id)
def main():
tornado.options.parse_command_line()
application = tornado.web.Application([
(r"/", MainHandler),
(r"/(\w+)", ProjectHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
| Indent properly, get helloworld working | Indent properly, get helloworld working
| Python | bsd-3-clause | fxtentacle/tilemill,nyimbi/tilemill,MappingKat/tilemill,paulovieira/tilemill-clima,tizzybec/tilemill,MappingKat/tilemill,fxtentacle/tilemill,Teino1978-Corp/Teino1978-Corp-tilemill,tizzybec/tilemill,makinacorpus/tilemill,paulovieira/tilemill-clima,paulovieira/tilemill-clima,Zhao-Qi/tilemill,Zhao-Qi/tilemill,tizzybec/tilemill,fxtentacle/tilemill,isaacs/tilemill,mbrukman/tilemill,mbrukman/tilemill,Zhao-Qi/tilemill,tilemill-project/tilemill,mbrukman/tilemill,makinacorpus/tilemill,florianf/tileoven,tizzybec/tilemill,tizzybec/tilemill,nyimbi/tilemill,fxtentacle/tilemill,mbrukman/tilemill,Zhao-Qi/tilemill,MappingKat/tilemill,Teino1978-Corp/Teino1978-Corp-tilemill,Teino1978-Corp/Teino1978-Corp-tilemill,nyimbi/tilemill,nyimbi/tilemill,isaacs/tilemill,florianf/tileoven,isaacs/tilemill,nyimbi/tilemill,paulovieira/tilemill-clima,MappingKat/tilemill,Zhao-Qi/tilemill,tilemill-project/tilemill,Teino1978-Corp/Teino1978-Corp-tilemill,MappingKat/tilemill,Teino1978-Corp/Teino1978-Corp-tilemill,florianf/tileoven,fxtentacle/tilemill,paulovieira/tilemill-clima,mbrukman/tilemill,tilemill-project/tilemill |
510afd0c93c333e86511fb6f6b9e96a434d54d00 | zerver/migrations/0174_userprofile_delivery_email.py | zerver/migrations/0174_userprofile_delivery_email.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-05 17:57
from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-07-05 17:57
from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
atomic = False
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
| Disable atomic for delivery_email migration. | migrations: Disable atomic for delivery_email migration.
I'm not sure theoretically why this should be required only for some
installations, but these articles all suggest the root problem is
doing these two migrations together atomically (creating the field and
setting a value for it), so the right answer is to declare the
migration as not atomic:
https://stackoverflow.com/questions/12838111/django-db-migrations-cannot-alter-table-because-it-has-pending-trigger-events
https://confluence.atlassian.com/confkb/upgrade-failed-with-the-error-message-error-cannot-alter-table-content-because-it-has-pending-trigger-events-747606853.html
| Python | apache-2.0 | dhcrzf/zulip,zulip/zulip,zulip/zulip,showell/zulip,dhcrzf/zulip,hackerkid/zulip,jackrzhang/zulip,eeshangarg/zulip,tommyip/zulip,brainwane/zulip,tommyip/zulip,synicalsyntax/zulip,tommyip/zulip,shubhamdhama/zulip,rht/zulip,dhcrzf/zulip,timabbott/zulip,shubhamdhama/zulip,rht/zulip,brainwane/zulip,hackerkid/zulip,synicalsyntax/zulip,andersk/zulip,dhcrzf/zulip,brainwane/zulip,timabbott/zulip,dhcrzf/zulip,punchagan/zulip,rht/zulip,zulip/zulip,shubhamdhama/zulip,rishig/zulip,kou/zulip,showell/zulip,rht/zulip,jackrzhang/zulip,zulip/zulip,hackerkid/zulip,punchagan/zulip,andersk/zulip,rishig/zulip,kou/zulip,punchagan/zulip,brainwane/zulip,dhcrzf/zulip,rishig/zulip,synicalsyntax/zulip,timabbott/zulip,hackerkid/zulip,jackrzhang/zulip,timabbott/zulip,timabbott/zulip,zulip/zulip,jackrzhang/zulip,hackerkid/zulip,kou/zulip,kou/zulip,shubhamdhama/zulip,brainwane/zulip,kou/zulip,rishig/zulip,zulip/zulip,synicalsyntax/zulip,zulip/zulip,eeshangarg/zulip,shubhamdhama/zulip,andersk/zulip,timabbott/zulip,rishig/zulip,brainwane/zulip,shubhamdhama/zulip,rht/zulip,punchagan/zulip,punchagan/zulip,timabbott/zulip,brainwane/zulip,eeshangarg/zulip,showell/zulip,showell/zulip,tommyip/zulip,rishig/zulip,punchagan/zulip,jackrzhang/zulip,eeshangarg/zulip,punchagan/zulip,rht/zulip,jackrzhang/zulip,andersk/zulip,eeshangarg/zulip,tommyip/zulip,hackerkid/zulip,tommyip/zulip,synicalsyntax/zulip,rishig/zulip,synicalsyntax/zulip,kou/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,synicalsyntax/zulip,eeshangarg/zulip,showell/zulip,dhcrzf/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,shubhamdhama/zulip,hackerkid/zulip,showell/zulip,jackrzhang/zulip,rht/zulip |
80ff7429d3a68f99e66cd4cfae9478b3f34e435c | exhibitions/models.py | exhibitions/models.py | from django.db import models
class Exhibition(models.Model):
title = models.CharField( "Название", max_length=1024 )
begin = models.DateField( "Дата начала" )
end = models.DateField( "Дата окончания" )
showroom = models.CharField( "Выставочный зал", max_length=1024 )
showroom_url = models.CharField( "Ссылка", blank=True, null=True, max_length=255 )
image = models.ImageField( "Картинка", blank=True, null=True, max_length=500, upload_to="images/exhibitions/" )
class Meta:
ordering = ['-begin']
verbose_name = 'Выставка'
verbose_name_plural = 'Выставки'
def save(self):
if self.showroom_url:
if self.showroom_url[:4] != 'http':
self.showroom_url = 'http://' + self.showroom_url
super(Exhibition, self).save()
def __str__(self):
return (self.title)
| from django.db import models
class Exhibition(models.Model):
title = models.CharField( "Название", max_length=1024 )
begin = models.DateField( "Дата начала" )
end = models.DateField( "Дата окончания" )
showroom = models.CharField( "Выставочный зал", max_length=1024 )
showroom_url = models.CharField( "Ссылка", blank=True, null=True, max_length=255 )
image = models.ImageField( "Картинка", blank=True, null=True, max_length=500, upload_to="images/exhibitions/" )
class Meta:
ordering = ['-end']
verbose_name = 'Выставка'
verbose_name_plural = 'Выставки'
def save(self):
if self.showroom_url:
if self.showroom_url[:4] != 'http':
self.showroom_url = 'http://' + self.showroom_url
super(Exhibition, self).save()
def __str__(self):
return (self.title)
| Order exhibition by end date | Order exhibition by end date
| Python | mit | hombit/olgart,hombit/olgart,hombit/olgart,hombit/olgart |
ad477285f4458145bca378b74dcb8cfe3abeaf06 | froide/bounce/apps.py | froide/bounce/apps.py | import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class BounceConfig(AppConfig):
name = 'froide.bounce'
verbose_name = _('Bounce')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import Bounce
if user is None:
return
Bounce.objects.filter(user=user).delete()
def export_user_data(user):
from .models import Bounce
bounces = Bounce.objects.filter(user=user)
if not bounces:
return
yield ('bounces.json', json.dumps([
{
'last_update': (
b.last_update.isoformat() if b.last_update else None
),
'bounces': b.bounces,
'email': b.email,
}
for b in bounces]).encode('utf-8')
)
| import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class BounceConfig(AppConfig):
name = 'froide.bounce'
verbose_name = _('Bounce')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
from froide.helper.email_sending import mail_middleware_registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
mail_middleware_registry.register(UnsubscribeReferenceMailMiddleware())
def cancel_user(sender, user=None, **kwargs):
from .models import Bounce
if user is None:
return
Bounce.objects.filter(user=user).delete()
def export_user_data(user):
from .models import Bounce
bounces = Bounce.objects.filter(user=user)
if not bounces:
return
yield ('bounces.json', json.dumps([
{
'last_update': (
b.last_update.isoformat() if b.last_update else None
),
'bounces': b.bounces,
'email': b.email,
}
for b in bounces]).encode('utf-8')
)
class UnsubscribeReferenceMailMiddleware:
'''
Moves unsubscribe_reference from mail render context
to email sending kwargs
'''
def enhance_email_kwargs(self, mail_intent, context, email_kwargs):
unsubscribe_reference = context.get('unsubscribe_reference')
if unsubscribe_reference is None:
return
return {
'unsubscribe_reference': unsubscribe_reference
}
| Add unsubscribe reference to mails through context | Add unsubscribe reference to mails through context | Python | mit | stefanw/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide |
5d67def658f0b1bd206fdefe100d32269f1eb34e | falcom/api/uri/api_querier.py | falcom/api/uri/api_querier.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
class SpecialNull: pass
result = SpecialNull
i = 1
while result is SpecialNull:
try:
result = self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
if i == self.max_tries:
result = b""
else:
i += 1
return result
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from time import sleep
class APIQuerier:
def __init__ (self, uri, url_opener, sleep_time=300, max_tries=0):
self.uri = uri
self.url_opener = url_opener
self.sleep_time = sleep_time
self.max_tries = max_tries
def get (self, **kwargs):
class SpecialNull: pass
self.result = SpecialNull
self.attempt_number = 1
while self.result is SpecialNull:
try:
self.result = self.__open_uri(kwargs)
except ConnectionError:
sleep(self.sleep_time)
if self.attempt_number == self.max_tries:
self.result = b""
else:
self.attempt_number += 1
return self.result
@staticmethod
def utf8 (str_or_bytes):
if isinstance(str_or_bytes, bytes):
return str_or_bytes.decode("utf_8")
else:
return str_or_bytes
def __open_uri (self, kwargs):
with self.url_opener(self.uri(**kwargs)) as response:
result = self.utf8(response.read())
return result
| Replace local variables with class variables | Replace local variables with class variables
| Python | bsd-3-clause | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation |
50fa164c4b09845bfa262c2f6959a3c5dfd6f76b | fluentcheck/classes/is_cls.py | fluentcheck/classes/is_cls.py | from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
def __init__(self, object_under_test: Any):
super().__init__(object_under_test)
| from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
pass
| Remove methods with unnecessary super delegation. | Remove methods with unnecessary super delegation. | Python | mit | csparpa/check |
a15d2956cfd48e0d46d5d4cf567af05641b4c8e6 | yunity/api/utils.py | yunity/api/utils.py | from django.http import JsonResponse
class ApiBase(object):
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
| from functools import wraps
from json import loads as load_json
from django.http import JsonResponse
class ApiBase(object):
@classmethod
def validation_failure(cls, message, status=400):
"""
:type message: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'validation_failure': message}, status=status)
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
def json_request(expected_keys=None):
"""Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object.
"""
expected_keys = expected_keys or []
def decorator(func):
@wraps(func)
def wrapper(cls, request, *args, **kwargs):
data = load_json(request.body.decode('utf8'))
for expected_key in expected_keys:
value = data.get(expected_key)
if not value:
return ApiBase.validation_failure('missing key: {}'.format(expected_key))
return func(cls, data, request, *args, **kwargs)
return wrapper
return decorator
| Implement JSON request validation decorator | Implement JSON request validation decorator
with @NerdyProjects
| Python | agpl-3.0 | yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core |
798bd79ddc2e9b212a82a7a8455428b3d32cfab4 | bin/pymodules/apitest/jscomponent.py | bin/pymodules/apitest/jscomponent.py | import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
| import json
import rexviewer as r
import naali
import urllib2
from componenthandler import DynamiccomponentHandler
class JavascriptHandler(DynamiccomponentHandler):
GUINAME = "Javascript Handler"
def __init__(self):
DynamiccomponentHandler.__init__(self)
self.jsloaded = False
def onChanged(self):
print "-----------------------------------"
ent = r.getEntity(self.comp.GetParentEntityId())
datastr = self.comp.GetAttribute()
#print "GetAttr got:", datastr
data = json.loads(datastr)
js_src = data.get('js_src', None)
if not self.jsloaded and js_src is not None:
jscode = self.loadjs(js_src)
print jscode
ctx = {
#'entity'/'this': self.entity
'component': self.comp
}
try:
ent.touchable
except AttributeError:
pass
else:
ctx['touchable'] = ent.touchable
try:
ent.placeable
except:
pass
else:
ctx['placeable'] = ent.placeable
naali.runjs(jscode, ctx)
print "-- done with js"
self.jsloaded = True
def loadjs(self, srcurl):
print "js source url:", srcurl
f = urllib2.urlopen(srcurl)
return f.read()
| Add placeable to javascript context | Add placeable to javascript context
| Python | apache-2.0 | BogusCurry/tundra,antont/tundra,pharos3d/tundra,antont/tundra,AlphaStaxLLC/tundra,jesterKing/naali,pharos3d/tundra,antont/tundra,pharos3d/tundra,BogusCurry/tundra,BogusCurry/tundra,antont/tundra,realXtend/tundra,BogusCurry/tundra,AlphaStaxLLC/tundra,jesterKing/naali,BogusCurry/tundra,pharos3d/tundra,antont/tundra,pharos3d/tundra,jesterKing/naali,AlphaStaxLLC/tundra,jesterKing/naali,jesterKing/naali,antont/tundra,realXtend/tundra,BogusCurry/tundra,jesterKing/naali,pharos3d/tundra,antont/tundra,AlphaStaxLLC/tundra,jesterKing/naali,AlphaStaxLLC/tundra,AlphaStaxLLC/tundra,realXtend/tundra,realXtend/tundra,realXtend/tundra,realXtend/tundra |
31f887979d2129bec80311e94b91cf0f77772f26 | zou/app/utils/fs.py | zou/app/utils/fs.py | import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
| import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
def copyfile(src, dest):
shutil.copyfile(src, dest)
| Add a new copy file util function | Add a new copy file util function
| Python | agpl-3.0 | cgwire/zou |
463fa89c143cd4493ea3704f177c5aba0ebb2af7 | idiokit/xmpp/_resolve.py | idiokit/xmpp/_resolve.py | from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port_and_count(port):
count = 0
while True:
try:
family, ip = yield idiokit.next()
except StopIteration:
idiokit.stop(count)
yield idiokit.send(family, ip, port)
count += 1
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port_and_count(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except dns.ResponseError:
srv_records = []
srv_count = 0
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
srv_count += yield _resolve_host(srv_record.target, port)
if srv_count == 0:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
| from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port(port):
while True:
family, ip = yield idiokit.next()
yield idiokit.send(family, ip, port)
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
if not srv_records:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
return
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
yield _resolve_host(srv_record.target, port)
| Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data. | idiokit.xmpp: Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
| Python | mit | abusesa/idiokit |
b955c4bed4024681f41076977605111abe4577e4 | skan/io.py | skan/io.py | import pandas as pd
def _params_dict_to_dataframe(d):
s = pd.Series(d)
s.index.name = 'parameters'
f = pd.DataFrame({'values': s})
return f
def write_excel(filename, **kwargs):
"""Write data tables to an Excel file, using kwarg names as sheet names.
Parameters
----------
filename : str
The filename to write to.
kwargs : dict
Mapping from sheet names to data.
"""
writer = pd.ExcelWriter(filename)
for sheet_name, obj in kwargs.items():
if isinstance(obj, dict):
obj = _params_dict_to_dataframe(obj)
if isinstance(obj, pd.DataFrame):
obj.to_excel(writer, sheet_name=sheet_name)
writer.save()
writer.close()
| import pandas as pd
def _params_dict_to_dataframe(d):
s = pd.Series(d)
s.index.name = 'parameters'
f = pd.DataFrame({'values': s})
return f
def write_excel(filename, **kwargs):
"""Write data tables to an Excel file, using kwarg names as sheet names.
Parameters
----------
filename : str
The filename to write to.
kwargs : dict
Mapping from sheet names to data.
"""
writer = pd.ExcelWriter(filename)
for sheet_name, obj in kwargs.items():
if isinstance(obj, dict):
obj = _params_dict_to_dataframe(obj)
if isinstance(obj, pd.DataFrame):
obj.to_excel(writer, sheetname=sheet_name)
writer.save()
writer.close()
| Update deprecated excel kwarg in pandas | Update deprecated excel kwarg in pandas
| Python | bsd-3-clause | jni/skan |
7e71e21734abb2b12e309ea37910c90f7b837651 | go/base/tests/test_decorators.py | go/base/tests/test_decorators.py | """Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
class CatchableDummyError(Exception):
"""Error that will be caught by DummyView.post."""
class UncatchableDummyError(Exception):
"""Error that will not be caught by DummyView.post."""
class DummyView(object):
@render_exception(CatchableDummyError, 400, "Meep.")
def post(self, request, err=None):
if err is None:
return "Success"
raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
| """Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
class CatchableDummyError(Exception):
"""Error that will be caught by DummyView.post."""
class UncatchableDummyError(Exception):
"""Error that will not be caught by DummyView.post."""
class DummyView(object):
@render_exception(CatchableDummyError, 400, "Meep.")
def post(self, request, err=None):
if err is None:
return "Success"
raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
| Move Django-specific pieces into the django_imports block. | Move Django-specific pieces into the django_imports block.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go |
a50aeb81a588f8297f194d793cb8f8cf0e15a411 | lambda/list_member.py | lambda/list_member.py | from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
| from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
if isinstance(address, unicode):
# Attempt to down-convert unicode-string addresses to plain strings
try:
address = str(address)
except UnicodeEncodeError:
pass
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
| Convert list member addresses to non-unicode strings when possible. | Convert list member addresses to non-unicode strings when possible.
| Python | mit | ilg/LambdaMLM |
bd59db76bb81218d04224e44773eae9d3d9dfc21 | rplugin/python3/denite/source/toc.py | rplugin/python3/denite/source/toc.py | # -*- coding: utf-8 -*-
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
if not n or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
indent = (' ' * 2*(depth - e['level']) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
depth = max([e['level'] for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
| # -*- coding: utf-8 -*-
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
depth = max([int(e['level']) for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
| Fix Denite support for vim8. | Fix Denite support for vim8.
| Python | mit | lervag/vimtex,Aster89/vimtex,Aster89/vimtex,kmarius/vimtex,lervag/vimtex,kmarius/vimtex |
f4406d21546922363cd67f53d5697bc324306f2b | orders/views.py | orders/views.py | from django.http import HttpResponse
from django.shortcuts import render
from django.utils import timezone
from orders.models import Order
def order_details(request, order_pk):
return HttpResponse("Hello, world!")
def not_executed(request):
orders = Order.objects.filter(valid_until__gt=timezone.now())
return render(request, 'orders/not_executed.html', {'orders': orders})
def outdated(request):
orders = Order.objects.filter(valid_until__lte=timezone.now())
return render(request, 'orders/outdated.html', {'orders': orders})
def executed(request):
return render(request, 'orders/executed.html')
| from django.db.models import Sum
from django.db.models.query import QuerySet
from django.http import HttpResponse
from django.shortcuts import render
from django.utils import timezone
from orders.models import Order
def order_details(request, order_pk):
return HttpResponse("Hello, world!")
def not_executed(request):
orders = get_orders().filter(valid_until__gt=timezone.now(), sold_count=0)
return render(request, 'orders/not_executed.html', {'orders': orders})
def outdated(request):
orders = get_orders().filter(valid_until__lte=timezone.now(), sold_count=0)
return render(request, 'orders/outdated.html', {'orders': orders})
def executed(request):
orders = get_orders().exclude(sold_count=0)
return render(request, 'orders/executed.html', {'orders': orders})
def get_orders() -> QuerySet:
"""
The function returns QuerySet of Order model with all necessary values for displaying also selected/prefetched.
:return: the QuerySet of Order model
"""
return Order.objects.select_related('user').prefetch_related('books').annotate(sold_count=Sum('books__sold')) | Implement actual filtering (not) executed Orders | Implement actual filtering (not) executed Orders
| Python | agpl-3.0 | m4tx/egielda,m4tx/egielda,m4tx/egielda |
1443d24ad1548497b76eb9d90b5f79627cf81b0c | whoarder/whoarder.py | whoarder/whoarder.py | #!/usr/bin/python3
import argparse
if __name__ == '__main__':
# Boilerplate to allow running as script directly. Avoids error below:
# SystemError: Parent module '' not loaded, cannot perform relative import
# See http://stackoverflow.com/questions/2943847/
if __package__ is None:
import sys
import os
abspath = os.path.abspath(__file__)
parent_dir = os.path.dirname(os.path.dirname(abspath))
sys.path.insert(0, parent_dir)
from whoarder.clippings import Clippings
del sys, os
parser = argparse.ArgumentParser(description="whoarder converts Kindle \
'My Clippings.txt' files to more pleasant HTML.")
parser.add_argument('source',
help='Path to the source file, stored by Kindle in \
/Media/Kindle/documents/My Clippings.txt.')
parser.add_argument('destination',
help='Target HTML file. If omitted, a .html bearing \
the same name as the input .txt file will be used.',
nargs='?', default=None)
args = parser.parse_args()
clippings = Clippings(args.source, args.destination)
clippings.export_clippings()
print('Successfully wrote ' + clippings.dest + "\n")
| #!/usr/bin/env python3
import argparse
if __name__ == '__main__':
# Boilerplate to allow running as script directly. Avoids error below:
# SystemError: Parent module '' not loaded, cannot perform relative import
# See http://stackoverflow.com/questions/2943847/
if __package__ is None:
import sys
import os
abspath = os.path.abspath(__file__)
parent_dir = os.path.dirname(os.path.dirname(abspath))
sys.path.insert(0, parent_dir)
from whoarder.clippings import Clippings
del sys, os
parser = argparse.ArgumentParser(description="whoarder converts Kindle \
'My Clippings.txt' files to more pleasant HTML.")
parser.add_argument('source',
help='Path to the source file, stored by Kindle in \
/Media/Kindle/documents/My Clippings.txt.')
parser.add_argument('destination',
help='Target HTML file. If omitted, a .html bearing \
the same name as the input .txt file will be used.',
nargs='?', default=None)
args = parser.parse_args()
clippings = Clippings(args.source, args.destination)
clippings.export_clippings()
print('Successfully wrote ' + clippings.dest + "\n")
| Replace hashbang with /usr/bin/env python3 for better portability | Replace hashbang with /usr/bin/env python3 for better portability
| Python | mit | ronjouch/whoarder,rejuvyesh/whoarder,rejuvyesh/whoarder,ronjouch/whoarder |
a4d2782ad902bde5229def1b3de35107a3918800 | opps/article/views.py | opps/article/views.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
return 'channel/{0}.html'.format(self.kwargs['channel__long_slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from opps.article.models import Post
class OppsList(ListView):
context_object_name = "context"
@property
def template_name(self):
return 'channel/{0}.html'.format(self.kwargs['channel__long_slug'])
@property
def queryset(self):
if not self.kwargs.get('channel__long_slug'):
return Post.objects.filter(channel__homepage=True).all()
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug']).all()
class OppsDetail(DetailView):
context_object_name = "context"
@property
def template_name(self):
return 'article/{0}/{1}.html'.format(
self.kwargs['channel__long_slug'], self.kwargs['slug'])
@property
def queryset(self):
return Post.objects.filter(
channel__long_slug=self.kwargs['channel__long_slug'],
slug=self.kwargs['slug']).all()
| Fix queryset on entry home page (/) on list page | Fix queryset on entry home page (/) on list page
| Python | mit | YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps |
fc92c557232a3202e53c5d998183d91d5ed82809 | python/setup.py | python/setup.py | # -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages # noqa: H301
NAME = "looker_sdk"
VERSION = "0.1.3b20"
REQUIRES = [
"requests >= 2.22",
"attrs >= 20.1.0",
"cattrs >= 1.0.0",
"python-dateutil;python_version<'3.7'",
"typing-extensions;python_version<'3.8'",
]
setup(
author="Looker Data Sciences, Inc.",
author_email="[email protected]",
description="Looker API 3.1",
install_requires=REQUIRES,
license="MIT",
long_description=open("README.rst").read(),
long_description_content_type="text/x-rst",
keywords=["Looker", "Looker API", "looker_sdk", "Looker API 3.1"],
name=NAME,
package_data={"looker_sdk": ["py.typed", "looker_sdk/looker-sample.ini"]},
packages=find_packages(),
python_requires=">=3.6, <3.9",
url="https://pypi.python.org/pypi/looker_sdk",
version=VERSION,
)
| # -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages # noqa: H301
NAME = "looker_sdk"
VERSION = "0.1.3b20"
REQUIRES = [
"requests >= 2.22",
# Python 3.6
"attrs >= 18.2.0;python_version<'3.7'",
"cattrs < 1.1.0;python_version<'3.7'",
"python-dateutil;python_version<'3.7'",
# Python 3.7+
"attrs >= 20.1.0;python_version>='3.7'",
"cattrs >= 1.1.0;python_version>='3.7'",
"typing-extensions;python_version<'3.8'",
]
setup(
author="Looker Data Sciences, Inc.",
author_email="[email protected]",
description="Looker API 3.1",
install_requires=REQUIRES,
license="MIT",
long_description=open("README.rst").read(),
long_description_content_type="text/x-rst",
keywords=["Looker", "Looker API", "looker_sdk", "Looker API 3.1"],
name=NAME,
package_data={"looker_sdk": ["py.typed", "looker_sdk/looker-sample.ini"]},
packages=find_packages(),
python_requires=">=3.6, <3.9",
url="https://pypi.python.org/pypi/looker_sdk",
version=VERSION,
)
| Split attrs, cattrs versions for py36, py37 | Split attrs, cattrs versions for py36, py37
| Python | mit | looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen,looker-open-source/sdk-codegen |
888f6b07174943ba7f3b9d187348ceeebecc4a42 | utils/00-cinspect.py | utils/00-cinspect.py | """ A startup script for IPython to patch it to 'inspect' using cinspect. """
# Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to
# use cinspect for the code inspection.
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
ipy = get_ipython()
old_format = ipy.inspector.format
def c_format(raw, *args, **kwargs):
return raw
def my_format(raw, out = None, scheme = ''):
try:
output = old_format(raw, out, scheme)
except:
output = raw
return output
ipy.inspector.format = my_format
| """ A startup script for IPython to patch it to 'inspect' using cinspect. """
# Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to
# use cinspect for the code inspection.
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = OI.getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
def patch_getsource(obj, is_binary=False):
if is_binary:
return cast_unicode(getsource(obj))
else:
return old_getsource(obj, is_binary)
OI.find_file = patch_find_file
OI.getsource = patch_getsource
| Update the IPython startup script for master. | Update the IPython startup script for master.
| Python | bsd-3-clause | punchagan/cinspect,punchagan/cinspect |
dc461956408ffa35e2391fccf4231d60144985f7 | yunity/groups/api.py | yunity/groups/api.py | from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
permission_classes = (IsAuthenticatedOrReadOnly,)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
| from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class IsMember(BasePermission):
message = 'You are not a member.'
def has_object_permission(self, request, view, obj):
return request.user in obj.members.all()
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
def get_permissions(self):
if self.action in ('update', 'partial_update', 'destroy'):
self.permission_classes = (IsMember,)
else:
self.permission_classes = (IsAuthenticatedOrReadOnly,)
return super().get_permissions()
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
| Fix permissions for groups endpoint | Fix permissions for groups endpoint
| Python | agpl-3.0 | yunity/yunity-core,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend |
0f7ebec0442da08b12cd88f2558146d5c5a551ad | K2fov/tests/test_plot.py | K2fov/tests/test_plot.py | """Tests K2fov.plot"""
from .. import plot
def test_basics():
"""Make sure this runs without exception."""
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
| """Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
| Simplify plot test for now | Simplify plot test for now
| Python | mit | KeplerGO/K2fov,mrtommyb/K2fov |
5187cd61c8063091304b8ee8e086c7474bbda71c | angus/client/version.py | angus/client/version.py | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__version__ = "0.0.14"
| # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__version__ = "0.0.15rc1"
| Prepare first rc for 0.0.15 | Prepare first rc for 0.0.15
| Python | apache-2.0 | angus-ai/angus-sdk-python |
3427b2583c38ed7ec5239c36faa82536f3f95a3b | automata/pda/stack.py | automata/pda/stack.py | #!/usr/bin/env python3
"""Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
def __init__(self, stack, **kwargs):
"""Initialize the new PDA stack."""
if isinstance(stack, PDAStack):
self._init_from_stack_obj(stack)
else:
self.stack = list(stack)
def _init_from_stack_obj(self, stack_obj):
"""Initialize this Stack as a deep copy of the given Stack."""
self.__init__(stack_obj.stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
return self.__class__(self)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
| #!/usr/bin/env python3
"""Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
def __init__(self, stack):
"""Initialize the new PDA stack."""
self.stack = list(stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
return self.__class__(**self.__dict__)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
| Remove copy constructor for PDAStack | Remove copy constructor for PDAStack
The copy() method is already sufficient.
| Python | mit | caleb531/automata |
3990e3aa64cff288def07ee36e24026cc15282c0 | taiga/projects/issues/serializers.py | taiga/projects/issues/serializers.py | # -*- coding: utf-8 -*-
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
comment = serializers.SerializerMethodField("get_comment")
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
def get_comment(self, obj):
return ""
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
| # -*- coding: utf-8 -*-
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
| Remove unnecessary field from IssueSerializer | Remove unnecessary field from IssueSerializer
| Python | agpl-3.0 | forging2012/taiga-back,EvgeneOskin/taiga-back,xdevelsistemas/taiga-back-community,seanchen/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,crr0004/taiga-back,dayatz/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,crr0004/taiga-back,obimod/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,19kestier/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,WALR/taiga-back,joshisa/taiga-back,astronaut1712/taiga-back,taigaio/taiga-back,coopsource/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,obimod/taiga-back,obimod/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,Tigerwhit4/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,Zaneh-/bearded-tribble-back,dayatz/taiga-back,CoolCloud/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,seanchen/taiga-back,astagi/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,astagi/taiga-back,EvgeneOskin/taiga-back,obimod/taiga-back,gam-phon/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,19kestier/taiga-back,astronaut1712/taiga-back,forging2012/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,astagi/taiga-back,WALR/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,joshisa/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,crr0004/taiga-back,forging2012/taiga-back,joshisa/taiga-back,CMLL/taiga-back,dycodedev/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,CoolCloud/taiga-back,gauravjns/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,Tigerwhit4/taiga-back,Tigerwhit4/taiga-back |
85e853a63d7fed79b931b337bb9e6678077cf8d5 | tests/integration/ssh/test_grains.py | tests/integration/ssh/test_grains.py | # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
self.assertEqual(ret['kernel'], 'Linux')
self.assertTrue(isinstance(ret, dict))
| # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
grain = 'Linux'
if salt.utils.platform.is_darwin():
grain = 'Darwin'
self.assertEqual(ret['kernel'], grain)
self.assertTrue(isinstance(ret, dict))
| Add darwin value for ssh grain items tests on MacOSX | Add darwin value for ssh grain items tests on MacOSX
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
79bbc95abd2c1b41bcbd19d9ce1ffa330bd76b7a | source/views.py | source/views.py | from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
| from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
pool.close()
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
pool.join()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
| Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed | Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed
| Python | mit | jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu |
9d29e4f41c076950d28f602773a3c4672a9191b1 | pyluos/io/__init__.py | pyluos/io/__init__.py | import json
import logging
class IOHandler(object):
@classmethod
def is_host_compatible(cls, host):
return False
def __init__(self, host):
raise NotImplementedError
def is_ready(self):
raise NotImplementedError
def read(self, trials=5):
try:
data = self.recv()
return self.loads(data)
except Exception as e:
logging.getLogger(__name__).debug('Msg read failed: {}'.format(str(e)))
if trials > 0:
return self.read(trials - 1)
else:
raise e
def recv(self):
raise NotImplementedError
def send(self, msg):
self.write(self.dumps(msg))
def write(self, data):
raise NotImplementedError
def loads(self, data):
if type(data) == bytes:
data = data.decode()
return json.loads(data)
def dumps(self, msg):
return json.dumps(msg).encode()
from .ws import Ws
from .serial_io import Serial
IOs = [Serial, Ws]
def io_from_host(host, *args, **kwargs):
for cls in IOs:
if cls.is_host_compatible(host):
return cls(host=host, *args, **kwargs)
raise ValueError('No corresponding IO found (among {}).'.format(discover_hosts))
def discover_hosts():
return sum([io.available_hosts() for io in IOs], [])
| import json
import logging
class IOHandler(object):
@classmethod
def is_host_compatible(cls, host):
return False
def __init__(self, host):
raise NotImplementedError
def is_ready(self):
raise NotImplementedError
def read(self, trials=5):
try:
data = self.recv()
return self.loads(data)
except Exception as e:
logging.getLogger(__name__).debug('Msg read failed: {}'.format(str(e)))
if trials == 0:
raise e
return self.read(trials)
def recv(self):
raise NotImplementedError
def send(self, msg):
self.write(self.dumps(msg))
def write(self, data):
raise NotImplementedError
def loads(self, data):
if type(data) == bytes:
data = data.decode()
return json.loads(data)
def dumps(self, msg):
return json.dumps(msg).encode()
from .ws import Ws
from .serial_io import Serial
IOs = [Serial, Ws]
def io_from_host(host, *args, **kwargs):
for cls in IOs:
if cls.is_host_compatible(host):
return cls(host=host, *args, **kwargs)
raise ValueError('No corresponding IO found (among {}).'.format(discover_hosts))
def discover_hosts():
return sum([io.available_hosts() for io in IOs], [])
| Remove retries limit in read ftm. | Remove retries limit in read ftm.
| Python | mit | pollen/pyrobus |
26a6da62dc81720ea13645589719dcbae6dadacc | pynexus/api_client.py | pynexus/api_client.py | import requests
class ApiClient:
def __init__(self, host, username, password):
self.host = host
self.username = username
self.password = password
def get_all_repositories(self):
r = requests.get(self.host+'/nexus/service/local/repositories', headers={'Accept': 'application/json'})
return r
def get_status(self):
r = requests.get(self.host+'/nexus/service/local/status', headers={'Accept': 'application/json'})
return r
| import requests
class ApiClient:
def __init__(self, host, username, password):
self.uri = host + '/nexus/service/local/'
self.username = username
self.password = password
def get_all_repositories(self):
r = requests.get(self.uri + 'all_repositories', headers={'Accept': 'application/json'})
return r
def get_status(self):
r = requests.get(self.uri + 'status', headers={'Accept': 'application/json'})
return r
| Refactor url attribute to uri | Refactor url attribute to uri
It's better to construct the uri in the class constructor, instead
of constructing it in every single REST method
| Python | apache-2.0 | rcarrillocruz/pynexus |
68f4d883eb9dd59b3a4560f53657d80cf572104e | pfasst/__init__.py | pfasst/__init__.py |
from pfasst import PFASST
__all__ = []
|
try:
from pfasst import PFASST
except:
print 'WARNING: Unable to import PFASST.'
__all__ = []
| Add warning when unable to import PFASST. | PFASST: Add warning when unable to import PFASST.
| Python | bsd-2-clause | memmett/PyPFASST,memmett/PyPFASST |
2cb385ab85257562547759c1d192993c258ebdff | wger/utils/tests/test_capitalizer.py | wger/utils/tests/test_capitalizer.py | # This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from wger.core.tests.base_testcase import WorkoutManagerTestCase
from wger.utils.helpers import smart_capitalize
class CapitalizerTestCase(WorkoutManagerTestCase):
'''
Tests the "intelligent" capitalizer
'''
def test_capitalizer(self):
'''
Tests different combinations of input strings
'''
self.assertEqual(smart_capitalize("some long words"), "Some Long Words")
self.assertEqual(smart_capitalize("Here a short one"), "Here a Short One")
self.assertEqual(smart_capitalize("meine gym AG"), "Meine Gym AG")
self.assertEqual(smart_capitalize("ßpecial case"), "ßpecial Case")
self.assertEqual(smart_capitalize("fIRST lettER only"), "FIRST LettER Only")
| # -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from wger.core.tests.base_testcase import WorkoutManagerTestCase
from wger.utils.helpers import smart_capitalize
class CapitalizerTestCase(WorkoutManagerTestCase):
'''
Tests the "intelligent" capitalizer
'''
def test_capitalizer(self):
'''
Tests different combinations of input strings
'''
self.assertEqual(smart_capitalize("some long words"), "Some Long Words")
self.assertEqual(smart_capitalize("Here a short one"), "Here a Short One")
self.assertEqual(smart_capitalize("meine gym AG"), "Meine Gym AG")
self.assertEqual(smart_capitalize("ßpecial case"), "ßpecial Case")
self.assertEqual(smart_capitalize("fIRST lettER only"), "FIRST LettER Only")
| Add coding for python 2.7 compatibility | Add coding for python 2.7 compatibility
| Python | agpl-3.0 | wger-project/wger,petervanderdoes/wger,wger-project/wger,kjagoo/wger_stark,rolandgeider/wger,petervanderdoes/wger,rolandgeider/wger,kjagoo/wger_stark,wger-project/wger,wger-project/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,petervanderdoes/wger,rolandgeider/wger,rolandgeider/wger |
93926a9986ab4ba7704cd564d0052b6e60ff38cb | casepro/pods/base.py | casepro/pods/base.py | import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
label = 'base_pod'
pod_class = Pod
config_class = PodConfig
title = 'Pod'
controller = None
directive = None
| import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
pod_class = Pod
config_class = PodConfig
# django application label, used to determine which pod type to use when
# loading pods configured in `settings.PODS`
label = 'base_pod'
# default title to use when configuring each pod
title = 'Pod'
# override to use a different angular controller
controller = 'PodController'
# override to use a different angular directive
directive = 'pod'
# override with paths to custom scripts that the pod needs
scripts = ()
# override with paths to custom styles that the pod needs
styles = ()
| Add the class-level vars we need for pod angular components to PodPlugin | Add the class-level vars we need for pod angular components to PodPlugin
| Python | bsd-3-clause | rapidpro/casepro,praekelt/casepro,xkmato/casepro,rapidpro/casepro,praekelt/casepro,xkmato/casepro,praekelt/casepro,rapidpro/casepro |
aceeac7e9dd2735add937bc7141cfdb29b6201c7 | pywatson/watson.py | pywatson/watson.py | from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson:
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.to_dict()
else:
q = Question(question_text).to_dict()
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.__dict__
else:
q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| Use __dict__ instead of to_dict() | Use __dict__ instead of to_dict()
| Python | mit | sherlocke/pywatson |
d7c9bcbf25a6b45a462216f426608474aa66ceb0 | mysite/missions/models.py | mysite/missions/models.py | from django.db import models
class MissionStep(models.Model):
pass
class MissionStepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
step = models.ForeignKey('MissionStep')
class Meta:
unique_together = ('person', 'step')
| from django.db import models
class Step(models.Model):
pass
class StepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
step = models.ForeignKey('Step')
class Meta:
unique_together = ('person', 'step')
| Remove the redundant "Mission" prefix from the mission model names. | Remove the redundant "Mission" prefix from the mission model names.
| Python | agpl-3.0 | heeraj123/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,moijes12/oh-mainline,openhatch/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,waseem18/oh-mainline,SnappleCap/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,Changaco/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,heeraj123/oh-mainline,waseem18/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,vipul-sharma20/oh-mainline,campbe13/openhatch,willingc/oh-mainline,SnappleCap/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,eeshangarg/oh-mainline,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,onceuponatimeforever/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,eeshangarg/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,openhatch/oh-mainline |
a2e3f0590d5bd25993be5291c058c722896aa773 | tests/test_utils.py | tests/test_utils.py | import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main() | import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main() | Fix broken utils test with seed | Fix broken utils test with seed
| Python | apache-2.0 | HazyResearch/metal,HazyResearch/metal |
df5e6bdd03ad666afdd9b61745eec95afc08e9cb | tests/test_views.py | tests/test_views.py | """ Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
| """ Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
""" Should notify upon a valid pull request. """
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
""" Should ignore an invalid pull request. """
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
| Fix last code quality issues | Fix last code quality issues
| Python | mit | DobaTech/github-review-slack-notifier |
23d50e82212eb02a3ba467ae323736e4f03f7293 | tof_server/views.py | tof_server/views.py | """This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
return jsonify({
'id' : 'somestubid',
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
finally:
cursor.close()
| """This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES ('%s')"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
finally:
cursor.close()
| Insert new player data into db | Insert new player data into db
| Python | mit | P1X-in/Tanks-of-Freedom-Server |
39091c3390d121d48097d64526f40d0a09702673 | src/zeit/today/tests.py | src/zeit/today/tests.py | import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
TodayLayer = zeit.cms.testing.ZCMLLayer('ftesting.zcml', product_config=(
product_config +
zeit.cms.testing.cms_product_config))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
layer=TodayLayer
)
| import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
CONFIG_LAYER = zeit.cms.testing.ProductConfigLayer(product_config, bases=(
zeit.cms.testing.CONFIG_LAYER,))
ZCML_LAYER = zeit.cms.testing.ZCMLLayer(bases=(CONFIG_LAYER,))
ZOPE_LAYER = zeit.cms.testing.ZopeLayer(bases=(ZCML_LAYER,))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
layer=ZOPE_LAYER)
| Update to new testlayer API | ZON-5241: Update to new testlayer API
| Python | bsd-3-clause | ZeitOnline/zeit.today |
81f7b2bdd0e916a001b954ce9bac24ebe4600150 | roboime/options.py | roboime/options.py | # -*- coding: utf-8 -*-
"""
General options during execution
"""
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Position Log with Noise filename. Use None to disable.
position_log_noise_filename = "math/pos_log_noise.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.
noise_var_y = 3.
noise_var_angle = 0.05 | # -*- coding: utf-8 -*-
"""
General options during execution
"""
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.E-5
noise_var_y = 3.E-5
noise_var_angle = 1.
# Process error estimate. The lower (higher negative exponent), more the filter
# becomes like a Low-Pass Filter (higher confidence in the model prediction).
Q = 1e-5
# Measurement error variances (for the R matrix).
# The higher (lower negative exponent), more the filter becomes like a
# Low-Pass Filter (higher possible measurement error).
R_var_x = 3.E-5
R_var_y = 3.E-5
R_var_angle = 3
| Add Q (generic) and R (3 values) to get more precise Kalman results | Add Q (generic) and R (3 values) to get more precise Kalman results
| Python | agpl-3.0 | roboime/pyroboime |
d6ce218b0da869f6b4319751c1fe59ef02fba6b6 | kremlin/imgutils.py | kremlin/imgutils.py | """
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save(f + ".thumbnail" + ext)
| """
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
| Use better string concatenation in mkthumb() | Use better string concatenation in mkthumb()
| Python | bsd-2-clause | glasnost/kremlin,glasnost/kremlin,glasnost/kremlin |
aa196b79102959a9fc5e8837c068307791b76d32 | lib/matrix_parser.py | lib/matrix_parser.py | #!/usr/bin/python
# Import code for parsing a matrix into a sympy object
from quantum_simulation import parse_matrix
from sympy import latex
import json, sys, pipes, urllib
# If the file's being run, rather than loaded as a library
if __name__ == "__main__":
# Load the matrix from json passed as cli argument
matrix = parse_matrix(json.loads(sys.argv[1])['matrix'])
# Generate latex for the matix, using the pmatrix matrix env.
tex = latex(matrix).replace("smallmatrix", "pmatrix").rpartition("\\right]")[0].partition("\\left[")[2]
# Print out a JSONified version of the latex for the matrix
# in a URL encoded version
print pipes.quote(json.dumps({
'matrix': urllib.quote(tex)
}))
| #!/usr/bin/python
# Import code for parsing a matrix into a sympy object
from quantum_simulation import parse_matrix
from sympy import latex
import json, sys, pipes, urllib, re
# If the file's being run, rather than loaded as a library
if __name__ == "__main__":
# Load the matrix from json passed as cli argument
matrix = parse_matrix(json.loads(sys.argv[1])['matrix'])
# Generate latex for the matix, using the pmatrix matrix env.
tex = latex(matrix).replace("smallmatrix", "pmatrix")
tex = re.sub(r'\\right[\]\)]$', '', tex)
tex = re.sub(r'^\\left[\[\(]', '', tex)
# Print out a JSONified version of the latex for the matrix
# in a URL encoded version
print pipes.quote(json.dumps({
'matrix': urllib.quote(tex)
}))
| Fix with latexising the matrix of an operator | Fix with latexising the matrix of an operator
| Python | mit | hrickards/shors_circuits,hrickards/shors_circuits,hrickards/shors_circuits |
09f65ff2a21cd00355193bcdee22a2289ead2d24 | tests/test_arguments.py | tests/test_arguments.py | from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
| from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
def test_unexpected_unicode_keyword(self):
def function(a=2):
pass
kwargs = { u'b': 40 }
self.assertRaises(TypeError, wrapt.getcallargs, function, **kwargs)
| Add test for unexpected unicode kwargs. | Add test for unexpected unicode kwargs.
| Python | bsd-2-clause | GrahamDumpleton/wrapt,GrahamDumpleton/wrapt |
397eb3ee376acec005a8d7b5a4c2b2e0193a938d | tests/test_bookmarks.py | tests/test_bookmarks.py | import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=password
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
| import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
| Add param for confirm field on register test func | Add param for confirm field on register test func
| Python | apache-2.0 | byanofsky/bookmarks,byanofsky/bookmarks,byanofsky/bookmarks |
95fbbe9bac94e171424cb8ee23a675a70607fb62 | tests/test_constants.py | tests/test_constants.py | from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
| from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
| Fix import order picked up by isort | Fix import order picked up by isort
| Python | mit | springload/draftjs_exporter,springload/draftjs_exporter,springload/draftjs_exporter |
9519b619c9a2c30ea2a5bf5559675c1d926ec5a4 | clouder_template_bind/__openerp__.py | clouder_template_bind/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2013 Yannick Buron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Clouder Template Bind',
'version': '1.0',
'category': 'Community',
'depends': ['clouder'],
'author': 'Yannick Buron',
'license': 'AGPL-3',
'website': 'https://github.com/YannickB',
'description': """
Clouder Template Bind
""",
'demo': [],
'data': ['clouder_template_bind_data.xml'],
'installable': True,
'application': True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Yannick Buron
# Copyright 2013 Yannick Buron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Clouder Template Bind',
'version': '1.0',
'category': 'Community',
'depends': ['clouder','clouder_template_shinken'],
'author': 'Yannick Buron',
'license': 'AGPL-3',
'website': 'https://github.com/YannickB',
'description': """
Clouder Template Bind
""",
'demo': [],
'data': ['clouder_template_bind_data.xml'],
'installable': True,
'application': True,
}
| Add shinken in bind dependancy | Add shinken in bind dependancy
| Python | agpl-3.0 | YannickB/odoo-hosting |
c2fb81dceddd4fd2ea5416565bbd4c7eb4e39bd0 | common/helpers/malicious_requests.py | common/helpers/malicious_requests.py | import re
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.http import HttpResponseBadRequest
class MaliciousRequestsMiddleware:
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
if settings.MALICIOUS_URL_PATTERNS is not None:
patterns = settings.MALICIOUS_URL_PATTERNS.split(',')
self.malicious_url_patterns = list(map(lambda pattern: re.compile(pattern, re.IGNORECASE), patterns))
else:
raise MiddlewareNotUsed
@staticmethod
def log_filter_action(path, log_msg):
prefix = f'[MaliciousRequestsMiddleware] Filtering malicious url "{path}": '
print(f'{prefix}: {log_msg}')
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
path = request.get_full_path()
for pattern in self.malicious_url_patterns:
if pattern.search(path) is not None:
self.log_filter_action(path, f'Matched pattern "{pattern.pattern}"')
# TODO: Fix exception that triggers after this
return HttpResponseBadRequest
response = self.get_response(request)
# Code to be executed for each request/response after
# the view is called.
return response
| import re
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed, SuspiciousOperation
class MaliciousRequestsMiddleware:
def __init__(self, get_response):
self.get_response = get_response
# One-time configuration and initialization.
if settings.MALICIOUS_URL_PATTERNS is not None:
patterns = settings.MALICIOUS_URL_PATTERNS.split(',')
self.malicious_url_patterns = list(map(lambda pattern: re.compile(pattern, re.IGNORECASE), patterns))
else:
raise MiddlewareNotUsed
@staticmethod
def log_filter_action(path, log_msg):
prefix = f'[MaliciousRequestsMiddleware] Filtering malicious url "{path}": '
print(f'{prefix}: {log_msg}')
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
path = request.get_full_path()
for pattern in self.malicious_url_patterns:
if pattern.search(path) is not None:
self.log_filter_action(path, f'Matched pattern "{pattern.pattern}"')
# TODO: Fix exception that triggers after this
raise SuspiciousOperation("Malicious url detected")
response = self.get_response(request)
# Code to be executed for each request/response after
# the view is called.
return response
| Use exception rather than Bad Request view | MaliciousRequestsMiddleware: Use exception rather than Bad Request view
| Python | mit | DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange |
aa6a74abc382bb6be86fa4a91132a9be51f365a5 | tests/test_data_checksums.py | tests/test_data_checksums.py | """ test data_checksums"""
from nose.tools import assert_equal
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be') | """ test data_checksums and hashing functions"""
import os
from nose.tools import assert_equal, assert_true
import pyne
# These tests require nuc_data
if not os.path.isfile(pyne.nuc_data):
raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
def test_internal_hashes():
from pyne.dbgen import hashtools
hashtools.set_internal_hashes(pyne.nuc_data)
for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
assert_true(val)
| Add test of internal hashes and guarded pyne.nuc_data use | Add test of internal hashes and guarded pyne.nuc_data use
| Python | bsd-3-clause | pyne/simplesim |
01641071c177e7b41f935a1683738f059e333d0b | tests/test_group.py | tests/test_group.py | import unittest
import requests
import requests_mock
import settings
from util import register_uris
from pycanvas import Canvas
from pycanvas.course import Course, CourseNickname, Page
class TestGroup(unittest.TestCase):
"""
Tests Group functionality
"""
@classmethod
def setUpClass(self):
requires = {
'course': ['get_by_id', 'show_front_page'],
'generic': ['not_found']
}
adapter = requests_mock.Adapter()
self.canvas = Canvas(settings.BASE_URL, settings.API_KEY, adapter)
register_uris(settings.BASE_URL, requires, adapter)
self.course = self.canvas.get_course(1)
self.group = self.group.get_single_group(1)
| Set up test group file | Set up test group file
| Python | mit | ucfopen/canvasapi,ucfopen/canvasapi,ucfopen/canvasapi |
|
698732f1276f92a94143b0531906caf37e885c28 | trello_notifications.py | trello_notifications.py | try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all():
pass
def noop():
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output) | try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
self.connection = connection
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all(self):
pass
def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output) | Store connection and missing self | Store connection and missing self
| Python | mit | NicoSantangelo/sublime-text-trello |
66c1b353a7fce078fc9c4209e453906b098a22e8 | tests/common.py | tests/common.py | from pprint import pprint, pformat
import datetime
import os
import itertools
from sgmock import Fixture
from sgmock import TestCase
if 'USE_SHOTGUN' in os.environ:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server='testing')
else:
from sgmock import Shotgun, ShotgunError, Fault
from sgsession import Session, Entity
from sgfs import SGFS
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
| from pprint import pprint, pformat
import datetime
import itertools
import os
from sgmock import Fixture
from sgmock import TestCase
_shotgun_server = os.environ.get('SHOTGUN', 'mock')
if _shotgun_server == 'mock':
from sgmock import Shotgun, ShotgunError, Fault
else:
from shotgun_api3 import ShotgunError, Fault
import shotgun_api3_registry
def Shotgun():
return shotgun_api3_registry.connect('sgsession.tests', server=_shotgun_server)
from sgsession import Session, Entity
from sgfs import SGFS
def mini_uuid():
return os.urandom(4).encode('hex')
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
def minimal(entity):
return dict(type=entity['type'], id=entity['id'])
| Change the way we test the real Shotgun server | Change the way we test the real Shotgun server | Python | bsd-3-clause | westernx/sgfs,westernx/sgfs |
9796e60975474006940af723a6cb8b16bc632ae0 | tz_app/context_processors.py | tz_app/context_processors.py | from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
alt_timezone = request.session.get('alt_timezone', pytz.utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
| from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
| Fix a bug when pytz isn't installed. | Fix a bug when pytz isn't installed.
| Python | bsd-3-clause | aaugustin/django-tz-demo |
1ce7f82fd76bca735c3e164cb6a67c9a8656af3b | trade_client.py | trade_client.py | import json
import socket
from orderbook import create_confirm
def send_msg(ip, port, message):
'''Sends a raw string to the given ip and port. Closes the socket and returns the response.'''
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
'''Sends an offer in JSON form to the given ip and port. offer parameter should be a dictionary.'''
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
# id is not yet properly implemented so we use this ugly hack for now
id = trade['trade-id'].split(';')[0]
# Cancel messages are not yet implemented. See issue #7.
return create_confirm(
id=id,
trade_id=trade['trade-id']
)
| import json
import socket
from crypto import retrieve_key
from orderbook import create_confirm
def send_msg(ip, port, message):
'''Sends a raw string to the given ip and port. Closes the socket and returns the response.'''
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, port))
try:
sock.sendall(message)
response = sock.recv(1024)
finally:
sock.close()
return response
def send_offer(ip, port, offer):
'''Sends an offer in JSON form to the given ip and port. offer parameter should be a dictionary.'''
message = json.dumps(offer)
return send_msg(ip, port, message)
def handle_response(response):
try:
response = json.loads(response)
if response and isinstance(response, basestring):
return None
if response and response['type'] == 'trade':
return handle_trade(response)
except ValueError:
return None
def handle_trade(trade):
# id is not yet properly implemented so we use this ugly hack for now
# Cancel messages are not yet implemented. See issue #7.
return create_confirm(
trade_id=trade['trade-id']
)
| Use public key as id. | Use public key as id.
| Python | mit | Tribler/decentral-market |
e875856b544446dc9e0ac0368b1661e6a7af6c6d | tsparser/imu.py | tsparser/imu.py | from tsparser.parser import BaseParser
class IMUParser(BaseParser):
def __init__(self):
self.gyro = None
self.accel = None
self.magnet = None
self.pressure = None
def parse(self, line, data_id, *values):
values = [int(x) for x in values]
if data_id == '$GYRO':
self.gyro = values
elif data_id == '$ACCEL':
self.accel = values
elif data_id == '$MAGNET':
self.magnet = values
elif data_id == '$MBAR':
self.pressure = values[0]
else:
return False
if all([self.gyro, self.accel, self.magnet, self.pressure]):
# todo send it instead of just printing
print(self.generate_data())
self.gyro = self.accel = self.magnet = self.pressure = None
return True
def generate_data(self):
return {
'timestamp': BaseParser.timestamp,
'gyro_x': self.gyro[0],
'gyro_y': self.gyro[1],
'gyro_z': self.gyro[2],
'accel_x': self.accel[0],
'accel_y': self.accel[1],
'accel_z': self.accel[2],
'magnet_x': self.magnet[0],
'magnet_y': self.magnet[1],
'magnet_z': self.magnet[2],
'pressure': self.pressure
}
| from tsparser.parser import BaseParser
class IMUParser(BaseParser):
def __init__(self):
self.gyro = None
self.accel = None
self.magnet = None
self.pressure = None
def parse(self, line, data_id, *values):
if data_id == '$GYRO':
self.gyro = [int(x) for x in values]
elif data_id == '$ACCEL':
self.accel = [int(x) for x in values]
elif data_id == '$MAGNET':
self.magnet = [int(x) for x in values]
elif data_id == '$MBAR':
self.pressure = int(values[0])
else:
return False
if all([self.gyro, self.accel, self.magnet, self.pressure]):
# todo send it instead of just printing
print(self.generate_data())
self.gyro = self.accel = self.magnet = self.pressure = None
return True
def generate_data(self):
return {
'timestamp': BaseParser.timestamp,
'gyro_x': self.gyro[0],
'gyro_y': self.gyro[1],
'gyro_z': self.gyro[2],
'accel_x': self.accel[0],
'accel_y': self.accel[1],
'accel_z': self.accel[2],
'magnet_x': self.magnet[0],
'magnet_y': self.magnet[1],
'magnet_z': self.magnet[2],
'pressure': self.pressure
}
| Fix ValueError in IMUParser with non-ints in input | Fix ValueError in IMUParser with non-ints in input
| Python | mit | m4tx/techswarm-receiver |
c8b86afc53af25c845c8303111a6e7b17d8c26b4 | ciscripts/check/psqcppconan/check.py | ciscripts/check/psqcppconan/check.py | # /ciscripts/check/psqcppconan/check.py
#
# Run tests and static analysis checks on a polysquare conan c++ project.
#
# See /LICENCE.md for Copyright information
"""Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
| # /ciscripts/check/psqcppconan/check.py
#
# Run tests and static analysis checks on a polysquare conan c++ project.
#
# See /LICENCE.md for Copyright information
"""Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
if not os.path.exists(binary) and os.path.exists(binary + ".exe"):
binary = binary + ".exe"
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
| Allow the use of .exe | psqcppconan: Allow the use of .exe
| Python | mit | polysquare/polysquare-ci-scripts,polysquare/polysquare-ci-scripts |
e3cb7ad226e3c26cbfa6f9f322ebdb4fde7e7d60 | coop_cms/apps/coop_bootstrap/templatetags/coop_bs.py | coop_cms/apps/coop_bootstrap/templatetags/coop_bs.py | # -*- coding: utf-8 -*-
"""
Some tools for templates
"""
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
return NavigationAsNestedUlNode(li_node="coop_bootstrap/li_node.html")
| # -*- coding: utf-8 -*-
"""
Some tools for templates
"""
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode, extract_kwargs
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
kwargs = dict(li_node="coop_bootstrap/li_node.html")
args = token.contents.split()
kwargs.update(extract_kwargs(args))
return NavigationAsNestedUlNode(**kwargs)
| Fix "navigation_bootstrap" templatetag : arguments were ignored | Fix "navigation_bootstrap" templatetag : arguments were ignored
| Python | bsd-3-clause | ljean/coop_cms,ljean/coop_cms,ljean/coop_cms |
8a4b576d6df4ef1f174c8698ff9a86dbf2f5bd4a | workshops/models.py | workshops/models.py | from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5)
| from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5) if self.price else None
| Check price exists before using it | Check price exists before using it
| Python | bsd-3-clause | WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web |
ea3660bcc1a9f7be619def8e26dd7b0ab4a873cf | estmator_project/est_client/forms.py | estmator_project/est_client/forms.py | from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
'first_name': TextInput(attrs={'required': True}),
'last_name': TextInput(attrs={'required': True}),
'title': TextInput(attrs={'required': True}),
'cell': TextInput(attrs={'required': True}),
'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
'phone': TextInput(attrs={'required': True}),
'address': TextInput(attrs={'required': True}),
'city': TextInput(attrs={'required': True}),
'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| Make fields required on new client and company | Make fields required on new client and company
| Python | mit | Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp |
Subsets and Splits