commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
---|---|---|---|---|---|---|---|---|---|---|
200f3a0bd0edd5a7409f36769cc8401b468bdb64 | puzzlehunt_server/settings/travis_settings.py | puzzlehunt_server/settings/travis_settings.py | from .base_settings import *
import os
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
'OPTIONS': {'charset': 'utf8mb4'},
}
}
INTERNAL_IPS = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
ALLOWED_HOSTS = ['*']
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
} | from .base_settings import *
import os
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
}
}
INTERNAL_IPS = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
ALLOWED_HOSTS = ['*']
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
| Fix database for testing environment | Fix database for testing environment
| Python | mit | dlareau/puzzlehunt_server,dlareau/puzzlehunt_server,dlareau/puzzlehunt_server,dlareau/puzzlehunt_server | ---
+++
@@ -7,12 +7,11 @@
SECRET_KEY = '$1B&VUf$OdUEfMJXd40qdakA36@%2NE_41Dz9tFs6l=z4v_3P-'
DATABASES = {
'default': {
- 'ENGINE': 'django.db.backends.mysql',
+ 'ENGINE': 'django.db.backends.postgresql',
'NAME': 'puzzlehunt_db',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '',
- 'OPTIONS': {'charset': 'utf8mb4'},
}
}
INTERNAL_IPS = '' |
fc1e05658eb7e1fb2722467b5da5df622145eece | server/lib/python/cartodb_services/setup.py | server/lib/python/cartodb_services/setup.py | """
CartoDB Services Python Library
See:
https://github.com/CartoDB/geocoder-api
"""
from setuptools import setup, find_packages
setup(
name='cartodb_services',
version='0.13.0',
description='CartoDB Services API Python Library',
url='https://github.com/CartoDB/dataservices-api',
author='Data Services Team - CartoDB',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Mapping comunity',
'Topic :: Maps :: Mapping Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='maps api mapping tools geocoder routing',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'dev': ['unittest'],
'test': ['unittest', 'nose', 'mockredispy', 'mock'],
}
)
| """
CartoDB Services Python Library
See:
https://github.com/CartoDB/geocoder-api
"""
from setuptools import setup, find_packages
setup(
name='cartodb_services',
version='0.14.0',
description='CartoDB Services API Python Library',
url='https://github.com/CartoDB/dataservices-api',
author='Data Services Team - CartoDB',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Mapping comunity',
'Topic :: Maps :: Mapping Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
keywords='maps api mapping tools geocoder routing',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'dev': ['unittest'],
'test': ['unittest', 'nose', 'mockredispy', 'mock'],
}
)
| Bump version of python library to 0.14.0 | Bump version of python library to 0.14.0
| Python | bsd-3-clause | CartoDB/geocoder-api,CartoDB/dataservices-api,CartoDB/geocoder-api,CartoDB/dataservices-api,CartoDB/dataservices-api,CartoDB/geocoder-api,CartoDB/dataservices-api,CartoDB/geocoder-api | ---
+++
@@ -10,7 +10,7 @@
setup(
name='cartodb_services',
- version='0.13.0',
+ version='0.14.0',
description='CartoDB Services API Python Library',
|
022f2cc6d067769a6c8e56601c0238aac69ec9ab | jfr_playoff/settings.py | jfr_playoff/settings.py | import glob, json, os, readline, sys
def complete_filename(text, state):
return (glob.glob(text+'*')+[None])[state]
class PlayoffSettings:
def __init__(self):
self.interactive = False
self.settings_file = None
if len(sys.argv) > 1:
self.settings_file = sys.argv[1]
else:
self.interactive = True
def load(self):
if self.settings_file is None:
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(complete_filename)
self.settings_file = raw_input('JSON settings file: ')
self.settings = json.load(open(self.settings_file))
def has_section(self, key):
self.load()
return key in self.settings
def get(self, *keys):
self.load()
section = self.settings
for key in keys:
section = section[key]
return section
| import glob, json, os, readline, sys
def complete_filename(text, state):
return (glob.glob(text+'*')+[None])[state]
class PlayoffSettings:
def __init__(self):
self.settings = None
self.interactive = False
self.settings_file = None
if len(sys.argv) > 1:
self.settings_file = sys.argv[1]
else:
self.interactive = True
def load(self):
if self.settings_file is None:
readline.set_completer_delims(' \t\n;')
readline.parse_and_bind("tab: complete")
readline.set_completer(complete_filename)
self.settings_file = raw_input('JSON settings file: ')
if self.settings is None:
self.settings = json.load(open(self.settings_file))
def has_section(self, key):
self.load()
return key in self.settings
def get(self, *keys):
self.load()
section = self.settings
for key in keys:
section = section[key]
return section
| Load config file only once | Load config file only once
| Python | bsd-2-clause | emkael/jfrteamy-playoff,emkael/jfrteamy-playoff | ---
+++
@@ -6,6 +6,7 @@
class PlayoffSettings:
def __init__(self):
+ self.settings = None
self.interactive = False
self.settings_file = None
if len(sys.argv) > 1:
@@ -20,7 +21,8 @@
readline.set_completer(complete_filename)
self.settings_file = raw_input('JSON settings file: ')
- self.settings = json.load(open(self.settings_file))
+ if self.settings is None:
+ self.settings = json.load(open(self.settings_file))
def has_section(self, key):
self.load() |
19faa280c924254b960a8b9fcb716017e51db09f | pymks/tests/test_mksRegressionModel.py | pymks/tests/test_mksRegressionModel.py | from pymks import MKSRegressionModel
import numpy as np
def test():
Nbin = 2
Nspace = 81
Nsample = 400
def filter(x):
return np.where(x < 10,
np.exp(-abs(x)) * np.cos(x * np.pi),
np.exp(-abs(x - 20)) * np.cos((x - 20) * np.pi))
coeff = np.linspace(1, 0, Nbin)[None,:] * filter(np.linspace(0, 20, Nspace))[:,None]
Fcoeff = np.fft.fft(coeff, axis=0)
np.random.seed(2)
X = np.random.random((Nsample, Nspace))
H = np.linspace(0, 1, Nbin)
X_ = np.maximum(1 - abs(X[:,:,None] - H) / (H[1] - H[0]), 0)
FX = np.fft.fft(X_, axis=1)
Fy = np.sum(Fcoeff[None] * FX, axis=-1)
y = np.fft.ifft(Fy, axis=1).real
model = MKSRegressionModel(Nbin=Nbin)
model.fit(X, y)
model.coeff = np.fft.ifft(model.Fcoeff, axis=0)
assert np.allclose(coeff, model.coeff)
if __name__ == '__main__':
test()
| from pymks import MKSRegressionModel
import numpy as np
def test():
Nbin = 2
Nspace = 81
Nsample = 400
def filter(x):
return np.where(x < 10,
np.exp(-abs(x)) * np.cos(x * np.pi),
np.exp(-abs(x - 20)) * np.cos((x - 20) * np.pi))
coeff = np.linspace(1, 0, Nbin)[None,:] * filter(np.linspace(0, 20, Nspace))[:,None]
Fcoeff = np.fft.fft(coeff, axis=0)
np.random.seed(2)
X = np.random.random((Nsample, Nspace))
H = np.linspace(0, 1, Nbin)
X_ = np.maximum(1 - abs(X[:,:,None] - H) / (H[1] - H[0]), 0)
FX = np.fft.fft(X_, axis=1)
Fy = np.sum(Fcoeff[None] * FX, axis=-1)
y = np.fft.ifft(Fy, axis=1).real
model = MKSRegressionModel(Nbin=Nbin)
model.fit(X, y)
assert np.allclose(np.fft.fftshift(coeff, axes=(0,)), model.coeff)
if __name__ == '__main__':
test()
| Fix test due to addition of coeff property | Fix test due to addition of coeff property
Address #49
Add fftshift to test coefficients as model.coeff now returns the
shifted real versions.
| Python | mit | davidbrough1/pymks,XinyiGong/pymks,awhite40/pymks,davidbrough1/pymks,fredhohman/pymks | ---
+++
@@ -26,9 +26,8 @@
model = MKSRegressionModel(Nbin=Nbin)
model.fit(X, y)
- model.coeff = np.fft.ifft(model.Fcoeff, axis=0)
- assert np.allclose(coeff, model.coeff)
+ assert np.allclose(np.fft.fftshift(coeff, axes=(0,)), model.coeff)
if __name__ == '__main__':
test() |
0336651c6538d756eb40babe086975a0f7fcabd6 | qual/tests/test_historical_calendar.py | qual/tests/test_historical_calendar.py | from test_calendar import CalendarTest
from qual.calendars import EnglishHistoricalCalendar
class TestHistoricalCalendar(object):
def setUp(self):
self.calendar = self.calendar_type()
def test_before_switch(self):
for triplet in self.julian_triplets:
self.check_valid_date(*triplet)
def test_after_switch(self):
for triplet in self.gregorian_triplets:
self.check_valid_date(*triplet)
def test_during_switch(self):
for triplet in self.transition_triplets:
self.check_invalid_date(*triplet)
class TestEnglishHistoricalCalendar(TestHistoricalCalendar, CalendarTest):
calendar_type = EnglishHistoricalCalendar
gregorian_triplets = [(1752, 9, 13)]
julian_triplets = [(1752, 9, 1)]
transition_triplets = [(1752, 9, 6)]
| from test_calendar import CalendarTest
from qual.calendars import EnglishHistoricalCalendar
class TestHistoricalCalendar(object):
def setUp(self):
self.calendar = self.calendar_type()
def test_before_switch(self):
for triplet in self.julian_triplets:
self.check_valid_date(*triplet)
def test_after_switch(self):
for triplet in self.gregorian_triplets:
self.check_valid_date(*triplet)
def test_during_switch(self):
for triplet in self.transition_triplets:
self.check_invalid_date(*triplet)
class TestEnglishHistoricalCalendar(TestHistoricalCalendar, CalendarTest):
calendar_type = EnglishHistoricalCalendar
gregorian_triplets = [(1752, 9, 14)]
julian_triplets = [(1752, 9, 1), (1752, 9, 2)]
transition_triplets = [(1752, 9, 3), (1752, 9, 6), (1752, 9, 13)]
| Correct test for the right missing days and present days. | Correct test for the right missing days and present days.
1st and 2nd of September 1752 happened, so did 14th. 3rd to 13th did not.
| Python | apache-2.0 | jwg4/qual,jwg4/calexicon | ---
+++
@@ -20,6 +20,6 @@
class TestEnglishHistoricalCalendar(TestHistoricalCalendar, CalendarTest):
calendar_type = EnglishHistoricalCalendar
- gregorian_triplets = [(1752, 9, 13)]
- julian_triplets = [(1752, 9, 1)]
- transition_triplets = [(1752, 9, 6)]
+ gregorian_triplets = [(1752, 9, 14)]
+ julian_triplets = [(1752, 9, 1), (1752, 9, 2)]
+ transition_triplets = [(1752, 9, 3), (1752, 9, 6), (1752, 9, 13)] |
d595f953a8993afd94f1616fbf815afe0b85a646 | scripts/master/factory/dart/channels.py | scripts/master/factory/dart/channels.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.7', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.8', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
| Make stable builders pull from 1.8 | Make stable builders pull from 1.8
Review URL: https://codereview.chromium.org/760053002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@293121 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | ---
+++
@@ -19,7 +19,7 @@
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
- Channel('stable', 'branches/1.7', 2, '-stable', 1),
+ Channel('stable', 'branches/1.8', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
|
2eff8e08e1e16463e526503db46ae5c21138d776 | tests/storage/servers/fastmail/__init__.py | tests/storage/servers/fastmail/__init__.py | import os
import pytest
class ServerMixin:
@pytest.fixture
def get_storage_args(self, item_type, slow_create_collection):
if item_type == "VTODO":
# Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.")
async def inner(collection="test"):
args = {
"username": os.environ["FASTMAIL_USERNAME"],
"password": os.environ["FASTMAIL_PASSWORD"],
}
if self.storage_class.fileext == ".ics":
args["url"] = "https://caldav.fastmail.com/"
elif self.storage_class.fileext == ".vcf":
args["url"] = "https://carddav.fastmail.com/"
else:
raise RuntimeError()
if collection is not None:
args = slow_create_collection(self.storage_class, args, collection)
return args
return inner
| import os
import pytest
class ServerMixin:
@pytest.fixture
def get_storage_args(self, item_type, slow_create_collection, aio_connector):
if item_type == "VTODO":
# Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
pytest.skip("Fastmail has non-standard VTODO support.")
async def inner(collection="test"):
args = {
"username": os.environ["FASTMAIL_USERNAME"],
"password": os.environ["FASTMAIL_PASSWORD"],
"connector": aio_connector,
}
if self.storage_class.fileext == ".ics":
args["url"] = "https://caldav.fastmail.com/"
elif self.storage_class.fileext == ".vcf":
args["url"] = "https://carddav.fastmail.com/"
else:
raise RuntimeError()
if collection is not None:
args = await slow_create_collection(
self.storage_class,
args,
collection,
)
return args
return inner
| Fix breakage in Fastmail tests | Fix breakage in Fastmail tests
Some code that wasn't updated with the switch to asyncio.
| Python | mit | untitaker/vdirsyncer,untitaker/vdirsyncer,untitaker/vdirsyncer | ---
+++
@@ -5,7 +5,7 @@
class ServerMixin:
@pytest.fixture
- def get_storage_args(self, item_type, slow_create_collection):
+ def get_storage_args(self, item_type, slow_create_collection, aio_connector):
if item_type == "VTODO":
# Fastmail has non-standard support for TODOs
# See https://github.com/pimutils/vdirsyncer/issues/824
@@ -15,6 +15,7 @@
args = {
"username": os.environ["FASTMAIL_USERNAME"],
"password": os.environ["FASTMAIL_PASSWORD"],
+ "connector": aio_connector,
}
if self.storage_class.fileext == ".ics":
@@ -25,7 +26,12 @@
raise RuntimeError()
if collection is not None:
- args = slow_create_collection(self.storage_class, args, collection)
+ args = await slow_create_collection(
+ self.storage_class,
+ args,
+ collection,
+ )
+
return args
return inner |
6833865ff35d451a8215803b9fa74cd57167ed82 | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'aa4874a6bcc51fdd87ca7ae0928514ce83645988'
| #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb'
| Update libchromiumcontent: Contain linux symbols. | Update libchromiumcontent: Contain linux symbols.
| Python | mit | trankmichael/electron,gabrielPeart/electron,felixrieseberg/electron,seanchas116/electron,bitemyapp/electron,xfstudio/electron,leolujuyi/electron,pombredanne/electron,astoilkov/electron,gabriel/electron,mubassirhayat/electron,smczk/electron,Jacobichou/electron,medixdev/electron,yalexx/electron,natgolov/electron,rsvip/electron,Neron-X5/electron,tomashanacek/electron,systembugtj/electron,webmechanicx/electron,fomojola/electron,felixrieseberg/electron,mubassirhayat/electron,cqqccqc/electron,wan-qy/electron,gerhardberger/electron,LadyNaggaga/electron,rsvip/electron,vaginessa/electron,Floato/electron,nagyistoce/electron-atom-shell,gbn972/electron,Rokt33r/electron,xiruibing/electron,joaomoreno/atom-shell,kazupon/electron,oiledCode/electron,stevekinney/electron,miniak/electron,fabien-d/electron,fritx/electron,ankitaggarwal011/electron,RIAEvangelist/electron,arturts/electron,joaomoreno/atom-shell,jtburke/electron,dkfiresky/electron,tomashanacek/electron,tomashanacek/electron,synaptek/electron,deed02392/electron,chrisswk/electron,saronwei/electron,lrlna/electron,lzpfmh/electron,michaelchiche/electron,abhishekgahlot/electron,simongregory/electron,smczk/electron,farmisen/electron,Jacobichou/electron,subblue/electron,BionicClick/electron,jannishuebl/electron,deed02392/electron,seanchas116/electron,DivyaKMenon/electron,shennushi/electron,Rokt33r/electron,GoooIce/electron,shockone/electron,neutrous/electron,mattdesl/electron,dahal/electron,mattotodd/electron,michaelchiche/electron,aliib/electron,simonfork/electron,seanchas116/electron,roadev/electron,simongregory/electron,saronwei/electron,SufianHassan/electron,pirafrank/electron,Jonekee/electron,trigrass2/electron,fabien-d/electron,systembugtj/electron,rhencke/electron,tincan24/electron,dongjoon-hyun/electron,voidbridge/electron,gabrielPeart/electron,mhkeller/electron,minggo/electron,bwiggs/electron,tincan24/electron,arusakov/electron,shaundunne/electron,ervinb/electron,stevemao/electron,fffej/electron,egoist/electron,icattlecoder/electron,wan-qy/electron,kikong/electron,bobwol/electron,Neron-X5/electron,takashi/electron,rreimann/electron,jlord/electron,renaesop/electron,wolfflow/electron,rajatsingla28/electron,bitemyapp/electron,bpasero/electron,joneit/electron,synaptek/electron,dahal/electron,aichingm/electron,anko/electron,bpasero/electron,wan-qy/electron,webmechanicx/electron,setzer777/electron,vaginessa/electron,simonfork/electron,soulteary/electron,icattlecoder/electron,aliib/electron,Gerhut/electron,RIAEvangelist/electron,tomashanacek/electron,faizalpribadi/electron,renaesop/electron,gstack/infinium-shell,nagyistoce/electron-atom-shell,vHanda/electron,baiwyc119/electron,fritx/electron,fomojola/electron,bright-sparks/electron,jonatasfreitasv/electron,nicholasess/electron,ankitaggarwal011/electron,JussMee15/electron,bright-sparks/electron,jlord/electron,DivyaKMenon/electron,oiledCode/electron,mattotodd/electron,micalan/electron,yan-foto/electron,BionicClick/electron,bruce/electron,maxogden/atom-shell,RobertJGabriel/electron,medixdev/electron,michaelchiche/electron,rsvip/electron,wan-qy/electron,brave/muon,Rokt33r/electron,carsonmcdonald/electron,trankmichael/electron,ankitaggarwal011/electron,eriser/electron,rhencke/electron,Neron-X5/electron,coderhaoxin/electron,shaundunne/electron,aliib/electron,deed02392/electron,ankitaggarwal011/electron,meowlab/electron,ervinb/electron,simongregory/electron,digideskio/electron,tonyganch/electron,cos2004/electron,aecca/electron,LadyNaggaga/electron,Faiz7412/electron,maxogden/atom-shell,sky7sea/electron,aaron-goshine/electron,gamedevsam/electron,hokein/atom-shell,kenmozi/electron,matiasinsaurralde/electron,shaundunne/electron,pandoraui/electron,stevekinney/electron,pirafrank/electron,yan-foto/electron,MaxGraey/electron,zhakui/electron,RIAEvangelist/electron,preco21/electron,Faiz7412/electron,kostia/electron,beni55/electron,arturts/electron,synaptek/electron,brave/electron,noikiy/electron,oiledCode/electron,simonfork/electron,meowlab/electron,preco21/electron,tinydew4/electron,benweissmann/electron,fritx/electron,sky7sea/electron,ervinb/electron,synaptek/electron,nekuz0r/electron,sircharleswatson/electron,jlhbaseball15/electron,Andrey-Pavlov/electron,kcrt/electron,rsvip/electron,leethomas/electron,roadev/electron,lzpfmh/electron,greyhwndz/electron,vaginessa/electron,farmisen/electron,coderhaoxin/electron,biblerule/UMCTelnetHub,mrwizard82d1/electron,zhakui/electron,trigrass2/electron,mjaniszew/electron,bpasero/electron,Jonekee/electron,bwiggs/electron,bpasero/electron,bobwol/electron,dongjoon-hyun/electron,JesselJohn/electron,SufianHassan/electron,digideskio/electron,MaxWhere/electron,bbondy/electron,thomsonreuters/electron,edulan/electron,Neron-X5/electron,fomojola/electron,leethomas/electron,tylergibson/electron,arusakov/electron,dongjoon-hyun/electron,twolfson/electron,renaesop/electron,Evercoder/electron,etiktin/electron,the-ress/electron,jsutcodes/electron,kikong/electron,Jonekee/electron,minggo/electron,sshiting/electron,xiruibing/electron,rreimann/electron,deepak1556/atom-shell,deed02392/electron,arturts/electron,bright-sparks/electron,RobertJGabriel/electron,saronwei/electron,electron/electron,cqqccqc/electron,mirrh/electron,biblerule/UMCTelnetHub,the-ress/electron,pombredanne/electron,d-salas/electron,jacksondc/electron,pirafrank/electron,JesselJohn/electron,rhencke/electron,chriskdon/electron,takashi/electron,Gerhut/electron,bobwol/electron,the-ress/electron,yan-foto/electron,MaxGraey/electron,thingsinjars/electron,icattlecoder/electron,edulan/electron,jlord/electron,shaundunne/electron,iftekeriba/electron,gerhardberger/electron,Zagorakiss/electron,aichingm/electron,sky7sea/electron,John-Lin/electron,lzpfmh/electron,stevemao/electron,thomsonreuters/electron,howmuchcomputer/electron,sircharleswatson/electron,greyhwndz/electron,Floato/electron,thingsinjars/electron,matiasinsaurralde/electron,chrisswk/electron,adamjgray/electron,brave/electron,jsutcodes/electron,mirrh/electron,RIAEvangelist/electron,shiftkey/electron,cqqccqc/electron,the-ress/electron,fritx/electron,joneit/electron,trankmichael/electron,joaomoreno/atom-shell,soulteary/electron,anko/electron,GoooIce/electron,felixrieseberg/electron,brenca/electron,benweissmann/electron,mjaniszew/electron,bbondy/electron,tincan24/electron,neutrous/electron,howmuchcomputer/electron,jtburke/electron,voidbridge/electron,bbondy/electron,eric-seekas/electron,MaxWhere/electron,gabriel/electron,Ivshti/electron,mattotodd/electron,sshiting/electron,aecca/electron,shiftkey/electron,zhakui/electron,MaxWhere/electron,evgenyzinoviev/electron,sshiting/electron,xiruibing/electron,dkfiresky/electron,kenmozi/electron,dongjoon-hyun/electron,leethomas/electron,cqqccqc/electron,natgolov/electron,sshiting/electron,jacksondc/electron,nicobot/electron,smczk/electron,jaanus/electron,tincan24/electron,shiftkey/electron,jlord/electron,the-ress/electron,webmechanicx/electron,xfstudio/electron,mattdesl/electron,coderhaoxin/electron,pombredanne/electron,bright-sparks/electron,xiruibing/electron,jjz/electron,mirrh/electron,noikiy/electron,nicobot/electron,leftstick/electron,preco21/electron,jjz/electron,eric-seekas/electron,anko/electron,darwin/electron,fritx/electron,jonatasfreitasv/electron,nicholasess/electron,bright-sparks/electron,electron/electron,John-Lin/electron,aaron-goshine/electron,SufianHassan/electron,shiftkey/electron,rajatsingla28/electron,rreimann/electron,natgolov/electron,natgolov/electron,Gerhut/electron,shockone/electron,electron/electron,trankmichael/electron,aaron-goshine/electron,bpasero/electron,JussMee15/electron,SufianHassan/electron,yalexx/electron,kcrt/electron,jsutcodes/electron,abhishekgahlot/electron,deed02392/electron,zhakui/electron,zhakui/electron,christian-bromann/electron,egoist/electron,benweissmann/electron,kcrt/electron,Andrey-Pavlov/electron,kikong/electron,adcentury/electron,fffej/electron,darwin/electron,BionicClick/electron,jiaz/electron,Floato/electron,IonicaBizauKitchen/electron,webmechanicx/electron,pombredanne/electron,jsutcodes/electron,systembugtj/electron,beni55/electron,fomojola/electron,destan/electron,vaginessa/electron,eric-seekas/electron,rprichard/electron,bbondy/electron,Zagorakiss/electron,JesselJohn/electron,gamedevsam/electron,joneit/electron,gabriel/electron,bruce/electron,tincan24/electron,jacksondc/electron,micalan/electron,edulan/electron,brave/electron,wolfflow/electron,kazupon/electron,felixrieseberg/electron,darwin/electron,fffej/electron,mjaniszew/electron,kokdemo/electron,DivyaKMenon/electron,etiktin/electron,IonicaBizauKitchen/electron,voidbridge/electron,trigrass2/electron,chriskdon/electron,arusakov/electron,stevekinney/electron,rajatsingla28/electron,davazp/electron,greyhwndz/electron,neutrous/electron,Andrey-Pavlov/electron,fomojola/electron,d-salas/electron,tonyganch/electron,abhishekgahlot/electron,darwin/electron,icattlecoder/electron,subblue/electron,hokein/atom-shell,takashi/electron,wan-qy/electron,systembugtj/electron,jiaz/electron,vipulroxx/electron,d-salas/electron,xiruibing/electron,jiaz/electron,jacksondc/electron,shockone/electron,robinvandernoord/electron,etiktin/electron,fireball-x/atom-shell,davazp/electron,dongjoon-hyun/electron,biblerule/UMCTelnetHub,nagyistoce/electron-atom-shell,jhen0409/electron,twolfson/electron,ianscrivener/electron,mattdesl/electron,gamedevsam/electron,nicobot/electron,soulteary/electron,Evercoder/electron,rreimann/electron,preco21/electron,tylergibson/electron,medixdev/electron,faizalpribadi/electron,faizalpribadi/electron,adamjgray/electron,tinydew4/electron,SufianHassan/electron,icattlecoder/electron,nicholasess/electron,lrlna/electron,baiwyc119/electron,brenca/electron,leolujuyi/electron,tonyganch/electron,fomojola/electron,hokein/atom-shell,nicholasess/electron,micalan/electron,oiledCode/electron,kokdemo/electron,GoooIce/electron,fffej/electron,adamjgray/electron,gstack/infinium-shell,kcrt/electron,bruce/electron,dahal/electron,tylergibson/electron,lrlna/electron,soulteary/electron,kcrt/electron,cqqccqc/electron,gbn972/electron,iftekeriba/electron,abhishekgahlot/electron,adamjgray/electron,jjz/electron,egoist/electron,faizalpribadi/electron,ianscrivener/electron,deepak1556/atom-shell,bruce/electron,webmechanicx/electron,the-ress/electron,farmisen/electron,nagyistoce/electron-atom-shell,destan/electron,jannishuebl/electron,shockone/electron,joaomoreno/atom-shell,bwiggs/electron,jlhbaseball15/electron,chrisswk/electron,d-salas/electron,michaelchiche/electron,adamjgray/electron,twolfson/electron,soulteary/electron,bwiggs/electron,carsonmcdonald/electron,jaanus/electron,gerhardberger/electron,kenmozi/electron,xfstudio/electron,howmuchcomputer/electron,leolujuyi/electron,coderhaoxin/electron,Zagorakiss/electron,roadev/electron,xfstudio/electron,carsonmcdonald/electron,thomsonreuters/electron,yan-foto/electron,shennushi/electron,anko/electron,vHanda/electron,jhen0409/electron,Floato/electron,electron/electron,fabien-d/electron,bbondy/electron,carsonmcdonald/electron,faizalpribadi/electron,pandoraui/electron,Faiz7412/electron,smczk/electron,subblue/electron,kenmozi/electron,noikiy/electron,nicholasess/electron,tylergibson/electron,nekuz0r/electron,rsvip/electron,cos2004/electron,brenca/electron,christian-bromann/electron,jaanus/electron,astoilkov/electron,greyhwndz/electron,mhkeller/electron,Zagorakiss/electron,yan-foto/electron,jsutcodes/electron,deepak1556/atom-shell,carsonmcdonald/electron,iftekeriba/electron,mattotodd/electron,MaxGraey/electron,aaron-goshine/electron,stevekinney/electron,dahal/electron,thompsonemerson/electron,lzpfmh/electron,jlhbaseball15/electron,d-salas/electron,benweissmann/electron,voidbridge/electron,kazupon/electron,Jonekee/electron,vipulroxx/electron,vHanda/electron,brenca/electron,tincan24/electron,pombredanne/electron,Zagorakiss/electron,chriskdon/electron,biblerule/UMCTelnetHub,thompsonemerson/electron,rprichard/electron,subblue/electron,noikiy/electron,jtburke/electron,chriskdon/electron,howmuchcomputer/electron,gerhardberger/electron,eriser/electron,coderhaoxin/electron,MaxGraey/electron,gbn972/electron,joneit/electron,neutrous/electron,eriser/electron,astoilkov/electron,John-Lin/electron,mhkeller/electron,nicholasess/electron,felixrieseberg/electron,benweissmann/electron,kazupon/electron,mjaniszew/electron,bobwol/electron,mirrh/electron,gstack/infinium-shell,pirafrank/electron,leftstick/electron,jcblw/electron,fireball-x/atom-shell,RobertJGabriel/electron,mjaniszew/electron,pombredanne/electron,GoooIce/electron,gstack/infinium-shell,jjz/electron,nicobot/electron,gbn972/electron,anko/electron,tylergibson/electron,jonatasfreitasv/electron,faizalpribadi/electron,beni55/electron,ianscrivener/electron,timruffles/electron,mubassirhayat/electron,ervinb/electron,stevemao/electron,mattotodd/electron,IonicaBizauKitchen/electron,Jacobichou/electron,arusakov/electron,RobertJGabriel/electron,matiasinsaurralde/electron,bitemyapp/electron,davazp/electron,mjaniszew/electron,adcentury/electron,Evercoder/electron,tonyganch/electron,setzer777/electron,simongregory/electron,pandoraui/electron,Floato/electron,simongregory/electron,leftstick/electron,kcrt/electron,arusakov/electron,rreimann/electron,howmuchcomputer/electron,adamjgray/electron,John-Lin/electron,noikiy/electron,oiledCode/electron,roadev/electron,aecca/electron,dkfiresky/electron,bitemyapp/electron,preco21/electron,Evercoder/electron,benweissmann/electron,the-ress/electron,saronwei/electron,mattdesl/electron,darwin/electron,BionicClick/electron,jhen0409/electron,Andrey-Pavlov/electron,systembugtj/electron,mrwizard82d1/electron,RobertJGabriel/electron,astoilkov/electron,jiaz/electron,gamedevsam/electron,aichingm/electron,jtburke/electron,jhen0409/electron,greyhwndz/electron,mrwizard82d1/electron,mirrh/electron,minggo/electron,John-Lin/electron,destan/electron,brenca/electron,tomashanacek/electron,thompsonemerson/electron,leftstick/electron,greyhwndz/electron,ianscrivener/electron,kostia/electron,JussMee15/electron,sky7sea/electron,jcblw/electron,cos2004/electron,tinydew4/electron,maxogden/atom-shell,gerhardberger/electron,fritx/electron,etiktin/electron,IonicaBizauKitchen/electron,thompsonemerson/electron,lrlna/electron,roadev/electron,kokdemo/electron,brave/muon,rajatsingla28/electron,shiftkey/electron,MaxGraey/electron,SufianHassan/electron,timruffles/electron,twolfson/electron,setzer777/electron,brave/electron,medixdev/electron,seanchas116/electron,saronwei/electron,edulan/electron,rhencke/electron,fireball-x/atom-shell,IonicaBizauKitchen/electron,shaundunne/electron,smczk/electron,MaxWhere/electron,chriskdon/electron,simonfork/electron,eriser/electron,jtburke/electron,chrisswk/electron,trigrass2/electron,brave/electron,nagyistoce/electron-atom-shell,digideskio/electron,joaomoreno/atom-shell,trankmichael/electron,synaptek/electron,gstack/infinium-shell,DivyaKMenon/electron,John-Lin/electron,mrwizard82d1/electron,systembugtj/electron,fireball-x/atom-shell,beni55/electron,jhen0409/electron,deepak1556/atom-shell,kostia/electron,eriser/electron,trigrass2/electron,Ivshti/electron,wolfflow/electron,gabriel/electron,dkfiresky/electron,JesselJohn/electron,thompsonemerson/electron,RIAEvangelist/electron,bpasero/electron,aaron-goshine/electron,hokein/atom-shell,mirrh/electron,jcblw/electron,gabrielPeart/electron,simongregory/electron,evgenyzinoviev/electron,matiasinsaurralde/electron,fffej/electron,matiasinsaurralde/electron,Andrey-Pavlov/electron,JussMee15/electron,fabien-d/electron,gabrielPeart/electron,renaesop/electron,dkfiresky/electron,ankitaggarwal011/electron,arturts/electron,LadyNaggaga/electron,LadyNaggaga/electron,jonatasfreitasv/electron,bwiggs/electron,leethomas/electron,kazupon/electron,d-salas/electron,mrwizard82d1/electron,farmisen/electron,timruffles/electron,vaginessa/electron,bright-sparks/electron,bobwol/electron,Rokt33r/electron,eric-seekas/electron,rajatsingla28/electron,setzer777/electron,baiwyc119/electron,xfstudio/electron,JussMee15/electron,Ivshti/electron,JesselJohn/electron,seanchas116/electron,biblerule/UMCTelnetHub,digideskio/electron,Ivshti/electron,thomsonreuters/electron,evgenyzinoviev/electron,egoist/electron,edulan/electron,BionicClick/electron,neutrous/electron,brave/muon,etiktin/electron,maxogden/atom-shell,maxogden/atom-shell,jcblw/electron,jtburke/electron,Evercoder/electron,nekuz0r/electron,xfstudio/electron,gbn972/electron,jannishuebl/electron,zhakui/electron,miniak/electron,pandoraui/electron,neutrous/electron,rhencke/electron,arturts/electron,natgolov/electron,jannishuebl/electron,Gerhut/electron,bruce/electron,Gerhut/electron,ianscrivener/electron,wan-qy/electron,takashi/electron,digideskio/electron,farmisen/electron,thingsinjars/electron,baiwyc119/electron,roadev/electron,leethomas/electron,adcentury/electron,voidbridge/electron,beni55/electron,coderhaoxin/electron,jonatasfreitasv/electron,saronwei/electron,jannishuebl/electron,bruce/electron,bpasero/electron,rprichard/electron,renaesop/electron,sircharleswatson/electron,Floato/electron,egoist/electron,jaanus/electron,JussMee15/electron,shockone/electron,jhen0409/electron,gabriel/electron,gabrielPeart/electron,electron/electron,leolujuyi/electron,christian-bromann/electron,BionicClick/electron,shockone/electron,rhencke/electron,kokdemo/electron,tonyganch/electron,trankmichael/electron,leftstick/electron,pirafrank/electron,mhkeller/electron,nekuz0r/electron,kenmozi/electron,aecca/electron,dongjoon-hyun/electron,IonicaBizauKitchen/electron,kokdemo/electron,Faiz7412/electron,leolujuyi/electron,stevekinney/electron,jjz/electron,kenmozi/electron,gbn972/electron,Jacobichou/electron,setzer777/electron,JesselJohn/electron,kikong/electron,aliib/electron,minggo/electron,shiftkey/electron,astoilkov/electron,pandoraui/electron,abhishekgahlot/electron,jjz/electron,lzpfmh/electron,yalexx/electron,fffej/electron,miniak/electron,yalexx/electron,mattdesl/electron,matiasinsaurralde/electron,wolfflow/electron,gabrielPeart/electron,Jacobichou/electron,christian-bromann/electron,simonfork/electron,aaron-goshine/electron,miniak/electron,chrisswk/electron,robinvandernoord/electron,thingsinjars/electron,davazp/electron,electron/electron,Zagorakiss/electron,ankitaggarwal011/electron,aichingm/electron,aecca/electron,jlord/electron,joneit/electron,wolfflow/electron,tinydew4/electron,takashi/electron,micalan/electron,adcentury/electron,medixdev/electron,posix4e/electron,leethomas/electron,trigrass2/electron,etiktin/electron,lzpfmh/electron,sshiting/electron,brave/muon,leftstick/electron,abhishekgahlot/electron,tylergibson/electron,michaelchiche/electron,dahal/electron,davazp/electron,brave/muon,kokdemo/electron,noikiy/electron,Rokt33r/electron,jannishuebl/electron,mhkeller/electron,michaelchiche/electron,shaundunne/electron,rprichard/electron,jcblw/electron,carsonmcdonald/electron,aliib/electron,jaanus/electron,LadyNaggaga/electron,meowlab/electron,miniak/electron,jiaz/electron,simonfork/electron,shennushi/electron,thompsonemerson/electron,joneit/electron,meowlab/electron,jacksondc/electron,kikong/electron,egoist/electron,cos2004/electron,thomsonreuters/electron,timruffles/electron,minggo/electron,posix4e/electron,icattlecoder/electron,chriskdon/electron,aichingm/electron,RobertJGabriel/electron,micalan/electron,kostia/electron,Faiz7412/electron,sshiting/electron,farmisen/electron,vHanda/electron,renaesop/electron,digideskio/electron,kostia/electron,jonatasfreitasv/electron,gamedevsam/electron,LadyNaggaga/electron,posix4e/electron,aichingm/electron,twolfson/electron,sky7sea/electron,kostia/electron,lrlna/electron,cos2004/electron,wolfflow/electron,arusakov/electron,mhkeller/electron,anko/electron,jsutcodes/electron,ervinb/electron,micalan/electron,mattotodd/electron,natgolov/electron,pandoraui/electron,Andrey-Pavlov/electron,robinvandernoord/electron,jiaz/electron,timruffles/electron,smczk/electron,pirafrank/electron,mattdesl/electron,evgenyzinoviev/electron,webmechanicx/electron,RIAEvangelist/electron,miniak/electron,twolfson/electron,christian-bromann/electron,tinydew4/electron,sircharleswatson/electron,christian-bromann/electron,DivyaKMenon/electron,Gerhut/electron,fabien-d/electron,mrwizard82d1/electron,biblerule/UMCTelnetHub,nicobot/electron,rreimann/electron,evgenyzinoviev/electron,gabriel/electron,Rokt33r/electron,vaginessa/electron,aecca/electron,nekuz0r/electron,jacksondc/electron,ervinb/electron,mubassirhayat/electron,baiwyc119/electron,iftekeriba/electron,sircharleswatson/electron,vHanda/electron,arturts/electron,shennushi/electron,cos2004/electron,astoilkov/electron,tonyganch/electron,stevemao/electron,takashi/electron,tinydew4/electron,setzer777/electron,stevemao/electron,thomsonreuters/electron,robinvandernoord/electron,gamedevsam/electron,deed02392/electron,felixrieseberg/electron,brave/muon,nekuz0r/electron,MaxWhere/electron,yan-foto/electron,destan/electron,Neron-X5/electron,davazp/electron,yalexx/electron,brenca/electron,Ivshti/electron,jcblw/electron,aliib/electron,GoooIce/electron,bitemyapp/electron,lrlna/electron,rajatsingla28/electron,shennushi/electron,posix4e/electron,ianscrivener/electron,bobwol/electron,xiruibing/electron,subblue/electron,shennushi/electron,posix4e/electron,oiledCode/electron,robinvandernoord/electron,Evercoder/electron,vipulroxx/electron,evgenyzinoviev/electron,yalexx/electron,bitemyapp/electron,brave/electron,thingsinjars/electron,sircharleswatson/electron,fireball-x/atom-shell,meowlab/electron,hokein/atom-shell,stevekinney/electron,dahal/electron,cqqccqc/electron,vHanda/electron,robinvandernoord/electron,dkfiresky/electron,Neron-X5/electron,eric-seekas/electron,MaxWhere/electron,vipulroxx/electron,howmuchcomputer/electron,eric-seekas/electron,iftekeriba/electron,destan/electron,thingsinjars/electron,adcentury/electron,GoooIce/electron,jlhbaseball15/electron,adcentury/electron,joaomoreno/atom-shell,leolujuyi/electron,gerhardberger/electron,jlhbaseball15/electron,preco21/electron,kazupon/electron,nicobot/electron,tomashanacek/electron,soulteary/electron,stevemao/electron,seanchas116/electron,mubassirhayat/electron,Jonekee/electron,jaanus/electron,eriser/electron,meowlab/electron,edulan/electron,vipulroxx/electron,subblue/electron,baiwyc119/electron,beni55/electron,Jacobichou/electron,minggo/electron,gerhardberger/electron,DivyaKMenon/electron,electron/electron,iftekeriba/electron,sky7sea/electron,Jonekee/electron,jlhbaseball15/electron,medixdev/electron,posix4e/electron,bwiggs/electron,bbondy/electron,synaptek/electron,voidbridge/electron,deepak1556/atom-shell,vipulroxx/electron,destan/electron | ---
+++
@@ -2,4 +2,4 @@
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
-LIBCHROMIUMCONTENT_COMMIT = 'aa4874a6bcc51fdd87ca7ae0928514ce83645988'
+LIBCHROMIUMCONTENT_COMMIT = '9c654df782c77449e7d8fa741843143145260aeb' |
c56480fd8905332e54649dac0ade95c825e8ba23 | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'b27290717c08f8c6a58067d3c3725d68b4e6a2e5'
| #!/usr/bin/env python
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'fe05f53f3080889ced2696b2741d93953e654b49'
| Update libchromiumcontent to use the thin version. | Update libchromiumcontent to use the thin version.
| Python | mit | cos2004/electron,rajatsingla28/electron,bobwol/electron,tonyganch/electron,gbn972/electron,deed02392/electron,natgolov/electron,felixrieseberg/electron,bright-sparks/electron,adcentury/electron,jhen0409/electron,soulteary/electron,pandoraui/electron,subblue/electron,matiasinsaurralde/electron,jlord/electron,rreimann/electron,shockone/electron,mattdesl/electron,subblue/electron,eriser/electron,gabriel/electron,destan/electron,adamjgray/electron,shennushi/electron,aliib/electron,mattdesl/electron,vaginessa/electron,yalexx/electron,jsutcodes/electron,Gerhut/electron,kokdemo/electron,JussMee15/electron,subblue/electron,DivyaKMenon/electron,vaginessa/electron,aaron-goshine/electron,shennushi/electron,leolujuyi/electron,davazp/electron,stevemao/electron,Evercoder/electron,cos2004/electron,bitemyapp/electron,arturts/electron,smczk/electron,pombredanne/electron,brave/electron,yalexx/electron,jlord/electron,felixrieseberg/electron,evgenyzinoviev/electron,SufianHassan/electron,dahal/electron,bruce/electron,nekuz0r/electron,jcblw/electron,leolujuyi/electron,miniak/electron,RIAEvangelist/electron,lzpfmh/electron,brave/muon,carsonmcdonald/electron,simonfork/electron,fireball-x/atom-shell,benweissmann/electron,jtburke/electron,xiruibing/electron,Jonekee/electron,RIAEvangelist/electron,mirrh/electron,jcblw/electron,timruffles/electron,the-ress/electron,pirafrank/electron,xiruibing/electron,xiruibing/electron,coderhaoxin/electron,twolfson/electron,chriskdon/electron,deepak1556/atom-shell,sky7sea/electron,saronwei/electron,bright-sparks/electron,mubassirhayat/electron,deepak1556/atom-shell,benweissmann/electron,kcrt/electron,arusakov/electron,kikong/electron,nagyistoce/electron-atom-shell,matiasinsaurralde/electron,icattlecoder/electron,saronwei/electron,thomsonreuters/electron,wan-qy/electron,aliib/electron,joaomoreno/atom-shell,gerhardberger/electron,gstack/infinium-shell,greyhwndz/electron,fomojola/electron,smczk/electron,stevekinney/electron,abhishekgahlot/electron,robinvandernoord/electron,bruce/electron,rsvip/electron,carsonmcdonald/electron,gstack/infinium-shell,Floato/electron,voidbridge/electron,cos2004/electron,kazupon/electron,webmechanicx/electron,simongregory/electron,beni55/electron,takashi/electron,beni55/electron,leftstick/electron,dkfiresky/electron,shennushi/electron,twolfson/electron,shennushi/electron,matiasinsaurralde/electron,christian-bromann/electron,pandoraui/electron,shockone/electron,vHanda/electron,kokdemo/electron,soulteary/electron,arturts/electron,faizalpribadi/electron,systembugtj/electron,biblerule/UMCTelnetHub,aichingm/electron,tonyganch/electron,brenca/electron,hokein/atom-shell,dongjoon-hyun/electron,minggo/electron,jlhbaseball15/electron,anko/electron,Gerhut/electron,etiktin/electron,eric-seekas/electron,etiktin/electron,thompsonemerson/electron,leethomas/electron,twolfson/electron,soulteary/electron,tinydew4/electron,coderhaoxin/electron,trigrass2/electron,dahal/electron,sircharleswatson/electron,medixdev/electron,davazp/electron,micalan/electron,farmisen/electron,ianscrivener/electron,hokein/atom-shell,nicholasess/electron,roadev/electron,matiasinsaurralde/electron,bwiggs/electron,stevekinney/electron,MaxGraey/electron,rajatsingla28/electron,jannishuebl/electron,shaundunne/electron,seanchas116/electron,carsonmcdonald/electron,webmechanicx/electron,nicholasess/electron,aecca/electron,preco21/electron,faizalpribadi/electron,rsvip/electron,systembugtj/electron,rajatsingla28/electron,yan-foto/electron,the-ress/electron,mjaniszew/electron,d-salas/electron,soulteary/electron,jaanus/electron,brenca/electron,mrwizard82d1/electron,noikiy/electron,Jonekee/electron,adcentury/electron,darwin/electron,kikong/electron,iftekeriba/electron,nagyistoce/electron-atom-shell,fomojola/electron,chrisswk/electron,DivyaKMenon/electron,rhencke/electron,dahal/electron,Ivshti/electron,kazupon/electron,anko/electron,aecca/electron,jhen0409/electron,vHanda/electron,bwiggs/electron,darwin/electron,voidbridge/electron,tonyganch/electron,bobwol/electron,howmuchcomputer/electron,jsutcodes/electron,JesselJohn/electron,nekuz0r/electron,Ivshti/electron,Gerhut/electron,benweissmann/electron,minggo/electron,mirrh/electron,BionicClick/electron,Evercoder/electron,icattlecoder/electron,Faiz7412/electron,ankitaggarwal011/electron,fireball-x/atom-shell,vipulroxx/electron,davazp/electron,setzer777/electron,digideskio/electron,renaesop/electron,cos2004/electron,xfstudio/electron,SufianHassan/electron,subblue/electron,ervinb/electron,bobwol/electron,darwin/electron,Floato/electron,rajatsingla28/electron,BionicClick/electron,eriser/electron,jonatasfreitasv/electron,jlhbaseball15/electron,mrwizard82d1/electron,arturts/electron,MaxGraey/electron,jaanus/electron,shockone/electron,jacksondc/electron,evgenyzinoviev/electron,dkfiresky/electron,lzpfmh/electron,jsutcodes/electron,robinvandernoord/electron,smczk/electron,cos2004/electron,leethomas/electron,electron/electron,ankitaggarwal011/electron,setzer777/electron,gabrielPeart/electron,Rokt33r/electron,digideskio/electron,michaelchiche/electron,Gerhut/electron,rreimann/electron,digideskio/electron,wolfflow/electron,RobertJGabriel/electron,electron/electron,natgolov/electron,leolujuyi/electron,astoilkov/electron,gamedevsam/electron,fireball-x/atom-shell,icattlecoder/electron,rsvip/electron,jcblw/electron,iftekeriba/electron,ianscrivener/electron,faizalpribadi/electron,RIAEvangelist/electron,eric-seekas/electron,tonyganch/electron,jtburke/electron,pombredanne/electron,sshiting/electron,aliib/electron,Neron-X5/electron,icattlecoder/electron,DivyaKMenon/electron,takashi/electron,trankmichael/electron,zhakui/electron,leethomas/electron,d-salas/electron,eric-seekas/electron,fffej/electron,Jacobichou/electron,adcentury/electron,egoist/electron,arusakov/electron,faizalpribadi/electron,pandoraui/electron,RIAEvangelist/electron,evgenyzinoviev/electron,takashi/electron,twolfson/electron,medixdev/electron,electron/electron,fffej/electron,shockone/electron,chriskdon/electron,wan-qy/electron,edulan/electron,shiftkey/electron,bbondy/electron,oiledCode/electron,lzpfmh/electron,maxogden/atom-shell,renaesop/electron,brave/muon,pirafrank/electron,matiasinsaurralde/electron,adamjgray/electron,webmechanicx/electron,jlord/electron,JussMee15/electron,kokdemo/electron,webmechanicx/electron,seanchas116/electron,gabriel/electron,farmisen/electron,jacksondc/electron,howmuchcomputer/electron,electron/electron,oiledCode/electron,arusakov/electron,simonfork/electron,rhencke/electron,IonicaBizauKitchen/electron,greyhwndz/electron,baiwyc119/electron,tincan24/electron,shiftkey/electron,sshiting/electron,vaginessa/electron,d-salas/electron,miniak/electron,tomashanacek/electron,adcentury/electron,zhakui/electron,smczk/electron,cqqccqc/electron,RobertJGabriel/electron,jlhbaseball15/electron,JesselJohn/electron,biblerule/UMCTelnetHub,davazp/electron,wan-qy/electron,meowlab/electron,evgenyzinoviev/electron,noikiy/electron,mhkeller/electron,JussMee15/electron,jonatasfreitasv/electron,gabriel/electron,synaptek/electron,farmisen/electron,DivyaKMenon/electron,natgolov/electron,miniak/electron,benweissmann/electron,jannishuebl/electron,christian-bromann/electron,eric-seekas/electron,MaxGraey/electron,wolfflow/electron,fomojola/electron,LadyNaggaga/electron,maxogden/atom-shell,rajatsingla28/electron,lrlna/electron,mrwizard82d1/electron,leftstick/electron,meowlab/electron,wolfflow/electron,electron/electron,Jacobichou/electron,tincan24/electron,mjaniszew/electron,xfstudio/electron,minggo/electron,rreimann/electron,simonfork/electron,sshiting/electron,shiftkey/electron,RobertJGabriel/electron,kenmozi/electron,roadev/electron,matiasinsaurralde/electron,benweissmann/electron,baiwyc119/electron,d-salas/electron,icattlecoder/electron,gerhardberger/electron,fomojola/electron,brenca/electron,nicobot/electron,joaomoreno/atom-shell,Rokt33r/electron,BionicClick/electron,synaptek/electron,Andrey-Pavlov/electron,mattotodd/electron,micalan/electron,kcrt/electron,digideskio/electron,fomojola/electron,miniak/electron,icattlecoder/electron,Jacobichou/electron,LadyNaggaga/electron,bpasero/electron,posix4e/electron,minggo/electron,Neron-X5/electron,mattdesl/electron,thompsonemerson/electron,jtburke/electron,saronwei/electron,mattotodd/electron,shaundunne/electron,jcblw/electron,tonyganch/electron,yan-foto/electron,rhencke/electron,abhishekgahlot/electron,jjz/electron,nagyistoce/electron-atom-shell,MaxWhere/electron,gstack/infinium-shell,seanchas116/electron,jjz/electron,tylergibson/electron,SufianHassan/electron,nicholasess/electron,MaxWhere/electron,voidbridge/electron,tinydew4/electron,jtburke/electron,mubassirhayat/electron,Evercoder/electron,simongregory/electron,egoist/electron,jannishuebl/electron,LadyNaggaga/electron,synaptek/electron,astoilkov/electron,thingsinjars/electron,Ivshti/electron,fabien-d/electron,pombredanne/electron,astoilkov/electron,BionicClick/electron,tinydew4/electron,setzer777/electron,Neron-X5/electron,chrisswk/electron,RobertJGabriel/electron,edulan/electron,MaxWhere/electron,evgenyzinoviev/electron,deed02392/electron,pandoraui/electron,mrwizard82d1/electron,Floato/electron,sky7sea/electron,bbondy/electron,sircharleswatson/electron,ervinb/electron,jonatasfreitasv/electron,thingsinjars/electron,renaesop/electron,GoooIce/electron,baiwyc119/electron,kokdemo/electron,rprichard/electron,pirafrank/electron,kazupon/electron,thingsinjars/electron,biblerule/UMCTelnetHub,MaxGraey/electron,bruce/electron,christian-bromann/electron,robinvandernoord/electron,shennushi/electron,RobertJGabriel/electron,gamedevsam/electron,pombredanne/electron,Neron-X5/electron,jiaz/electron,eric-seekas/electron,vipulroxx/electron,stevemao/electron,gstack/infinium-shell,carsonmcdonald/electron,bwiggs/electron,Floato/electron,seanchas116/electron,etiktin/electron,bright-sparks/electron,farmisen/electron,davazp/electron,tylergibson/electron,eriser/electron,Jacobichou/electron,mirrh/electron,takashi/electron,felixrieseberg/electron,thingsinjars/electron,leethomas/electron,jonatasfreitasv/electron,vipulroxx/electron,anko/electron,ianscrivener/electron,stevekinney/electron,thomsonreuters/electron,mirrh/electron,mattotodd/electron,gerhardberger/electron,Evercoder/electron,Neron-X5/electron,twolfson/electron,posix4e/electron,subblue/electron,gerhardberger/electron,jjz/electron,mjaniszew/electron,simongregory/electron,renaesop/electron,John-Lin/electron,micalan/electron,GoooIce/electron,destan/electron,kikong/electron,leolujuyi/electron,yan-foto/electron,yan-foto/electron,aaron-goshine/electron,biblerule/UMCTelnetHub,robinvandernoord/electron,John-Lin/electron,aichingm/electron,tylergibson/electron,ankitaggarwal011/electron,timruffles/electron,edulan/electron,maxogden/atom-shell,tincan24/electron,howmuchcomputer/electron,seanchas116/electron,shennushi/electron,simonfork/electron,Rokt33r/electron,yalexx/electron,aecca/electron,bpasero/electron,carsonmcdonald/electron,mattdesl/electron,kikong/electron,fireball-x/atom-shell,preco21/electron,cqqccqc/electron,jhen0409/electron,jaanus/electron,iftekeriba/electron,nekuz0r/electron,kcrt/electron,leftstick/electron,GoooIce/electron,fffej/electron,egoist/electron,RobertJGabriel/electron,egoist/electron,jannishuebl/electron,sshiting/electron,xfstudio/electron,GoooIce/electron,robinvandernoord/electron,gerhardberger/electron,meowlab/electron,christian-bromann/electron,tomashanacek/electron,coderhaoxin/electron,pirafrank/electron,evgenyzinoviev/electron,DivyaKMenon/electron,takashi/electron,kokdemo/electron,hokein/atom-shell,brave/electron,bpasero/electron,edulan/electron,GoooIce/electron,fritx/electron,gbn972/electron,shaundunne/electron,lzpfmh/electron,joneit/electron,carsonmcdonald/electron,sky7sea/electron,mubassirhayat/electron,faizalpribadi/electron,mattotodd/electron,medixdev/electron,IonicaBizauKitchen/electron,xfstudio/electron,rreimann/electron,IonicaBizauKitchen/electron,aaron-goshine/electron,arturts/electron,eriser/electron,jsutcodes/electron,thompsonemerson/electron,iftekeriba/electron,iftekeriba/electron,etiktin/electron,kostia/electron,wolfflow/electron,minggo/electron,soulteary/electron,howmuchcomputer/electron,trankmichael/electron,GoooIce/electron,jhen0409/electron,micalan/electron,miniak/electron,trankmichael/electron,simonfork/electron,maxogden/atom-shell,chrisswk/electron,kikong/electron,brave/muon,thingsinjars/electron,anko/electron,kazupon/electron,aliib/electron,aecca/electron,deed02392/electron,sircharleswatson/electron,tincan24/electron,jjz/electron,nicholasess/electron,christian-bromann/electron,davazp/electron,Ivshti/electron,jjz/electron,nicobot/electron,trigrass2/electron,jaanus/electron,deed02392/electron,mubassirhayat/electron,tylergibson/electron,gbn972/electron,mhkeller/electron,neutrous/electron,nekuz0r/electron,John-Lin/electron,adamjgray/electron,RIAEvangelist/electron,coderhaoxin/electron,Andrey-Pavlov/electron,Andrey-Pavlov/electron,leethomas/electron,shiftkey/electron,eric-seekas/electron,baiwyc119/electron,mrwizard82d1/electron,felixrieseberg/electron,astoilkov/electron,fabien-d/electron,twolfson/electron,kostia/electron,jtburke/electron,smczk/electron,Ivshti/electron,brenca/electron,thomsonreuters/electron,thingsinjars/electron,kostia/electron,setzer777/electron,bitemyapp/electron,d-salas/electron,micalan/electron,oiledCode/electron,tonyganch/electron,thompsonemerson/electron,mhkeller/electron,mattdesl/electron,shockone/electron,etiktin/electron,brave/electron,Andrey-Pavlov/electron,chrisswk/electron,John-Lin/electron,saronwei/electron,pandoraui/electron,xfstudio/electron,vipulroxx/electron,bbondy/electron,farmisen/electron,aaron-goshine/electron,thomsonreuters/electron,SufianHassan/electron,dkfiresky/electron,pirafrank/electron,aliib/electron,JesselJohn/electron,rsvip/electron,bbondy/electron,dongjoon-hyun/electron,JussMee15/electron,mhkeller/electron,destan/electron,MaxWhere/electron,noikiy/electron,trigrass2/electron,dongjoon-hyun/electron,michaelchiche/electron,Evercoder/electron,posix4e/electron,synaptek/electron,lrlna/electron,yan-foto/electron,ervinb/electron,Jacobichou/electron,JussMee15/electron,ankitaggarwal011/electron,zhakui/electron,brave/electron,kenmozi/electron,posix4e/electron,pandoraui/electron,nagyistoce/electron-atom-shell,stevekinney/electron,eriser/electron,shaundunne/electron,darwin/electron,stevemao/electron,bitemyapp/electron,kenmozi/electron,wan-qy/electron,Zagorakiss/electron,mubassirhayat/electron,ankitaggarwal011/electron,sky7sea/electron,gabrielPeart/electron,jacksondc/electron,felixrieseberg/electron,kazupon/electron,vHanda/electron,zhakui/electron,Zagorakiss/electron,fabien-d/electron,howmuchcomputer/electron,vaginessa/electron,Neron-X5/electron,renaesop/electron,systembugtj/electron,tinydew4/electron,rprichard/electron,cqqccqc/electron,jtburke/electron,meowlab/electron,shaundunne/electron,trankmichael/electron,rreimann/electron,bruce/electron,neutrous/electron,the-ress/electron,bitemyapp/electron,leftstick/electron,synaptek/electron,bruce/electron,roadev/electron,brave/muon,Gerhut/electron,Jonekee/electron,bbondy/electron,jlord/electron,Faiz7412/electron,Jacobichou/electron,gamedevsam/electron,leftstick/electron,kcrt/electron,wan-qy/electron,Floato/electron,bright-sparks/electron,kenmozi/electron,bwiggs/electron,shaundunne/electron,christian-bromann/electron,IonicaBizauKitchen/electron,dongjoon-hyun/electron,natgolov/electron,brenca/electron,Faiz7412/electron,tomashanacek/electron,fffej/electron,simongregory/electron,neutrous/electron,ervinb/electron,maxogden/atom-shell,simonfork/electron,posix4e/electron,trigrass2/electron,abhishekgahlot/electron,pirafrank/electron,greyhwndz/electron,aichingm/electron,rreimann/electron,vHanda/electron,Faiz7412/electron,the-ress/electron,trigrass2/electron,neutrous/electron,lrlna/electron,brenca/electron,nagyistoce/electron-atom-shell,neutrous/electron,John-Lin/electron,jcblw/electron,MaxGraey/electron,preco21/electron,JesselJohn/electron,yan-foto/electron,mjaniszew/electron,coderhaoxin/electron,Jonekee/electron,mjaniszew/electron,jaanus/electron,Rokt33r/electron,edulan/electron,arturts/electron,jonatasfreitasv/electron,xiruibing/electron,voidbridge/electron,Jonekee/electron,jacksondc/electron,yalexx/electron,chriskdon/electron,noikiy/electron,bitemyapp/electron,LadyNaggaga/electron,ianscrivener/electron,kazupon/electron,stevekinney/electron,bwiggs/electron,timruffles/electron,digideskio/electron,biblerule/UMCTelnetHub,fritx/electron,tomashanacek/electron,MaxWhere/electron,bpasero/electron,Rokt33r/electron,joneit/electron,Evercoder/electron,brave/electron,preco21/electron,fireball-x/atom-shell,dkfiresky/electron,saronwei/electron,tylergibson/electron,aaron-goshine/electron,dahal/electron,JesselJohn/electron,voidbridge/electron,arusakov/electron,neutrous/electron,leftstick/electron,Jonekee/electron,joneit/electron,aichingm/electron,bobwol/electron,darwin/electron,xiruibing/electron,deepak1556/atom-shell,fffej/electron,jjz/electron,Zagorakiss/electron,miniak/electron,Zagorakiss/electron,lzpfmh/electron,nicobot/electron,tinydew4/electron,nicholasess/electron,vaginessa/electron,fabien-d/electron,cqqccqc/electron,vaginessa/electron,tomashanacek/electron,tomashanacek/electron,ervinb/electron,abhishekgahlot/electron,JesselJohn/electron,SufianHassan/electron,gerhardberger/electron,etiktin/electron,rprichard/electron,oiledCode/electron,medixdev/electron,posix4e/electron,michaelchiche/electron,edulan/electron,beni55/electron,kostia/electron,bbondy/electron,rhencke/electron,jcblw/electron,rhencke/electron,sky7sea/electron,fabien-d/electron,the-ress/electron,fomojola/electron,adamjgray/electron,astoilkov/electron,rsvip/electron,vHanda/electron,aliib/electron,vipulroxx/electron,jlhbaseball15/electron,BionicClick/electron,bright-sparks/electron,renaesop/electron,baiwyc119/electron,meowlab/electron,kostia/electron,medixdev/electron,dkfiresky/electron,mhkeller/electron,tinydew4/electron,fritx/electron,chrisswk/electron,jiaz/electron,baiwyc119/electron,kostia/electron,kenmozi/electron,dongjoon-hyun/electron,lrlna/electron,JussMee15/electron,aecca/electron,greyhwndz/electron,lrlna/electron,mattotodd/electron,pombredanne/electron,MaxWhere/electron,Faiz7412/electron,gamedevsam/electron,kcrt/electron,chriskdon/electron,gbn972/electron,trigrass2/electron,kokdemo/electron,gstack/infinium-shell,setzer777/electron,jiaz/electron,sshiting/electron,leolujuyi/electron,Zagorakiss/electron,systembugtj/electron,adamjgray/electron,bwiggs/electron,wolfflow/electron,sshiting/electron,arusakov/electron,rprichard/electron,dahal/electron,subblue/electron,jlhbaseball15/electron,joaomoreno/atom-shell,Zagorakiss/electron,benweissmann/electron,roadev/electron,d-salas/electron,joneit/electron,jiaz/electron,xfstudio/electron,soulteary/electron,the-ress/electron,nicobot/electron,eriser/electron,xiruibing/electron,bobwol/electron,yalexx/electron,jacksondc/electron,anko/electron,egoist/electron,timruffles/electron,preco21/electron,dkfiresky/electron,deepak1556/atom-shell,destan/electron,joaomoreno/atom-shell,shockone/electron,stevemao/electron,leethomas/electron,michaelchiche/electron,medixdev/electron,farmisen/electron,stevekinney/electron,jonatasfreitasv/electron,jsutcodes/electron,nekuz0r/electron,adcentury/electron,tincan24/electron,yalexx/electron,gabriel/electron,abhishekgahlot/electron,egoist/electron,fritx/electron,rajatsingla28/electron,simongregory/electron,aecca/electron,fritx/electron,trankmichael/electron,bobwol/electron,stevemao/electron,seanchas116/electron,chriskdon/electron,aaron-goshine/electron,gabriel/electron,gamedevsam/electron,greyhwndz/electron,Andrey-Pavlov/electron,aichingm/electron,DivyaKMenon/electron,thomsonreuters/electron,cos2004/electron,mirrh/electron,synaptek/electron,zhakui/electron,John-Lin/electron,deed02392/electron,lrlna/electron,rhencke/electron,bpasero/electron,ankitaggarwal011/electron,vHanda/electron,simongregory/electron,natgolov/electron,mhkeller/electron,adamjgray/electron,brave/electron,ianscrivener/electron,biblerule/UMCTelnetHub,gamedevsam/electron,joaomoreno/atom-shell,chriskdon/electron,michaelchiche/electron,fritx/electron,Floato/electron,IonicaBizauKitchen/electron,arturts/electron,Gerhut/electron,Andrey-Pavlov/electron,noikiy/electron,LadyNaggaga/electron,BionicClick/electron,jaanus/electron,setzer777/electron,jannishuebl/electron,trankmichael/electron,nicholasess/electron,kenmozi/electron,bruce/electron,jhen0409/electron,jannishuebl/electron,dongjoon-hyun/electron,roadev/electron,astoilkov/electron,arusakov/electron,destan/electron,wolfflow/electron,iftekeriba/electron,brave/muon,Rokt33r/electron,IonicaBizauKitchen/electron,smczk/electron,shiftkey/electron,voidbridge/electron,deed02392/electron,coderhaoxin/electron,deepak1556/atom-shell,noikiy/electron,oiledCode/electron,micalan/electron,preco21/electron,takashi/electron,fffej/electron,mirrh/electron,jacksondc/electron,webmechanicx/electron,lzpfmh/electron,jsutcodes/electron,electron/electron,thomsonreuters/electron,zhakui/electron,natgolov/electron,ianscrivener/electron,nicobot/electron,beni55/electron,digideskio/electron,the-ress/electron,gabrielPeart/electron,LadyNaggaga/electron,oiledCode/electron,beni55/electron,pombredanne/electron,thompsonemerson/electron,mattdesl/electron,gbn972/electron,timruffles/electron,ervinb/electron,RIAEvangelist/electron,joaomoreno/atom-shell,howmuchcomputer/electron,jiaz/electron,gabrielPeart/electron,minggo/electron,beni55/electron,shiftkey/electron,hokein/atom-shell,michaelchiche/electron,jiaz/electron,anko/electron,stevemao/electron,meowlab/electron,thompsonemerson/electron,sircharleswatson/electron,faizalpribadi/electron,bright-sparks/electron,gabrielPeart/electron,cqqccqc/electron,leolujuyi/electron,felixrieseberg/electron,bitemyapp/electron,jlhbaseball15/electron,webmechanicx/electron,gabrielPeart/electron,abhishekgahlot/electron,tincan24/electron,sky7sea/electron,destan/electron,SufianHassan/electron,saronwei/electron,adcentury/electron,gbn972/electron,bpasero/electron,bpasero/electron,cqqccqc/electron,hokein/atom-shell,mjaniszew/electron,systembugtj/electron,jlord/electron,sircharleswatson/electron,nekuz0r/electron,dahal/electron,wan-qy/electron,gabriel/electron,joneit/electron,mattotodd/electron,tylergibson/electron,vipulroxx/electron,greyhwndz/electron,joneit/electron,gerhardberger/electron,roadev/electron,mrwizard82d1/electron,jhen0409/electron,electron/electron,systembugtj/electron,kcrt/electron,robinvandernoord/electron,nicobot/electron,sircharleswatson/electron,brave/muon,aichingm/electron | ---
+++
@@ -2,4 +2,4 @@
NODE_VERSION = 'v0.11.10'
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
-LIBCHROMIUMCONTENT_COMMIT = 'b27290717c08f8c6a58067d3c3725d68b4e6a2e5'
+LIBCHROMIUMCONTENT_COMMIT = 'fe05f53f3080889ced2696b2741d93953e654b49' |
1c32b17bd4c85165f91fbb188b22471a296c6176 | kajiki/i18n.py | kajiki/i18n.py | # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .ir import TranslatableTextNode
def gettext(s):
return s
def extract(fileobj, keywords, comment_tags, options):
'''Babel entry point that extracts translation strings from XML templates.
'''
from .xml_template import _Parser, _Compiler, expand
doc = _Parser(filename='<string>', source=fileobj.read()).parse()
expand(doc)
compiler = _Compiler(filename='<string>', doc=doc,
mode=options.get('mode', 'xml'),
is_fragment=options.get('is_fragment', False))
ir = compiler.compile()
for node in ir:
if isinstance(node, TranslatableTextNode):
if node.text.strip():
for line in node.text.split('\n'):
yield (node.lineno, '_', line, [])
| # -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .ir import TranslatableTextNode
def gettext(s):
return s
def extract(fileobj, keywords, comment_tags, options):
'''Babel entry point that extracts translation strings from XML templates.
'''
from .xml_template import _Parser, _Compiler, expand
source = fileobj.read()
if isinstance(source, bytes):
source = source.decode('utf-8')
doc = _Parser(filename='<string>', source=source).parse()
expand(doc)
compiler = _Compiler(filename='<string>', doc=doc,
mode=options.get('mode', 'xml'),
is_fragment=options.get('is_fragment', False))
ir = compiler.compile()
for node in ir:
if isinstance(node, TranslatableTextNode):
if node.text.strip():
for line in node.text.split('\n'):
yield (node.lineno, '_', line, [])
| Fix issue with message extractor on Py2 | Fix issue with message extractor on Py2
| Python | mit | ollyc/kajiki,ollyc/kajiki,ollyc/kajiki | ---
+++
@@ -13,7 +13,10 @@
'''Babel entry point that extracts translation strings from XML templates.
'''
from .xml_template import _Parser, _Compiler, expand
- doc = _Parser(filename='<string>', source=fileobj.read()).parse()
+ source = fileobj.read()
+ if isinstance(source, bytes):
+ source = source.decode('utf-8')
+ doc = _Parser(filename='<string>', source=source).parse()
expand(doc)
compiler = _Compiler(filename='<string>', doc=doc,
mode=options.get('mode', 'xml'), |
c1785e0713a5af6b849baaa1b314a13ac777f3f5 | tests/test_str_py3.py | tests/test_str_py3.py | from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
import fastavro
from fastavro.compat import BytesIO
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
| # -*- coding: utf-8 -*-
"""Python3 string tests for fastavro"""
from __future__ import absolute_import
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import fastavro
letters = ascii_uppercase + digits
id_size = 100
seed('str_py3') # Repeatable results
def gen_id():
return ''.join(choice(letters) for _ in range(id_size))
keys = ['first', 'second', 'third', 'fourth']
testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)]
schema = {
"fields": [{'name': key, 'type': 'string'} for key in keys],
"namespace": "namespace",
"name": "zerobyte",
"type": "record"
}
def test_str_py3():
buf = BytesIO()
fastavro.writer(buf, schema, testdata)
buf.seek(0, SEEK_SET)
for i, rec in enumerate(fastavro.iter_avro(buf), 1):
pass
size = len(testdata)
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
if __name__ == '__main__':
test_str_py3()
| Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually. | Test files shouldn't import 'fastavro.compat'. Just import BytesIO
manually.
| Python | mit | e-heller/fastavro,e-heller/fastavro | ---
+++
@@ -1,9 +1,19 @@
+# -*- coding: utf-8 -*-
+"""Python3 string tests for fastavro"""
+
+from __future__ import absolute_import
+
from os import SEEK_SET
from random import choice, seed
from string import ascii_uppercase, digits
+try:
+ from cStringIO import StringIO as BytesIO
+except ImportError:
+ from io import BytesIO
+
import fastavro
-from fastavro.compat import BytesIO
+
letters = ascii_uppercase + digits
id_size = 100
@@ -40,5 +50,6 @@
assert i == size, 'bad number of records'
assert rec == testdata[-1], 'bad last record'
+
if __name__ == '__main__':
test_str_py3() |
1d839a0207058bd08de4be9a821337c9bdb1bcf8 | rock/utils.py | rock/utils.py | import StringIO
import os
class shell(object):
def __enter__(self):
self.stdin = StringIO.StringIO()
return self
def __exit__(self, type, value, traceback):
os.execl('/usr/bin/bash', 'bash', '-c', self.stdin.getvalue())
def write(self, text):
self.stdin.write(text + '\n')
| import StringIO
import os
class shell(object):
def __enter__(self):
self.stdin = StringIO.StringIO()
return self
def __exit__(self, type, value, traceback):
os.execl('/usr/bin/env', 'bash', 'bash', '-c', self.stdin.getvalue())
def write(self, text):
self.stdin.write(text + '\n')
| Use env instead of hard coding bash path | Use env instead of hard coding bash path
| Python | mit | silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock | ---
+++
@@ -9,7 +9,7 @@
return self
def __exit__(self, type, value, traceback):
- os.execl('/usr/bin/bash', 'bash', '-c', self.stdin.getvalue())
+ os.execl('/usr/bin/env', 'bash', 'bash', '-c', self.stdin.getvalue())
def write(self, text):
self.stdin.write(text + '\n') |
df1397dcf6fe849b87db139e8ea3087a5f73649a | tests/graphics/toolbuttons.py | tests/graphics/toolbuttons.py | from gi.repository import Gtk
from sugar3.graphics.toolbarbox import ToolbarBox
from sugar3.graphics.colorbutton import ColorToolButton
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toggletoolbutton import ToggleToolButton
import common
test = common.Test()
test.show()
vbox = Gtk.VBox()
test.pack_start(vbox, True, True, 0)
vbox.show()
toolbar_box = ToolbarBox()
vbox.pack_start(toolbar_box, False, False, 0)
toolbar_box.show()
radial_button = RadioToolButton(named_icon='view-radial')
toolbar_box.toolbar.insert(radial_button, -1)
radial_button.show()
list_button = RadioToolButton(named_icon='view-list')
list_button.props.group = radial_button
toolbar_box.toolbar.insert(list_button, -1)
list_button.show()
separator = Gtk.SeparatorToolItem()
toolbar_box.toolbar.insert(separator, -1)
separator.show()
color_button = ColorToolButton()
toolbar_box.toolbar.insert(color_button, -1)
color_button.show()
favorite_button = ToggleToolButton('emblem-favorite')
toolbar_box.toolbar.insert(favorite_button, -1)
favorite_button.show()
if __name__ == '__main__':
common.main(test)
| from gi.repository import Gtk
from sugar3.graphics.toolbarbox import ToolbarBox
from sugar3.graphics.colorbutton import ColorToolButton
from sugar3.graphics.radiotoolbutton import RadioToolButton
from sugar3.graphics.toggletoolbutton import ToggleToolButton
import common
test = common.Test()
test.show()
vbox = Gtk.VBox()
test.pack_start(vbox, True, True, 0)
vbox.show()
toolbar_box = ToolbarBox()
vbox.pack_start(toolbar_box, False, False, 0)
toolbar_box.show()
radial_button = RadioToolButton(icon_name='view-radial')
toolbar_box.toolbar.insert(radial_button, -1)
radial_button.show()
list_button = RadioToolButton(icon_name='view-list')
list_button.props.group = radial_button
toolbar_box.toolbar.insert(list_button, -1)
list_button.show()
separator = Gtk.SeparatorToolItem()
toolbar_box.toolbar.insert(separator, -1)
separator.show()
color_button = ColorToolButton()
toolbar_box.toolbar.insert(color_button, -1)
color_button.show()
favorite_button = ToggleToolButton('emblem-favorite')
toolbar_box.toolbar.insert(favorite_button, -1)
favorite_button.show()
if __name__ == '__main__':
common.main(test)
| Update toolbar buttons testcase with API change for the icon name | Update toolbar buttons testcase with API change for the icon name
Follow up of fe11a3aa23c0e7fbc3c0c498e147b0a20348cc12 .
Signed-off-by: Manuel Quiñones <[email protected]>
| Python | lgpl-2.1 | i5o/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,sugarlabs/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,samdroid-apps/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3 | ---
+++
@@ -19,11 +19,11 @@
vbox.pack_start(toolbar_box, False, False, 0)
toolbar_box.show()
-radial_button = RadioToolButton(named_icon='view-radial')
+radial_button = RadioToolButton(icon_name='view-radial')
toolbar_box.toolbar.insert(radial_button, -1)
radial_button.show()
-list_button = RadioToolButton(named_icon='view-list')
+list_button = RadioToolButton(icon_name='view-list')
list_button.props.group = radial_button
toolbar_box.toolbar.insert(list_button, -1)
list_button.show() |
e907c7de622c54556df10155caddbb05c8235d19 | resolwe_bio/__about__.py | resolwe_bio/__about__.py | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '12.0.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = '[email protected]'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
| """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe-bio'
__summary__ = 'Bioinformatics pipelines for the Resolwe platform'
__url__ = 'https://github.com/genialis/resolwe-bio'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '12.0.0a2'
__author__ = 'Genialis, Inc.'
__email__ = '[email protected]'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
"__title__", "__summary__", "__url__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
)
| Change author in about file | Change author in about file
| Python | apache-2.0 | genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio | ---
+++
@@ -10,7 +10,7 @@
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '12.0.0a2'
-__author__ = 'Genialis d.o.o.'
+__author__ = 'Genialis, Inc.'
__email__ = '[email protected]'
__license__ = 'Apache License (2.0)' |
2f56d481c05e28a4434a038a356f521b4ea5cbca | tests/test_simple_features.py | tests/test_simple_features.py | from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_tent_map():
values = list(float(i) for i in range(10))
values.append(11.0)
values+= list(10.0 - i for i in range(10))
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert "" in features
| from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_tent_map():
values = [float(i) for i in range(10)]
values.append(11.0)
values += [10.0 - i for i in range(10)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert "" in features
def test_step_function():
values = [1.0] * 10
values += [2.0] * 10
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert "" in features
| Test case for step function in time series | Test case for step function in time series
Test a graph function that is linear in two time periods, jumping
between linear values.
| Python | apache-2.0 | tleeuwenburg/wordgraph,tleeuwenburg/wordgraph | ---
+++
@@ -20,9 +20,16 @@
assert "" in features
def test_tent_map():
- values = list(float(i) for i in range(10))
+ values = [float(i) for i in range(10)]
values.append(11.0)
- values+= list(10.0 - i for i in range(10))
+ values += [10.0 - i for i in range(10)]
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert "" in features
+
+def test_step_function():
+ values = [1.0] * 10
+ values += [2.0] * 10
+ datapoints = time_values(values)
+ features = wordgraph.describe(datapoints)
+ assert "" in features |
9051fc68b2c542f7a201a969340b1f1f5d0f660c | test_openfolder.py | test_openfolder.py | import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception):
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception):
result = open_folder("/")
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| import pytest
from mock import patch, MagicMock
from open_folder import *
def test_folder_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
result = open_folder(".")
assert result == None
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
with pytest.raises(Exception) as excinfo:
open_folder("/")
assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
with patch('platform.system', MagicMock(return_value="Linux")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Darwin")):
result = open_folder("/")
assert result == None
with patch('platform.system', MagicMock(return_value="Windows")):
result = open_folder("/")
assert result == None
| Check to ensure the exceptions return the text we expect. | Check to ensure the exceptions return the text we expect.
| Python | mit | golliher/dg-tickler-file | ---
+++
@@ -10,14 +10,17 @@
def test_folder_does_not_exists():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
- with pytest.raises(Exception):
+ with pytest.raises(Exception) as excinfo:
open_folder("it_is_very_unlikely_that_this_file_exists_20150718")
+ assert str(excinfo.value) == ('Folder does not exist.')
def test_unsupported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")):
+
with patch('platform.system', MagicMock(return_value="NotDarwinWindowsLinux")):
- with pytest.raises(Exception):
- result = open_folder("/")
+ with pytest.raises(Exception) as excinfo:
+ open_folder("/")
+ assert str(excinfo.value).startswith('Your operating system was not recognized.')
def test_supported_os():
with patch('subprocess.check_call', MagicMock(return_value="NOOP")): |
e5fa10e27d9c5911b0238d23fc13acc081accc79 | utils/dates.py | utils/dates.py | # This file is part of e-Giełda.
# Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, timedelta
DT_FORMAT = '%Y-%m-%d %H:%M:%S%z'
def datetime_html_format(date):
return date.strftime("%Y-%m-%dT%H:%M")
def datetime_to_string(datetime):
return datetime.strftime(datetime, DT_FORMAT)
def string_to_datetime(date):
return datetime.strptime(date, DT_FORMAT)
def date_range(start_date, end_date):
return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
| # This file is part of e-Giełda.
# Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński
#
# e-Giełda is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# You should have received a copy of the GNU Affero General Public License
# along with e-Giełda. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime, timedelta
DT_FORMAT = '%Y-%m-%d %H:%M:%S%z'
def datetime_html_format(date):
return date.strftime("%Y-%m-%dT%H:%M")
def datetime_to_string(date):
return date.strftime(DT_FORMAT)
def string_to_datetime(date):
return datetime.strptime(date, DT_FORMAT)
def date_range(start_date, end_date):
return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
| Fix error on date save | Fix error on date save
| Python | agpl-3.0 | m4tx/egielda,m4tx/egielda,m4tx/egielda | ---
+++
@@ -18,8 +18,8 @@
return date.strftime("%Y-%m-%dT%H:%M")
-def datetime_to_string(datetime):
- return datetime.strftime(datetime, DT_FORMAT)
+def datetime_to_string(date):
+ return date.strftime(DT_FORMAT)
def string_to_datetime(date): |
ca430300c08f78b7c2de4153e08c1645996f85b7 | tests/test_parsers.py | tests/test_parsers.py | import unittest
from brew.parsers import JSONDataLoader
class TestJSONDataLoader(unittest.TestCase):
def setUp(self):
self.parser = JSONDataLoader('./')
def test_format_name(self):
name_list = [('pale malt 2-row us', 'pale_malt_2_row_us'),
('caramel crystal malt 20l', 'caramel_crystal_malt_20l'),
('centennial', 'centennial'),
('cascade us', 'cascade_us'),
('Wyeast 1056', 'wyeast_1056'),
]
for name, expected in name_list:
out = self.parser.format_name(name)
self.assertEquals(out, expected)
| import unittest
from brew.parsers import DataLoader
from brew.parsers import JSONDataLoader
class TestDataLoader(unittest.TestCase):
def setUp(self):
self.parser = DataLoader('./')
def test_read_data_raises(self):
with self.assertRaises(NotImplementedError):
self.parser.read_data('filename')
class TestJSONDataLoader(unittest.TestCase):
def setUp(self):
self.parser = JSONDataLoader('./')
def test_format_name(self):
name_list = [('pale malt 2-row us', 'pale_malt_2_row_us'),
('caramel crystal malt 20l', 'caramel_crystal_malt_20l'),
('centennial', 'centennial'),
('cascade us', 'cascade_us'),
('Wyeast 1056', 'wyeast_1056'),
]
for name, expected in name_list:
out = self.parser.format_name(name)
self.assertEquals(out, expected)
| Add test to DataLoader base class | Add test to DataLoader base class
| Python | mit | chrisgilmerproj/brewday,chrisgilmerproj/brewday | ---
+++
@@ -1,6 +1,17 @@
import unittest
+from brew.parsers import DataLoader
from brew.parsers import JSONDataLoader
+
+
+class TestDataLoader(unittest.TestCase):
+
+ def setUp(self):
+ self.parser = DataLoader('./')
+
+ def test_read_data_raises(self):
+ with self.assertRaises(NotImplementedError):
+ self.parser.read_data('filename')
class TestJSONDataLoader(unittest.TestCase): |
17d2d4eaf58011ceb33a4d5944253578c2b5edd1 | pmdarima/preprocessing/endog/tests/test_log.py | pmdarima/preprocessing/endog/tests/test_log.py | # -*- coding: utf-8 -*-
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy import stats
import pytest
from pmdarima.preprocessing import LogEndogTransformer
from pmdarima.preprocessing import BoxCoxEndogTransformer
def test_same():
y = [1, 2, 3]
trans = BoxCoxEndogTransformer()
log_trans = LogEndogTransformer()
y_t, _ = trans.fit_transform(y)
log_y_t, _ = log_trans.fit_transform(y)
assert_array_almost_equal(log_y_t, y_t)
| # -*- coding: utf-8 -*-
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy import stats
import pytest
from pmdarima.preprocessing import LogEndogTransformer
from pmdarima.preprocessing import BoxCoxEndogTransformer
def test_same():
y = [1, 2, 3]
trans = BoxCoxEndogTransformer(lmbda=0)
log_trans = LogEndogTransformer()
y_t, _ = trans.fit_transform(y)
log_y_t, _ = log_trans.fit_transform(y)
assert_array_almost_equal(log_y_t, y_t)
def test_invertible():
y = [1, 2, 3]
trans = LogEndogTransformer()
y_t, _ = trans.fit_transform(y)
y_prime, _ = trans.inverse_transform(y_t)
assert_array_almost_equal(y, y_prime)
| Add test_invertible to log transformer test | Add test_invertible to log transformer test
| Python | mit | alkaline-ml/pmdarima,tgsmith61591/pyramid,tgsmith61591/pyramid,alkaline-ml/pmdarima,alkaline-ml/pmdarima,tgsmith61591/pyramid | ---
+++
@@ -8,10 +8,19 @@
from pmdarima.preprocessing import LogEndogTransformer
from pmdarima.preprocessing import BoxCoxEndogTransformer
+
def test_same():
y = [1, 2, 3]
- trans = BoxCoxEndogTransformer()
+ trans = BoxCoxEndogTransformer(lmbda=0)
log_trans = LogEndogTransformer()
y_t, _ = trans.fit_transform(y)
log_y_t, _ = log_trans.fit_transform(y)
assert_array_almost_equal(log_y_t, y_t)
+
+
+def test_invertible():
+ y = [1, 2, 3]
+ trans = LogEndogTransformer()
+ y_t, _ = trans.fit_transform(y)
+ y_prime, _ = trans.inverse_transform(y_t)
+ assert_array_almost_equal(y, y_prime) |
5c1f9b0a70fe47bbfa7d3813a47e2da81cd81506 | tests/runalldoctests.py | tests/runalldoctests.py | import doctest
import glob
import pkg_resources
try:
pkg_resources.require('OWSLib')
except (ImportError, pkg_resources.DistributionNotFound):
pass
testfiles = glob.glob('*.txt')
for file in testfiles:
doctest.testfile(file)
| import doctest
import getopt
import glob
import sys
import pkg_resources
try:
pkg_resources.require('OWSLib')
except (ImportError, pkg_resources.DistributionNotFound):
pass
def run(pattern):
if pattern is None:
testfiles = glob.glob('*.txt')
else:
testfiles = glob.glob(pattern)
for file in testfiles:
doctest.testfile(file)
if __name__ == "__main__":
try:
opts, args = getopt.getopt(sys.argv[1:], "t:v")
except getopt.GetoptError:
print "Usage: python runalldoctests.py [-t GLOB_PATTERN]"
sys.exit(2)
pattern = None
for o, a in opts:
if o == '-t':
pattern = a
run(pattern)
| Add option to pick single test file from the runner | Add option to pick single test file from the runner
git-svn-id: 8e0fbe17d71f9a07a4f24b82f5b9fb44b438f95e@620 b426a367-1105-0410-b9ff-cdf4ab011145
| Python | bsd-3-clause | sabman/OWSLib,monoid/owslib,monoid/owslib | ---
+++
@@ -1,5 +1,8 @@
import doctest
+import getopt
import glob
+import sys
+
import pkg_resources
try:
@@ -7,8 +10,23 @@
except (ImportError, pkg_resources.DistributionNotFound):
pass
-testfiles = glob.glob('*.txt')
+def run(pattern):
+ if pattern is None:
+ testfiles = glob.glob('*.txt')
+ else:
+ testfiles = glob.glob(pattern)
+ for file in testfiles:
+ doctest.testfile(file)
-for file in testfiles:
- doctest.testfile(file)
+if __name__ == "__main__":
+ try:
+ opts, args = getopt.getopt(sys.argv[1:], "t:v")
+ except getopt.GetoptError:
+ print "Usage: python runalldoctests.py [-t GLOB_PATTERN]"
+ sys.exit(2)
+ pattern = None
+ for o, a in opts:
+ if o == '-t':
+ pattern = a
+ run(pattern)
|
60d71f0f6dc9de01442c304978ee7966319a5049 | zarya/settings/dev.py | zarya/settings/dev.py | from __future__ import absolute_import, unicode_literals
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l33th4x0rs'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
try:
from .local import *
except ImportError:
pass
| from __future__ import absolute_import, unicode_literals
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l33th4x0rs'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'uwcs_zarya',
'USER': 'uwcs_zarya',
'PASSWORD': 'doyouevenlift',
'HOST': '127.0.0.1',
'PORT': '5432',
},
'old_data': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
try:
from .local import *
except ImportError:
pass
| Change database connections for data migration tool | Change database connections for data migration tool
| Python | mit | davidjrichardson/uwcs-zarya,davidjrichardson/uwcs-zarya | ---
+++
@@ -8,9 +8,22 @@
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l33th4x0rs'
+DATABASES = {
+ 'default': {
+ 'ENGINE': 'django.db.backends.postgresql_psycopg2',
+ 'NAME': 'uwcs_zarya',
+ 'USER': 'uwcs_zarya',
+ 'PASSWORD': 'doyouevenlift',
+ 'HOST': '127.0.0.1',
+ 'PORT': '5432',
+ },
+ 'old_data': {
+ 'ENGINE': 'django.db.backends.sqlite3',
+ 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
+ }
+}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
-
try:
from .local import * |
61bbd4e8fc0712fe56614481173eb86d409eb8d7 | tests/test_linked_list.py | tests/test_linked_list.py | from unittest import TestCase
from pystructures.linked_lists import LinkedList, Node
class TestNode(TestCase):
def test_value(self):
""" A simple test to check the Node's value """
node = Node(10)
self.assertEqual(10, node.value)
def test_improper_node(self):
""" A test to check if an invalid data type is set as a node's next"""
node = Node(10)
with self.assertRaises(ValueError):
node.next = "Hello"
class TestLinkedList(TestCase):
def test_insert(self):
""" A simple test to check if insertion works as expected in a singly linked list """
l = LinkedList()
results = [l.insert(val) for val in xrange(10, 100, 10)]
self.assertEqual(len(set(results)), 1)
self.assertTrue(results[0], msg="Testing for successful insertion...")
self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...")
| from builtins import range
from unittest import TestCase
from pystructures.linked_lists import LinkedList, Node
class TestNode(TestCase):
def test_value(self):
""" A simple test to check the Node's value """
node = Node(10)
self.assertEqual(10, node.value)
def test_improper_node(self):
""" A test to check if an invalid data type is set as a node's next"""
node = Node(10)
with self.assertRaises(ValueError):
node.next = "Hello"
class TestLinkedList(TestCase):
def test_insert(self):
""" A simple test to check if insertion works as expected in a singly linked list """
l = LinkedList()
results = [l.insert(val) for val in range(10, 100, 10)]
self.assertEqual(len(set(results)), 1)
self.assertTrue(results[0], msg="Testing for successful insertion...")
self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...")
| Fix range issue with travis | Fix range issue with travis
| Python | mit | apranav19/pystructures | ---
+++
@@ -1,3 +1,4 @@
+from builtins import range
from unittest import TestCase
from pystructures.linked_lists import LinkedList, Node
@@ -18,7 +19,7 @@
def test_insert(self):
""" A simple test to check if insertion works as expected in a singly linked list """
l = LinkedList()
- results = [l.insert(val) for val in xrange(10, 100, 10)]
+ results = [l.insert(val) for val in range(10, 100, 10)]
self.assertEqual(len(set(results)), 1)
self.assertTrue(results[0], msg="Testing for successful insertion...")
self.assertEqual(len(results), l.size, msg="Testing if # of results equal list size...") |
01c0dd4d34e61df589b3dd9ee3c5f8b96cf5486b | tests/test_transformer.py | tests/test_transformer.py | from __future__ import unicode_literals
import functools
from scrapi.base import XMLHarvester
from scrapi.linter import RawDocument
from .utils import get_leaves
from .utils import TEST_SCHEMA, TEST_NAMESPACES, TEST_XML_DOC
class TestHarvester(XMLHarvester):
def harvest(self, days_back=1):
return [RawDocument({
'doc': str(TEST_XML_DOC),
'source': 'TEST',
'filetype': 'XML',
'docID': "1"
}) for _ in xrange(days_back)]
class TestTransformer(object):
def setup_method(self, method):
self.harvester = TestHarvester("TEST", TEST_SCHEMA, TEST_NAMESPACES)
def test_normalize(self):
results = [
self.harvester.normalize(record) for record in self.harvester.harvest(days_back=10)
]
for result in results:
assert result['properties']['title1'] == 'Test'
assert result['properties']['title2'] == 'test'
assert result['properties']['title3'] == 'Testtest'
for (k, v) in get_leaves(result.attributes):
assert type(v) != functools.partial
| from __future__ import unicode_literals
import functools
from scrapi.base import XMLHarvester
from scrapi.linter import RawDocument
from .utils import get_leaves
from .utils import TEST_SCHEMA, TEST_NAMESPACES, TEST_XML_DOC
class TestHarvester(XMLHarvester):
def harvest(self, days_back=1):
return [RawDocument({
'doc': str(TEST_XML_DOC),
'source': 'TEST',
'filetype': 'XML',
'docID': "1"
}) for _ in xrange(days_back)]
@property
def name(self):
return 'TEST'
@property
def namespaces(self):
return TEST_NAMESPACES
@property
def schema(self):
return TEST_SCHEMA
class TestTransformer(object):
def setup_method(self, method):
self.harvester = TestHarvester()
def test_normalize(self):
results = [
self.harvester.normalize(record) for record in self.harvester.harvest(days_back=10)
]
for result in results:
assert result['properties']['title1'] == 'Test'
assert result['properties']['title2'] == 'test'
assert result['properties']['title3'] == 'Testtest'
for (k, v) in get_leaves(result.attributes):
assert type(v) != functools.partial
| Update tests with required properties | Update tests with required properties
| Python | apache-2.0 | CenterForOpenScience/scrapi,jeffreyliu3230/scrapi,fabianvf/scrapi,felliott/scrapi,erinspace/scrapi,icereval/scrapi,mehanig/scrapi,ostwald/scrapi,erinspace/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,fabianvf/scrapi,felliott/scrapi | ---
+++
@@ -20,10 +20,23 @@
}) for _ in xrange(days_back)]
+ @property
+ def name(self):
+ return 'TEST'
+
+ @property
+ def namespaces(self):
+ return TEST_NAMESPACES
+
+ @property
+ def schema(self):
+ return TEST_SCHEMA
+
+
class TestTransformer(object):
def setup_method(self, method):
- self.harvester = TestHarvester("TEST", TEST_SCHEMA, TEST_NAMESPACES)
+ self.harvester = TestHarvester()
def test_normalize(self):
results = [ |
46d274401080d47f3a9974c6ee80f2f3b9c0c8b0 | metakernel/magics/tests/test_download_magic.py | metakernel/magics/tests/test_download_magic.py |
from metakernel.tests.utils import (get_kernel, get_log_text,
clear_log_text, EvalKernel)
import os
def test_download_magic():
kernel = get_kernel(EvalKernel)
kernel.do_execute("%download --filename TEST.txt https://raw.githubusercontent.com/blink1073/metakernel/master/LICENSE.txt")
text = get_log_text(kernel)
assert "Downloaded 'TEST.txt'" in text, text
assert os.path.isfile("TEST.txt"), "File does not exist: TEST.txt"
def teardown():
try:
os.remove("TEST.txt")
except:
pass
|
from metakernel.tests.utils import (get_kernel, get_log_text,
clear_log_text, EvalKernel)
import os
def test_download_magic():
kernel = get_kernel(EvalKernel)
kernel.do_execute("%download --filename TEST.txt https://raw.githubusercontent.com/blink1073/metakernel/master/LICENSE.txt")
text = get_log_text(kernel)
assert "Downloaded 'TEST.txt'" in text, text
assert os.path.isfile("TEST.txt"), "File does not exist: TEST.txt"
clear_log_text(kernel)
kernel.do_execute("%download https://raw.githubusercontent.com/blink1073/metakernel/master/LICENSE.txt")
text = get_log_text(kernel)
assert "Downloaded 'LICENSE.txt'" in text, text
assert os.path.isfile("LICENSE.txt"), "File does not exist: LICENSE.txt"
def teardown():
for fname in ['TEST.txt', 'LICENSE.txt']:
try:
os.remove(fname)
except:
pass
| Add download test without filename | Add download test without filename
| Python | bsd-3-clause | Calysto/metakernel | ---
+++
@@ -10,8 +10,17 @@
assert "Downloaded 'TEST.txt'" in text, text
assert os.path.isfile("TEST.txt"), "File does not exist: TEST.txt"
+ clear_log_text(kernel)
+
+ kernel.do_execute("%download https://raw.githubusercontent.com/blink1073/metakernel/master/LICENSE.txt")
+ text = get_log_text(kernel)
+ assert "Downloaded 'LICENSE.txt'" in text, text
+ assert os.path.isfile("LICENSE.txt"), "File does not exist: LICENSE.txt"
+
+
def teardown():
- try:
- os.remove("TEST.txt")
- except:
- pass
+ for fname in ['TEST.txt', 'LICENSE.txt']:
+ try:
+ os.remove(fname)
+ except:
+ pass |
6220c36f046b2b504cc2ebbbc04a34c4d826564d | IPython/extensions/tests/test_storemagic.py | IPython/extensions/tests/test_storemagic.py | import tempfile, os
import nose.tools as nt
ip = get_ipython()
ip.magic('load_ext storemagic')
def test_store_restore():
ip.user_ns['foo'] = 78
ip.magic('alias bar echo "hello"')
tmpd = tempfile.mkdtemp()
ip.magic('cd ' + tmpd)
ip.magic('store foo')
ip.magic('store bar')
# Check storing
nt.assert_equal(ip.db['autorestore/foo'], 78)
nt.assert_in('bar', ip.db['stored_aliases'])
# Remove those items
ip.user_ns.pop('foo', None)
ip.alias_manager.undefine_alias('bar')
ip.magic('cd -')
ip.user_ns['_dh'][:] = []
# Check restoring
ip.magic('store -r')
nt.assert_equal(ip.user_ns['foo'], 78)
nt.assert_in('bar', ip.alias_manager.alias_table)
nt.assert_in(os.path.realpath(tmpd), ip.user_ns['_dh'])
os.rmdir(tmpd)
| import tempfile, os
from IPython.config.loader import Config
import nose.tools as nt
ip = get_ipython()
ip.magic('load_ext storemagic')
def test_store_restore():
ip.user_ns['foo'] = 78
ip.magic('alias bar echo "hello"')
tmpd = tempfile.mkdtemp()
ip.magic('cd ' + tmpd)
ip.magic('store foo')
ip.magic('store bar')
# Check storing
nt.assert_equal(ip.db['autorestore/foo'], 78)
nt.assert_in('bar', ip.db['stored_aliases'])
# Remove those items
ip.user_ns.pop('foo', None)
ip.alias_manager.undefine_alias('bar')
ip.magic('cd -')
ip.user_ns['_dh'][:] = []
# Check restoring
ip.magic('store -r')
nt.assert_equal(ip.user_ns['foo'], 78)
nt.assert_in('bar', ip.alias_manager.alias_table)
nt.assert_in(os.path.realpath(tmpd), ip.user_ns['_dh'])
os.rmdir(tmpd)
def test_autorestore():
ip.user_ns['foo'] = 95
ip.magic('store foo')
del ip.user_ns['foo']
c = Config()
c.StoreMagics.autorestore = False
orig_config = ip.config
try:
ip.config = c
ip.extension_manager.reload_extension('storemagic')
nt.assert_not_in('foo', ip.user_ns)
c.StoreMagics.autorestore = True
ip.extension_manager.reload_extension('storemagic')
nt.assert_equal(ip.user_ns['foo'], 95)
finally:
ip.config = orig_config
| Add test for StoreMagics.autorestore option | Add test for StoreMagics.autorestore option
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | ---
+++
@@ -1,5 +1,6 @@
import tempfile, os
+from IPython.config.loader import Config
import nose.tools as nt
ip = get_ipython()
@@ -30,3 +31,20 @@
nt.assert_in(os.path.realpath(tmpd), ip.user_ns['_dh'])
os.rmdir(tmpd)
+
+def test_autorestore():
+ ip.user_ns['foo'] = 95
+ ip.magic('store foo')
+ del ip.user_ns['foo']
+ c = Config()
+ c.StoreMagics.autorestore = False
+ orig_config = ip.config
+ try:
+ ip.config = c
+ ip.extension_manager.reload_extension('storemagic')
+ nt.assert_not_in('foo', ip.user_ns)
+ c.StoreMagics.autorestore = True
+ ip.extension_manager.reload_extension('storemagic')
+ nt.assert_equal(ip.user_ns['foo'], 95)
+ finally:
+ ip.config = orig_config |
975a5010e97b11b9b6f00923c87268dd883b1cfa | 2017-code/opt/test1.py | 2017-code/opt/test1.py | # test1.py
# Ronald L. Rivest and Karim Husayn Karimi
# August 17, 2017
# Routine to experiment with scipy.optimize.minimize
import scipy.optimize
from scipy.stats import norm
# function to minimize:
def g(xy):
(x,y) = xy
print("g({},{})".format(x,y))
return x + y
# constraints
noise_level = 0.0000005
# constraint 1: y <= x/2
def f1(xy):
(x,y) = xy
return x/2 - y + noise_level * norm.rvs(0)
# constraint 2: y >= 1/x
def f2(xy):
(x,y) = xy
return y - 1.0/x + noise_level * norm.rvs(0)
constraints = [
{ "type": "ineq",
"fun": f1
},
{ "type": "ineq",
"fun": f2
}
]
print(scipy.optimize.minimize(g, (11, 5), constraints=constraints))
| # test1.py
# Ronald L. Rivest and Karim Husayn Karimi
# August 17, 2017
# Routine to experiment with scipy.optimize.minimize
import scipy.optimize
from scipy.stats import norm
# function to minimize:
def g(xy):
(x,y) = xy
print("g({},{})".format(x,y))
return x + y
# constraints
noise_level = 0.05
# constraint 1: y <= x/2
def f1(xy):
(x,y) = xy
return x/2 - y + noise_level * norm.rvs(0)
# constraint 2: y >= 1/x
def f2(xy):
(x,y) = xy
return y - 1.0/x + noise_level * norm.rvs(0)
constraints = [
{ "type": "ineq",
"fun": f1
},
{ "type": "ineq",
"fun": f2
}
]
print(scipy.optimize.minimize(g,
(11, 5),
method = "COBYLA",
tol = 0.01,
constraints=constraints))
| Switch to COBYLA optimization method. Works much better. | Switch to COBYLA optimization method. Works much better.
| Python | mit | ron-rivest/2017-bayes-audit,ron-rivest/2017-bayes-audit | ---
+++
@@ -14,7 +14,7 @@
# constraints
-noise_level = 0.0000005
+noise_level = 0.05
# constraint 1: y <= x/2
def f1(xy):
@@ -35,6 +35,10 @@
}
]
-print(scipy.optimize.minimize(g, (11, 5), constraints=constraints))
+print(scipy.optimize.minimize(g,
+ (11, 5),
+ method = "COBYLA",
+ tol = 0.01,
+ constraints=constraints))
|
76cf350b4ca48455e9ae7d6288d992389a8ec0b5 | src/toil/provisioners/azure/__init__.py | src/toil/provisioners/azure/__init__.py | # Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
def getAzureZone(defaultZone=None):
"""
Find an appropriate azure zone.
Look for an environment variable or return a default as provided.
:param defaultZone: The zone specified in the leader metadata.
:return zone: The zone.
"""
return os.environ.get('TOIL_AZURE_ZONE') or defaultZone
| # Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| Remove getAzureZone from init, no longer needed. | Remove getAzureZone from init, no longer needed.
| Python | apache-2.0 | BD2KGenomics/slugflow,BD2KGenomics/slugflow | ---
+++
@@ -11,17 +11,3 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-import os
-
-def getAzureZone(defaultZone=None):
- """
- Find an appropriate azure zone.
-
- Look for an environment variable or return a default as provided.
-
- :param defaultZone: The zone specified in the leader metadata.
- :return zone: The zone.
- """
-
- return os.environ.get('TOIL_AZURE_ZONE') or defaultZone |
6941d9048a8c630244bb48100864872b35a1a307 | tests/functional/test_layout_and_styling.py | tests/functional/test_layout_and_styling.py | import os
from .base import FunctionalTest
class LayoutStylingTest(FunctionalTest):
def test_bootstrap_links_loaded_successfully(self):
self.browser.get(self.live_server_url)
self.assertIn(
"//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css",
self.browser.page_source.strip())
self.assertIn(
"//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js",
self.browser.page_source.strip())
self.assertIn(
'//code.jquery.com/jquery.min.js',
self.browser.page_source.strip())
| from .base import FunctionalTest
class LayoutStylingTest(FunctionalTest):
def test_bootstrap_links_loaded_successfully(self):
self.browser.get(self.live_server_url)
links = [link.get_attribute("href")
for link in self.browser.find_elements_by_tag_name('link')]
scripts = [script.get_attribute("src")
for script in self.browser.find_elements_by_tag_name('script')]
self.assertTrue(
["//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css"
in link for link in links])
self.assertTrue(
["//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"
in link for link in links])
self.assertTrue(
["//code.jquery.com/jquery.min.js"
in link for link in scripts])
| Fix bootstrap and jQuery link checking in homepage | Fix bootstrap and jQuery link checking in homepage
| Python | bsd-3-clause | andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop | ---
+++
@@ -1,5 +1,3 @@
-import os
-
from .base import FunctionalTest
@@ -8,14 +6,19 @@
def test_bootstrap_links_loaded_successfully(self):
self.browser.get(self.live_server_url)
- self.assertIn(
- "//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css",
- self.browser.page_source.strip())
+ links = [link.get_attribute("href")
+ for link in self.browser.find_elements_by_tag_name('link')]
+ scripts = [script.get_attribute("src")
+ for script in self.browser.find_elements_by_tag_name('script')]
- self.assertIn(
- "//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js",
- self.browser.page_source.strip())
+ self.assertTrue(
+ ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/css/bootstrap.min.css"
+ in link for link in links])
- self.assertIn(
- '//code.jquery.com/jquery.min.js',
- self.browser.page_source.strip())
+ self.assertTrue(
+ ["//netdna.bootstrapcdn.com/bootstrap/3.3.4/js/bootstrap.min.js"
+ in link for link in links])
+
+ self.assertTrue(
+ ["//code.jquery.com/jquery.min.js"
+ in link for link in scripts]) |
7375c41b1d9bd5ca153df80705ae1887e6f2e70b | api/base/exceptions.py | api/base/exceptions.py |
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that nests detail inside errors.
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
return response |
def jsonapi_exception_handler(exc, context):
"""
Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
if response is not None:
if 'detail' in response.data:
response.data = {'errors': [response.data]}
else:
response.data = {'errors': [{'detail': response.data}]}
return response
| Change docstring for exception handler | Change docstring for exception handler
| Python | apache-2.0 | RomanZWang/osf.io,felliott/osf.io,cosenal/osf.io,caseyrollins/osf.io,aaxelb/osf.io,aaxelb/osf.io,ckc6cz/osf.io,zamattiac/osf.io,emetsger/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,mfraezz/osf.io,crcresearch/osf.io,MerlinZhang/osf.io,mluke93/osf.io,hmoco/osf.io,emetsger/osf.io,felliott/osf.io,ckc6cz/osf.io,caseyrollins/osf.io,aaxelb/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,kwierman/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,mluke93/osf.io,cosenal/osf.io,sbt9uc/osf.io,pattisdr/osf.io,sloria/osf.io,amyshi188/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,felliott/osf.io,mluo613/osf.io,caseyrygt/osf.io,doublebits/osf.io,SSJohns/osf.io,alexschiller/osf.io,TomBaxter/osf.io,mattclark/osf.io,RomanZWang/osf.io,doublebits/osf.io,cslzchen/osf.io,icereval/osf.io,erinspace/osf.io,MerlinZhang/osf.io,doublebits/osf.io,Johnetordoff/osf.io,acshi/osf.io,KAsante95/osf.io,jnayak1/osf.io,danielneis/osf.io,petermalcolm/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,haoyuchen1992/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,samchrisinger/osf.io,aaxelb/osf.io,RomanZWang/osf.io,HalcyonChimera/osf.io,acshi/osf.io,sloria/osf.io,pattisdr/osf.io,njantrania/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,mattclark/osf.io,KAsante95/osf.io,arpitar/osf.io,caneruguz/osf.io,ckc6cz/osf.io,abought/osf.io,jmcarp/osf.io,zachjanicki/osf.io,abought/osf.io,mattclark/osf.io,crcresearch/osf.io,mluo613/osf.io,asanfilippo7/osf.io,wearpants/osf.io,mfraezz/osf.io,samchrisinger/osf.io,adlius/osf.io,chrisseto/osf.io,danielneis/osf.io,samanehsan/osf.io,samanehsan/osf.io,sloria/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,jnayak1/osf.io,GageGaskins/osf.io,alexschiller/osf.io,Ghalko/osf.io,binoculars/osf.io,doublebits/osf.io,Nesiehr/osf.io,chrisseto/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,alexschiller/osf.io,emetsger/osf.io,haoyuchen1992/osf.io,chrisseto/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,mfraezz/osf.io,abought/osf.io,emetsger/osf.io,amyshi188/osf.io,chennan47/osf.io,zachjanicki/osf.io,kch8qx/osf.io,mluke93/osf.io,acshi/osf.io,doublebits/osf.io,billyhunt/osf.io,leb2dg/osf.io,ckc6cz/osf.io,mluo613/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,TomHeatwole/osf.io,caneruguz/osf.io,Ghalko/osf.io,billyhunt/osf.io,leb2dg/osf.io,asanfilippo7/osf.io,erinspace/osf.io,zamattiac/osf.io,KAsante95/osf.io,cslzchen/osf.io,kch8qx/osf.io,kch8qx/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,cwisecarver/osf.io,SSJohns/osf.io,adlius/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,kwierman/osf.io,laurenrevere/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,MerlinZhang/osf.io,kwierman/osf.io,GageGaskins/osf.io,KAsante95/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,MerlinZhang/osf.io,Ghalko/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,rdhyee/osf.io,asanfilippo7/osf.io,njantrania/osf.io,ticklemepierce/osf.io,jnayak1/osf.io,baylee-d/osf.io,jmcarp/osf.io,kch8qx/osf.io,njantrania/osf.io,rdhyee/osf.io,petermalcolm/osf.io,sbt9uc/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,cosenal/osf.io,felliott/osf.io,Nesiehr/osf.io,hmoco/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,erinspace/osf.io,acshi/osf.io,saradbowman/osf.io,TomBaxter/osf.io,Ghalko/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,chennan47/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,billyhunt/osf.io,icereval/osf.io,mluo613/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,haoyuchen1992/osf.io,icereval/osf.io,arpitar/osf.io,sbt9uc/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,njantrania/osf.io,baylee-d/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,binoculars/osf.io,GageGaskins/osf.io,caseyrygt/osf.io,mluke93/osf.io,jmcarp/osf.io,monikagrabowska/osf.io,GageGaskins/osf.io,wearpants/osf.io,cslzchen/osf.io,mfraezz/osf.io,pattisdr/osf.io,wearpants/osf.io,samchrisinger/osf.io,abought/osf.io,HalcyonChimera/osf.io,caseyrygt/osf.io,binoculars/osf.io,kwierman/osf.io,zachjanicki/osf.io,acshi/osf.io,ZobairAlijan/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,hmoco/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,zachjanicki/osf.io,danielneis/osf.io,rdhyee/osf.io,samanehsan/osf.io,petermalcolm/osf.io,hmoco/osf.io,ticklemepierce/osf.io,ZobairAlijan/osf.io,arpitar/osf.io,billyhunt/osf.io,saradbowman/osf.io,zamattiac/osf.io,KAsante95/osf.io,caneruguz/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,samchrisinger/osf.io,SSJohns/osf.io,cosenal/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,danielneis/osf.io,Johnetordoff/osf.io,adlius/osf.io,leb2dg/osf.io,SSJohns/osf.io,TomBaxter/osf.io,arpitar/osf.io | ---
+++
@@ -1,7 +1,7 @@
def jsonapi_exception_handler(exc, context):
"""
- Custom exception handler that nests detail inside errors.
+ Custom exception handler that returns errors object as an array with a 'detail' member
"""
from rest_framework.views import exception_handler
response = exception_handler(exc, context) |
6d624d693a05749879f4184231e727590542db03 | backend/globaleaks/tests/utils/test_zipstream.py | backend/globaleaks/tests/utils/test_zipstream.py | # -*- encoding: utf-8 -*-
import StringIO
from twisted.internet.defer import inlineCallbacks
from zipfile import ZipFile
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
class TestZipStream(helpers.TestGL):
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.files = []
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'),
'buf': self.internationalized_text[k].encode('utf-8')})
def test_zipstream(self):
output = StringIO.StringIO()
for data in ZipStream(self.files):
output.write(data)
with ZipFile(output, 'r') as f:
self.assertIsNone(f.testzip())
| # -*- encoding: utf-8 -*-
import os
import StringIO
from twisted.internet.defer import inlineCallbacks
from zipfile import ZipFile
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
class TestZipStream(helpers.TestGL):
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.unicode_seq = ''.join(unichr(x) for x in range(0x400, 0x40A))
self.files = [
{'name': self.unicode_seq, 'buf': self.unicode_seq},
{'name': __file__, 'path': os.path.abspath(__file__)}
]
def test_zipstream(self):
output = StringIO.StringIO()
for data in ZipStream(self.files):
output.write(data)
with ZipFile(output, 'r') as f:
self.assertIsNone(f.testzip())
with ZipFile(output, 'r') as f:
infolist = f.infolist()
self.assertTrue(len(infolist), 2)
for ff in infolist:
if ff.filename == self.unicode_seq:
self.assertTrue(ff.file_size == len(self.unicode_seq))
else:
self.assertTrue(ff.file_size == os.stat(os.path.abspath(__file__)).st_size)
| Improve unit testing of zipstream utilities | Improve unit testing of zipstream utilities
| Python | agpl-3.0 | vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks | ---
+++
@@ -1,4 +1,6 @@
# -*- encoding: utf-8 -*-
+
+import os
import StringIO
from twisted.internet.defer import inlineCallbacks
@@ -7,16 +9,17 @@
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
-
class TestZipStream(helpers.TestGL):
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
- self.files = []
- for k in self.internationalized_text:
- self.files.append({'name': self.internationalized_text[k].encode('utf8'),
- 'buf': self.internationalized_text[k].encode('utf-8')})
+ self.unicode_seq = ''.join(unichr(x) for x in range(0x400, 0x40A))
+
+ self.files = [
+ {'name': self.unicode_seq, 'buf': self.unicode_seq},
+ {'name': __file__, 'path': os.path.abspath(__file__)}
+ ]
def test_zipstream(self):
output = StringIO.StringIO()
@@ -26,3 +29,12 @@
with ZipFile(output, 'r') as f:
self.assertIsNone(f.testzip())
+
+ with ZipFile(output, 'r') as f:
+ infolist = f.infolist()
+ self.assertTrue(len(infolist), 2)
+ for ff in infolist:
+ if ff.filename == self.unicode_seq:
+ self.assertTrue(ff.file_size == len(self.unicode_seq))
+ else:
+ self.assertTrue(ff.file_size == os.stat(os.path.abspath(__file__)).st_size) |
92138f23dfc5dbbcb81aeb1f429e68a63a9d5005 | apps/organizations/admin.py | apps/organizations/admin.py | from django.contrib import admin
from apps.organizations.models import (
Organization, OrganizationAddress, OrganizationMember
)
class OrganizationAddressAdmin(admin.StackedInline):
model = OrganizationAddress
extra = 0
class OrganizationAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (OrganizationAddressAdmin,)
search_fields = ('name', 'description')
admin.site.register(Organization, OrganizationAdmin)
admin.site.register(OrganizationMember)
| from django.contrib import admin
from apps.organizations.models import (
Organization, OrganizationAddress, OrganizationMember
)
class OrganizationAddressAdmin(admin.StackedInline):
model = OrganizationAddress
extra = 0
class OrganizationAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
inlines = (OrganizationAddressAdmin,)
search_fields = ('name', 'description')
admin.site.register(Organization, OrganizationAdmin)
class OrganizationMemberAdmin(admin.ModelAdmin):
list_display = ('user', 'function', 'organization')
list_filter = ('function',)
search_fields = ('user__first_name', 'user__last_name',
'user__username', 'organization__name')
admin.site.register(OrganizationMember, OrganizationMemberAdmin) | Add a custom Admin page for organization members. | Add a custom Admin page for organization members.
This is a partial fix for BB-66.
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | ---
+++
@@ -17,7 +17,13 @@
search_fields = ('name', 'description')
-
admin.site.register(Organization, OrganizationAdmin)
-admin.site.register(OrganizationMember)
+
+class OrganizationMemberAdmin(admin.ModelAdmin):
+ list_display = ('user', 'function', 'organization')
+ list_filter = ('function',)
+ search_fields = ('user__first_name', 'user__last_name',
+ 'user__username', 'organization__name')
+
+admin.site.register(OrganizationMember, OrganizationMemberAdmin) |
60a5104f0138af7bbfc5056fae01898c148b10a0 | benchmarks/serialization.py | benchmarks/serialization.py | """
Benchmark of message serialization.
The goal here is to mostly focus on performance of serialization, in a vaguely
realistic manner. That is, mesages are logged in context of a message with a
small number of fields.
"""
from __future__ import unicode_literals
import time
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
to_file(open("/dev/null"))
N = 10000
def run():
start = time.time()
for i in range(N):
with start_action(action_type="my_action"):
with start_action(action_type="my_action2"):
Message.log(
message_type="my_message",
integer=3, string=b"abcdeft", string2="dgsjdlkgjdsl",
list=[1, 2, 3, 4])
end = time.time()
# Each iteration has 5 messages: start/end of my_action, start/end of
# my_action2, and my_message.
print("%.6f per message" % ((end - start) / (N * 5),))
print("%s messages/sec" % (int(N / (end-start)),))
if __name__ == '__main__':
run()
| """
Benchmark of message serialization.
The goal here is to mostly focus on performance of serialization, in a vaguely
realistic manner. That is, mesages are logged in context of a message with a
small number of fields.
"""
from __future__ import unicode_literals
import time
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
to_file(open("/dev/null", "w"))
N = 10000
def run():
start = time.time()
for i in range(N):
with start_action(action_type="my_action"):
with start_action(action_type="my_action2"):
Message.log(
message_type="my_message",
integer=3, string=b"abcdeft", string2="dgsjdlkgjdsl",
list=[1, 2, 3, 4])
end = time.time()
# Each iteration has 5 messages: start/end of my_action, start/end of
# my_action2, and my_message.
print("%.6f per message" % ((end - start) / (N * 5),))
print("%s messages/sec" % (int(N / (end-start)),))
if __name__ == '__main__':
run()
| Fix the benchmark so it's not throwing exceptions every time a message is written | Fix the benchmark so it's not throwing exceptions every time a message is written | Python | apache-2.0 | ScatterHQ/eliot,ScatterHQ/eliot,ClusterHQ/eliot,ScatterHQ/eliot | ---
+++
@@ -13,7 +13,7 @@
from eliot import Message, start_action, to_file
# Ensure JSON serialization is part of benchmark:
-to_file(open("/dev/null"))
+to_file(open("/dev/null", "w"))
N = 10000
|
38d80f89abc4c39d077505c4d6f27c4db699eeee | examples/calculations/Parse_Angles.py | examples/calculations/Parse_Angles.py | # Copyright (c) 2015-2018 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Parse angles
============
Demonstrate how to convert direction strings to angles.
The code below shows how to parse directional text into angles.
It also demonstrates the function's flexibility
in handling various string formatting.
"""
import metpy.calc as mpcalc
###########################################
# Create a test value of a directional text
dir_str = 'SOUTH SOUTH EAST'
print(dir_str)
###########################################
# Now throw that string into the function to calculate
# the corresponding angle
angle_deg = mpcalc.parse_angle(dir_str)
print(angle_deg)
###########################################
# The function can also handle arrays of string
# in many different abbrieviations and capitalizations
dir_str_list = ['ne', 'NE', 'NORTHEAST', 'NORTH_EAST', 'NORTH east']
angle_deg_list = mpcalc.parse_angle(dir_str_list)
print(angle_deg_list)
| # Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Parse angles
============
Demonstrate how to convert direction strings to angles.
The code below shows how to parse directional text into angles.
It also demonstrates the function's flexibility
in handling various string formatting.
"""
import metpy.calc as mpcalc
###########################################
# Create a test value of a directional text
dir_str = 'SOUTH SOUTH EAST'
print(dir_str)
###########################################
# Now throw that string into the function to calculate
# the corresponding angle
angle_deg = mpcalc.parse_angle(dir_str)
print(angle_deg)
###########################################
# The function can also handle arrays of string
# in many different abbrieviations and capitalizations
dir_str_list = ['ne', 'NE', 'NORTHEAST', 'NORTH_EAST', 'NORTH east']
angle_deg_list = mpcalc.parse_angle(dir_str_list)
print(angle_deg_list)
| Add parse_angle() example to calculations page. | Add parse_angle() example to calculations page.
| Python | bsd-3-clause | Unidata/MetPy,ahaberlie/MetPy,dopplershift/MetPy,dopplershift/MetPy,Unidata/MetPy,ShawnMurd/MetPy,ahaberlie/MetPy | ---
+++
@@ -1,4 +1,4 @@
-# Copyright (c) 2015-2018 MetPy Developers.
+# Copyright (c) 2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
""" |
539608a9ca9a21707184496e744fc40a8cb72cc1 | announce/management/commands/migrate_mailchimp_users.py | announce/management/commands/migrate_mailchimp_users.py | from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from announce.mailchimp import archive_members, list_members, batch_subscribe
from studygroups.models import Profile
import requests
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Synchronize mailchimp audience with users that opted in for communications'
def handle(self, *args, **options):
# get all mailchimp users
mailchimp_members = list_members()
filter_subscribed = lambda x: x.get('status') not in ['unsubscribed', 'cleaned']
mailchimp_members = filter(filter_subscribed, mailchimp_members)
emails = [member.get('email_address').lower() for member in mailchimp_members]
# add all members with communicagtion_opt_in == True to mailchimp
subscribed = User.objects.filter(profile__communication_opt_in=True, is_active=True, profile__email_confirmed_at__isnull=False)
to_sub = list(filter(lambda u: u.email.lower() not in emails, subscribed))
print('{} users will be added to the mailchimp list'.format(len(to_sub)))
batch_subscribe(to_sub)
# update profile.communication_opt_in = True for users subscribed to the mailchimp newsletter
unsubscribed_users = User.objects.filter(profile__communication_opt_in=False, is_active=True, profile__email_confirmed_at__isnull=False)
to_update = list(filter(lambda u: u.email.lower() in emails, unsubscribed_users))
for user in to_update:
user.profile.communication_opt_in = True
user.profile.save()
| from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from announce.mailchimp import archive_members, list_members, batch_subscribe
from studygroups.models import Profile
import requests
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Synchronize mailchimp audience with users that opted in for communications'
def handle(self, *args, **options):
# get all mailchimp users
mailchimp_members = list_members()
filter_subscribed = lambda x: x.get('status') not in ['unsubscribed', 'cleaned']
mailchimp_members = filter(filter_subscribed, mailchimp_members)
emails = [member.get('email_address').lower() for member in mailchimp_members]
# add all members with communicagtion_opt_in == True to mailchimp
subscribed = User.objects.filter(profile__communication_opt_in=True, is_active=True, profile__email_confirmed_at__isnull=False)
to_sub = list(filter(lambda u: u.email.lower() not in emails, subscribed))
print('{} users will be added to the mailchimp list'.format(len(to_sub)))
batch_subscribe(to_sub)
| Remove once of code for mailchimp list migration | Remove once of code for mailchimp list migration
| Python | mit | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | ---
+++
@@ -25,13 +25,3 @@
to_sub = list(filter(lambda u: u.email.lower() not in emails, subscribed))
print('{} users will be added to the mailchimp list'.format(len(to_sub)))
batch_subscribe(to_sub)
-
- # update profile.communication_opt_in = True for users subscribed to the mailchimp newsletter
- unsubscribed_users = User.objects.filter(profile__communication_opt_in=False, is_active=True, profile__email_confirmed_at__isnull=False)
- to_update = list(filter(lambda u: u.email.lower() in emails, unsubscribed_users))
- for user in to_update:
- user.profile.communication_opt_in = True
- user.profile.save()
-
-
- |
d8a5d6d6478ae8267ccd9d1e4db710f8decb7991 | wiki/achievements.py | wiki/achievements.py | import wikipedia
import sys
import random
import re
import nltk.data
def process_file(f):
names = {}
with open(f) as file:
for line in file:
l = line.strip().split('\t')
if len(l) != 2:
continue
(k, v) = l
names[k] = v
return names
REGEX_IN_DATE = r".*in\s*(?:[^ ,]*?)?\s*\d\d\d\d.*"
def process_page(id):
page = wikipedia.page(pageid=id)
in_date_regex = re.compile(REGEX_IN_DATE, re.IGNORECASE)
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
out = set()
for line in tokenizer.tokenize(page.content, realign_boundaries=True):
if '\n' in line:
line = line.split('\n')
else:
line = [line]
for l in line:
if in_date_regex.match(l):
out.add(l)
return out
if __name__ == '__main__':
for file in sys.argv[1:]:
names = process_file(file)
if len(names) > 10:
sample = random.sample(names, 10)
else:
sample = names
for name in sample:
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
print "\t", achievement.encode('utf-8')
| import wikipedia
import sys
import random
import re
import nltk.data
def process_file(f):
names = {}
with open(f) as file:
for line in file:
l = line.strip().split('\t')
if len(l) != 2:
continue
(k, v) = l
names[k] = v
return names
REGEX_IN_DATE = r".*in\s*(?:[^ ,]*?)?\s*\d\d\d\d.*"
def process_page(id):
page = wikipedia.page(pageid=id)
in_date_regex = re.compile(REGEX_IN_DATE, re.IGNORECASE)
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
out = set()
for line in tokenizer.tokenize(page.content, realign_boundaries=True):
if '\n' in line:
line = line.split('\n')
else:
line = [line]
for l in line:
if in_date_regex.match(l):
out.add(l)
return out
if __name__ == '__main__':
for file in sys.argv[1:]:
names = process_file(file)
if len(names) > 10:
sample = random.sample(names, 10)
else:
sample = names
for name in sample:
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
print ("\t", achievement.encode('utf-8'))
| Use print as a statement | janitoring: Use print as a statement
- Let's be Python 3 compatible.
Signed-off-by: mr.Shu <[email protected]>
| Python | apache-2.0 | Motivatix/wikipedia-achievements-processing | ---
+++
@@ -48,4 +48,4 @@
pageid = names[name]
print "Results of processing {} ({})".format(name, pageid)
for achievement in process_page(pageid):
- print "\t", achievement.encode('utf-8')
+ print ("\t", achievement.encode('utf-8')) |
5be55b944b52c047b6d91d46c23645c5fb79c342 | webapp/calendars/forms.py | webapp/calendars/forms.py | from django import forms
from django.contrib.admin import widgets
from datetimewidget.widgets import DateTimeWidget
from .models import Event
class LoginForm(forms.Form):
username = forms.CharField(label='Nazwa użytkownika')
password = forms.CharField(label='Hasło', widget=forms.PasswordInput())
date_time_options = {
'format': 'dd.mm.yyyy HH:ii',
'language': 'pl'
}
def dt_widget():
return DateTimeWidget(
bootstrap_version=3,
options=date_time_options
)
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = (
'title', 'place',
'description', 'categories',
'start_time', 'end_time',
'image', 'url',
)
widgets = {
'start_time': dt_widget(),
'end_time': dt_widget(),
}
| from django import forms
from django.contrib.admin import widgets
from datetimewidget.widgets import DateTimeWidget
from .models import Event
class LoginForm(forms.Form):
username = forms.CharField(label='Nazwa użytkownika')
password = forms.CharField(label='Hasło', widget=forms.PasswordInput())
date_time_options = {
'format': 'dd.mm.yyyy hh:ii',
'language': 'pl'
}
def dt_widget():
return DateTimeWidget(
bootstrap_version=3,
options=date_time_options
)
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = (
'title', 'place',
'description', 'categories',
'start_time', 'end_time',
'image', 'url',
)
widgets = {
'start_time': dt_widget(),
'end_time': dt_widget(),
}
| Use 24h time format in datetime-widget. | Use 24h time format in datetime-widget.
Signed-off-by: Mariusz Fik <[email protected]>
| Python | agpl-3.0 | hackerspace-silesia/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,firemark/calendar-oswiecim,Fisiu/calendar-oswiecim,Fisiu/calendar-oswiecim,firemark/calendar-oswiecim,firemark/calendar-oswiecim,Fisiu/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim | ---
+++
@@ -10,7 +10,7 @@
date_time_options = {
- 'format': 'dd.mm.yyyy HH:ii',
+ 'format': 'dd.mm.yyyy hh:ii',
'language': 'pl'
}
|
c93cb479446fbe12e019550f193cb45dbdc1e3e0 | pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py | pytest-{{cookiecutter.plugin_name}}/pytest_{{cookiecutter.plugin_name}}.py | # -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store',
dest='foo',
help='alias for --foo'
)
@pytest.fixture
def bar(request):
return request.config.option.foo
| # -*- coding: utf-8 -*-
import pytest
def pytest_addoption(parser):
group = parser.getgroup('{{cookiecutter.plugin_name}}')
group.addoption(
'--foo',
action='store',
dest='foo',
help='Set the value for the fixture "bar".'
)
@pytest.fixture
def bar(request):
return request.config.option.foo
| Optimize the help message for the option arg | Optimize the help message for the option arg
| Python | mit | luzfcb/cookiecutter-pytest-plugin,s0undt3ch/cookiecutter-pytest-plugin,pytest-dev/cookiecutter-pytest-plugin | ---
+++
@@ -9,7 +9,7 @@
'--foo',
action='store',
dest='foo',
- help='alias for --foo'
+ help='Set the value for the fixture "bar".'
)
|
ca977a2a9c8f3c3b54f2bd516323fa9b1dc23b6c | pocs/utils/data.py | pocs/utils/data.py | import argparse
import os
import shutil
from astroplan import download_IERS_A
from astropy.utils import data
def download_all_files(data_folder=None):
download_IERS_A()
if data_folder is None:
data_folder = "{}/astrometry/data".format(os.getenv('PANDIR'))
for i in range(4214, 4219):
fn = 'index-{}.fits'.format(i)
dest = "{}/{}".format(data_folder, fn)
if not os.path.exists(dest):
url = "http://data.astrometry.net/4200/{}".format(fn)
df = data.download_file(url)
try:
shutil.move(df, dest)
except OSError as e:
print("Problem saving. (Maybe permissions?): {}".format(e))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--folder', help='Folder to place astrometry data')
args = parser.parse_args()
if args.folder and not os.path.exists(args.folder):
print("{} does not exist.".format(args.folder))
download_all_files(data_folder=args.folder)
| import argparse
import os
import shutil
from astroplan import download_IERS_A
from astropy.utils import data
def download_all_files(data_folder=None):
download_IERS_A()
if data_folder is None:
data_folder = "{}/astrometry/data".format(os.getenv('PANDIR'))
for i in range(4214, 4220):
fn = 'index-{}.fits'.format(i)
dest = "{}/{}".format(data_folder, fn)
if not os.path.exists(dest):
url = "http://data.astrometry.net/4200/{}".format(fn)
df = data.download_file(url)
try:
shutil.move(df, dest)
except OSError as e:
print("Problem saving. (Maybe permissions?): {}".format(e))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--folder', help='Folder to place astrometry data')
args = parser.parse_args()
if args.folder and not os.path.exists(args.folder):
print("{} does not exist.".format(args.folder))
download_all_files(data_folder=args.folder)
| Add one more index file for solver | Add one more index file for solver
| Python | mit | panoptes/POCS,joshwalawender/POCS,panoptes/POCS,AstroHuntsman/POCS,panoptes/POCS,panoptes/POCS,joshwalawender/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,AstroHuntsman/POCS,joshwalawender/POCS | ---
+++
@@ -12,7 +12,7 @@
if data_folder is None:
data_folder = "{}/astrometry/data".format(os.getenv('PANDIR'))
- for i in range(4214, 4219):
+ for i in range(4214, 4220):
fn = 'index-{}.fits'.format(i)
dest = "{}/{}".format(data_folder, fn)
|
3d7bbd37485dca4782ad7e7fdb088b22db586b66 | pyscores/config.py | pyscores/config.py | BASE_URL = "http://api.football-data.org/v1"
LEAGUE_IDS = {
"PL": "426",
"ELC": "427",
"EL1": "428",
"FAC": "429",
"BL1": "430",
"BL2": "431",
"DFB": "432",
"DED": "433",
"FL1": "434",
"FL2": "435",
"PD": "436",
"SD": "437",
"SA": "438",
"PPL": "439",
"CL": "440",
"SB": "441",
"ENL": "442",
"EL2": "443"
}
| BASE_URL = "http://api.football-data.org/v1"
LEAGUE_IDS = {
"BSA": "444",
"PL": "445",
"ELC": "446",
"EL1": "447",
"EL2": "448",
"DED": "449",
"FL1": "450",
"FL2": "451",
"BL1": "452",
"BL2": "453",
"PD": "455",
"SA": "456",
"PPL": "457",
"DFB": "458",
"SB": "459",
"CL": "464",
"AAL": "466"
}
| Update league codes for new season | Update league codes for new season
| Python | mit | conormag94/pyscores | ---
+++
@@ -1,22 +1,21 @@
BASE_URL = "http://api.football-data.org/v1"
LEAGUE_IDS = {
- "PL": "426",
- "ELC": "427",
- "EL1": "428",
- "FAC": "429",
- "BL1": "430",
- "BL2": "431",
- "DFB": "432",
- "DED": "433",
- "FL1": "434",
- "FL2": "435",
- "PD": "436",
- "SD": "437",
- "SA": "438",
- "PPL": "439",
- "CL": "440",
- "SB": "441",
- "ENL": "442",
- "EL2": "443"
+ "BSA": "444",
+ "PL": "445",
+ "ELC": "446",
+ "EL1": "447",
+ "EL2": "448",
+ "DED": "449",
+ "FL1": "450",
+ "FL2": "451",
+ "BL1": "452",
+ "BL2": "453",
+ "PD": "455",
+ "SA": "456",
+ "PPL": "457",
+ "DFB": "458",
+ "SB": "459",
+ "CL": "464",
+ "AAL": "466"
} |
1170aa08809b1c5b63e93f03864b4c76766064bc | content/test/gpu/gpu_tests/pixel_expectations.py | content/test/gpu/gpu_tests/pixel_expectations.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
pass
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import test_expectations
# Valid expectation conditions are:
#
# Operating systems:
# win, xp, vista, win7, mac, leopard, snowleopard, lion, mountainlion,
# linux, chromeos, android
#
# GPU vendors:
# amd, arm, broadcom, hisilicon, intel, imagination, nvidia, qualcomm,
# vivante
#
# Specific GPUs can be listed as a tuple with vendor name and device ID.
# Examples: ('nvidia', 0x1234), ('arm', 'Mali-T604')
# Device IDs must be paired with a GPU vendor.
class PixelExpectations(test_expectations.TestExpectations):
def SetExpectations(self):
# Sample Usage:
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
self.Fail('Pixel.CSS3DBlueBox',bug=416719)
pass
| Mark the CSS3DBlueBox pixel test as failing. | Mark the CSS3DBlueBox pixel test as failing.
There seems to be a problem with this test, as it seems to change
often after being rebaselined recently.
BUG=416719
Review URL: https://codereview.chromium.org/587753004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#296124}
| Python | bsd-3-clause | hgl888/chromium-crosswalk-efl,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,jaruba/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,ltilve/chromium,ltilve/chromium,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,M4sse/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,ltilve/chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,dednal/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,markYoungH/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,ltilve/chromium,ltilve/chromium,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,Jonekee/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium | ---
+++
@@ -24,4 +24,6 @@
# self.Fail('Pixel.Canvas2DRedBox',
# ['mac', 'amd', ('nvidia', 0x1234)], bug=123)
+ self.Fail('Pixel.CSS3DBlueBox',bug=416719)
+
pass |
a476c42216af99488c2e02bacd29f7e3a869a3e7 | tests/retrieval_metrics/test_precision_at_k.py | tests/retrieval_metrics/test_precision_at_k.py | import numpy as np
import pytest
import tensorflow as tf
from tensorflow_similarity.retrieval_metrics import PrecisionAtK
testdata = [
(
"micro",
tf.constant(0.583333333),
),
(
"macro",
tf.constant(0.5),
),
]
@pytest.mark.parametrize("avg, expected", testdata, ids=["micro", "macro"])
def test_compute(avg, expected):
query_labels = tf.constant([1, 1, 1, 0])
match_mask = tf.constant(
[
[True, True, False],
[True, True, False],
[True, True, False],
[False, False, True],
],
dtype=bool,
)
rm = PrecisionAtK(k=3, average=avg)
precision = rm.compute(query_labels=query_labels, match_mask=match_mask)
np.testing.assert_allclose(precision, expected)
| import numpy as np
import pytest
import tensorflow as tf
from tensorflow_similarity.retrieval_metrics import PrecisionAtK
testdata = [
(
"micro",
tf.constant(0.583333333),
),
(
"macro",
tf.constant(0.5),
),
]
@pytest.mark.parametrize("avg, expected", testdata, ids=["micro", "macro"])
def test_compute(avg, expected):
query_labels = tf.constant([1, 1, 1, 0])
match_mask = tf.constant(
[
[True, True, False],
[True, True, False],
[True, True, False],
[False, False, True],
],
dtype=bool,
)
rm = PrecisionAtK(k=3, average=avg)
precision = rm.compute(query_labels=query_labels, match_mask=match_mask)
np.testing.assert_allclose(precision, expected, atol=1e-05)
| Update atol on precision at k test. | Update atol on precision at k test.
| Python | apache-2.0 | tensorflow/similarity | ---
+++
@@ -31,4 +31,4 @@
rm = PrecisionAtK(k=3, average=avg)
precision = rm.compute(query_labels=query_labels, match_mask=match_mask)
- np.testing.assert_allclose(precision, expected)
+ np.testing.assert_allclose(precision, expected, atol=1e-05) |
b1a3a17460ae1f68c6c9bf60ecbd6a3b80a95abe | billjobs/tests/tests_model.py | billjobs/tests/tests_model.py | from django.test import TestCase, Client
from django.contrib.auth.models import User
from billjobs.models import Bill, Service
from billjobs.settings import BILLJOBS_BILL_ISSUER
class BillingTestCase(TestCase):
''' Test billing creation and modification '''
fixtures = ['dev_data.json']
def setUp(self):
self.client = Client()
self.client.login(username='bill', password='jobs')
def tearDown(self):
self.client.logout()
def test_create_bill_with_one_line(self):
''' Test when user is created a bill with a single service '''
#response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True)
#self.assertEqual(response.status_code, 200)
self.assertTrue(True)
def test_create_bill(self):
user = User.objects.get(username='bill')
bill = Bill(user=user)
bill.save()
self.assertEqual(bill.user.username, 'bill')
self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER)
self.assertEqual(
bill.billing_address, user.userprofile.billing_address)
| from django.test import TestCase, Client
from django.contrib.auth.models import User
from billjobs.models import Bill, Service
from billjobs.settings import BILLJOBS_BILL_ISSUER
class BillingTestCase(TestCase):
''' Test billing creation and modification '''
fixtures = ['dev_data.json']
def setUp(self):
self.user = User.objects.get(username='bill')
def tearDown(self):
pass
def test_create_bill_with_one_line(self):
''' Test when user is created a bill with a single service '''
#response = self.client.get('/admin/billjobs/bill/add/', follow_redirect=True)
#self.assertEqual(response.status_code, 200)
self.assertTrue(True)
def test_create_bill(self):
bill = Bill(user=self.user)
bill.save()
self.assertEqual(bill.user.username, self.user.username)
self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER)
self.assertEqual(
bill.billing_address, self.user.userprofile.billing_address)
def test_user_change_billing_address(self):
''' Test when user is changing is billing address
Previous bill is with old address
New bill is with new address
'''
pass
| Refactor test to use user | Refactor test to use user
| Python | mit | ioO/billjobs | ---
+++
@@ -8,11 +8,10 @@
fixtures = ['dev_data.json']
def setUp(self):
- self.client = Client()
- self.client.login(username='bill', password='jobs')
+ self.user = User.objects.get(username='bill')
def tearDown(self):
- self.client.logout()
+ pass
def test_create_bill_with_one_line(self):
''' Test when user is created a bill with a single service '''
@@ -21,11 +20,16 @@
self.assertTrue(True)
def test_create_bill(self):
- user = User.objects.get(username='bill')
- bill = Bill(user=user)
+ bill = Bill(user=self.user)
bill.save()
- self.assertEqual(bill.user.username, 'bill')
+ self.assertEqual(bill.user.username, self.user.username)
self.assertEqual(bill.issuer_address, BILLJOBS_BILL_ISSUER)
self.assertEqual(
- bill.billing_address, user.userprofile.billing_address)
+ bill.billing_address, self.user.userprofile.billing_address)
+ def test_user_change_billing_address(self):
+ ''' Test when user is changing is billing address
+ Previous bill is with old address
+ New bill is with new address
+ '''
+ pass |
ed8423041abc80b778bf9ffed61e3dad246d72ff | bucketeer/test/test_commit.py | bucketeer/test/test_commit.py | import unittest
import boto
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
def setUp(self):
# Create a bucket with one file
connection = boto.connect_s3()
bucket = connection.create_bucket('bucket.exists')
return
def tearDown(self):
# Remove all test-created buckets and files
connection = boto.connect_s3()
bucket = connection.delete_bucket('bucket.exists')
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
| import unittest
import boto
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket
existing_bucket = 'bucket.exists'
def setUp(self):
# Create a bucket with one file
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
return
def tearDown(self):
# Remove all test-created buckets and files
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
| Refactor bucket name to global variable | Refactor bucket name to global variable
| Python | mit | mgarbacz/bucketeer | ---
+++
@@ -4,17 +4,20 @@
class BuckeeterTest(unittest.TestCase):
+ global existing_bucket
+ existing_bucket = 'bucket.exists'
+
def setUp(self):
# Create a bucket with one file
connection = boto.connect_s3()
- bucket = connection.create_bucket('bucket.exists')
+ bucket = connection.create_bucket(existing_bucket)
return
def tearDown(self):
# Remove all test-created buckets and files
connection = boto.connect_s3()
- bucket = connection.delete_bucket('bucket.exists')
+ bucket = connection.delete_bucket(existing_bucket)
return
def testMain(self): |
d0367aacfea7c238c476772a2c83f7826b1e9de5 | corehq/apps/export/tasks.py | corehq/apps/export/tasks.py | from celery.task import task
from corehq.apps.export.export import get_export_file, rebuild_export
from couchexport.models import Format
from couchexport.tasks import escape_quotes
from soil.util import expose_cached_download
@task
def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60):
export_file = get_export_file(export_instances, filters)
file_format = Format.from_format(export_file.format)
filename = filename or export_instances[0].name
escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension))
payload = export_file.file.payload
expose_cached_download(
payload,
expiry,
".{}".format(file_format.extension),
mimetype=file_format.mimetype,
content_disposition='attachment; filename="%s"' % escaped_filename,
download_id=download_id,
)
export_file.file.delete()
@task(queue='background_queue', ignore_result=True, last_access_cutoff=None, filter=None)
def rebuild_export_task(export_instance):
rebuild_export(export_instance)
| from celery.task import task
from corehq.apps.export.export import get_export_file, rebuild_export
from couchexport.models import Format
from couchexport.tasks import escape_quotes
from soil.util import expose_cached_download
@task
def populate_export_download_task(export_instances, filters, download_id, filename=None, expiry=10 * 60 * 60):
export_file = get_export_file(export_instances, filters)
file_format = Format.from_format(export_file.format)
filename = filename or export_instances[0].name
escaped_filename = escape_quotes('%s.%s' % (filename, file_format.extension))
payload = export_file.file.payload
expose_cached_download(
payload,
expiry,
".{}".format(file_format.extension),
mimetype=file_format.mimetype,
content_disposition='attachment; filename="%s"' % escaped_filename,
download_id=download_id,
)
export_file.file.delete()
@task(queue='background_queue', ignore_result=True)
def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None):
rebuild_export(export_instance, last_access_cutoff, filter)
| Fix botched keyword args in rebuild_export_task() | Fix botched keyword args in rebuild_export_task()
| Python | bsd-3-clause | dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | ---
+++
@@ -26,6 +26,6 @@
export_file.file.delete()
-@task(queue='background_queue', ignore_result=True, last_access_cutoff=None, filter=None)
-def rebuild_export_task(export_instance):
- rebuild_export(export_instance)
+@task(queue='background_queue', ignore_result=True)
+def rebuild_export_task(export_instance, last_access_cutoff=None, filter=None):
+ rebuild_export(export_instance, last_access_cutoff, filter) |
641ac2d30b0eb2239444b022688195ff26bd70b4 | timeout_decorator/timeout_decorator.py | timeout_decorator/timeout_decorator.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2012-2013 by PN.
:license: MIT, see LICENSE for more details.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import signal
from functools import wraps
############################################################
# Timeout
############################################################
#http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/
class TimeoutError(Exception):
def __init__(self, value="Timed Out"):
self.value = value
def __str__(self):
return repr(self.value)
def timeout(seconds=None):
def decorate(f):
def handler(signum, frame):
raise TimeoutError()
@wraps(f)
def new_f(*args, **kwargs):
old = signal.signal(signal.SIGALRM, handler)
new_seconds = kwargs['timeout'] if 'timeout' in kwargs else seconds
if new_seconds is None:
raise ValueError("You must provide a timeout value")
signal.alarm(new_seconds)
try:
result = f(*args, **kwargs)
finally:
signal.signal(signal.SIGALRM, old)
signal.alarm(0)
return result
return new_f
return decorate
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:copyright: (c) 2012-2013 by PN.
:license: MIT, see LICENSE for more details.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import signal
from functools import wraps
############################################################
# Timeout
############################################################
#http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/
class TimeoutError(AssertionError):
def __init__(self, value="Timed Out"):
self.value = value
def __str__(self):
return repr(self.value)
def timeout(seconds=None):
def decorate(f):
def handler(signum, frame):
raise TimeoutError()
@wraps(f)
def new_f(*args, **kwargs):
old = signal.signal(signal.SIGALRM, handler)
new_seconds = kwargs['timeout'] if 'timeout' in kwargs else seconds
if new_seconds is None:
raise ValueError("You must provide a timeout value")
signal.alarm(new_seconds)
try:
result = f(*args, **kwargs)
finally:
signal.signal(signal.SIGALRM, old)
signal.alarm(0)
return result
return new_f
return decorate
| Make timeout error an assertion error, not just any old exception | Make timeout error an assertion error, not just any old exception
This means that timeout failures are considered to be test failures, where a specific assertion (i.e. 'this function takes less than N seconds') has failed, rather than being a random error in the test that may indicate a bug. | Python | mit | pnpnpn/timeout-decorator,yetone/timeout-decorator | ---
+++
@@ -20,7 +20,7 @@
#http://www.saltycrane.com/blog/2010/04/using-python-timeout-decorator-uploading-s3/
-class TimeoutError(Exception):
+class TimeoutError(AssertionError):
def __init__(self, value="Timed Out"):
self.value = value
|
bddd7cb9131dd61a8167fe9db5bbc3b02f8e3add | tests/main.py | tests/main.py | #!/usr/bin/env python
"""
Tests for python-bna
>>> import bna
>>> serial = "US120910711868"
>>> secret = b"88aaface48291e09dc1ece9c2aa44d839983a7ff"
>>> bna.get_token(secret, time=1347279358)
(93461643, 2)
>>> bna.get_token(secret, time=1347279359)
(93461643, 1)
>>> bna.get_token(secret, time=1347279360)
(86031001, 30)
>>> bna.get_restore_code(serial, secret)
'4B91NQCYQ3'
>>> bna.normalize_serial(bna.prettify_serial(serial)) == serial
True
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
| #!/usr/bin/env python
"""
Tests for python-bna
>>> import bna
>>> serial = "US120910711868"
>>> secret = b"88aaface48291e09dc1ece9c2aa44d839983a7ff"
>>> bna.get_token(secret, time=1347279358)
(93461643, 2)
>>> bna.get_token(secret, time=1347279359)
(93461643, 1)
>>> bna.get_token(secret, time=1347279360)
(86031001, 30)
>>> bna.get_restore_code(serial, secret)
'4B91NQCYQ3'
>>> bna.normalize_serial(bna.prettify_serial(serial)) == serial
True
>>> bna.get_otpauth_url(serial, secret)
'otpauth://totp/Battle.net:US120910711868:?secret=HA4GCYLGMFRWKNBYGI4TCZJQHFSGGMLFMNSTSYZSMFQTINDEHAZTSOJYGNQTOZTG&issuer=Battle.net&digits=8'
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
| Add a test for bna.get_otpauth_url | Add a test for bna.get_otpauth_url
| Python | mit | jleclanche/python-bna,Adys/python-bna | ---
+++
@@ -15,6 +15,8 @@
'4B91NQCYQ3'
>>> bna.normalize_serial(bna.prettify_serial(serial)) == serial
True
+>>> bna.get_otpauth_url(serial, secret)
+'otpauth://totp/Battle.net:US120910711868:?secret=HA4GCYLGMFRWKNBYGI4TCZJQHFSGGMLFMNSTSYZSMFQTINDEHAZTSOJYGNQTOZTG&issuer=Battle.net&digits=8'
"""
if __name__ == "__main__": |
86a325777742e1fa79bc632fca9460f3b1b8eb16 | to_do/urls.py | to_do/urls.py | from django.conf.urls import patterns, include, url
from task.views import TaskList, TaskView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TaskList.as_view(), name='TaskList'),
url(r'^task/', TaskView.as_view(), name='TaskView'),
url(r'^admin/', include(admin.site.urls)),
)
"""
MyModel = Backbone.Model.extend({
url: function(){ "API/"
return "API/MyModel/" +this.get("id");
}
});
MyCollection = Backbone.Collection.extend({
model: MyModel ,
url: "API/MyModels"
});
"""
| from django.conf.urls import patterns, include, url
from task.views import TaskList, TaskView, get_task_list
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TaskList.as_view(), name='TaskList'),
url(r'^task/', TaskView.as_view(), name='TaskView'),
url(r'^tasks/', get_task_list, name='get_task_list'),
url(r'^admin/', include(admin.site.urls)),
)
"""
MyModel = Backbone.Model.extend({
url: function(){ "API/"
return "API/MyModel/" +this.get("id");
}
});
MyCollection = Backbone.Collection.extend({
model: MyModel ,
url: "API/MyModels"
});
"""
| Enable url to get all list of tasks by ajax | Enable url to get all list of tasks by ajax
| Python | mit | rosadurante/to_do,rosadurante/to_do | ---
+++
@@ -1,5 +1,5 @@
from django.conf.urls import patterns, include, url
-from task.views import TaskList, TaskView
+from task.views import TaskList, TaskView, get_task_list
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
@@ -8,6 +8,7 @@
urlpatterns = patterns('',
url(r'^$', TaskList.as_view(), name='TaskList'),
url(r'^task/', TaskView.as_view(), name='TaskView'),
+ url(r'^tasks/', get_task_list, name='get_task_list'),
url(r'^admin/', include(admin.site.urls)),
)
|
49975504a590a1ae53e2e8cc81aadea277cc5600 | cvrminer/app/__init__.py | cvrminer/app/__init__.py | """cvrminer app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap
def create_app(smiley=False):
"""Create app.
Factory for app.
Parameters
----------
smiley : bool, optional
Determines whether the smiley functionality should be setup.
"""
app = Flask(__name__)
Bootstrap(app)
if smiley:
from ..smiley import Smiley
app.smiley = Smiley()
else:
app.smiley = None
from .views import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
| """cvrminer app."""
from __future__ import absolute_import, division, print_function
from flask import Flask
from flask_bootstrap import Bootstrap, StaticCDN
def create_app(smiley=False):
"""Create app.
Factory for app.
Parameters
----------
smiley : bool, optional
Determines whether the smiley functionality should be setup.
"""
app = Flask(__name__)
Bootstrap(app)
# Serve assets from wmflabs for privacy reasons
app.extensions['bootstrap']['cdns']['jquery'] = StaticCDN()
app.extensions['bootstrap']['cdns']['bootstrap'] = StaticCDN()
if smiley:
from ..smiley import Smiley
app.smiley = Smiley()
else:
app.smiley = None
from .views import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
| Change to use local Javascript and CSS files | Change to use local Javascript and CSS files
| Python | apache-2.0 | fnielsen/cvrminer,fnielsen/cvrminer,fnielsen/cvrminer | ---
+++
@@ -4,7 +4,7 @@
from __future__ import absolute_import, division, print_function
from flask import Flask
-from flask_bootstrap import Bootstrap
+from flask_bootstrap import Bootstrap, StaticCDN
def create_app(smiley=False):
@@ -21,6 +21,10 @@
app = Flask(__name__)
Bootstrap(app)
+ # Serve assets from wmflabs for privacy reasons
+ app.extensions['bootstrap']['cdns']['jquery'] = StaticCDN()
+ app.extensions['bootstrap']['cdns']['bootstrap'] = StaticCDN()
+
if smiley:
from ..smiley import Smiley
|
3b4322b8de8ffdc691f08fbf7f35e6ec5293f41e | crm_job_position/models/crm_job_position.py | crm_job_position/models/crm_job_position.py | # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmJobPosition(models.Model):
_name = 'crm.job_position'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Job position"
name = fields.Char(required=True)
parent_id = fields.Many2one(comodel_name='crm.job_position')
children = fields.One2many(comodel_name='crm.job_position',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
| # -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields
class CrmJobPosition(models.Model):
_name = 'crm.job_position'
_order = "parent_left"
_parent_order = "name"
_parent_store = True
_description = "Job position"
name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.job_position')
children = fields.One2many(comodel_name='crm.job_position',
inverse_name='parent_id')
parent_left = fields.Integer('Parent Left', select=True)
parent_right = fields.Integer('Parent Right', select=True)
| Set some fields as tranlate | Set some fields as tranlate
| Python | agpl-3.0 | acsone/partner-contact,Therp/partner-contact,diagramsoftware/partner-contact,Endika/partner-contact,open-synergy/partner-contact | ---
+++
@@ -12,7 +12,7 @@
_parent_store = True
_description = "Job position"
- name = fields.Char(required=True)
+ name = fields.Char(required=True, translate=True)
parent_id = fields.Many2one(comodel_name='crm.job_position')
children = fields.One2many(comodel_name='crm.job_position',
inverse_name='parent_id') |
f7bfcd7fee64ae9220710835974125f41dae1c50 | frappe/core/doctype/role/test_role.py | frappe/core/doctype/role/test_role.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
test_records = frappe.get_test_records('Role')
class TestUser(unittest.TestCase):
def test_disable_role(self):
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
role = frappe.get_doc("Role", "_Test Role 3")
role.disabled = 1
role.save()
self.assertTrue("_Test Role 3" not in frappe.get_roles("[email protected]"))
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
self.assertTrue("_Test Role 3" not in frappe.get_roles("[email protected]"))
role = frappe.get_doc("Role", "_Test Role 3")
role.disabled = 0
role.save()
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
self.assertTrue("_Test Role 3" in frappe.get_roles("[email protected]"))
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
test_records = frappe.get_test_records('Role')
class TestUser(unittest.TestCase):
def test_disable_role(self):
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
role = frappe.get_doc("Role", "_Test Role 3")
role.disabled = 1
role.save()
self.assertTrue("_Test Role 3" not in frappe.get_roles("[email protected]"))
role = frappe.get_doc("Role", "_Test Role 3")
role.disabled = 0
role.save()
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
self.assertTrue("_Test Role 3" in frappe.get_roles("[email protected]"))
| Test Case for disabled role | fix: Test Case for disabled role
| Python | mit | mhbu50/frappe,mhbu50/frappe,saurabh6790/frappe,frappe/frappe,saurabh6790/frappe,vjFaLk/frappe,StrellaGroup/frappe,vjFaLk/frappe,yashodhank/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,adityahase/frappe,almeidapaulopt/frappe,adityahase/frappe,adityahase/frappe,adityahase/frappe,mhbu50/frappe,yashodhank/frappe,frappe/frappe,vjFaLk/frappe,vjFaLk/frappe,yashodhank/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,yashodhank/frappe,mhbu50/frappe,almeidapaulopt/frappe,frappe/frappe,saurabh6790/frappe,saurabh6790/frappe | ---
+++
@@ -10,20 +10,16 @@
class TestUser(unittest.TestCase):
def test_disable_role(self):
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
-
+
role = frappe.get_doc("Role", "_Test Role 3")
role.disabled = 1
role.save()
-
+
self.assertTrue("_Test Role 3" not in frappe.get_roles("[email protected]"))
-
- frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
- self.assertTrue("_Test Role 3" not in frappe.get_roles("[email protected]"))
-
+
role = frappe.get_doc("Role", "_Test Role 3")
role.disabled = 0
role.save()
-
+
frappe.get_doc("User", "[email protected]").add_roles("_Test Role 3")
self.assertTrue("_Test Role 3" in frappe.get_roles("[email protected]"))
- |
82121f05032f83de538c4a16596b24b5b012a3be | chaco/shell/tests/test_tutorial_example.py | chaco/shell/tests/test_tutorial_example.py | """ Test script-oriented example from interactive plotting tutorial
source: docs/source/user_manual/chaco_tutorial.rst
"""
import unittest
from numpy import linspace, pi, sin
from enthought.chaco.shell import plot, show, title, ytitle
class InteractiveTestCase(unittest.TestCase):
def test_script(self):
x = linspace(-2*pi, 2*pi, 100)
y = sin(x)
plot(x, y, "r-")
title("First plot")
ytitle("sin(x)")
if __name__ == "__main__":
unittest.main()
| """ Test script-oriented example from interactive plotting tutorial
source: docs/source/user_manual/chaco_tutorial.rst
"""
import unittest
from numpy import linspace, pi, sin
from chaco.shell import plot, title, ytitle
class InteractiveTestCase(unittest.TestCase):
def test_script(self):
x = linspace(-2*pi, 2*pi, 100)
y = sin(x)
plot(x, y, "r-")
title("First plot")
ytitle("sin(x)")
if __name__ == "__main__":
unittest.main()
| Clean up: pyflakes and remove enthought.chaco import. We should and will update the tutorial later. | Clean up: pyflakes and remove enthought.chaco import. We should and will update the tutorial later.
| Python | bsd-3-clause | tommy-u/chaco,burnpanck/chaco,burnpanck/chaco,tommy-u/chaco,tommy-u/chaco,burnpanck/chaco | ---
+++
@@ -5,7 +5,7 @@
"""
import unittest
from numpy import linspace, pi, sin
-from enthought.chaco.shell import plot, show, title, ytitle
+from chaco.shell import plot, title, ytitle
class InteractiveTestCase(unittest.TestCase): |
e029998f73a77ebd8f4a6e32a8b03edcc93ec0d7 | dataproperty/__init__.py | dataproperty/__init__.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from ._align import Align
from ._align_getter import align_getter
from ._container import MinMaxContainer
from ._data_property import (
ColumnDataProperty,
DataProperty
)
from ._error import TypeConversionError
from ._function import (
is_integer,
is_hex,
is_float,
is_nan,
is_empty_string,
is_not_empty_string,
is_list_or_tuple,
is_empty_sequence,
is_not_empty_sequence,
is_empty_list_or_tuple,
is_not_empty_list_or_tuple,
is_datetime,
get_integer_digit,
get_number_of_digit,
get_text_len,
strict_strtobool
)
from ._property_extractor import PropertyExtractor
from ._type import (
NoneType,
StringType,
IntegerType,
FloatType,
DateTimeType,
BoolType,
InfinityType,
NanType
)
from ._typecode import Typecode
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from ._align import Align
from ._align_getter import align_getter
from ._container import MinMaxContainer
from ._data_property import (
ColumnDataProperty,
DataProperty
)
from ._error import TypeConversionError
from ._function import (
is_integer,
is_hex,
is_float,
is_nan,
is_empty_string,
is_not_empty_string,
is_list_or_tuple,
is_empty_sequence,
is_not_empty_sequence,
is_empty_list_or_tuple,
is_not_empty_list_or_tuple,
is_datetime,
get_integer_digit,
get_number_of_digit,
get_text_len
)
from ._property_extractor import PropertyExtractor
from ._type import (
NoneType,
StringType,
IntegerType,
FloatType,
DateTimeType,
BoolType,
InfinityType,
NanType
)
from ._typecode import Typecode
| Delete import that no longer used | Delete import that no longer used
| Python | mit | thombashi/DataProperty | ---
+++
@@ -29,8 +29,7 @@
is_datetime,
get_integer_digit,
get_number_of_digit,
- get_text_len,
- strict_strtobool
+ get_text_len
)
from ._property_extractor import PropertyExtractor
from ._type import ( |
1a0e277d23dfc41fc03799edde2a650b89cbcced | src/utils/utils.py | src/utils/utils.py | import logging
LOGGER = logging.getLogger(__name__)
def tidy_up_url(url):
if url.startswith("//"):
# If no protocol was supplied, add https
url = "https:" + url
if '?' in url:
url = url[:url.rfind('?')]
return url
def limit_file_name(file_name, length=65):
if len(file_name) <= length:
return file_name
else:
extension = file_name[file_name.rfind("."):]
file_name = file_name[:length - len(extension)] + extension
LOGGER.debug("Will have to limit the file name %s as it exceeds %i" % (file_name, length))
return file_name
| import logging
LOGGER = logging.getLogger(__name__)
def tidy_up_url(url):
if url.startswith("//"):
# If no protocol was supplied, add https
url = "https:" + url
if '?' in url:
url = url[:url.rfind('?')]
if url.endswith("/"):
url = url[:-1]
return url
def limit_file_name(file_name, length=65):
if len(file_name) <= length:
return file_name
else:
extension = file_name[file_name.rfind("."):]
file_name = file_name[:length - len(extension)] + extension
LOGGER.debug("Will have to limit the file name %s as it exceeds %i" % (file_name, length))
return file_name
| Make sure that the Imgur ID can be correctly extracted from the URL | Make sure that the Imgur ID can be correctly extracted from the URL
This was made to address the case where the URL might end with '/'
| Python | apache-2.0 | CharlieCorner/pymage_downloader | ---
+++
@@ -10,6 +10,9 @@
if '?' in url:
url = url[:url.rfind('?')]
+
+ if url.endswith("/"):
+ url = url[:-1]
return url
|
69abcf66d36079e100815f629487d121ae016ee9 | future/tests/test_standard_library_renames.py | future/tests/test_standard_library_renames.py | """
Tests for the future.standard_library_renames module
"""
from __future__ import absolute_import, unicode_literals, print_function
from future import standard_library_renames, six
import unittest
class TestStandardLibraryRenames(unittest.TestCase):
def test_configparser(self):
import configparser
def test_copyreg(self):
import copyreg
def test_pickle(self):
import pickle
def test_profile(self):
import profile
def test_io(self):
from io import StringIO
s = StringIO('test')
for method in ['next', 'read', 'seek', 'close']:
self.assertTrue(hasattr(s, method))
def test_queue(self):
import queue
heap = ['thing', 'another thing']
queue.heapq.heapify(heap)
self.assertEqual(heap, ['another thing', 'thing'])
# 'markupbase': '_markupbase',
def test_reprlib(self):
import reprlib
def test_socketserver(self):
import socketserver
def test_tkinter(self):
import tkinter
# '_winreg': 'winreg',
def test_builtins(self):
import builtins
self.assertTrue(hasattr(builtins, 'tuple'))
if __name__ == '__main__':
unittest.main()
| """
Tests for the future.standard_library_renames module
"""
from __future__ import absolute_import, unicode_literals, print_function
from future import standard_library_renames, six
import unittest
class TestStandardLibraryRenames(unittest.TestCase):
def test_configparser(self):
import configparser
def test_copyreg(self):
import copyreg
def test_pickle(self):
import pickle
def test_profile(self):
import profile
def test_io(self):
from io import StringIO
s = StringIO('test')
for method in ['next', 'read', 'seek', 'close']:
self.assertTrue(hasattr(s, method))
def test_queue(self):
import queue
q = queue.Queue()
q.put('thing')
self.assertFalse(q.empty())
# 'markupbase': '_markupbase',
def test_reprlib(self):
import reprlib
def test_socketserver(self):
import socketserver
def test_tkinter(self):
import tkinter
# '_winreg': 'winreg',
def test_builtins(self):
import builtins
self.assertTrue(hasattr(builtins, 'tuple'))
if __name__ == '__main__':
unittest.main()
| Fix test for queue module | Fix test for queue module
I was testing heapq before ;) ...
| Python | mit | QuLogic/python-future,QuLogic/python-future,krischer/python-future,michaelpacer/python-future,michaelpacer/python-future,krischer/python-future,PythonCharmers/python-future,PythonCharmers/python-future | ---
+++
@@ -29,9 +29,9 @@
def test_queue(self):
import queue
- heap = ['thing', 'another thing']
- queue.heapq.heapify(heap)
- self.assertEqual(heap, ['another thing', 'thing'])
+ q = queue.Queue()
+ q.put('thing')
+ self.assertFalse(q.empty())
# 'markupbase': '_markupbase',
|
f266132c05c37469290027e7aa8000d1f9a19a6c | tst/colors.py | tst/colors.py | YELLOW = '\033[1;33m'
LRED = '\033[1;31m'
LGREEN = '\033[1;32m'
GREEN="\033[9;32m"
WHITE="\033[1;37m"
LCYAN = '\033[1;36m'
LBLUE = '\033[1;34m'
RESET = '\033[0m'
| YELLOW = '\033[1;33m'
LRED = '\033[1;31m'
LGREEN = '\033[1;32m'
GREEN = '\033[9;32m'
WHITE = '\033[0;37m'
LWHITE = '\033[1;37m'
LCYAN = '\033[1;36m'
LBLUE = '\033[1;34m'
RESET = '\033[0m'
CRITICAL = '\033[41;37m'
| Add some new color codes | Add some new color codes
| Python | agpl-3.0 | daltonserey/tst,daltonserey/tst | ---
+++
@@ -1,8 +1,10 @@
YELLOW = '\033[1;33m'
LRED = '\033[1;31m'
LGREEN = '\033[1;32m'
-GREEN="\033[9;32m"
-WHITE="\033[1;37m"
+GREEN = '\033[9;32m'
+WHITE = '\033[0;37m'
+LWHITE = '\033[1;37m'
LCYAN = '\033[1;36m'
LBLUE = '\033[1;34m'
RESET = '\033[0m'
+CRITICAL = '\033[41;37m' |
9fb7d827007a7ed1aea505f88b3831f19b066d23 | webapp/titanembeds/database/disabled_guilds.py | webapp/titanembeds/database/disabled_guilds.py | from titanembeds.database import db
class DisabledGuilds(db.Model):
__tablename__ = "disabled_guilds" # Auto increment id
guild_id = db.Column(db.BigInteger, nullable=False, primary_key=True) # Server id that is disabled
def __init__(self, guild_id):
self.guild_id = guild_id
def list_disabled_guilds():
q = db.session.query(DisabledGuilds).all()
their_ids = []
for guild in q:
their_ids.append(guild.guild_id)
return their_ids | from titanembeds.database import db
class DisabledGuilds(db.Model):
__tablename__ = "disabled_guilds" # Auto increment id
guild_id = db.Column(db.BigInteger, nullable=False, primary_key=True) # Server id that is disabled
def __init__(self, guild_id):
self.guild_id = guild_id
def list_disabled_guilds():
q = db.session.query(DisabledGuilds).all()
their_ids = []
for guild in q:
their_ids.append(str(guild.guild_id))
return their_ids | Set disabled guild ids to string so it works again | Set disabled guild ids to string so it works again
| Python | agpl-3.0 | TitanEmbeds/Titan,TitanEmbeds/Titan,TitanEmbeds/Titan | ---
+++
@@ -11,5 +11,5 @@
q = db.session.query(DisabledGuilds).all()
their_ids = []
for guild in q:
- their_ids.append(guild.guild_id)
+ their_ids.append(str(guild.guild_id))
return their_ids |
038978f87883247a14e9bec08708452c98c91285 | test/test_chimera.py | test/test_chimera.py | import unittest
import utils
import os
import sys
import re
import shutil
import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
utils.set_search_paths(TOPDIR)
import cryptosite.chimera
class Tests(unittest.TestCase):
def test_bad(self):
"""Test wrong arguments to chimera"""
for args in ([], ['x'] * 4):
out = utils.check_output(['cryptosite', 'chimera'] + args,
stderr=subprocess.STDOUT, retcode=2)
out = utils.check_output(['python', '-m',
'cryptosite.chimera'] + args,
stderr=subprocess.STDOUT, retcode=2)
def test_make_chimera_file(self):
"""Test make_chimera_file() function"""
cryptosite.chimera.make_chimera_file('url1', 'url2', 'out.chimerax')
os.unlink('out.chimerax')
if __name__ == '__main__':
unittest.main()
| import unittest
import utils
import os
import sys
import re
import shutil
import subprocess
TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
utils.set_search_paths(TOPDIR)
import cryptosite.chimera
class Tests(unittest.TestCase):
def test_bad(self):
"""Test wrong arguments to chimera"""
for args in ([], ['x'] * 4):
out = utils.check_output(['cryptosite', 'chimera'] + args,
stderr=subprocess.STDOUT, retcode=2)
out = utils.check_output(['python', '-m',
'cryptosite.chimera'] + args,
stderr=subprocess.STDOUT, retcode=2)
def test_make_chimera_file(self):
"""Test make_chimera_file() function"""
cryptosite.chimera.make_chimera_file('url1', 'url2', 'out.chimerax')
with open('out.chimerax') as fh:
lines = fh.readlines()
self.assertEqual(lines[-4], 'open_files("url1", "url2")\n')
os.unlink('out.chimerax')
if __name__ == '__main__':
unittest.main()
| Check generated file for sanity. | Check generated file for sanity.
| Python | lgpl-2.1 | salilab/cryptosite,salilab/cryptosite,salilab/cryptosite | ---
+++
@@ -24,6 +24,9 @@
def test_make_chimera_file(self):
"""Test make_chimera_file() function"""
cryptosite.chimera.make_chimera_file('url1', 'url2', 'out.chimerax')
+ with open('out.chimerax') as fh:
+ lines = fh.readlines()
+ self.assertEqual(lines[-4], 'open_files("url1", "url2")\n')
os.unlink('out.chimerax')
if __name__ == '__main__': |
05ddf0fff9469ae0173809eb559486ff216231a0 | test/test_scripts.py | test/test_scripts.py | import pytest
import subprocess
@pytest.mark.parametrize("script", [])
def test_script(script):
try:
subprocess.check_output([script, '-h'], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
print e.output
assert e.returncode == 0
| import pytest
import subprocess
@pytest.mark.parametrize("script", ['bin/cast-example'])
def test_script(script):
try:
subprocess.check_output([script, '-h'], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
print e.output
assert e.returncode == 0
| Add example-cast to sanity test | Add example-cast to sanity test
| Python | mit | maxzheng/clicast | ---
+++
@@ -2,7 +2,7 @@
import subprocess
[email protected]("script", [])
[email protected]("script", ['bin/cast-example'])
def test_script(script):
try: |
33598fd8baf527d63cef965eddfc90548b6c52b3 | go/apps/jsbox/definition.py | go/apps/jsbox/definition.py | import json
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class ViewLogsAction(ConversationAction):
action_name = 'view_logs'
action_display_name = 'View Sandbox Logs'
redirect_to = 'jsbox_logs'
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'jsbox'
conversation_display_name = 'Javascript App'
actions = (ViewLogsAction,)
def configured_endpoints(self, config):
app_config = config.get("jsbox_app_config", {})
raw_js_config = app_config.get("config", {}).get("value", {})
try:
js_config = json.loads(raw_js_config)
except Exception:
return []
endpoints = set()
# vumi-jssandbox-toolkit v2 endpoints
try:
endpoints.update(js_config["endpoints"].keys())
except Exception:
pass
# vumi-jssandbox-toolkit v1 endpoints
try:
pool, tag = js_config["sms_tag"]
endpoints.add("%s:%s" % (pool, tag))
except Exception:
pass
return sorted(endpoints)
| import json
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class ViewLogsAction(ConversationAction):
action_name = 'view_logs'
action_display_name = 'View Sandbox Logs'
redirect_to = 'jsbox_logs'
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'jsbox'
conversation_display_name = 'Javascript App'
actions = (ViewLogsAction,)
def configured_endpoints(self, config):
app_config = config.get("jsbox_app_config", {})
raw_js_config = app_config.get("config", {}).get("value", {})
try:
js_config = json.loads(raw_js_config)
except Exception:
return []
# vumi-jssandbox-toolkit v2 endpoints
try:
v2_endpoints = list(js_config["endpoints"].keys())
except Exception:
v2_endpoints = []
# vumi-jssandbox-toolkit v1 endpoints
try:
pool, tag = js_config["sms_tag"]
v1_endpoints = [u"%s:%s" % (pool, tag)]
except Exception:
v1_endpoints = []
endpoints = v1_endpoints + v2_endpoints
endpoints = [ep for ep in endpoints if isinstance(ep, unicode)]
return sorted(set(endpoints))
| Remove non-unicode endpoints from the endpoint list. | Remove non-unicode endpoints from the endpoint list.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | ---
+++
@@ -24,16 +24,18 @@
except Exception:
return []
- endpoints = set()
# vumi-jssandbox-toolkit v2 endpoints
try:
- endpoints.update(js_config["endpoints"].keys())
+ v2_endpoints = list(js_config["endpoints"].keys())
except Exception:
- pass
+ v2_endpoints = []
# vumi-jssandbox-toolkit v1 endpoints
try:
pool, tag = js_config["sms_tag"]
- endpoints.add("%s:%s" % (pool, tag))
+ v1_endpoints = [u"%s:%s" % (pool, tag)]
except Exception:
- pass
- return sorted(endpoints)
+ v1_endpoints = []
+
+ endpoints = v1_endpoints + v2_endpoints
+ endpoints = [ep for ep in endpoints if isinstance(ep, unicode)]
+ return sorted(set(endpoints)) |
fbc46862af7fa254f74f1108149fd0669c46f1ad | rplugin/python3/deoplete/sources/LanguageClientSource.py | rplugin/python3/deoplete/sources/LanguageClientSource.py | import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
'<`\g<num>:\g<desc>`>', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::|->)\w*'
self.__keyword_patterns = r'(?:[a-zA-Z@0-9_À-ÿ]|\.|::|->)*$'
def get_complete_position(self, context):
m = re.search('(?:' + context['keyword_patterns'] + ')$',
context['input'])
if m:
return m.start()
m = re.search(self.__keyword_patterns, context['input'])
if m:
return m.end()
return -1
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if not isinstance(result, list):
result = []
return result
| from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if not isinstance(result, list):
result = []
return result
| Remove problematic deoplete source customization. | Remove problematic deoplete source customization.
Close #312.
| Python | mit | autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim | ---
+++
@@ -1,14 +1,7 @@
-import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
-
-
-def simplify_snippet(snip: str) -> str:
- snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
- return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
- '<`\g<num>:\g<desc>`>', snip)
class Source(Base):
@@ -20,21 +13,6 @@
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
- self.min_pattern_length = 1
- self.input_pattern = r'(\.|::|->)\w*'
- self.__keyword_patterns = r'(?:[a-zA-Z@0-9_À-ÿ]|\.|::|->)*$'
-
- def get_complete_position(self, context):
- m = re.search('(?:' + context['keyword_patterns'] + ')$',
- context['input'])
- if m:
- return m.start()
-
- m = re.search(self.__keyword_patterns, context['input'])
- if m:
- return m.end()
-
- return -1
def gather_candidates(self, context):
if not context["is_async"]: |
d6a3d43e58796299c55fe3a6c6d597edaf5cfc3c | troposphere/kms.py | troposphere/kms.py | # Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'Tags': (list, False)
}
| # Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .validators import boolean
try:
from awacs.aws import Policy
policytypes = (dict, Policy)
except ImportError:
policytypes = dict,
class Alias(AWSObject):
resource_type = "AWS::KMS::Alias"
props = {
'AliasName': (basestring, True),
'TargetKeyId': (basestring, True)
}
class Key(AWSObject):
resource_type = "AWS::KMS::Key"
props = {
'Description': (basestring, False),
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
'Tags': ((Tags, list), False)
}
| Change KMS::Key to accept a standard Tags | Change KMS::Key to accept a standard Tags
| Python | bsd-2-clause | ikben/troposphere,pas256/troposphere,johnctitus/troposphere,cloudtools/troposphere,ikben/troposphere,cloudtools/troposphere,johnctitus/troposphere,pas256/troposphere | ---
+++
@@ -3,7 +3,7 @@
#
# See LICENSE file for full license.
-from . import AWSObject
+from . import AWSObject, Tags
from .validators import boolean
try:
from awacs.aws import Policy
@@ -29,5 +29,5 @@
'Enabled': (boolean, False),
'EnableKeyRotation': (boolean, False),
'KeyPolicy': (policytypes, True),
- 'Tags': (list, False)
+ 'Tags': ((Tags, list), False)
} |
ca777965c26b8dfd43b472adeb032f048e2537ed | acceptancetests/tests/acc_test_login_page.py | acceptancetests/tests/acc_test_login_page.py | # (c) Crown Owned Copyright, 2016. Dstl.
import os
import unittest
from splinter import Browser
class TestLoginPage (unittest.TestCase):
def setUp(self):
self.browser = Browser('phantomjs')
def test_login_page_appears(self):
# This needs to come from an environment variable at some point
# For now, this will only pass if the lighthouse-app-server host is
# running.
url = "http://%s/login" % os.environ['LIGHTHOUSE_HOST']
title = 'Lighthouse'
self.browser.visit(url)
self.assertEqual(self.browser.url, url)
self.assertEqual(self.browser.status_code.code, 200)
self.assertIn(self.browser.title, title)
self.assertIn('Login with ID.', self.browser.html)
| # (c) Crown Owned Copyright, 2016. Dstl.
import os
import unittest
from splinter import Browser
class TestLoginPage (unittest.TestCase):
def setUp(self):
self.browser = Browser('phantomjs')
def test_login_page_appears(self):
# This needs to come from an environment variable at some point
# For now, this will only pass if the lighthouse-app-server host is
# running.
url = "http://%s/login" % os.environ['LIGHTHOUSE_HOST']
title = 'Lighthouse'
self.browser.visit(url)
self.assertEqual(self.browser.url, url)
self.assertEqual(self.browser.status_code.code, 200)
self.assertIn(title, self.browser.title)
self.assertIn('Login with ID.', self.browser.html)
| Check that expected title exists in the actual title, not the other way round | Check that expected title exists in the actual title, not the other way round
| Python | mit | dstl/lighthouse,dstl/lighthouse,dstl/lighthouse,dstl/lighthouse,dstl/lighthouse | ---
+++
@@ -21,6 +21,6 @@
self.assertEqual(self.browser.url, url)
self.assertEqual(self.browser.status_code.code, 200)
- self.assertIn(self.browser.title, title)
+ self.assertIn(title, self.browser.title)
self.assertIn('Login with ID.', self.browser.html) |
d5ee91ba36c7e3d2ce0720b5b047934d554041cd | app/__init__.py | app/__init__.py | from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
| from flask import Flask
from flask.ext.bootstrap import Bootstrap
from flask.ext.sqlalchemy import SQLAlchemy
from config import config
bootstrap = Bootstrap()
db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
db.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint)
return app
| Initialize the database in the application package constructor | Initialize the database in the application package constructor
| Python | mit | finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is | ---
+++
@@ -1,15 +1,18 @@
from flask import Flask
from flask.ext.bootstrap import Bootstrap
+from flask.ext.sqlalchemy import SQLAlchemy
from config import config
bootstrap = Bootstrap()
+db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
+ db.init_app(app)
from .aflafrettir import aflafrettir as afla_blueprint
app.register_blueprint(afla_blueprint) |
c3d6613b4f611857bee1c1b9c414aebd9abf21d7 | amazon/ion/__init__.py | amazon/ion/__init__.py | # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at:
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the
# License.
# Python 2/3 compatibility
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__author__ = 'Amazon.com, Inc.'
__version__ = '0.1.0'
__all__ = [
'core',
'exceptions',
'reader',
'reader_binary',
'reader_managed',
'simple_types',
'simpleion',
'symbols',
'util',
'writer',
'writer_binary',
'writer_binary_raw',
'writer_binary_raw_fields',
'writer_buffer',
'writer_text',
]
| # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at:
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the
# License.
# Python 2/3 compatibility
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__author__ = 'Amazon.com, Inc.'
__version__ = '0.4.1'
__all__ = [
'core',
'exceptions',
'reader',
'reader_binary',
'reader_managed',
'simple_types',
'simpleion',
'symbols',
'util',
'writer',
'writer_binary',
'writer_binary_raw',
'writer_binary_raw_fields',
'writer_buffer',
'writer_text',
]
| Change self-reported version from 0.1.0 to 0.4.1 | Change self-reported version from 0.1.0 to 0.4.1
| Python | apache-2.0 | amznlabs/ion-python,almann/ion-python | ---
+++
@@ -18,7 +18,7 @@
from __future__ import print_function
__author__ = 'Amazon.com, Inc.'
-__version__ = '0.1.0'
+__version__ = '0.4.1'
__all__ = [
'core', |
154b64b2ee56fa4391251268ba4a85d178bedd60 | djangoautoconf/urls.py | djangoautoconf/urls.py | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
# from mezzanine.core.views import direct_to_template
admin.autodiscover()
# Must be defined before auto discover and urlpatterns var. So when there is root url
# injection, we first insert root url to this, then the last line will insert it to real urlpatterns
default_app_url_patterns = []
from djangoautoconf import auto_conf_urls
auto_conf_urls.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^default_django_15_and_below/', include('default_django_15_and_below.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# url(r'^', include('demo.urls')),
# url(r'^obj_sys/', include('obj_sys.urls')),
# url("^$", direct_to_template, {"template": "index.html"}, name="home"),
)
urlpatterns = [
# ... the rest of your URLconf goes here ...
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += default_app_url_patterns
| from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
# from mezzanine.core.views import direct_to_template
admin.autodiscover()
# Must be defined before auto discover and urlpatterns var. So when there is root url
# injection, we first insert root url to this, then the last line will insert it to real urlpatterns
default_app_url_patterns = []
from djangoautoconf import auto_conf_urls
auto_conf_urls.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^default_django_15_and_below/', include('default_django_15_and_below.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
# url(r'^', include('demo.urls')),
# url(r'^obj_sys/', include('obj_sys.urls')),
# url("^$", direct_to_template, {"template": "index.html"}, name="home"),
)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += default_app_url_patterns
| Fix the issue of override url by mistake. | Fix the issue of override url by mistake.
| Python | bsd-3-clause | weijia/djangoautoconf,weijia/djangoautoconf | ---
+++
@@ -30,8 +30,6 @@
# url(r'^obj_sys/', include('obj_sys.urls')),
# url("^$", direct_to_template, {"template": "index.html"}, name="home"),
)
-urlpatterns = [
- # ... the rest of your URLconf goes here ...
-] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
+urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += default_app_url_patterns |
890841fb910112853306e2d6b4163cce12262fd5 | xdg/Config.py | xdg/Config.py | """
Functions to configure Basic Settings
"""
language = "C"
windowmanager = None
icon_theme = "highcolor"
icon_size = 48
cache_time = 5
root_mode = False
def setWindowManager(wm):
global windowmanager
windowmanager = wm
def setIconTheme(theme):
global icon_theme
icon_theme = theme
import xdg.IconTheme
xdg.IconTheme.themes = []
def setIconSize(size):
global icon_size
icon_size = size
def setCacheTime(time):
global cache_time
cache_time = time
def setLocale(lang):
import locale
lang = locale.normalize(lang)
locale.setlocale(locale.LC_ALL, lang)
import xdg.Locale
xdg.Locale.update(lang)
def setRootMode(boolean):
global root_mode
root_mode = boolean
| """
Functions to configure Basic Settings
"""
language = "C"
windowmanager = None
icon_theme = "hicolor"
icon_size = 48
cache_time = 5
root_mode = False
def setWindowManager(wm):
global windowmanager
windowmanager = wm
def setIconTheme(theme):
global icon_theme
icon_theme = theme
import xdg.IconTheme
xdg.IconTheme.themes = []
def setIconSize(size):
global icon_size
icon_size = size
def setCacheTime(time):
global cache_time
cache_time = time
def setLocale(lang):
import locale
lang = locale.normalize(lang)
locale.setlocale(locale.LC_ALL, lang)
import xdg.Locale
xdg.Locale.update(lang)
def setRootMode(boolean):
global root_mode
root_mode = boolean
| Correct spelling of default icon theme | Correct spelling of default icon theme
Closes fd.o bug #29294
| Python | lgpl-2.1 | 0312birdzhang/pyxdg | ---
+++
@@ -4,7 +4,7 @@
language = "C"
windowmanager = None
-icon_theme = "highcolor"
+icon_theme = "hicolor"
icon_size = 48
cache_time = 5
root_mode = False |
8703ff401b77333ca23a696026802bacebd879b1 | python-pscheduler/pscheduler/pscheduler/db.py | python-pscheduler/pscheduler/pscheduler/db.py | """
Functions for connecting to the pScheduler database
"""
import psycopg2
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
# Read the DSN from a file if requested
if dsn.startswith('@'):
with open(dsn[1:], 'r') as dsnfile:
dsn = dsnfile.read().replace('\n', '')
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
| """
Functions for connecting to the pScheduler database
"""
import psycopg2
from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
Connect to the database, and return a handle to it
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit:
pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
return pg
def pg_cursor(dsn='', autocommit=True):
"""
Connect to the database, and return a cursor.
Arguments:
dsn - A data source name to use in connecting to the database. If
the string begins with an '@', the remainder will be treated as
the path to a file where the value can be retrieved.
autocommit - Whether or not commits are done automatically when
quesies are issued.
"""
pg = pg_connection(dsn, autocommit)
return pg.cursor()
# TODO: Need a routine that does the select wait currently
# rubberstamped into the services to do timed waits for notifications.
| Use string_from_file() to interpret file-based DSNs | Use string_from_file() to interpret file-based DSNs
| Python | apache-2.0 | perfsonar/pscheduler,mfeit-internet2/pscheduler-dev,perfsonar/pscheduler,perfsonar/pscheduler,perfsonar/pscheduler,mfeit-internet2/pscheduler-dev | ---
+++
@@ -3,6 +3,8 @@
"""
import psycopg2
+
+from filestring import string_from_file
def pg_connection(dsn='', autocommit=True):
"""
@@ -18,10 +20,7 @@
quesies are issued.
"""
- # Read the DSN from a file if requested
- if dsn.startswith('@'):
- with open(dsn[1:], 'r') as dsnfile:
- dsn = dsnfile.read().replace('\n', '')
+ dsn = string_from_file(dsn)
pg = psycopg2.connect(dsn)
if autocommit: |
96f9819ab67b48135a61c8a1e15bc808cf82d194 | bokeh/models/widget.py | bokeh/models/widget.py | from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
class Widget(PlotObject):
disabled = Bool(False)
| from __future__ import absolute_import
from ..plot_object import PlotObject
from ..properties import Bool
from ..embed import notebook_div
class Widget(PlotObject):
disabled = Bool(False)
def _repr_html_(self):
return notebook_div(self)
@property
def html(self):
from IPython.core.display import HTML
return HTML(self._repr_html_())
| Implement display protocol for Widget (_repr_html_) | Implement display protocol for Widget (_repr_html_)
This effectively allows us to automatically display plots and widgets.
| Python | bsd-3-clause | evidation-health/bokeh,abele/bokeh,mutirri/bokeh,percyfal/bokeh,htygithub/bokeh,jakirkham/bokeh,rhiever/bokeh,DuCorey/bokeh,srinathv/bokeh,DuCorey/bokeh,awanke/bokeh,clairetang6/bokeh,ericdill/bokeh,ahmadia/bokeh,saifrahmed/bokeh,mutirri/bokeh,bokeh/bokeh,gpfreitas/bokeh,philippjfr/bokeh,xguse/bokeh,srinathv/bokeh,draperjames/bokeh,schoolie/bokeh,laurent-george/bokeh,paultcochrane/bokeh,akloster/bokeh,caseyclements/bokeh,justacec/bokeh,maxalbert/bokeh,philippjfr/bokeh,birdsarah/bokeh,evidation-health/bokeh,rs2/bokeh,phobson/bokeh,PythonCharmers/bokeh,draperjames/bokeh,satishgoda/bokeh,mindriot101/bokeh,PythonCharmers/bokeh,CrazyGuo/bokeh,mindriot101/bokeh,birdsarah/bokeh,jplourenco/bokeh,matbra/bokeh,htygithub/bokeh,deeplook/bokeh,abele/bokeh,bsipocz/bokeh,rhiever/bokeh,laurent-george/bokeh,ericmjl/bokeh,htygithub/bokeh,DuCorey/bokeh,justacec/bokeh,PythonCharmers/bokeh,msarahan/bokeh,mutirri/bokeh,percyfal/bokeh,timsnyder/bokeh,timsnyder/bokeh,muku42/bokeh,deeplook/bokeh,xguse/bokeh,daodaoliang/bokeh,ChristosChristofidis/bokeh,ericmjl/bokeh,timothydmorton/bokeh,percyfal/bokeh,schoolie/bokeh,alan-unravel/bokeh,jplourenco/bokeh,canavandl/bokeh,Karel-van-de-Plassche/bokeh,bokeh/bokeh,evidation-health/bokeh,Karel-van-de-Plassche/bokeh,tacaswell/bokeh,bsipocz/bokeh,mutirri/bokeh,deeplook/bokeh,dennisobrien/bokeh,msarahan/bokeh,quasiben/bokeh,roxyboy/bokeh,josherick/bokeh,mindriot101/bokeh,saifrahmed/bokeh,rothnic/bokeh,CrazyGuo/bokeh,canavandl/bokeh,aiguofer/bokeh,akloster/bokeh,clairetang6/bokeh,almarklein/bokeh,josherick/bokeh,aiguofer/bokeh,timothydmorton/bokeh,ptitjano/bokeh,KasperPRasmussen/bokeh,mindriot101/bokeh,aavanian/bokeh,josherick/bokeh,quasiben/bokeh,xguse/bokeh,saifrahmed/bokeh,KasperPRasmussen/bokeh,akloster/bokeh,awanke/bokeh,ptitjano/bokeh,aavanian/bokeh,azjps/bokeh,tacaswell/bokeh,draperjames/bokeh,alan-unravel/bokeh,ericmjl/bokeh,rs2/bokeh,bokeh/bokeh,stonebig/bokeh,tacaswell/bokeh,ChinaQuants/bokeh,stonebig/bokeh,stuart-knock/bokeh,paultcochrane/bokeh,xguse/bokeh,jakirkham/bokeh,abele/bokeh,alan-unravel/bokeh,KasperPRasmussen/bokeh,birdsarah/bokeh,stuart-knock/bokeh,Karel-van-de-Plassche/bokeh,carlvlewis/bokeh,gpfreitas/bokeh,dennisobrien/bokeh,deeplook/bokeh,alan-unravel/bokeh,lukebarnard1/bokeh,jakirkham/bokeh,ahmadia/bokeh,aavanian/bokeh,phobson/bokeh,clairetang6/bokeh,timsnyder/bokeh,ptitjano/bokeh,ahmadia/bokeh,lukebarnard1/bokeh,rs2/bokeh,tacaswell/bokeh,ericdill/bokeh,matbra/bokeh,satishgoda/bokeh,awanke/bokeh,rothnic/bokeh,evidation-health/bokeh,jplourenco/bokeh,muku42/bokeh,CrazyGuo/bokeh,roxyboy/bokeh,bokeh/bokeh,caseyclements/bokeh,jplourenco/bokeh,matbra/bokeh,gpfreitas/bokeh,ChinaQuants/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,dennisobrien/bokeh,saifrahmed/bokeh,timothydmorton/bokeh,rhiever/bokeh,timsnyder/bokeh,maxalbert/bokeh,DuCorey/bokeh,azjps/bokeh,birdsarah/bokeh,satishgoda/bokeh,stonebig/bokeh,srinathv/bokeh,rs2/bokeh,aiguofer/bokeh,schoolie/bokeh,rothnic/bokeh,philippjfr/bokeh,laurent-george/bokeh,stonebig/bokeh,matbra/bokeh,justacec/bokeh,maxalbert/bokeh,percyfal/bokeh,jakirkham/bokeh,eteq/bokeh,eteq/bokeh,rs2/bokeh,philippjfr/bokeh,daodaoliang/bokeh,ericdill/bokeh,azjps/bokeh,khkaminska/bokeh,draperjames/bokeh,philippjfr/bokeh,almarklein/bokeh,canavandl/bokeh,ericmjl/bokeh,clairetang6/bokeh,ptitjano/bokeh,srinathv/bokeh,KasperPRasmussen/bokeh,ericmjl/bokeh,htygithub/bokeh,carlvlewis/bokeh,ptitjano/bokeh,aiguofer/bokeh,laurent-george/bokeh,lukebarnard1/bokeh,ChristosChristofidis/bokeh,abele/bokeh,ChristosChristofidis/bokeh,azjps/bokeh,draperjames/bokeh,jakirkham/bokeh,roxyboy/bokeh,Karel-van-de-Plassche/bokeh,roxyboy/bokeh,khkaminska/bokeh,phobson/bokeh,caseyclements/bokeh,paultcochrane/bokeh,percyfal/bokeh,caseyclements/bokeh,muku42/bokeh,eteq/bokeh,msarahan/bokeh,aiguofer/bokeh,almarklein/bokeh,ChinaQuants/bokeh,ericdill/bokeh,PythonCharmers/bokeh,khkaminska/bokeh,carlvlewis/bokeh,canavandl/bokeh,bokeh/bokeh,timsnyder/bokeh,eteq/bokeh,muku42/bokeh,rothnic/bokeh,ahmadia/bokeh,timothydmorton/bokeh,DuCorey/bokeh,stuart-knock/bokeh,bsipocz/bokeh,phobson/bokeh,dennisobrien/bokeh,stuart-knock/bokeh,CrazyGuo/bokeh,aavanian/bokeh,schoolie/bokeh,phobson/bokeh,dennisobrien/bokeh,akloster/bokeh,bsipocz/bokeh,paultcochrane/bokeh,josherick/bokeh,daodaoliang/bokeh,schoolie/bokeh,rhiever/bokeh,maxalbert/bokeh,satishgoda/bokeh,ChristosChristofidis/bokeh,msarahan/bokeh,carlvlewis/bokeh,justacec/bokeh,Karel-van-de-Plassche/bokeh,lukebarnard1/bokeh,daodaoliang/bokeh,azjps/bokeh,awanke/bokeh,khkaminska/bokeh,gpfreitas/bokeh,aavanian/bokeh,quasiben/bokeh | ---
+++
@@ -2,6 +2,15 @@
from ..plot_object import PlotObject
from ..properties import Bool
+from ..embed import notebook_div
class Widget(PlotObject):
disabled = Bool(False)
+
+ def _repr_html_(self):
+ return notebook_div(self)
+
+ @property
+ def html(self):
+ from IPython.core.display import HTML
+ return HTML(self._repr_html_()) |
5a6c809afc6b228d7f5d37154adda162802c0110 | botcommands/vimtips.py | botcommands/vimtips.py | # coding: utf-8
import requests
def vimtips(msg=None):
try:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
except Exception as e:
return None
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
| # coding: utf-8
import requests
from redis_wrap import get_hash
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip()
except Exception as e:
return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
@job('default')
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
| Use RQ to queue collecting job | Use RQ to queue collecting job
| Python | bsd-2-clause | JokerQyou/bot | ---
+++
@@ -1,9 +1,33 @@
# coding: utf-8
import requests
+from redis_wrap import get_hash
+from rq.decorators import job
def vimtips(msg=None):
try:
- tip = requests.get('http://vim-tips.com/random_tips/json').json()
+ existing_tips = get_hash('vimtips')
+ _len = len(existing_tips)
+ if _len > 0:
+ _index = randint(0, _len - 1)
+ _k = existing_tips.keys()[_index]
+ _v = existing_tips[_k]
+ tip = {
+ 'Content': _k,
+ 'Comment': _v
+ }
+ else:
+ tip = requests.get('http://vim-tips.com/random_tips/json').json()
+ existing_tips.update({
+ tip['Content']: tip['Comment']
+ })
+ collect_tip()
except Exception as e:
- return None
+ return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
+
+@job('default')
+def collect_tip():
+ tip = requests.get('http://vim-tips.com/random_tips/json').json()
+ get_hash('vimtips').update({
+ tip['Content']: tip['Comment']
+ }) |
16d0f3f0ca4ce59f08e598b6f9f25bb6dc8e1713 | benchmark/benchmark.py | benchmark/benchmark.py | import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
return " ".join(s)
| import time
import sys
from utils import format_duration
if sys.platform == "win32":
default_timer = time.clock
else:
default_timer = time.time
class Benchmark():
def __init__(self, func, name="", repeat=5):
self.func = func
self.repeat = repeat
self.name = name
self.verbose = False
def run(self, conn):
self.results = []
for x in range(self.repeat):
start = default_timer()
self.func()
end = default_timer()
elapsed = end - start
self.results.append(elapsed)
conn.rollback()
return min(self.results)
def __str__(self):
s = format_duration(min(self.results))
if self.verbose:
s_min = format_duration(min(self.results))
s_avg = format_duration(sum(self.results) / len(self.results))
s_max = format_duration(max(self.results))
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
return s
| Fix bad console output formatting | Fix bad console output formatting
| Python | mit | jameshy/libtree,conceptsandtraining/libtree | ---
+++
@@ -37,4 +37,4 @@
s_all = [format_duration(t) for t in self.results]
s += "(min={} avg={} max={} all={})".format(s_min,
s_avg, s_max, s_all)
- return " ".join(s)
+ return s |
2cf8d03324af2fadf905da811cfab4a29a6bc93a | pony_barn/settings/django_settings.py | pony_barn/settings/django_settings.py | DATABASES = {
'default': {
'ENGINE': '{{ db_engine }}',
'NAME': '{{ db_name }}',
'USER': '{{ db_user}}',
'PASSWORD': '{{ db_pass }}',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
'TEST_NAME': 'other_db'
}
}
| import os
pid = os.getpid()
DATABASES = {
'default': {
'ENGINE': '{{ db_engine }}',
'NAME': '{{ db_name }}',
'USER': '{{ db_user}}',
'PASSWORD': '{{ db_pass }}',
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
'TEST_NAME': 'other_db_%s' % pid,
}
}
| Append PID to Django database to avoid conflicts. | Append PID to Django database to avoid conflicts. | Python | mit | ericholscher/pony_barn,ericholscher/pony_barn | ---
+++
@@ -1,3 +1,6 @@
+import os
+pid = os.getpid()
+
DATABASES = {
'default': {
'ENGINE': '{{ db_engine }}',
@@ -7,6 +10,6 @@
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
- 'TEST_NAME': 'other_db'
+ 'TEST_NAME': 'other_db_%s' % pid,
}
} |
0d8a28b62c4e54ee84861da75b0f0626bc4e46e7 | get_data_from_twitter.py | get_data_from_twitter.py | # -*- coding: UTF-8 -*-
import numpy
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import json
import config
#Much of this code comes from http://adilmoujahid.com/posts/2014/07/twitter-analytics/
class StdOutListener(StreamListener):
def on_data(self, data_str):
data = json.loads(data_str)
if len(data['entities']['urls']) != 0:
newdata = {'created_at' : data['created_at'], 'text' : data['text'], 'urls' : [url['expanded_url'] for url in data['entities']['urls'] if url['url'] != '' ] }
print json.dumps(newdata)
return True
def on_error(self, status):
print status
l = StdOutListener()
auth = OAuthHandler(config.consumer_key, config.consumer_secret)
auth.set_access_token(config.access_token, config.access_token_secret)
stream = Stream(auth, l)
#stream.filter(track=['#Trump2016', '#Hillary2016'])
stream.filter(track=['#Trump2016'])
| # -*- coding: UTF-8 -*-
import numpy
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import json
import config
#Much of this code comes from http://adilmoujahid.com/posts/2014/07/twitter-analytics/
class StdOutListener(StreamListener):
def on_data(self, data_str):
data = json.loads(data_str)
if len(data['entities']['urls']) != 0:
newdata = {'created_at' : data['created_at'], 'text' : data['text'], 'urls' : [url['expanded_url'] for url in data['entities']['urls'] if url['url'] != '' ] }
print json.dumps(newdata)
return True
def on_error(self, status):
print status
l = StdOutListener()
auth = OAuthHandler(config.consumer_key, config.consumer_secret)
auth.set_access_token(config.access_token, config.access_token_secret)
stream = Stream(auth, l)
stream.filter(track=['#Trump2016', '#Hillary2016'])
| Add back filter for Hillary2016 | Add back filter for Hillary2016
| Python | mpl-2.0 | aDataAlchemist/election-tweets | ---
+++
@@ -24,6 +24,4 @@
auth.set_access_token(config.access_token, config.access_token_secret)
stream = Stream(auth, l)
-#stream.filter(track=['#Trump2016', '#Hillary2016'])
-stream.filter(track=['#Trump2016'])
-
+stream.filter(track=['#Trump2016', '#Hillary2016']) |
8f1fd73d6a88436d24f936adec997f88ad7f1413 | neutron/tests/unit/objects/test_l3agent.py | neutron/tests/unit/objects/test_l3agent.py | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import l3agent
from neutron.tests.unit.objects import test_base
from neutron.tests.unit import testlib_api
class RouterL3AgentBindingIfaceObjTestCase(test_base.BaseObjectIfaceTestCase):
_test_class = l3agent.RouterL3AgentBinding
class RouterL3AgentBindingDbObjTestCase(test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = l3agent.RouterL3AgentBinding
def setUp(self):
super(RouterL3AgentBindingDbObjTestCase, self).setUp()
self._create_test_router()
def getter():
self._create_test_agent()
return self._agent['id']
self.update_obj_fields(
{'router_id': self._router.id,
'l3_agent_id': getter})
| # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.objects import l3agent
from neutron.tests.unit.objects import test_base
from neutron.tests.unit import testlib_api
class RouterL3AgentBindingIfaceObjTestCase(test_base.BaseObjectIfaceTestCase):
_test_class = l3agent.RouterL3AgentBinding
class RouterL3AgentBindingDbObjTestCase(test_base.BaseDbObjectTestCase,
testlib_api.SqlTestCase):
_test_class = l3agent.RouterL3AgentBinding
def setUp(self):
super(RouterL3AgentBindingDbObjTestCase, self).setUp()
self._create_test_router()
def getter():
self._create_test_agent()
return self._agent['id']
index = iter(range(1, len(self.objs) + 1))
self.update_obj_fields(
{'router_id': self._router.id,
'binding_index': lambda: next(index),
'l3_agent_id': getter})
| Use unique binding_index for RouterL3AgentBinding | Use unique binding_index for RouterL3AgentBinding
This is because (router_id, binding_index) tuple is expected to be
unique, as per db model.
Closes-Bug: #1674434
Change-Id: I64fcee88f2ac942e6fa173644fbfb7655ea6041b
| Python | apache-2.0 | openstack/neutron,mahak/neutron,noironetworks/neutron,huntxu/neutron,eayunstack/neutron,openstack/neutron,huntxu/neutron,eayunstack/neutron,mahak/neutron,mahak/neutron,openstack/neutron,noironetworks/neutron | ---
+++
@@ -35,6 +35,8 @@
self._create_test_agent()
return self._agent['id']
+ index = iter(range(1, len(self.objs) + 1))
self.update_obj_fields(
{'router_id': self._router.id,
+ 'binding_index': lambda: next(index),
'l3_agent_id': getter}) |
867a8081646eb061555eda2471c5174a842dd6fd | tests/test_floodplain.py | tests/test_floodplain.py | from unittest import TestCase
import niche_vlaanderen as nv
import numpy as np
import rasterio
class TestFloodPlain(TestCase):
def test__calculate(self):
fp = nv.FloodPlain()
fp._calculate(depth=np.array([1, 2, 3]), frequency="T25",
period="winter", duration=1)
np.testing.assert_equal(np.array([3, 3, 3]), fp._veg[1])
def test_calculate(self):
fp = nv.FloodPlain()
fp.calculate("testcase/floodplains/ff_bt_t10_h.asc", "T10",
period="winter", duration=1)
with rasterio.open(
"testcase/floodplains/result/F25-T10-P1-winter.asc") as dst:
expected = dst.read(1)
np.testing.assert_equal(expected, fp._veg[25])
def test_plot(self):
fp = nv.FloodPlain()
fp.calculate("testcase/floodplains/ff_bt_t10_h.asc", "T10",
period="winter", duration=1)
fp.plot(7)
| from unittest import TestCase
import niche_vlaanderen as nv
import numpy as np
import rasterio
class TestFloodPlain(TestCase):
def test__calculate(self):
fp = nv.FloodPlain()
fp._calculate(depth=np.array([1, 2, 3]), frequency="T25",
period="winter", duration=1)
np.testing.assert_equal(np.array([3, 3, 3]), fp._veg[1])
def test_calculate(self):
fp = nv.FloodPlain()
fp.calculate("testcase/floodplains/ff_bt_t10_h.asc", "T10",
period="winter", duration=1)
with rasterio.open(
"testcase/floodplains/result/F25-T10-P1-winter.asc") as dst:
expected = dst.read(1)
np.testing.assert_equal(expected, fp._veg[25])
def test_plot(self):
import matplotlib as mpl
mpl.use('agg')
import matplotlib.pyplot as plt
plt.show = lambda: None
fp = nv.FloodPlain()
fp.calculate("testcase/floodplains/ff_bt_t10_h.asc", "T10",
period="winter", duration=1)
fp.plot(7)
| Fix tests for running floodplain model in ci | Fix tests for running floodplain model in ci
| Python | mit | johanvdw/niche_vlaanderen | ---
+++
@@ -20,6 +20,12 @@
np.testing.assert_equal(expected, fp._veg[25])
def test_plot(self):
+ import matplotlib as mpl
+ mpl.use('agg')
+
+ import matplotlib.pyplot as plt
+ plt.show = lambda: None
+
fp = nv.FloodPlain()
fp.calculate("testcase/floodplains/ff_bt_t10_h.asc", "T10",
period="winter", duration=1) |
fc8ac6ba5081e7847847d31588a65db8ea13416c | openprescribing/matrixstore/build/dates.py | openprescribing/matrixstore/build/dates.py | DEFAULT_NUM_MONTHS = 60
def generate_dates(end_str, months=None):
"""
Given an end date as a string in YYYY-MM form (or the underscore separated
equivalent), return a list of N consecutive months as strings in YYYY-MM-01
form, with that month as the final member
"""
if months is None:
months = DEFAULT_NUM_MONTHS
end_date = parse_date(end_str)
assert months > 0
dates = []
for offset in range(1-months, 1):
date = increment_months(end_date, offset)
dates.append('{:04d}-{:02d}-01'.format(date[0], date[1]))
return dates
def parse_date(date_str):
"""
Given a date string in YYYY-MM form (or the underscore separated
equivalent), return a pair of (year, month) integers
"""
year_str, month_str = date_str.replace('_', '-').split('-')[:2]
assert len(year_str) == 4
assert len(month_str) == 2
return int(year_str), int(month_str)
def increment_months((year, month), months):
"""
Given a pair of (year, month) integers return the (year, month) pair N
months in the future
"""
i = (year*12) + (month - 1)
i += months
return int(i/12), (i % 12) + 1
| DEFAULT_NUM_MONTHS = 60
def generate_dates(end_str, months=None):
"""
Given an end date as a string in YYYY-MM form (or the underscore separated
equivalent), return a list of N consecutive months as strings in YYYY-MM-01
form, with that month as the final member
"""
if months is None:
months = DEFAULT_NUM_MONTHS
end_date = parse_date(end_str)
assert months > 0
dates = []
for offset in range(1-months, 1):
date = increment_months(end_date, offset)
dates.append('{:04d}-{:02d}-01'.format(date[0], date[1]))
return dates
def parse_date(date_str):
"""
Given a date string in YYYY-MM form (or the underscore separated
equivalent), return a pair of (year, month) integers
"""
year_str, month_str = date_str.replace('_', '-').split('-')[:2]
assert len(year_str) == 4
assert len(month_str) == 2
return int(year_str), int(month_str)
def increment_months(year_month, months):
"""
Given a pair of (year, month) integers return the (year, month) pair N
months in the future
"""
year, month = year_month
i = (year*12) + (month - 1)
i += months
return int(i/12), (i % 12) + 1
| Fix another py27-ism which Black can't handle | Fix another py27-ism which Black can't handle
Not sure how I missed this one last time.
| Python | mit | ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc | ---
+++
@@ -29,11 +29,12 @@
return int(year_str), int(month_str)
-def increment_months((year, month), months):
+def increment_months(year_month, months):
"""
Given a pair of (year, month) integers return the (year, month) pair N
months in the future
"""
+ year, month = year_month
i = (year*12) + (month - 1)
i += months
return int(i/12), (i % 12) + 1 |
68aeddc44d4c9ace7ab9d2475a92c5fd39b4a665 | ckanext/requestdata/controllers/request_data.py | ckanext/requestdata/controllers/request_data.py | from ckan.lib import base
from ckan.common import c, _
from ckan import logic
from ckanext.requestdata import emailer
from ckan.plugins import toolkit
import ckan.model as model
import ckan.plugins as p
import json
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
abort = base.abort
BaseController = base.BaseController
class RequestDataController(BaseController):
def send_request(self):
'''Send mail to resource owner.
:param data: Contact form data.
:type data: object
'''
print "Entered"
context = {'model': model, 'session': model.Session,
'user': c.user, 'auth_user_obj': c.userobj}
try:
if p.toolkit.request.method == 'POST':
data = dict(toolkit.request.POST)
content = data["message_content"]
to = data['email_address']
user = context['auth_user_obj']
mail_subject = "Request data"
user_email = user.email
get_action('requestdata_request_create')(context, data)
except NotAuthorized:
abort(403, _('Unauthorized to update this dataset.'))
except ValidationError:
error = {
'success': False,
'error': {
'message': 'An error occurred while requesting the data.'
}
}
return json.dumps(error)
response_message = emailer.send_email(content, to, user_email, mail_subject)
return json.dumps(response_message) | from ckan.lib import base
from ckan.common import c, _
from ckan import logic
from ckanext.requestdata import emailer
from ckan.plugins import toolkit
import ckan.model as model
import ckan.plugins as p
import json
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
abort = base.abort
BaseController = base.BaseController
class RequestDataController(BaseController):
def send_request(self):
'''Send mail to resource owner.
:param data: Contact form data.
:type data: object
:rtype: json
'''
print "Entered"
context = {'model': model, 'session': model.Session,
'user': c.user, 'auth_user_obj': c.userobj}
try:
if p.toolkit.request.method == 'POST':
data = dict(toolkit.request.POST)
content = data["message_content"]
to = data['email_address']
mail_subject = "Request data"
get_action('requestdata_request_create')(context, data)
except NotAuthorized:
abort(403, _('Unauthorized to update this dataset.'))
except ValidationError:
error = {
'success': False,
'error': {
'message': 'An error occurred while requesting the data.'
}
}
return json.dumps(error)
response_message = emailer.send_email(content, to, mail_subject)
return json.dumps(response_message) | Remove user_mail from request data | Remove user_mail from request data
| Python | agpl-3.0 | ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata | ---
+++
@@ -23,6 +23,7 @@
:param data: Contact form data.
:type data: object
+ :rtype: json
'''
print "Entered"
context = {'model': model, 'session': model.Session,
@@ -33,9 +34,7 @@
data = dict(toolkit.request.POST)
content = data["message_content"]
to = data['email_address']
- user = context['auth_user_obj']
mail_subject = "Request data"
- user_email = user.email
get_action('requestdata_request_create')(context, data)
except NotAuthorized:
abort(403, _('Unauthorized to update this dataset.'))
@@ -49,6 +48,6 @@
return json.dumps(error)
- response_message = emailer.send_email(content, to, user_email, mail_subject)
+ response_message = emailer.send_email(content, to, mail_subject)
return json.dumps(response_message) |
73b61983de6ff655b4f11205c0acd2b2f92915f4 | eva/util/nutil.py | eva/util/nutil.py | import numpy as np
def to_rgb(pixels):
return np.repeat(pixels, 3 if pixels.shape[2] == 1 else 1, 2)
def binarize(arr, generate=np.random.uniform):
"""
Stochastically binarize values in [0, 1] by treating them as p-values of
a Bernoulli distribution.
"""
return (generate(size=arr.shape) < arr).astype('float32')
| import numpy as np
def to_rgb(pixels):
return np.repeat(pixels, 3 if pixels.shape[2] == 1 else 1, 2)
def binarize(arr, generate=np.random.uniform):
return (generate(size=arr.shape) < arr).astype('i')
| Remove comment; change to int | Remove comment; change to int
| Python | apache-2.0 | israelg99/eva | ---
+++
@@ -5,8 +5,4 @@
return np.repeat(pixels, 3 if pixels.shape[2] == 1 else 1, 2)
def binarize(arr, generate=np.random.uniform):
- """
- Stochastically binarize values in [0, 1] by treating them as p-values of
- a Bernoulli distribution.
- """
- return (generate(size=arr.shape) < arr).astype('float32')
+ return (generate(size=arr.shape) < arr).astype('i') |
a24c657ca84e553a39e23d201d605d84d828c322 | examples/hello.py | examples/hello.py | from cell import Actor, Agent
from cell.actors import Server
from kombu import Connection
from kombu.log import setup_logging
connection = Connection()
class GreetingActor(Server):
default_routing_key = 'GreetingActor'
class state:
def greet(self, who='world'):
return 'Hello %s' % who
greeting = GreetingActor(connection)
class Printer(Actor):
default_routing_key = 'Printer'
class state:
def echo(self, msg = 'test'):
print 'I am a printer:',msg
#self.output_edge.send(msg)
return msg
printerActor = Printer(connection)
class Ihu(Actor):
default_routing_key = 'Printer'
class state:
def temp(self, msg = 'blabla'):
self.output_server.send(msg)
class GreetingAgent(Agent):
actors = [greeting, printerActor]
if __name__ == '__main__':
consumer = GreetingAgent(connection).consume_from_commandline()
for _ in consumer:
print 'Received'
# Run this script from the command line and try this
# in another console:
#
# >>> from hello import greeting
# >>> greeting.call('greet')
# 'Hello world'
| from cell import Actor, Agent
from cell.actors import Server
from kombu import Connection
from kombu.log import setup_logging
connection = Connection()
class GreetingActor(Server):
default_routing_key = 'GreetingActor'
class state:
def greet(self, who='world'):
return 'Hello %s' % who
greeting = GreetingActor(connection)
class GreetingAgent(Agent):
actors = [greeting]
if __name__ == '__main__':
GreetingAgent(connection).consume_from_commandline()
# Run this script from the command line and try this
# in another console:
#
# >>> from hello import greeting
# >>> greeting.call('greet')
# 'Hello world'
| Use the Server class (an Actor derived class) | Use the Server class (an Actor derived class)
| Python | bsd-3-clause | celery/cell,celery/cell | ---
+++
@@ -11,41 +11,17 @@
default_routing_key = 'GreetingActor'
class state:
-
def greet(self, who='world'):
return 'Hello %s' % who
greeting = GreetingActor(connection)
+
-class Printer(Actor):
- default_routing_key = 'Printer'
-
- class state:
- def echo(self, msg = 'test'):
- print 'I am a printer:',msg
- #self.output_edge.send(msg)
- return msg
-
-printerActor = Printer(connection)
-
-
-
-class Ihu(Actor):
- default_routing_key = 'Printer'
-
- class state:
- def temp(self, msg = 'blabla'):
- self.output_server.send(msg)
-
-
class GreetingAgent(Agent):
- actors = [greeting, printerActor]
+ actors = [greeting]
if __name__ == '__main__':
- consumer = GreetingAgent(connection).consume_from_commandline()
- for _ in consumer:
- print 'Received'
-
+ GreetingAgent(connection).consume_from_commandline()
# Run this script from the command line and try this
# in another console:
# |
d70ccd856bb4ddb061ff608716ef15f778380d62 | gnsq/stream/defalte.py | gnsq/stream/defalte.py | from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
self._decompressor = zlib.decompressobj(level)
self._compressor = zlib.compressobj(level)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
| from __future__ import absolute_import
import zlib
from .compression import CompressionSocket
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
wbits = -zlib.MAX_WBITS
self._decompressor = zlib.decompressobj(wbits)
self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits)
super(DefalteSocket, self).__init__(socket)
def compress(self, data):
return self._compressor.compress(data)
def decompress(self, data):
return self._decompressor.decompress(data)
| Set correct waits for deflate. | Set correct waits for deflate.
| Python | bsd-3-clause | wtolson/gnsq,hiringsolved/gnsq,wtolson/gnsq | ---
+++
@@ -6,8 +6,9 @@
class DefalteSocket(CompressionSocket):
def __init__(self, socket, level):
- self._decompressor = zlib.decompressobj(level)
- self._compressor = zlib.compressobj(level)
+ wbits = -zlib.MAX_WBITS
+ self._decompressor = zlib.decompressobj(wbits)
+ self._compressor = zlib.compressobj(level, zlib.DEFLATED, wbits)
super(DefalteSocket, self).__init__(socket)
def compress(self, data): |
90531ef7caf2ad3f6cd5a50fd2f0acdc1236abd4 | importer/importer/__init__.py | importer/importer/__init__.py | import aiohttp
from aioes import Elasticsearch
from .importer import Importer
from .kudago import KudaGo
ELASTIC_ENDPOINTS = ['localhost:9200']
ELASTIC_INDEX_NAME = 'theatrics'
async def update():
with aiohttp.ClientSession() as http_client:
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
kudago = KudaGo(http_client)
importer = Importer(kudago, elastic, ELASTIC_INDEX_NAME)
return await importer.go()
| import aiohttp
from aioes import Elasticsearch
from .importer import Importer
from .kudago import KudaGo
ELASTIC_ENDPOINTS = ['localhost:9200']
ELASTIC_INDEX_NAME = 'theatrics'
async def update():
async with aiohttp.ClientSession() as http_client:
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
kudago = KudaGo(http_client)
importer = Importer(kudago, elastic, ELASTIC_INDEX_NAME)
return await importer.go()
| Use async with with ClientSession | Use async with with ClientSession
| Python | mit | despawnerer/theatrics,despawnerer/theatrics,despawnerer/theatrics | ---
+++
@@ -10,7 +10,7 @@
async def update():
- with aiohttp.ClientSession() as http_client:
+ async with aiohttp.ClientSession() as http_client:
elastic = Elasticsearch(ELASTIC_ENDPOINTS)
kudago = KudaGo(http_client)
importer = Importer(kudago, elastic, ELASTIC_INDEX_NAME) |
02ca8bc5908b0ff15cd97846e1fd1488eddb4087 | backend/schedule/models.py | backend/schedule/models.py | from django.db import models
class Event(models.Model):
setup_start = models.DateField
setup_end = model.DateField
event_start = model.DateField
event_end = model.DateField
teardown_start = model.DateField
teardown_end = model.DateField
needed_resources = models.ManyToMany(Resource)
status = models.CharField(length=255, blank=False)
visibility = models.CharField(length=255, blank=False)
event_organizer = models.ManyToMany(Organization)
location = models.ForeignKey(Location)
class Location(models.Model):
personel = models.ForeignKey('User')
capacity = models.IntegerField
location_name = models.CharField(length=255, blank=False)
availability = models.CharField(length=255, blank=False)
class Organization(models.Model):
name = models.CharField(length=255, blank=False)
phone_number = models.CharField(length=11, blank=True)
email = models.CharField(length=255)
class Resource(models.Model):
isFixed = models.BooleanField
resourceType = models.CharField(length=255, blank=False)
description = models.CharField(length=255, blank=True)
location = models.ForeignKey(Location, null=True)
| from django.db import models
class Event(models.Model):
setup_start = models.DateField
setup_end = model.DateField
event_start = model.DateField
event_end = model.DateField
teardown_start = model.DateField
teardown_end = model.DateField
needed_resources = models.ManyToMany(Resource)
status = models.CharField(length=255, blank=False)
visibility = models.CharField(length=255, blank=False)
event_organizer = models.ManyToMany(Organization)
location = models.ForeignKey(Location)
class Location(models.Model):
personel = models.ForeignKey('User')
square_footage = models.IntegerField
capacity = models.IntegerField
location_name = models.CharField(length=255, blank=False)
availability = models.CharField(length=255, blank=False)
class Organization(models.Model):
name = models.CharField(length=255, blank=False)
phone_number = models.CharField(length=11, blank=True)
email = models.CharField(length=255)
class Resource(models.Model):
isFixed = models.BooleanField
resourceType = models.CharField(length=255, blank=False)
description = models.CharField(length=255, blank=True)
location = models.ForeignKey(Location, null=True)
| Add square feet to Location data model. | Add square feet to Location data model.
| Python | mit | bable5/schdlr,bable5/schdlr,bable5/schdlr,bable5/schdlr | ---
+++
@@ -15,6 +15,7 @@
class Location(models.Model):
personel = models.ForeignKey('User')
+ square_footage = models.IntegerField
capacity = models.IntegerField
location_name = models.CharField(length=255, blank=False)
availability = models.CharField(length=255, blank=False) |
1fbb58a3f6692a7467f758ccedca6f8baa96a165 | text/__init__.py | text/__init__.py | #! /usr/bin/env python
import os
def get_files(path, ext=None):
"""
Get all files in directory path, optionally with the specified extension
"""
if ext is None:
ext = ''
return [
os.path.abspath(fname)
for fname in os.listdir(path)
if os.path.isfile(fname)
if fname.endswith(ext)
]
def blob_text(filenames):
"""
Create a blob of text by reading in all filenames into a string
"""
return '\n'.join([open(fname).read() for fname in filenames])
| #! /usr/bin/env python
import os
import re
def get_files(path, ext=None):
"""
Get all files in directory path, optionally with the specified extension
"""
if ext is None:
ext = ''
return [
os.path.abspath(fname)
for fname in os.listdir(path)
if os.path.isfile(fname)
if fname.endswith(ext)
]
def blob_text(filenames):
"""
Create a blob of text by reading in all filenames into a string
"""
return '\n'.join([open(fname).read() for fname in filenames])
def get_definition(text, startswith):
"""
Parse text to retrieve the definitions that start with keyword
"""
return [
re.split('[ ()]', line.strip())[1]
for line in [line.strip() for line in text.splitlines()]
if line.startswith(startswith)
]
def get_functions(text, startswith='def '):
"""
Parse text to retrive the functions and methods defined
"""
return get_definition(text, startswith)
def get_classes(text, startswith='class '):
"""
Parse text to retrive the functions and methods defined
"""
return get_definition(text, startswith)
| Add function to get the functions / classes that are defined | Add function to get the functions / classes that are defined
| Python | mit | IanLee1521/utilities | ---
+++
@@ -1,6 +1,7 @@
#! /usr/bin/env python
import os
+import re
def get_files(path, ext=None):
@@ -23,3 +24,28 @@
Create a blob of text by reading in all filenames into a string
"""
return '\n'.join([open(fname).read() for fname in filenames])
+
+
+def get_definition(text, startswith):
+ """
+ Parse text to retrieve the definitions that start with keyword
+ """
+ return [
+ re.split('[ ()]', line.strip())[1]
+ for line in [line.strip() for line in text.splitlines()]
+ if line.startswith(startswith)
+ ]
+
+
+def get_functions(text, startswith='def '):
+ """
+ Parse text to retrive the functions and methods defined
+ """
+ return get_definition(text, startswith)
+
+
+def get_classes(text, startswith='class '):
+ """
+ Parse text to retrive the functions and methods defined
+ """
+ return get_definition(text, startswith) |
8e75605e0511b85dfd500b644613739f29705da6 | cfnf.py | cfnf.py | import sublime, sublime_plugin
import time
class cfnewfile(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\r\n Name:\r\n Description:\r\n Written By:\r\n Date Created: "+localtime+"\r\n History:\r\n--->\r\n")
|
import sublime, sublime_plugin
import time
class cfnfCommand(sublime_plugin.WindowCommand):
def run(self):
a = self.window.new_file()
a.run_command("addheader")
class addheaderCommand(sublime_plugin.TextCommand):
def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
self.view.insert(edit,0,"<!---\n Name:\n Description:\n Written By:\n Date Created: "+localtime+"\n History:\n--->\n")
| Send text to new file | Send text to new file
| Python | bsd-2-clause | dwkd/SublimeCFNewFile | ---
+++
@@ -1,7 +1,13 @@
+
import sublime, sublime_plugin
import time
-class cfnewfile(sublime_plugin.TextCommand):
- def run(self, edit):
+class cfnfCommand(sublime_plugin.WindowCommand):
+ def run(self):
+ a = self.window.new_file()
+ a.run_command("addheader")
+
+class addheaderCommand(sublime_plugin.TextCommand):
+ def run(self, edit):
localtime = time.asctime( time.localtime(time.time()) )
- self.view.insert(edit,0,"<!---\r\n Name:\r\n Description:\r\n Written By:\r\n Date Created: "+localtime+"\r\n History:\r\n--->\r\n")
+ self.view.insert(edit,0,"<!---\n Name:\n Description:\n Written By:\n Date Created: "+localtime+"\n History:\n--->\n") |
99af762edb7a8fa4b0914bdf157af151a814adb6 | backend/api.py | backend/api.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
import boto3
table_name = os.environ.get("TABLE_NAME")
table = boto3.resource("dynamodb").Table(table_name)
def _log_dynamo(response):
print "HTTPStatusCode:{}, RetryAttempts:{}, ScannedCount:{}, Count:{}".format(
response.get("ResponseMetadata").get("HTTPStatusCode"),
response.get("ResponseMetadata").get("RetryAttempts"),
response.get("ScannedCount"),
response.get("Count")
)
def get_items(event, context):
response = table.scan(Limit=10)
_log_dynamo(response)
return {
"statusCode": 200,
"body": json.dumps(response["Items"], indent=1)
}
def get_item(event, context):
response = table.get_item(Key={"id": event.get("pathParameters").get("id")})
_log_dynamo(response)
return {
"statusCode": 200,
"body": json.dumps(response["Item"], indent=1)
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
import boto3
table_name = os.environ.get("TABLE_NAME")
table = boto3.resource("dynamodb").Table(table_name)
def _log_dynamo(response):
print "HTTPStatusCode:{}, RetryAttempts:{}, ScannedCount:{}, Count:{}".format(
response.get("ResponseMetadata").get("HTTPStatusCode"),
response.get("ResponseMetadata").get("RetryAttempts"),
response.get("ScannedCount"),
response.get("Count")
)
def get_items(event, context):
response = table.scan(Limit=10)
_log_dynamo(response)
return {
"statusCode": 200,
"body": json.dumps(response["Items"], indent=1),
"headers": {"Access-Control-Allow-Origin": "*"}
}
def get_item(event, context):
response = table.get_item(Key={"id": event.get("pathParameters").get("id")})
_log_dynamo(response)
return {
"statusCode": 200,
"body": json.dumps(response["Item"], indent=1)
}
| Add CORS header to function | Add CORS header to function
| Python | mit | Vilsepi/no-servers,Vilsepi/no-servers,Vilsepi/no-servers,Vilsepi/no-servers | ---
+++
@@ -21,7 +21,8 @@
_log_dynamo(response)
return {
"statusCode": 200,
- "body": json.dumps(response["Items"], indent=1)
+ "body": json.dumps(response["Items"], indent=1),
+ "headers": {"Access-Control-Allow-Origin": "*"}
}
def get_item(event, context): |
6589df70baad1b57c604736d75e424465cf8775e | djangoautoconf/auto_conf_admin_tools/reversion_feature.py | djangoautoconf/auto_conf_admin_tools/reversion_feature.py | from djangoautoconf.auto_conf_admin_tools.admin_feature_base import AdminFeatureBase
from django.conf import settings
__author__ = 'weijia'
class ReversionFeature(AdminFeatureBase):
def __init__(self):
super(ReversionFeature, self).__init__()
self.related_search_fields = {}
def process_parent_class_list(self, parent_list, class_inst):
if "reversion" in settings.INSTALLED_APPS:
from reversion import VersionAdmin
parent_list.append(VersionAdmin)
| from djangoautoconf.auto_conf_admin_tools.admin_feature_base import AdminFeatureBase
from django.conf import settings
__author__ = 'weijia'
class ReversionFeature(AdminFeatureBase):
def __init__(self):
super(ReversionFeature, self).__init__()
self.related_search_fields = {}
def process_parent_class_list(self, parent_list, class_inst):
if "reversion" in settings.INSTALLED_APPS:
try:
from reversion import VersionAdmin # for Django 1.5
except:
from reversion.admin import VersionAdmin # for Django 1.8
parent_list.append(VersionAdmin)
| Fix import issue for Django 1.5 above | Fix import issue for Django 1.5 above
| Python | bsd-3-clause | weijia/djangoautoconf,weijia/djangoautoconf | ---
+++
@@ -11,5 +11,8 @@
def process_parent_class_list(self, parent_list, class_inst):
if "reversion" in settings.INSTALLED_APPS:
- from reversion import VersionAdmin
+ try:
+ from reversion import VersionAdmin # for Django 1.5
+ except:
+ from reversion.admin import VersionAdmin # for Django 1.8
parent_list.append(VersionAdmin) |
6df7ee955c7dfaee9a597b331dbc4c448fe3738a | fpr/migrations/0017_ocr_unique_names.py | fpr/migrations/0017_ocr_unique_names.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
IDCommand = apps.get_model('fpr', 'IDCommand')
ocr_command = IDCommand.objects.get(
uuid='5d501dbf-76bb-4569-a9db-9e367800995e')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
FPCommand = apps.get_model('fpr', 'FPCommand')
ocr_command = FPCommand.objects.get(
uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
| Fix OCR command UUID typo | Fix OCR command UUID typo
| Python | agpl-3.0 | artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin | ---
+++
@@ -12,9 +12,9 @@
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
- IDCommand = apps.get_model('fpr', 'IDCommand')
- ocr_command = IDCommand.objects.get(
- uuid='5d501dbf-76bb-4569-a9db-9e367800995e')
+ FPCommand = apps.get_model('fpr', 'FPCommand')
+ ocr_command = FPCommand.objects.get(
+ uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n' |
8c49359a79d815cc21acbd58adc36c52d75e20b7 | dash2012/auth/views.py | dash2012/auth/views.py | from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout
from django.contrib.auth.decorators import login_required
from cloudfish.models import Cloud
def login(r):
if r.POST:
username = r.POST['username']
password = r.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
auth_login(r, user)
if not Cloud.objects.filter(account=user).exists():
return HttpResponseRedirect(reverse('connect-view'))
return HttpResponseRedirect(reverse('myservers-view'))
return render(r, 'auth.html')
def logout(request):
auth_logout(request)
return HttpResponseRedirect(reverse('index-view'))
@login_required
def connect(request):
return render(request, 'connect.html')
| from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout
from django.contrib.auth.decorators import login_required
from cloudfish.models import Cloud
def login(r):
c = {}
if r.POST:
username = r.POST['username']
password = r.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
auth_login(r, user)
if not Cloud.objects.filter(account=user).exists():
return HttpResponseRedirect(reverse('connect-view'))
return HttpResponseRedirect(reverse('myservers-view'))
c['errors'] = "Login failed, please try again"
return render(r, 'auth.html', c)
def logout(request):
auth_logout(request)
return HttpResponseRedirect(reverse('index-view'))
@login_required
def connect(request):
return render(request, 'connect.html')
| Add login failed flash message | Add login failed flash message
| Python | bsd-3-clause | losmiserables/djangodash2012,losmiserables/djangodash2012 | ---
+++
@@ -7,6 +7,7 @@
def login(r):
+ c = {}
if r.POST:
username = r.POST['username']
password = r.POST['password']
@@ -19,7 +20,8 @@
return HttpResponseRedirect(reverse('myservers-view'))
- return render(r, 'auth.html')
+ c['errors'] = "Login failed, please try again"
+ return render(r, 'auth.html', c)
def logout(request): |
e170a96859232d1436930be7a0cbfc7f2295c8a7 | main.py | main.py | from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
self.numberRequests += 1
request.setHeader("content-type", "text/plain")
return "I am request #" + str(self.numberRequests) + "\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
| from twisted.internet import reactor
from twisted.web import server, resource
from teiler.server import FileServerResource
from teiler.client import FileRequestResource
import sys
from twisted.python import log
class HelloResource(resource.Resource):
isLeaf = False
numberRequests = 0
def render_GET(self, request):
request.setHeader("content-type", "text/plain")
return "Welcome to teiler\n"
if __name__ == '__main__':
log.startLogging(sys.stdout)
filesServed = []
transferRequests = []
downloadDirectory = "."
root = resource.Resource()
root.putChild('', HelloResource())
root.putChild('files', FileServerResource(filesServed))
root.putChild('requests', FileRequestResource(transferRequests,
downloadDirectory))
reactor.listenTCP(8080, server.Site(root))
reactor.run()
| Set root resource to welcome | Set root resource to welcome
| Python | mit | derwolfe/teiler,derwolfe/teiler | ---
+++
@@ -12,9 +12,8 @@
numberRequests = 0
def render_GET(self, request):
- self.numberRequests += 1
request.setHeader("content-type", "text/plain")
- return "I am request #" + str(self.numberRequests) + "\n"
+ return "Welcome to teiler\n"
if __name__ == '__main__':
log.startLogging(sys.stdout) |
73b6a84cfc0ccc20d04c3dd80c3e505cd118be4d | nsfw.py | nsfw.py | import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['gel'])
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
| import random
from discord.ext import commands
from lxml import etree
class NSFW:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['gel'], hidden=True)
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = []
url = 'http://gelbooru.com/index.php'
params = {'page': 'dapi',
's': 'post',
'q': 'index',
'tags': tags}
async with self.bot.session.get(url, params=params) as resp:
root = etree.fromstring((await resp.text()).encode(),
etree.HTMLParser())
search_nodes = root.findall(".//post")
for node in search_nodes:
image = next((item[1] for item in node.items()
if item[0] == 'file_url'), None)
if image is not None:
entries.append(image)
try:
message = f'http:{random.choice(entries)}'
except IndexError:
message = 'No images found.'
await ctx.send(message)
@commands.command(hidden=True)
async def massage(self, ctx, *, tags=''):
await ctx.invoke(self.gelbooru, tags='massage ' + tags)
def setup(bot):
bot.add_cog(NSFW(bot))
| Make command invisible by default | Make command invisible by default
| Python | mit | BeatButton/beattie-bot,BeatButton/beattie | ---
+++
@@ -8,7 +8,7 @@
def __init__(self, bot):
self.bot = bot
- @commands.command(aliases=['gel'])
+ @commands.command(aliases=['gel'], hidden=True)
async def gelbooru(self, ctx, *, tags):
async with ctx.typing():
entries = [] |
22e6cce28da8a700bd4cd45aa47913aaff559a9d | functional_tests/management/commands/create_testrecipe.py | functional_tests/management/commands/create_testrecipe.py | from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management import call_command
import random
import string
from recipes.models import Recipe
class Command(BaseCommand):
def handle(self, *args, **options):
r = Recipe(name=''.join(random.choice(string.ascii_letters) for _
in range(10)), description='description')
r.save()
| import datetime
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management import call_command
import random
import string
from recipes.models import Recipe
class Command(BaseCommand):
def handle(self, *args, **options):
r = Recipe(name=''.join(random.choice(string.ascii_letters) for _
in range(10)), description='description')
r.save()
r.add_date = datetime.date.today() - datetime.timedelta(days=2)
r.save()
| Make sure that recipes created by the command show up | Make sure that recipes created by the command show up
| Python | agpl-3.0 | XeryusTC/rotd,XeryusTC/rotd,XeryusTC/rotd | ---
+++
@@ -1,3 +1,4 @@
+import datetime
from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.management import call_command
@@ -10,3 +11,5 @@
r = Recipe(name=''.join(random.choice(string.ascii_letters) for _
in range(10)), description='description')
r.save()
+ r.add_date = datetime.date.today() - datetime.timedelta(days=2)
+ r.save() |
f96990118d51b56ad438a8efbf2a7f83ec0f3c63 | conference_scheduler/tests/test_parameters.py | conference_scheduler/tests/test_parameters.py | from conference_scheduler import parameters
def test_variables(shape):
X = parameters.variables(shape)
assert len(X) == 21
def test_schedule_all_events(shape, X):
constraints = [c for c in parameters._schedule_all_events(shape, X)]
assert len(constraints) == 3
def test_max_one_event_per_slot(shape, X):
constraints = [c for c in parameters._max_one_event_per_slot(shape, X)]
assert len(constraints) == 7
def test_constraints(shape, X):
constraints = [c for c in parameters.constraints(shape, X)]
assert len(constraints) == 10
| from conference_scheduler import parameters
import numpy as np
def test_tags(events):
tags = parameters.tags(events)
assert np.array_equal(tags, np.array([[1, 0], [1, 1], [0, 1]]))
def test_variables(shape):
X = parameters.variables(shape)
assert len(X) == 21
def test_schedule_all_events(shape, X):
constraints = [c for c in parameters._schedule_all_events(shape, X)]
assert len(constraints) == 3
def test_max_one_event_per_slot(shape, X):
constraints = [c for c in parameters._max_one_event_per_slot(shape, X)]
assert len(constraints) == 7
def test_constraints(shape, X):
constraints = [c for c in parameters.constraints(shape, X)]
assert len(constraints) == 10
| Add failing test to function to build tags matrix. | Add failing test to function to build tags matrix.
| Python | mit | PyconUK/ConferenceScheduler | ---
+++
@@ -1,5 +1,9 @@
from conference_scheduler import parameters
+import numpy as np
+def test_tags(events):
+ tags = parameters.tags(events)
+ assert np.array_equal(tags, np.array([[1, 0], [1, 1], [0, 1]]))
def test_variables(shape):
X = parameters.variables(shape) |
24e65db624221d559f46ce74d88ad28ec970d754 | profile_collection/startup/00-startup.py | profile_collection/startup/00-startup.py | import logging
session_mgr._logger.setLevel(logging.INFO)
from dataportal import (DataBroker as db,
StepScan as ss, DataBroker,
StepScan, DataMuxer)
from bluesky.standard_config import *
from ophyd.commands import *
gs.RE.md['config'] = {}
gs.RE.md['owner'] = 'xf28id1'
gs.RE.md['group'] = 'XPD'
gs.RE.md['beamline_id'] = 'xpd'
| import logging
session_mgr._logger.setLevel(logging.INFO)
from dataportal import (DataBroker as db,
StepScan as ss, DataBroker,
StepScan, DataMuxer)
from bluesky.standard_config import *
from ophyd.commands import *
gs.RE.md['config'] = {}
gs.RE.md['owner'] = 'xf28id1'
gs.RE.md['group'] = 'XPD'
gs.RE.md['beamline_id'] = 'xpd'
import bluesky.qt_kicker
bluesky.qt_kicker.install_qt_kicker()
| Update bluesky's API to the qt_kicker. | Update bluesky's API to the qt_kicker.
| Python | bsd-2-clause | NSLS-II-XPD/ipython_ophyd,pavoljuhas/ipython_ophyd,pavoljuhas/ipython_ophyd,NSLS-II-XPD/ipython_ophyd | ---
+++
@@ -12,3 +12,8 @@
gs.RE.md['owner'] = 'xf28id1'
gs.RE.md['group'] = 'XPD'
gs.RE.md['beamline_id'] = 'xpd'
+
+
+import bluesky.qt_kicker
+bluesky.qt_kicker.install_qt_kicker()
+ |
54b2a6953a4da2b217052d166ad1f069f683b9ee | scripts/nomenclature/nomenclature_map.py | scripts/nomenclature/nomenclature_map.py | """Create a map from the input binomials to their ITIS accepted synonyms.
"""
import pandas as pd
itis_results = pd.read_csv("search_result.csv", encoding = "ISO-8859-1")
| """Create a map from the input binomials to their ITIS accepted synonyms.
"""
import pandas as pd
from PyFloraBook.in_out.data_coordinator import locate_nomenclature_folder
# Globals
INPUT_FILE_NAME = "search_results.csv"
# Input
nomenclature_folder = locate_nomenclature_folder()
itis_results = pd.read_csv(
str(nomenclature_folder / INPUT_FILE_NAME), encoding="ISO-8859-1")
| Implement locator in nomenclature map | Implement locator in nomenclature map
| Python | mit | jnfrye/local_plants_book | ---
+++
@@ -2,4 +2,13 @@
"""
import pandas as pd
-itis_results = pd.read_csv("search_result.csv", encoding = "ISO-8859-1")
+from PyFloraBook.in_out.data_coordinator import locate_nomenclature_folder
+
+
+# Globals
+INPUT_FILE_NAME = "search_results.csv"
+
+# Input
+nomenclature_folder = locate_nomenclature_folder()
+itis_results = pd.read_csv(
+ str(nomenclature_folder / INPUT_FILE_NAME), encoding="ISO-8859-1") |
b77e39b21a326655a04dbd15fcacfd2cc57a6008 | core/emails.py | core/emails.py | from django.core.mail import EmailMessage
from django.template.loader import render_to_string
def notify_existing_user(user, event):
""" Sends e-mail to existing organizer, that they're added
to the new Event.
"""
content = render_to_string('emails/existing_user.html', {
'user': user,
'event': event
})
subject = 'You have been granted access to new Django Girls event'
send_email(content, subject, user)
def notify_new_user(user, event, password):
""" Sends e-mail to newly created organizer that their account was created
and that they were added to the Event.
"""
content = render_to_string('emails/new_user.html', {
'user': user,
'event': event,
'password': password,
})
subject = 'Access to Django Girls website'
send_email(content, subject, user)
def send_email(user, content, subject):
msg = EmailMessage(subject,
content,
"Django Girls <[email protected]>",
[user.email])
msg.content_subtype = "html"
msg.send()
| from django.core.mail import EmailMessage
from django.template.loader import render_to_string
def notify_existing_user(user, event):
""" Sends e-mail to existing organizer, that they're added
to the new Event.
"""
content = render_to_string('emails/existing_user.html', {
'user': user,
'event': event
})
subject = 'You have been granted access to new Django Girls event'
send_email(content, subject, user)
def notify_new_user(user, event, password):
""" Sends e-mail to newly created organizer that their account was created
and that they were added to the Event.
"""
content = render_to_string('emails/new_user.html', {
'user': user,
'event': event,
'password': password,
})
subject = 'Access to Django Girls website'
send_email(content, subject, user)
def send_email(content, subject, user):
msg = EmailMessage(subject,
content,
"Django Girls <[email protected]>",
[user.email])
msg.content_subtype = "html"
msg.send()
| Fix broken order of arguments in send_email | Fix broken order of arguments in send_email
Ticket #342
| Python | bsd-3-clause | patjouk/djangogirls,patjouk/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls | ---
+++
@@ -27,7 +27,7 @@
send_email(content, subject, user)
-def send_email(user, content, subject):
+def send_email(content, subject, user):
msg = EmailMessage(subject,
content,
"Django Girls <[email protected]>", |
96ef78f53b7762782491bdd635a2c2dd1d6c75d2 | dimod/package_info.py | dimod/package_info.py | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.10'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = '[email protected]'
__description__ = 'A shared API for binary quadratic model samplers.'
| # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
__version__ = '0.7.11'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = '[email protected]'
__description__ = 'A shared API for binary quadratic model samplers.'
| Update version 0.7.10 -> 0.7.11 | Update version 0.7.10 -> 0.7.11 | Python | apache-2.0 | dwavesystems/dimod,dwavesystems/dimod | ---
+++
@@ -14,7 +14,7 @@
#
# ================================================================================================
-__version__ = '0.7.10'
+__version__ = '0.7.11'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = '[email protected]'
__description__ = 'A shared API for binary quadratic model samplers.' |
5bdbdfa94d06065c0536b684e6694b94ad80047b | authentication/authentication.py | authentication/authentication.py | from flask import Flask, jsonify, request
from requests import codes
app = Flask(__name__)
@app.route('/login', methods=['POST'])
def login():
username = request.form['email']
password = request.form['password']
response_content = {'email': username, 'password': password}
return jsonify(response_content), codes.OK
@app.route('/signup', methods=['POST'])
def signup():
username = request.form['email']
password = request.form['password']
response_content = {'email': username, 'password': password}
return jsonify(response_content), codes.CREATED
if __name__ == '__main__':
# Specifying 0.0.0.0 as the host tells the operating system to listen on
# all public IPs. This makes the server visible externally.
# See http://flask.pocoo.org/docs/0.10/quickstart/#a-minimal-application
app.run(host='0.0.0.0', debug=True)
| from flask import Flask, jsonify, request
from requests import codes
app = Flask(__name__)
@app.route('/login', methods=['POST'])
def login():
email = request.form['email']
password = request.form['password']
response_content = {'email': email, 'password': password}
return jsonify(response_content), codes.OK
@app.route('/signup', methods=['POST'])
def signup():
email = request.form['email']
password = request.form['password']
user = User(email=email, password=password)
response_content = {'email': email, 'password': password}
return jsonify(response_content), codes.CREATED
if __name__ == '__main__':
# Specifying 0.0.0.0 as the host tells the operating system to listen on
# all public IPs. This makes the server visible externally.
# See http://flask.pocoo.org/docs/0.10/quickstart/#a-minimal-application
app.run(host='0.0.0.0', debug=True)
| Use email variable name where appropriate | Use email variable name where appropriate
| Python | mit | jenca-cloud/jenca-authentication | ---
+++
@@ -5,16 +5,17 @@
@app.route('/login', methods=['POST'])
def login():
- username = request.form['email']
+ email = request.form['email']
password = request.form['password']
- response_content = {'email': username, 'password': password}
+ response_content = {'email': email, 'password': password}
return jsonify(response_content), codes.OK
@app.route('/signup', methods=['POST'])
def signup():
- username = request.form['email']
+ email = request.form['email']
password = request.form['password']
- response_content = {'email': username, 'password': password}
+ user = User(email=email, password=password)
+ response_content = {'email': email, 'password': password}
return jsonify(response_content), codes.CREATED
if __name__ == '__main__': |
badddd6aa9533a01e07477174dc7422ee4941014 | wsgi.py | wsgi.py | # Yith Library Server is a password storage server.
# Copyright (C) 2015 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
from newrelic import agent
agent.initialize()
from paste.deploy import loadapp
from raven.middleware import Sentry
application = loadapp('config:production.ini',
relative_to='yithlibraryserver/config-templates')
application = agent.WSGIApplicationWrapper(Sentry(application))
| # Yith Library Server is a password storage server.
# Copyright (C) 2015 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
import os
import os.path
from newrelic import agent
agent.initialize()
from paste.deploy import loadapp
from pyramid.paster import setup_logging
from raven.middleware import Sentry
from waitress import serve
basedir= os.path.dirname(os.path.realpath(__file__))
conf_file = os.path.join(
basedir,
'yithlibraryserver', 'config-templates', 'production.ini'
)
application = loadapp('config:%s' % conf_file)
application = agent.WSGIApplicationWrapper(Sentry(application))
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
scheme = os.environ.get("SCHEME", "https")
setup_logging(conf_file)
serve(application, host='0.0.0.0', port=port, url_scheme=scheme)
| Read the conf file using absolute paths | Read the conf file using absolute paths
| Python | agpl-3.0 | lorenzogil/yith-library-server,lorenzogil/yith-library-server,lorenzogil/yith-library-server | ---
+++
@@ -16,12 +16,28 @@
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
+import os
+import os.path
+
from newrelic import agent
agent.initialize()
from paste.deploy import loadapp
+from pyramid.paster import setup_logging
from raven.middleware import Sentry
+from waitress import serve
-application = loadapp('config:production.ini',
- relative_to='yithlibraryserver/config-templates')
+basedir= os.path.dirname(os.path.realpath(__file__))
+conf_file = os.path.join(
+ basedir,
+ 'yithlibraryserver', 'config-templates', 'production.ini'
+)
+
+application = loadapp('config:%s' % conf_file)
application = agent.WSGIApplicationWrapper(Sentry(application))
+
+if __name__ == "__main__":
+ port = int(os.environ.get("PORT", 5000))
+ scheme = os.environ.get("SCHEME", "https")
+ setup_logging(conf_file)
+ serve(application, host='0.0.0.0', port=port, url_scheme=scheme) |
a6300723150d7d1ff9a58f4f3f1297e0fe2c6f78 | css_updater/git/manager.py | css_updater/git/manager.py | """manages github repos"""
import os
import tempfile
from typing import Dict, Any
import pygit2 as git
from .webhook.handler import Handler
class Manager(object):
"""handles git repos"""
def __init__(self: Manager, handler: Handler) -> None:
self.webhook_handler: Handler = handler
self.temp_dir: tempfile.TemporaryDirectory = tempfile.TemporaryDirectory()
self.repo: git.Repository = git.clone_repository(
self.webhook_handler.git_url, path=self.temp_dir.name)
with open(os.path.join(self.temp_dir.name, "css-updater.json")) as config:
import json
self.config: Dict[str, Any] = json.loads(config.read())
def __del__(self: Manager) -> None:
self.temp_dir.cleanup()
| """manages github repos"""
import os
import tempfile
from typing import Dict, Any
import pygit2 as git
from .webhook.handler import Handler
class Manager(object):
"""handles git repos"""
def __init__(self: Manager, handler: Handler) -> None:
self.webhook_handler: Handler = handler
self.temp_dir: tempfile.TemporaryDirectory = tempfile.TemporaryDirectory()
self.repo: git.Repository = git.clone_repository(
self.webhook_handler.git_url, path=self.temp_dir.name)
with open(os.path.join(self.temp_dir.name, "css-updater.json")) as config:
import json
try:
self.config: Dict[str, Any] = json.loads(config.read())["css_updater"]
except KeyError as invalid_json:
print(invalid_json)
except IOError as io_error:
print(io_error)
def __del__(self: Manager) -> None:
self.temp_dir.cleanup()
| Check for errors in config | Check for errors in config
| Python | mit | neoliberal/css-updater | ---
+++
@@ -19,7 +19,12 @@
with open(os.path.join(self.temp_dir.name, "css-updater.json")) as config:
import json
- self.config: Dict[str, Any] = json.loads(config.read())
+ try:
+ self.config: Dict[str, Any] = json.loads(config.read())["css_updater"]
+ except KeyError as invalid_json:
+ print(invalid_json)
+ except IOError as io_error:
+ print(io_error)
def __del__(self: Manager) -> None:
self.temp_dir.cleanup() |
3d64eb4a7438b6b4f46f1fdf7f47d530cb11b09c | spacy/tests/regression/test_issue2396.py | spacy/tests/regression/test_issue2396.py | # coding: utf-8
from __future__ import unicode_literals
from ..util import get_doc
import pytest
import numpy
@pytest.mark.parametrize('sentence,matrix', [
(
'She created a test for spacy',
numpy.array([
[0, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
[1, 1, 2, 3, 3, 3],
[1, 1, 3, 3, 3, 3],
[1, 1, 3, 3, 4, 4],
[1, 1, 3, 3, 4, 5]], dtype=numpy.int32)
)
])
def test_issue2396(EN, sentence, matrix):
doc = EN(sentence)
span = doc[:]
assert (doc.get_lca_matrix() == matrix).all()
assert (span.get_lca_matrix() == matrix).all()
| # coding: utf-8
from __future__ import unicode_literals
from ..util import get_doc
import pytest
import numpy
from numpy.testing import assert_array_equal
@pytest.mark.parametrize('words,heads,matrix', [
(
'She created a test for spacy'.split(),
[1, 0, 1, -2, -1, -1],
numpy.array([
[0, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
[1, 1, 2, 3, 3, 3],
[1, 1, 3, 3, 3, 3],
[1, 1, 3, 3, 4, 4],
[1, 1, 3, 3, 4, 5]], dtype=numpy.int32)
)
])
def test_issue2396(en_vocab, words, heads, matrix):
doc = get_doc(en_vocab, words=words, heads=heads)
span = doc[:]
assert_array_equal(doc.get_lca_matrix(), matrix)
assert_array_equal(span.get_lca_matrix(), matrix)
| Update get_lca_matrix test for develop | Update get_lca_matrix test for develop
| Python | mit | explosion/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy | ---
+++
@@ -5,10 +5,13 @@
import pytest
import numpy
+from numpy.testing import assert_array_equal
[email protected]('sentence,matrix', [
+
[email protected]('words,heads,matrix', [
(
- 'She created a test for spacy',
+ 'She created a test for spacy'.split(),
+ [1, 0, 1, -2, -1, -1],
numpy.array([
[0, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 1],
@@ -18,10 +21,11 @@
[1, 1, 3, 3, 4, 5]], dtype=numpy.int32)
)
])
-def test_issue2396(EN, sentence, matrix):
- doc = EN(sentence)
+def test_issue2396(en_vocab, words, heads, matrix):
+ doc = get_doc(en_vocab, words=words, heads=heads)
+
span = doc[:]
- assert (doc.get_lca_matrix() == matrix).all()
- assert (span.get_lca_matrix() == matrix).all()
+ assert_array_equal(doc.get_lca_matrix(), matrix)
+ assert_array_equal(span.get_lca_matrix(), matrix)
|
61b96e4f925831e64e80d0253428dbd1b2c8a6a4 | app/grandchallenge/annotations/validators.py | app/grandchallenge/annotations/validators.py | from rest_framework import serializers
from django.conf import settings
def validate_grader_is_current_retina_user(grader, context):
"""
This method checks if the passed grader equals the request.user that is passed in the context.
Only applies to users that are in the retina_graders group.
"""
request = context.get("request")
if (
request is not None
and request.user is not None
and request.user.is_authenticated
):
user = request.user
if user.groups.filter(
name=settings.RETINA_GRADERS_GROUP_NAME
).exists():
if grader != user:
raise serializers.ValidationError(
"User is not allowed to create annotation for other grader"
)
| from rest_framework import serializers
from django.conf import settings
def validate_grader_is_current_retina_user(grader, context):
"""
This method checks if the passed grader equals the request.user that is passed in the context.
Only applies to users that are in the retina_graders group.
BEWARE! Validation will pass if user is not logged in or request or request.user is not defined
"""
request = context.get("request")
if (
request is not None
and request.user is not None
and request.user.is_authenticated
):
user = request.user
if user.groups.filter(
name=settings.RETINA_GRADERS_GROUP_NAME
).exists():
if grader != user:
raise serializers.ValidationError(
"User is not allowed to create annotation for other grader"
)
| Add comment to clarify validation method usage | Add comment to clarify validation method usage
| Python | apache-2.0 | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | ---
+++
@@ -6,6 +6,7 @@
"""
This method checks if the passed grader equals the request.user that is passed in the context.
Only applies to users that are in the retina_graders group.
+ BEWARE! Validation will pass if user is not logged in or request or request.user is not defined
"""
request = context.get("request")
if ( |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.