commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
edc5564d4c3677dc8b545e9c9a6a51b481247eab | contentcuration/contentcuration/tests/test_makemessages.py | contentcuration/contentcuration/tests/test_makemessages.py | import os
import subprocess
import pathlib
from django.conf import settings
from django.test import TestCase
class MakeMessagesCommandRunTestCase(TestCase):
"""
Sanity check to make sure makemessages runs to completion.
"""
def test_command_succeeds_without_postgres(self):
"""
Test that we can run makemessages when postgres is not activated.
"""
# this test can make changes to committed files, so only run it
# on the CI server
if 'CI' not in os.environ or not os.environ['CI']:
return
repo_root = pathlib.Path(settings.BASE_DIR).parent
cmd = ["make", "makemessages"]
env = os.environ.copy()
# We fake postgres not being available, by setting the wrong IP address.
# hopefully postgres isn't running at 127.0.0.2!
env.update({"DATA_DB_HOST": "127.0.0.2"})
subprocess.check_output(
cmd,
env=env,
cwd=str(repo_root)
)
| import os
import subprocess
import pathlib
import pytest
from django.conf import settings
from django.test import TestCase
class MakeMessagesCommandRunTestCase(TestCase):
"""
Sanity check to make sure makemessages runs to completion.
"""
# this test can make changes to committed files, so only run it
# on the CI server
@pytest.mark.skipif('CI' not in os.environ or not os.environ['CI'], reason="runs only on CI server")
def test_command_succeeds_without_postgres(self):
"""
Test that we can run makemessages when postgres is not activated.
"""
repo_root = pathlib.Path(settings.BASE_DIR).parent
cmd = ["make", "makemessages"]
env = os.environ.copy()
# We fake postgres not being available, by setting the wrong IP address.
# hopefully postgres isn't running at 127.0.0.2!
env.update({"DATA_DB_HOST": "127.0.0.2"})
subprocess.check_output(
cmd,
env=env,
cwd=str(repo_root)
)
| Use pytest.skip so we can check the test wasn't skipped on the CI. | Use pytest.skip so we can check the test wasn't skipped on the CI.
| Python | mit | DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation |
c3f8069435f0f1c09c00ed6dba2e4f3bdb7ab91b | grow/testing/testdata/pod/extensions/preprocessors.py | grow/testing/testdata/pod/extensions/preprocessors.py | from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
| from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
| Update extension testdata to take **kwargs. | Update extension testdata to take **kwargs.
| Python | mit | grow/grow,grow/pygrow,denmojo/pygrow,grow/pygrow,grow/grow,denmojo/pygrow,denmojo/pygrow,grow/pygrow,denmojo/pygrow,grow/grow,grow/grow |
e29b1f6243fb7f9d2322b80573617ff9a0582d01 | pinax/blog/parsers/markdown_parser.py | pinax/blog/parsers/markdown_parser.py | from markdown import Markdown
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
from ..models import Image
class ImageLookupImagePattern(ImagePattern):
def sanitize_url(self, url):
if url.startswith("http"):
return url
else:
try:
image = Image.objects.get(pk=int(url))
return image.image_path.url
except Image.DoesNotExist:
pass
except ValueError:
return url
return ""
def parse(text):
md = Markdown(extensions=["codehilite"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
| from markdown import Markdown
from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE
from ..models import Image
class ImageLookupImagePattern(ImagePattern):
def sanitize_url(self, url):
if url.startswith("http"):
return url
else:
try:
image = Image.objects.get(pk=int(url))
return image.image_path.url
except Image.DoesNotExist:
pass
except ValueError:
return url
return ""
def parse(text):
md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"])
md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md)
html = md.convert(text)
return html
| Add some extensions to the markdown parser | Add some extensions to the markdown parser
Ultimately we should make this a setting or hookset so it could be overridden at the site level. | Python | mit | swilcox/pinax-blog,pinax/pinax-blog,miurahr/pinax-blog,miurahr/pinax-blog,swilcox/pinax-blog,easton402/pinax-blog,pinax/pinax-blog,pinax/pinax-blog,easton402/pinax-blog |
044e55544529aa8eb3a755428d990f0400403687 | xunit-autolabeler-v2/ast_parser/core/test_data/parser/exclude_tags/exclude_tags_main.py | xunit-autolabeler-v2/ast_parser/core/test_data/parser/exclude_tags/exclude_tags_main.py | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START main_method]
def included():
return 'included method one'
# [START_EXCLUDE]
def also_included():
return 'also included method'
# [END_EXCLUDE]
# [END main_method]
| # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START included]
def included():
return 'included method one'
# [START_EXCLUDE]
def also_included():
return 'also included method'
# [END_EXCLUDE]
# [END included]
| Fix stepping on other tests >:( | Fix stepping on other tests >:(
| Python | apache-2.0 | GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground |
606b2b6c84e9f9f67606a4d7e521cf4805855a98 | migrations/versions/0311_populate_returned_letters.py | migrations/versions/0311_populate_returned_letters.py | """
Revision ID: 0311_populate_returned_letters
Revises: 0310_returned_letters_table
Create Date: 2019-12-09 12:13:49.432993
"""
from alembic import op
from app.dao.returned_letters_dao import insert_or_update_returned_letters
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
select id, service_id, reference
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
results = conn.execute(sql)
returned_letters = results.fetchall()
references = [x.reference for x in returned_letters]
insert_or_update_returned_letters(references)
def downgrade():
pass
| """
Revision ID: 0311_populate_returned_letters
Revises: 0310_returned_letters_table
Create Date: 2019-12-09 12:13:49.432993
"""
from alembic import op
revision = '0311_populate_returned_letters'
down_revision = '0310_returned_letters_table'
def upgrade():
conn = op.get_bind()
sql = """
select id, service_id, reference, updated_at
from notification_history
where notification_type = 'letter'
and notification_status = 'returned-letter'"""
insert_sql = """
insert into returned_letters(id, reported_at, service_id, notification_id, created_at, updated_at)
values(uuid_in(md5(random()::text)::cstring), '{}', '{}', '{}', now(), null)
"""
results = conn.execute(sql)
returned_letters = results.fetchall()
for x in returned_letters:
f = insert_sql.format(x.updated_at.date(), x.service_id, x.id)
conn.execute(f)
def downgrade():
pass
| Change the insert to use updated_at as the reported_at date | Change the insert to use updated_at as the reported_at date
| Python | mit | alphagov/notifications-api,alphagov/notifications-api |
853d2907432a8d7fbedbed12ff28efbe520d4c80 | project_euler/library/number_theory/continued_fractions.py | project_euler/library/number_theory/continued_fractions.py | from fractions import Fraction
from math import sqrt
from itertools import chain, cycle
from typing import Generator, Iterable, List, Tuple
def convergent_sequence(generator: Iterable[int]) -> \
Generator[Fraction, None, None]:
h = (0, 1)
k = (1, 0)
for a in generator:
h = h[1], a * h[1] + h[0]
k = k[1], a * k[1] + k[0]
yield Fraction(h[-1], k[-1])
def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]:
remainders = []
continued_fraction = []
remainder = (Fraction(1), Fraction(0)) # remainder is sqrt(n) + 0.
sqrt_n = sqrt(n)
while remainder not in remainders:
remainders.append(remainder)
a = int(remainder[0] * sqrt_n + remainder[1])
continued_fraction.append(a)
norm = (remainder[1] - a) ** 2 - remainder[0] ** 2 * n
remainder = (-remainder[0] / norm, (remainder[1] - a) / norm)
index = remainders.index(remainder)
return continued_fraction[:index], continued_fraction[index:]
def convergents_sqrt(n: int) -> Generator[Fraction, None, None]:
initial, repeat = continued_fraction_sqrt(n)
convergents = convergent_sequence(chain(initial, cycle(repeat)))
yield from convergents
| from fractions import Fraction
from math import sqrt
from itertools import chain, cycle
from typing import Generator, Iterable, List, Tuple
from .gcd import gcd
from ..sqrt import fsqrt
def convergent_sequence(generator: Iterable[int]) -> \
Generator[Fraction, None, None]:
h = (0, 1)
k = (1, 0)
for a in generator:
h = h[1], a * h[1] + h[0]
k = k[1], a * k[1] + k[0]
yield Fraction(h[-1], k[-1])
def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]:
sqrt_n = sqrt(n)
remainders = []
remainder = (0, 1)
# remainder is an + (sqrt(n) - p) / q and these are initial.
continued_fraction = []
while remainder not in remainders:
remainders.append(remainder)
p, q = remainder
q = (n - (p * p)) // q
a = int((sqrt_n + p) / q)
p = a * q - p
continued_fraction.append(a)
remainder = (p, q)
index = remainders.index(remainder)
return continued_fraction[1:index], continued_fraction[index:]
def convergents_sqrt(n: int) -> Generator[Fraction, None, None]:
initial, repeat = continued_fraction_sqrt(n)
convergents = convergent_sequence(chain(initial, cycle(repeat)))
yield from convergents
| Make continued fractions sqrt much faster | Make continued fractions sqrt much faster
| Python | mit | cryvate/project-euler,cryvate/project-euler |
36df41cf3f5345ab599b5a748562aec2af414239 | python/crypto-square/crypto_square.py | python/crypto-square/crypto_square.py | import string
import math
import itertools
class CryptoSquare:
@classmethod
def encode(cls, msg):
if len(cls.normalize(msg)) == 0:
return ''
return ' '.join(cls.transpose_square(cls.squarify(cls.normalize(msg))))
@classmethod
def squarify(cls, msg):
return [msg[i:i + cls.square_size(len(msg))]
for i in range(0, len(msg), cls.square_size(len(msg)))]
@classmethod
def transpose_square(cls, square):
matrix = [list(row) for row in square]
transposed_matrix = cls.filter_out_none(cls.transpose_uneven_matrix(matrix))
return [''.join(row) for row in transposed_matrix]
@staticmethod
def normalize(msg):
return ''.join(ch.lower() for ch in msg if ch not in
set(string.punctuation + ' '))
@staticmethod
def square_size(msg_length):
return int(math.ceil(msg_length ** 0.5))
# https://stackoverflow.com/a/4938130/2813210
@staticmethod
def transpose_uneven_matrix(matrix):
return list(itertools.zip_longest(*matrix))
@staticmethod
def filter_out_none(matrix):
return [[val for val in row if val is not None] for row in matrix]
def encode(msg):
return CryptoSquare.encode(msg)
| import string
import math
import itertools
class CryptoSquare:
@classmethod
def encode(cls, msg):
if len(cls.normalize(msg)) == 0:
return ''
return ' '.join(cls.transpose_square(cls.squarify(cls.normalize(msg))))
@classmethod
def squarify(cls, msg):
return [msg[i:i + cls.square_size(len(msg))]
for i in range(0, len(msg), cls.square_size(len(msg)))]
@classmethod
def transpose_square(cls, square):
matrix = [list(row) for row in square]
transposed_matrix = cls.transpose_uneven_matrix(matrix)
return [''.join(row) for row in transposed_matrix]
@staticmethod
def normalize(msg):
return ''.join(ch.lower() for ch in msg if ch not in
set(string.punctuation + ' '))
@staticmethod
def square_size(msg_length):
return int(math.ceil(msg_length ** 0.5))
# https://stackoverflow.com/a/4938130/2813210
@staticmethod
def transpose_uneven_matrix(matrix):
transposed_matrix = list(itertools.zip_longest(*matrix))
return [[val for val in row if val is not None] for row in transposed_matrix] # Remove None's
def encode(msg):
return CryptoSquare.encode(msg)
| Clean up transpose helper method | Clean up transpose helper method
| Python | mit | rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism |
c8301f1e3165a5e5eaac46de9bdf97c4c1109718 | dht.py | dht.py | #!/usr/bin/env python
import time
import thread
import Adafruit_DHT as dht
import config
h = 0.0
t = 0.0
def get_ht_thread():
while True:
ht = dht.read_retry(dht.DHT22, config.DHT22_GPIO_NUM)
h = '{0:0.1f}'.format(ht[0])
t = '{0:0.1f}'.format(ht[1])
time.sleep(2)
def get_ht():
return (h, t)
thread.start_new_thread(get_ht_thread, ())
if __name__ == '__main__':
ht = get_ht()
print('The humidity and temperature:')
print(ht)
| #!/usr/bin/env python
import time
import thread
import Adafruit_DHT as dht
import config
h = 0.0
t = 0.0
def get_ht_thread():
global h
global t
while True:
ht = dht.read_retry(dht.DHT22, config.DHT22_GPIO_NUM)
h = '{0:0.1f}'.format(ht[0])
t = '{0:0.1f}'.format(ht[1])
time.sleep(2)
def get_ht():
return (h, t)
thread.start_new_thread(get_ht_thread, ())
if __name__ == '__main__':
ht = get_ht()
print('The humidity and temperature:')
print(ht)
| Fix a DHT reading error | Fix a DHT reading error
| Python | mit | yunbademo/yunba-smarthome,yunbademo/yunba-smarthome |
b86d23b0302bb4d0efa2aa203883a78d3dcbf26e | scipy/integrate/_ivp/tests/test_rk.py | scipy/integrate/_ivp/tests/test_rk.py | import pytest
from numpy.testing import assert_allclose
import numpy as np
from scipy.integrate import RK23, RK45, DOP853
from scipy.integrate._ivp import dop853_coefficients
@pytest.mark.parametrize("solver", [RK23, RK45, DOP853])
def test_coefficient_properties(solver):
assert_allclose(np.sum(solver.B), 1, rtol=1e-15)
assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14)
def test_coefficient_properties_dop853():
assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15)
assert_allclose(np.sum(dop853_coefficients.A, axis=1),
dop853_coefficients.C,
rtol=1e-14)
| import pytest
from numpy.testing import assert_allclose, assert_
import numpy as np
from scipy.integrate import RK23, RK45, DOP853
from scipy.integrate._ivp import dop853_coefficients
@pytest.mark.parametrize("solver", [RK23, RK45, DOP853])
def test_coefficient_properties(solver):
assert_allclose(np.sum(solver.B), 1, rtol=1e-15)
assert_allclose(np.sum(solver.A, axis=1), solver.C, rtol=1e-14)
def test_coefficient_properties_dop853():
assert_allclose(np.sum(dop853_coefficients.B), 1, rtol=1e-15)
assert_allclose(np.sum(dop853_coefficients.A, axis=1),
dop853_coefficients.C,
rtol=1e-14)
@pytest.mark.parametrize("solver_class", [RK23, RK45, DOP853])
def test_error_estimation(solver_class):
step = 0.2
solver = solver_class(lambda t, y: y, 0, [1], 1, first_step=step)
solver.step()
error_estimate = solver._estimate_errors(solver.K, step)
error = solver.y - np.exp([step])
assert_(np.abs(error) < np.abs(error_estimate))
| Test of error estimation of Runge-Kutta methods | TST: Test of error estimation of Runge-Kutta methods
| Python | bsd-3-clause | jor-/scipy,zerothi/scipy,mdhaber/scipy,anntzer/scipy,ilayn/scipy,Eric89GXL/scipy,mdhaber/scipy,matthew-brett/scipy,endolith/scipy,jor-/scipy,anntzer/scipy,grlee77/scipy,vigna/scipy,mdhaber/scipy,andyfaff/scipy,aarchiba/scipy,aeklant/scipy,tylerjereddy/scipy,aeklant/scipy,andyfaff/scipy,perimosocordiae/scipy,tylerjereddy/scipy,aeklant/scipy,person142/scipy,jamestwebber/scipy,e-q/scipy,rgommers/scipy,endolith/scipy,perimosocordiae/scipy,jor-/scipy,Stefan-Endres/scipy,pizzathief/scipy,mdhaber/scipy,rgommers/scipy,perimosocordiae/scipy,lhilt/scipy,Eric89GXL/scipy,grlee77/scipy,person142/scipy,matthew-brett/scipy,aarchiba/scipy,gertingold/scipy,mdhaber/scipy,person142/scipy,pizzathief/scipy,matthew-brett/scipy,jamestwebber/scipy,scipy/scipy,perimosocordiae/scipy,aarchiba/scipy,endolith/scipy,Eric89GXL/scipy,Stefan-Endres/scipy,tylerjereddy/scipy,perimosocordiae/scipy,e-q/scipy,nmayorov/scipy,pizzathief/scipy,jor-/scipy,arokem/scipy,vigna/scipy,gertingold/scipy,andyfaff/scipy,scipy/scipy,pizzathief/scipy,matthew-brett/scipy,aeklant/scipy,scipy/scipy,rgommers/scipy,jamestwebber/scipy,Stefan-Endres/scipy,lhilt/scipy,nmayorov/scipy,jor-/scipy,mdhaber/scipy,vigna/scipy,grlee77/scipy,Eric89GXL/scipy,Eric89GXL/scipy,andyfaff/scipy,ilayn/scipy,jamestwebber/scipy,person142/scipy,gertingold/scipy,nmayorov/scipy,nmayorov/scipy,nmayorov/scipy,perimosocordiae/scipy,ilayn/scipy,WarrenWeckesser/scipy,arokem/scipy,lhilt/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,person142/scipy,jamestwebber/scipy,scipy/scipy,zerothi/scipy,anntzer/scipy,Stefan-Endres/scipy,scipy/scipy,scipy/scipy,grlee77/scipy,zerothi/scipy,ilayn/scipy,endolith/scipy,zerothi/scipy,rgommers/scipy,zerothi/scipy,endolith/scipy,tylerjereddy/scipy,WarrenWeckesser/scipy,andyfaff/scipy,lhilt/scipy,rgommers/scipy,gertingold/scipy,e-q/scipy,pizzathief/scipy,vigna/scipy,ilayn/scipy,aarchiba/scipy,e-q/scipy,WarrenWeckesser/scipy,matthew-brett/scipy,lhilt/scipy,arokem/scipy,zerothi/scipy,WarrenWeckesser/scipy,aeklant/scipy,ilayn/scipy,grlee77/scipy,tylerjereddy/scipy,Stefan-Endres/scipy,e-q/scipy,Eric89GXL/scipy,andyfaff/scipy,anntzer/scipy,endolith/scipy,vigna/scipy,anntzer/scipy,aarchiba/scipy,arokem/scipy,WarrenWeckesser/scipy,anntzer/scipy,arokem/scipy,gertingold/scipy |
81dfb5cb952fbca90882bd39e76887f0fa6479eb | msmexplorer/tests/test_msm_plot.py | msmexplorer/tests/test_msm_plot.py | import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
| import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
| Add test for implied timescales plot | Add test for implied timescales plot
| Python | mit | msmexplorer/msmexplorer,msmexplorer/msmexplorer |
5f39fd311c735593ac41ba17a060f9cadbe80e18 | nlpipe/scripts/amcat_background.py | nlpipe/scripts/amcat_background.py | """
Assign articles from AmCAT sets for background processing in nlpipe
"""
import sys, argparse
from nlpipe import tasks
from nlpipe.pipeline import parse_background
from nlpipe.backend import get_input_ids
from nlpipe.celery import app
modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")}
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))),
choices=modules, metavar="module")
parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)')
parser.add_argument('--max', type=int, help='maximum number of articles to assign')
parser.add_argument('--queue', default='background', help='Celery queue to put the articles on')
args = parser.parse_args()
task = modules[args.module]
body = {u'filter': {'terms': {u'sets': args.sets}}}
print("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}"
.format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals()))
ids = list(get_input_ids(body))
parse_background(ids, task, max=args.max, queue=args.queue)
| """
Assign articles from AmCAT sets for background processing in nlpipe
"""
import sys, argparse
from nlpipe import tasks
from nlpipe.pipeline import parse_background
from nlpipe.backend import get_input_ids
from nlpipe.celery import app
import logging
FORMAT = '[%(asctime)-15s] %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO)
modules = {n.split(".")[-1]: t for (n,t) in app.tasks.iteritems() if n.startswith("nlpipe")}
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('module', help='nlpipe module (task) name ({})'.format(", ".join(sorted(modules))),
choices=modules, metavar="module")
parser.add_argument('sets', type=int, nargs='+', help='Article set id(s)')
parser.add_argument('--max', type=int, help='maximum number of articles to assign')
parser.add_argument('--queue', default='background', help='Celery queue to put the articles on')
args = parser.parse_args()
task = modules[args.module]
body = {u'filter': {'terms': {u'sets': args.sets}}}
logging.info("Assigning {max} articles from set(s) {args.sets} for processing by {task.name}"
.format(max=("up to {}".format(args.max) if args.max is not None else "all"), **locals()))
ids = list(get_input_ids(body))
logging.info("... Found {} articles".format(len(ids)))
parse_background(ids, task, max=args.max, queue=args.queue)
| Add logging to background assign | Add logging to background assign
| Python | mit | amcat/nlpipe |
8c11b2db7f09844aa860bfe7f1c3ff23c0d30f94 | sentry/migrations/0062_correct_del_index_sentry_groupedmessage_logger__view__checksum.py | sentry/migrations/0062_correct_del_index_sentry_groupedmessage_logger__view__checksum.py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum']
# FIXES 0015
try:
db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
except:
# TODO: we should probaby remove this since it seems to only affect previous installs that
# hadnt applied migrations, and MySQL doesnt like try/excepts (aka South)
pass
def backwards(self, orm):
# Adding unique constraint on 'GroupedMessage', fields ['checksum', 'logger', 'view']
#FIXES 0015
db.create_unique('sentry_groupedmessage', ['checksum', 'logger', 'view'])
complete_apps = ['sentry']
| # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
pass
def backwards(self, orm):
pass
complete_apps = ['sentry']
| Remove bad delete_unique call as it was already applied in migration 0015 | Remove bad delete_unique call as it was already applied in migration 0015
| Python | bsd-3-clause | camilonova/sentry,1tush/sentry,vperron/sentry,drcapulet/sentry,fuziontech/sentry,boneyao/sentry,mvaled/sentry,ifduyue/sentry,pauloschilling/sentry,boneyao/sentry,beni55/sentry,Kryz/sentry,beeftornado/sentry,jean/sentry,gg7/sentry,JamesMura/sentry,rdio/sentry,wong2/sentry,songyi199111/sentry,daevaorn/sentry,looker/sentry,kevinastone/sentry,beni55/sentry,gencer/sentry,jokey2k/sentry,BayanGroup/sentry,fuziontech/sentry,JamesMura/sentry,gencer/sentry,TedaLIEz/sentry,zenefits/sentry,BayanGroup/sentry,jean/sentry,NickPresta/sentry,imankulov/sentry,mvaled/sentry,gencer/sentry,JackDanger/sentry,looker/sentry,nicholasserra/sentry,JTCunning/sentry,camilonova/sentry,mvaled/sentry,Natim/sentry,alexm92/sentry,llonchj/sentry,zenefits/sentry,drcapulet/sentry,hongliang5623/sentry,BuildingLink/sentry,wujuguang/sentry,songyi199111/sentry,pauloschilling/sentry,kevinlondon/sentry,vperron/sentry,ngonzalvez/sentry,SilentCircle/sentry,JTCunning/sentry,jean/sentry,TedaLIEz/sentry,gg7/sentry,Natim/sentry,camilonova/sentry,fotinakis/sentry,argonemyth/sentry,ewdurbin/sentry,jean/sentry,imankulov/sentry,ifduyue/sentry,1tush/sentry,BuildingLink/sentry,fotinakis/sentry,SilentCircle/sentry,jean/sentry,NickPresta/sentry,NickPresta/sentry,gg7/sentry,wujuguang/sentry,BuildingLink/sentry,looker/sentry,fotinakis/sentry,daevaorn/sentry,pauloschilling/sentry,BuildingLink/sentry,rdio/sentry,daevaorn/sentry,1tush/sentry,nicholasserra/sentry,fotinakis/sentry,llonchj/sentry,rdio/sentry,kevinlondon/sentry,zenefits/sentry,looker/sentry,ifduyue/sentry,hongliang5623/sentry,BayanGroup/sentry,SilentCircle/sentry,chayapan/django-sentry,kevinastone/sentry,felixbuenemann/sentry,JamesMura/sentry,boneyao/sentry,Natim/sentry,mitsuhiko/sentry,hongliang5623/sentry,Kryz/sentry,korealerts1/sentry,imankulov/sentry,alexm92/sentry,wujuguang/sentry,vperron/sentry,JTCunning/sentry,beeftornado/sentry,JamesMura/sentry,ifduyue/sentry,ewdurbin/sentry,felixbuenemann/sentry,mitsuhiko/sentry,wong2/sentry,JackDanger/sentry,chayapan/django-sentry,nicholasserra/sentry,SilentCircle/sentry,beeftornado/sentry,drcapulet/sentry,TedaLIEz/sentry,gencer/sentry,ewdurbin/sentry,gencer/sentry,BuildingLink/sentry,korealerts1/sentry,felixbuenemann/sentry,Kryz/sentry,jokey2k/sentry,chayapan/django-sentry,kevinastone/sentry,JackDanger/sentry,songyi199111/sentry,daevaorn/sentry,ngonzalvez/sentry,mvaled/sentry,looker/sentry,ifduyue/sentry,fuziontech/sentry,JamesMura/sentry,argonemyth/sentry,zenefits/sentry,mvaled/sentry,zenefits/sentry,ngonzalvez/sentry,mvaled/sentry,korealerts1/sentry,rdio/sentry,jokey2k/sentry,kevinlondon/sentry,alexm92/sentry,argonemyth/sentry,wong2/sentry,llonchj/sentry,NickPresta/sentry,beni55/sentry |
457f2d1d51b2bf008f837bf3ce8ee3cb47d5ba6b | var/spack/packages/libpng/package.py | var/spack/packages/libpng/package.py | from spack import *
class Libpng(Package):
"""libpng graphics file format"""
homepage = "http://www.libpng.org/pub/png/libpng.html"
url = "http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download"
version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
| from spack import *
class Libpng(Package):
"""libpng graphics file format"""
homepage = "http://www.libpng.org/pub/png/libpng.html"
url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz"
version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
version('1.6.15', '829a256f3de9307731d4f52dc071916d')
version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
| Fix libpng to use a better URL | Fix libpng to use a better URL
Sourceforge URLs like this eventually die when the libpng version is bumped:
http://sourceforge.net/projects/libpng/files/libpng16/1.6.14/libpng-1.6.14.tar.gz/download
But ones like this give you a "permanently moved", which curl -L will follow:
http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz
| Python | lgpl-2.1 | mfherbst/spack,tmerrick1/spack,iulian787/spack,TheTimmy/spack,tmerrick1/spack,krafczyk/spack,EmreAtes/spack,matthiasdiener/spack,TheTimmy/spack,lgarren/spack,EmreAtes/spack,lgarren/spack,krafczyk/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,lgarren/spack,krafczyk/spack,krafczyk/spack,skosukhin/spack,TheTimmy/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,tmerrick1/spack,iulian787/spack,lgarren/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,matthiasdiener/spack,LLNL/spack,mfherbst/spack,iulian787/spack,lgarren/spack,matthiasdiener/spack,TheTimmy/spack,EmreAtes/spack,iulian787/spack,iulian787/spack,skosukhin/spack,LLNL/spack,skosukhin/spack,tmerrick1/spack,skosukhin/spack |
f4429e49c8b493fa285d169a41b82cb761716705 | tests/explorers_tests/test_additive_ou.py | tests/explorers_tests/test_additive_ou.py | from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
import numpy as np
from chainerrl.explorers.additive_ou import AdditiveOU
class TestAdditiveOU(unittest.TestCase):
def test(self):
action_size = 3
dt = 0.5
sigma = 0.001
theta = 0.3
def greedy_action_func():
return np.asarray([0] * action_size, dtype=np.float32)
explorer = AdditiveOU(action_size, dt=dt, theta=theta, sigma=sigma)
for t in range(10000):
a = explorer.select_action(t, greedy_action_func)
print(a)
| from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
import numpy as np
from chainerrl.explorers.additive_ou import AdditiveOU
class TestAdditiveOU(unittest.TestCase):
def test(self):
action_size = 3
def greedy_action_func():
return np.asarray([0] * action_size, dtype=np.float32)
explorer = AdditiveOU()
for t in range(100):
a = explorer.select_action(t, greedy_action_func)
print(t, a)
| Fix a test for AdditiveOU | Fix a test for AdditiveOU
| Python | mit | toslunar/chainerrl,toslunar/chainerrl |
bea258e2affc165f610de83248d9f958eec1ef4e | cmsplugin_markdown/models.py | cmsplugin_markdown/models.py | from django.db import models
from cms.models import CMSPlugin
class MarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(max_length=8000)
| from django.db import models
from cms.models import CMSPlugin
from cms.utils.compat.dj import python_2_unicode_compatible
@python_2_unicode_compatible
class MarkdownPlugin(CMSPlugin):
markdown_text = models.TextField(max_length=8000)
def __str__(self):
text = self.markdown_text
return (text[:50] + '...') if len(text) > 53 else text
| Add __str__ method for better representation in frontend | Add __str__ method for better representation in frontend
| Python | mit | bitmazk/cmsplugin-markdown,bitmazk/cmsplugin-markdown,bitmazk/cmsplugin-markdown |
6776a538f946a25e921f8ecd11a0ce1ddd422d0d | tools/skp/page_sets/skia_ukwsj_nexus10.py | tools/skp/page_sets/skia_ukwsj_nexus10.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path='data/credentials.json')
self.user_agent_type = 'tablet'
self.archive_data_file = 'data/skia_ukwsj_nexus10.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(5)
class SkiaUkwsjNexus10PageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaUkwsjNexus10PageSet, self).__init__(
user_agent_type='tablet',
archive_data_file='data/skia_ukwsj_nexus10.json')
urls_list = [
# Why: for Clank CY
'http://uk.wsj.com/home-page',
]
for url in urls_list:
self.AddUserStory(SkiaBuildbotDesktopPage(url, self))
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path='data/credentials.json')
self.user_agent_type = 'tablet'
self.archive_data_file = 'data/skia_ukwsj_nexus10.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
class SkiaUkwsjNexus10PageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaUkwsjNexus10PageSet, self).__init__(
user_agent_type='tablet',
archive_data_file='data/skia_ukwsj_nexus10.json')
urls_list = [
# Why: for Clank CY
'http://uk.wsj.com/home-page',
]
for url in urls_list:
self.AddUserStory(SkiaBuildbotDesktopPage(url, self))
| Increase timeout of ukwsj to get more consistent SKP captures | Increase timeout of ukwsj to get more consistent SKP captures
BUG=skia:3574
TBR=borenet
NOTRY=true
Review URL: https://codereview.chromium.org/1038443002
| Python | bsd-3-clause | TeamTwisted/external_skia,vanish87/skia,shahrzadmn/skia,VRToxin-AOSP/android_external_skia,TeamTwisted/external_skia,shahrzadmn/skia,pcwalton/skia,TeamExodus/external_skia,YUPlayGodDev/platform_external_skia,boulzordev/android_external_skia,qrealka/skia-hc,TeamTwisted/external_skia,HalCanary/skia-hc,rubenvb/skia,pcwalton/skia,nvoron23/skia,google/skia,geekboxzone/mmallow_external_skia,noselhq/skia,MarshedOut/android_external_skia,google/skia,AOSP-YU/platform_external_skia,invisiblek/android_external_skia,invisiblek/android_external_skia,Infinitive-OS/platform_external_skia,AOSP-YU/platform_external_skia,UBERMALLOW/external_skia,aosp-mirror/platform_external_skia,todotodoo/skia,ominux/skia,aosp-mirror/platform_external_skia,TeamExodus/external_skia,rubenvb/skia,HalCanary/skia-hc,nfxosp/platform_external_skia,Hikari-no-Tenshi/android_external_skia,BrokenROM/external_skia,Hikari-no-Tenshi/android_external_skia,PAC-ROM/android_external_skia,timduru/platform-external-skia,invisiblek/android_external_skia,HalCanary/skia-hc,amyvmiwei/skia,TeamTwisted/external_skia,google/skia,Hikari-no-Tenshi/android_external_skia,AOSP-YU/platform_external_skia,invisiblek/android_external_skia,shahrzadmn/skia,TeamExodus/external_skia,UBERMALLOW/external_skia,MinimalOS-AOSP/platform_external_skia,HalCanary/skia-hc,YUPlayGodDev/platform_external_skia,ominux/skia,Hikari-no-Tenshi/android_external_skia,PAC-ROM/android_external_skia,rubenvb/skia,Jichao/skia,PAC-ROM/android_external_skia,MonkeyZZZZ/platform_external_skia,boulzordev/android_external_skia,tmpvar/skia.cc,pcwalton/skia,VRToxin-AOSP/android_external_skia,Hikari-no-Tenshi/android_external_skia,TeamTwisted/external_skia,Jichao/skia,tmpvar/skia.cc,YUPlayGodDev/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,rubenvb/skia,Jichao/skia,google/skia,geekboxzone/mmallow_external_skia,w3nd1go/android_external_skia,ominux/skia,pcwalton/skia,MonkeyZZZZ/platform_external_skia,noselhq/skia,pcwalton/skia,shahrzadmn/skia,shahrzadmn/skia,vanish87/skia,MonkeyZZZZ/platform_external_skia,boulzordev/android_external_skia,nfxosp/platform_external_skia,Jichao/skia,rubenvb/skia,PAC-ROM/android_external_skia,ominux/skia,BrokenROM/external_skia,invisiblek/android_external_skia,vanish87/skia,nvoron23/skia,w3nd1go/android_external_skia,AOSP-YU/platform_external_skia,nvoron23/skia,amyvmiwei/skia,BrokenROM/external_skia,AOSPB/external_skia,HalCanary/skia-hc,todotodoo/skia,todotodoo/skia,tmpvar/skia.cc,pcwalton/skia,timduru/platform-external-skia,qrealka/skia-hc,boulzordev/android_external_skia,google/skia,Infinitive-OS/platform_external_skia,OneRom/external_skia,PAC-ROM/android_external_skia,nvoron23/skia,MinimalOS-AOSP/platform_external_skia,TeamExodus/external_skia,BrokenROM/external_skia,ominux/skia,MarshedOut/android_external_skia,MarshedOut/android_external_skia,Jichao/skia,vanish87/skia,AOSP-YU/platform_external_skia,spezi77/android_external_skia,timduru/platform-external-skia,todotodoo/skia,YUPlayGodDev/platform_external_skia,vanish87/skia,YUPlayGodDev/platform_external_skia,YUPlayGodDev/platform_external_skia,MarshedOut/android_external_skia,TeamExodus/external_skia,Infinitive-OS/platform_external_skia,Jichao/skia,boulzordev/android_external_skia,PAC-ROM/android_external_skia,MonkeyZZZZ/platform_external_skia,boulzordev/android_external_skia,aosp-mirror/platform_external_skia,google/skia,spezi77/android_external_skia,rubenvb/skia,invisiblek/android_external_skia,TeamTwisted/external_skia,todotodoo/skia,UBERMALLOW/external_skia,UBERMALLOW/external_skia,MonkeyZZZZ/platform_external_skia,w3nd1go/android_external_skia,ominux/skia,PAC-ROM/android_external_skia,noselhq/skia,Hikari-no-Tenshi/android_external_skia,qrealka/skia-hc,TeamExodus/external_skia,Infinitive-OS/platform_external_skia,Infinitive-OS/platform_external_skia,OneRom/external_skia,nvoron23/skia,w3nd1go/android_external_skia,HalCanary/skia-hc,UBERMALLOW/external_skia,OneRom/external_skia,Infinitive-OS/platform_external_skia,noselhq/skia,MinimalOS-AOSP/platform_external_skia,rubenvb/skia,tmpvar/skia.cc,pcwalton/skia,OneRom/external_skia,MinimalOS-AOSP/platform_external_skia,shahrzadmn/skia,todotodoo/skia,aosp-mirror/platform_external_skia,amyvmiwei/skia,noselhq/skia,noselhq/skia,AOSPB/external_skia,noselhq/skia,rubenvb/skia,vanish87/skia,pcwalton/skia,geekboxzone/mmallow_external_skia,Jichao/skia,PAC-ROM/android_external_skia,w3nd1go/android_external_skia,MonkeyZZZZ/platform_external_skia,amyvmiwei/skia,amyvmiwei/skia,Jichao/skia,VRToxin-AOSP/android_external_skia,TeamExodus/external_skia,geekboxzone/mmallow_external_skia,Infinitive-OS/platform_external_skia,MonkeyZZZZ/platform_external_skia,VRToxin-AOSP/android_external_skia,HalCanary/skia-hc,ominux/skia,boulzordev/android_external_skia,geekboxzone/mmallow_external_skia,google/skia,Infinitive-OS/platform_external_skia,MonkeyZZZZ/platform_external_skia,aosp-mirror/platform_external_skia,TeamTwisted/external_skia,OneRom/external_skia,nfxosp/platform_external_skia,qrealka/skia-hc,TeamTwisted/external_skia,spezi77/android_external_skia,spezi77/android_external_skia,nvoron23/skia,Hikari-no-Tenshi/android_external_skia,TeamExodus/external_skia,AOSP-YU/platform_external_skia,TeamExodus/external_skia,tmpvar/skia.cc,PAC-ROM/android_external_skia,MarshedOut/android_external_skia,BrokenROM/external_skia,geekboxzone/mmallow_external_skia,UBERMALLOW/external_skia,VRToxin-AOSP/android_external_skia,nfxosp/platform_external_skia,rubenvb/skia,aosp-mirror/platform_external_skia,boulzordev/android_external_skia,MonkeyZZZZ/platform_external_skia,timduru/platform-external-skia,geekboxzone/mmallow_external_skia,MarshedOut/android_external_skia,qrealka/skia-hc,MinimalOS-AOSP/platform_external_skia,timduru/platform-external-skia,VRToxin-AOSP/android_external_skia,VRToxin-AOSP/android_external_skia,vanish87/skia,MarshedOut/android_external_skia,nvoron23/skia,todotodoo/skia,UBERMALLOW/external_skia,Infinitive-OS/platform_external_skia,MarshedOut/android_external_skia,AOSP-YU/platform_external_skia,HalCanary/skia-hc,AOSP-YU/platform_external_skia,OneRom/external_skia,nfxosp/platform_external_skia,MinimalOS-AOSP/platform_external_skia,shahrzadmn/skia,nfxosp/platform_external_skia,OneRom/external_skia,aosp-mirror/platform_external_skia,tmpvar/skia.cc,VRToxin-AOSP/android_external_skia,YUPlayGodDev/platform_external_skia,invisiblek/android_external_skia,qrealka/skia-hc,nfxosp/platform_external_skia,UBERMALLOW/external_skia,ominux/skia,qrealka/skia-hc,tmpvar/skia.cc,vanish87/skia,BrokenROM/external_skia,invisiblek/android_external_skia,VRToxin-AOSP/android_external_skia,shahrzadmn/skia,amyvmiwei/skia,spezi77/android_external_skia,AOSPB/external_skia,YUPlayGodDev/platform_external_skia,nfxosp/platform_external_skia,tmpvar/skia.cc,TeamTwisted/external_skia,Jichao/skia,ominux/skia,UBERMALLOW/external_skia,Hikari-no-Tenshi/android_external_skia,pcwalton/skia,nvoron23/skia,AOSPB/external_skia,timduru/platform-external-skia,AOSPB/external_skia,MinimalOS-AOSP/platform_external_skia,amyvmiwei/skia,boulzordev/android_external_skia,nfxosp/platform_external_skia,aosp-mirror/platform_external_skia,noselhq/skia,w3nd1go/android_external_skia,w3nd1go/android_external_skia,shahrzadmn/skia,vanish87/skia,YUPlayGodDev/platform_external_skia,google/skia,qrealka/skia-hc,AOSPB/external_skia,BrokenROM/external_skia,tmpvar/skia.cc,geekboxzone/mmallow_external_skia,OneRom/external_skia,HalCanary/skia-hc,AOSPB/external_skia,geekboxzone/mmallow_external_skia,OneRom/external_skia,todotodoo/skia,MarshedOut/android_external_skia,rubenvb/skia,google/skia,MinimalOS-AOSP/platform_external_skia,amyvmiwei/skia,timduru/platform-external-skia,AOSPB/external_skia,nvoron23/skia,BrokenROM/external_skia,aosp-mirror/platform_external_skia,todotodoo/skia,noselhq/skia,AOSP-YU/platform_external_skia,spezi77/android_external_skia,MinimalOS-AOSP/platform_external_skia,w3nd1go/android_external_skia,HalCanary/skia-hc,w3nd1go/android_external_skia,AOSPB/external_skia |
9828e5125cdbc01a773c60b1e211d0e434a2c5aa | tests/test_modules/test_pmac/test_pmacstatuspart.py | tests/test_modules/test_pmac/test_pmacstatuspart.py | from malcolm.core import Process
from malcolm.modules.builtin.controllers import ManagerController
from malcolm.modules.pmac.blocks import pmac_status_block
from malcolm.modules.pmac.parts import PmacStatusPart
from malcolm.testutil import ChildTestCase
class TestPmacStatusPart(ChildTestCase):
def setUp(self):
self.process = Process("Process")
child = self.create_child_block(
pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE"
)
self.set_attributes(child, i10=1705244)
c = ManagerController("PMAC", "/tmp", use_git=False)
self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True)
c.add_part(self.o)
self.process.add_controller(c)
self.process.start()
self.b = c.block_view()
def tearDown(self):
self.process.stop(timeout=1)
def test_servo_freq(self):
freq = self.b.servoFrequency()
assert freq == 4919.300698316487
| from malcolm.core import Process
from malcolm.modules.builtin.controllers import ManagerController
from malcolm.modules.pmac.blocks import pmac_status_block
from malcolm.modules.pmac.parts import PmacStatusPart
from malcolm.testutil import ChildTestCase
class TestPmacStatusPart(ChildTestCase):
def setUp(self):
self.process = Process("Process")
child = self.create_child_block(
pmac_status_block, self.process, mri="my_mri", pv_prefix="PV:PRE"
)
self.set_attributes(child, servoFreq=2500.04)
c = ManagerController("PMAC", "/tmp", use_git=False)
self.o = PmacStatusPart(name="part", mri="my_mri", initial_visibility=True)
c.add_part(self.o)
self.process.add_controller(c)
self.process.start()
self.b = c.block_view()
def tearDown(self):
self.process.stop(timeout=1)
def test_servo_freq(self):
freq = self.b.servoFrequency()
assert freq == 2500.04
| Change TestPmacStatusPart to not use i10 | Change TestPmacStatusPart to not use i10
| Python | apache-2.0 | dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm |
864d8908fce4c92382916f5e3e02992f83fd6e6e | feincms/content/raw/models.py | feincms/content/raw/models.py | from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
class RawContent(models.Model):
text = models.TextField(_('text'), blank=True)
class Meta:
abstract = True
verbose_name = _('raw content')
verbose_name_plural = _('raw contents')
def render(self, **kwargs):
return mark_safe(self.text)
| from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
class RawContent(models.Model):
text = models.TextField(_('content'), blank=True)
class Meta:
abstract = True
verbose_name = _('raw content')
verbose_name_plural = _('raw contents')
def render(self, **kwargs):
return mark_safe(self.text)
| Rename RawContent text field, describes field better | Rename RawContent text field, describes field better
| Python | bsd-3-clause | joshuajonah/feincms,pjdelport/feincms,michaelkuty/feincms,matthiask/django-content-editor,feincms/feincms,joshuajonah/feincms,hgrimelid/feincms,joshuajonah/feincms,matthiask/feincms2-content,mjl/feincms,michaelkuty/feincms,matthiask/django-content-editor,nickburlett/feincms,pjdelport/feincms,mjl/feincms,joshuajonah/feincms,michaelkuty/feincms,nickburlett/feincms,matthiask/feincms2-content,feincms/feincms,feincms/feincms,mjl/feincms,hgrimelid/feincms,hgrimelid/feincms,nickburlett/feincms,michaelkuty/feincms,matthiask/django-content-editor,nickburlett/feincms,matthiask/feincms2-content,matthiask/django-content-editor,pjdelport/feincms |
58dbfa0b449b8e4171c5f9cef1c15db39b52c1f0 | tests/run_tests.py | tests/run_tests.py | #!/usr/bin/env python
import os.path
import sys
import subprocess
import unittest
tests_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.dirname(tests_dir))
import secretstorage
if __name__ == '__main__':
major, minor, patch = sys.version_info[:3]
print('Running with Python %d.%d.%d (SecretStorage from %s)' %
(major, minor, patch, os.path.dirname(secretstorage.__file__)))
mock = None
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
loader = unittest.TestLoader()
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(loader.discover(tests_dir))
if mock is not None:
mock.terminate()
sys.exit(not result.wasSuccessful())
| #!/usr/bin/env python
import os.path
import sys
import subprocess
import unittest
tests_dir = os.path.dirname(__file__)
sys.path.insert(0, os.path.dirname(tests_dir))
import secretstorage
if __name__ == '__main__':
major, minor, patch = sys.version_info[:3]
print('Running with Python %d.%d.%d (SecretStorage from %s)' %
(major, minor, patch, os.path.dirname(secretstorage.__file__)))
mock = None
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
mock = subprocess.Popen(('/usr/bin/python3', sys.argv[1],),
stdout=subprocess.PIPE,
universal_newlines=True)
assert mock.stdout is not None # for mypy
bus_name = mock.stdout.readline().rstrip()
secretstorage.util.BUS_NAME = bus_name
print('Bus name set to %r' % secretstorage.util.BUS_NAME)
loader = unittest.TestLoader()
runner = unittest.TextTestRunner(verbosity=2)
result = runner.run(loader.discover(tests_dir))
if mock is not None:
mock.terminate()
sys.exit(not result.wasSuccessful())
| Add an assert to make mypy check pass again | Add an assert to make mypy check pass again
| Python | bsd-3-clause | mitya57/secretstorage |
99496d97f3e00284840d2127556bba0e21d1a99e | frappe/tests/test_commands.py | frappe/tests/test_commands.py | # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
from __future__ import unicode_literals
import shlex
import subprocess
import unittest
import frappe
def clean(value):
if isinstance(value, (bytes, str)):
value = value.decode().strip()
return value
class BaseTestCommands:
def execute(self, command):
command = command.format(**{"site": frappe.local.site})
command = shlex.split(command)
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout = clean(self._proc.stdout)
self.stderr = clean(self._proc.stderr)
self.returncode = clean(self._proc.returncode)
| # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
from __future__ import unicode_literals
import shlex
import subprocess
import unittest
import frappe
def clean(value):
if isinstance(value, (bytes, str)):
value = value.decode().strip()
return value
class BaseTestCommands:
def execute(self, command):
command = command.format(**{"site": frappe.local.site})
command = shlex.split(command)
self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout = clean(self._proc.stdout)
self.stderr = clean(self._proc.stderr)
self.returncode = clean(self._proc.returncode)
class TestCommands(BaseTestCommands, unittest.TestCase):
def test_execute(self):
# execute a command expecting a numeric output
self.execute("bench --site {site} execute frappe.db.get_database_size")
self.assertEquals(self.returncode, 0)
self.assertIsInstance(float(self.stdout), float)
# execute a command expecting an errored output as local won't exist
self.execute("bench --site {site} execute frappe.local.site")
self.assertEquals(self.returncode, 1)
self.assertIsNotNone(self.stderr)
# execute a command with kwargs
self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""")
self.assertEquals(self.returncode, 0)
self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
| Add tests for bench execute | test: Add tests for bench execute
| Python | mit | saurabh6790/frappe,StrellaGroup/frappe,adityahase/frappe,mhbu50/frappe,adityahase/frappe,yashodhank/frappe,mhbu50/frappe,yashodhank/frappe,mhbu50/frappe,mhbu50/frappe,StrellaGroup/frappe,saurabh6790/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,adityahase/frappe,frappe/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,frappe/frappe |
fac280a022c8728f14bbe1194cf74af761b7ec3f | vfp2py/__main__.py | vfp2py/__main__.py | import argparse
import vfp2py
def parse_args(argv=None):
parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python')
parser.add_argument("infile", help="file to convert", type=str)
parser.add_argument("outfile", help="file to output to", type=str)
parser.add_argument("search", help="directories to search for included files", type=str, nargs='*')
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
global SEARCH_PATH
SEARCH_PATH = args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| import argparse
import vfp2py
def parse_args(argv=None):
parser = argparse.ArgumentParser(description='Tool for rewriting Foxpro code in Python')
parser.add_argument("infile", help="file to convert", type=str)
parser.add_argument("outfile", help="file to output to", type=str)
parser.add_argument("search", help="directories to search for included files", type=str, nargs='*')
return parser.parse_args(argv)
def main(argv=None):
args = parse_args(argv)
vfp2py.SEARCH_PATH += args.search
vfp2py.convert_file(args.infile, args.outfile)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| Fix search paths not being added from arguments. | Fix search paths not being added from arguments.
| Python | mit | mwisslead/vfp2py,mwisslead/vfp2py |
2088b3df274fd31c28baa6193c937046c04b98a6 | scripts/generate_wiki_languages.py | scripts/generate_wiki_languages.py | from urllib2 import urlopen
import csv
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
# Column 2 is the language code
lang_keys = [row[2] for row in data]
del lang_keys[0] # Get rid of the headers
# Generate the XML
x = lb.E
keys = [x.item(k) for k in lang_keys]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
| from urllib2 import urlopen
import csv
import json
import lxml.builder as lb
from lxml import etree
# Returns CSV of all wikipedias, ordered by number of 'good' articles
URL = "https://wikistats.wmflabs.org/api.php?action=dump&table=wikipedias&format=csv&s=good"
data = csv.reader(urlopen(URL))
lang_keys = []
lang_local_names = []
lang_eng_names = []
for row in data:
lang_keys.append(row[2])
lang_local_names.append(row[10])
lang_eng_names.append(row[1])
# Generate the XML, for Android
x = lb.E
keys = [x.item(k) for k in lang_keys]
# Skip the headers!
del keys[0]
resources = x.resources(
getattr(x, 'string-array')(*keys, name="preference_language_keys"),
)
open("languages_list.xml", "w").write(
etree.tostring(resources, pretty_print=True, encoding="utf-8", xml_declaration=True)
)
# Generate the JSON, for iOS
langs_json = []
# Start from 1, to skip the headers
for i in xrange(1, len(lang_keys)):
langs_json.append({
"code": lang_keys[i],
"name": lang_local_names[i],
"canonical_name": lang_eng_names[i]
})
open("languages_list.json", "w").write(json.dumps(langs_json, indent=4))
| Modify language generation script to make JSON for iOS | Modify language generation script to make JSON for iOS
Change-Id: Ib5aec2f6cfcb5bd1187cf8863ecd50f1b1a2d20c
| Python | apache-2.0 | Wikinaut/wikipedia-app,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,creaITve/apps-android-tbrc-works,reproio/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,reproio/apps-android-wikipedia,wikimedia/apps-android-wikipedia,BrunoMRodrigues/apps-android-tbrc-work,BrunoMRodrigues/apps-android-tbrc-work,carloshwa/apps-android-wikipedia,creaITve/apps-android-tbrc-works,BrunoMRodrigues/apps-android-tbrc-work,Wikinaut/wikipedia-app,Wikinaut/wikipedia-app,BrunoMRodrigues/apps-android-tbrc-work,wikimedia/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,anirudh24seven/apps-android-wikipedia,carloshwa/apps-android-wikipedia,wikimedia/apps-android-wikipedia,Wikinaut/wikipedia-app,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,anirudh24seven/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,reproio/apps-android-wikipedia,creaITve/apps-android-tbrc-works,anirudh24seven/apps-android-wikipedia,dbrant/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,parvez3019/apps-android-wikipedia,parvez3019/apps-android-wikipedia,SAGROUP2/apps-android-wikipedia,creaITve/apps-android-tbrc-works,wikimedia/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,SAGROUP2/apps-android-wikipedia,parvez3019/apps-android-wikipedia,carloshwa/apps-android-wikipedia,dbrant/apps-android-wikipedia,reproio/apps-android-wikipedia,Duct-and-rice/KrswtkhrWiki4Android,Duct-and-rice/KrswtkhrWiki4Android,Wikinaut/wikipedia-app |
914fe4f61b5cae2804d293169d318df499ab8183 | examples/benchmarking/client.py | examples/benchmarking/client.py | import smtplib, time
messages_sent = 0.0
start_time = time.time()
msg = file('examples/benchmarking/benchmark.eml').read()
while True:
if (messages_sent % 10) == 0:
current_time = time.time()
print '%s msg-written/sec' % (messages_sent / (current_time - start_time))
server = smtplib.SMTP('localhost', port=25)
server.sendmail('foo@localhost', ['bar@localhost'], msg)
server.quit()
messages_sent += 1.0
| import smtplib, time
messages_sent = 0.0
start_time = time.time()
msg = file('examples/benchmarking/benchmark.eml').read()
while True:
if (messages_sent % 10) == 0:
current_time = time.time()
print '%s msg-written/sec' % (messages_sent / (current_time - start_time))
server = smtplib.SMTP('localhost', port=1025)
server.sendmail('foo@localhost', ['bar@localhost'], msg)
server.quit()
messages_sent += 1.0
| Switch to non-privledged port to make testing easier | Switch to non-privledged port to make testing easier
| Python | isc | bcoe/secure-smtpd |
f340c674737431c15875007f92de4dbe558ba377 | molo/yourwords/templatetags/competition_tag.py | molo/yourwords/templatetags/competition_tag.py | from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.live().child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': [
a.get_translation_for(locale_code) or a for a in competitions]
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.live().child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return [a.get_translation_for(locale) or a for a in qs]
else:
return []
| from django import template
from copy import copy
from molo.yourwords.models import (YourWordsCompetition, ThankYou,
YourWordsCompetitionIndexPage)
from molo.core.core_tags import get_pages
register = template.Library()
@register.inclusion_tag(
'yourwords/your_words_competition_tag.html',
takes_context=True
)
def your_words_competition(context):
context = copy(context)
locale_code = context.get('locale_code')
page = YourWordsCompetitionIndexPage.objects.live().all().first()
if page:
competitions = (
YourWordsCompetition.objects.child_of(page).filter(
languages__language__is_main_language=True).specific())
else:
competitions = []
context.update({
'competitions': get_pages(context, competitions, locale_code)
})
return context
@register.assignment_tag(takes_context=True)
def load_thank_you_page_for_competition(context, competition):
page = competition.get_main_language_page()
locale = context.get('locale_code')
qs = ThankYou.objects.child_of(page).filter(
languages__language__is_main_language=True)
if not locale:
return qs
if qs:
return get_pages(context, qs, locale)
else:
return []
| Add support for hiding untranslated content | Add support for hiding untranslated content
| Python | bsd-2-clause | praekelt/molo.yourwords,praekelt/molo.yourwords |
abdd6d6e75fb7c6f9cff4b42f6b12a2cfb7a342a | fpsd/test/test_sketchy_sites.py | fpsd/test/test_sketchy_sites.py | #!/usr/bin/env python3.5
# This test crawls some sets that have triggered http.client.RemoteDisconnected
# exceptions
import unittest
from crawler import Crawler
class CrawlBadSitesTest(unittest.TestCase):
bad_sites = ["http://jlve2diknf45qwjv.onion/",
"http://money2mxtcfcauot.onion",
"http://22222222aziwzse2.onion"]
def test_crawl_of_bad_sites(self):
with Crawler() as crawler:
crawler.collect_set_of_traces(self.bad_sites, shuffle=False)
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python3.5
# This test crawls some sets that have triggered http.client.RemoteDisconnected
# exceptions
import unittest
from crawler import Crawler
class CrawlBadSitesTest(unittest.TestCase):
bad_sites = ["http://jlve2diknf45qwjv.onion/",
"http://money2mxtcfcauot.onion",
"http://22222222aziwzse2.onion"]
def test_crawl_of_bad_sites(self):
with Crawler(restart_on_sketchy_exception=True) as crawler:
crawler.collect_set_of_traces(self.bad_sites)
if __name__ == "__main__":
unittest.main()
| Use known-to-trigger-exceptions sites to test crawler restart method | Use known-to-trigger-exceptions sites to test crawler restart method
| Python | agpl-3.0 | freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop,freedomofpress/fingerprint-securedrop,freedomofpress/fingerprint-securedrop,freedomofpress/FingerprintSecureDrop |
053147c19acbf467bb0e044f2fb58304b759b72d | frameworks/Python/pyramid/create_database.py | frameworks/Python/pyramid/create_database.py | import codecs
from frameworkbenchmarks.models import DBSession
if __name__ == "__main__":
"""
Initialize database
"""
with codecs.open('../config/create-postgres.sql', 'r', encoding='utf-8') as fp:
sql = fp.read()
DBSession.execute(sql)
DBSession.commit()
| import codecs
from frameworkbenchmarks.models import DBSession
if __name__ == "__main__":
"""
Initialize database
"""
with codecs.open('../../../config/create-postgres.sql',
'r',
encoding='utf-8') as fp:
sql = fp.read()
DBSession.execute(sql)
DBSession.commit()
| Fix the path to create-postgres.sql | Fix the path to create-postgres.sql
| Python | bsd-3-clause | k-r-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,herloct/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,khellang/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,methane/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,grob/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,khellang/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,actframework/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,joshk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,actframework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,denkab/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,joshk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,testn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jamming/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,methane/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,denkab/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Verber/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,testn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,valyala/FrameworkBenchmarks,doom369/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,herloct/FrameworkBenchmarks,valyala/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,denkab/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,methane/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Verber/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,methane/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Verber/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,joshk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Verber/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,denkab/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,actframework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,denkab/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,denkab/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,methane/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,herloct/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,methane/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sxend/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zapov/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,herloct/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,grob/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,herloct/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,grob/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Verber/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,khellang/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,doom369/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,testn/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zapov/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,khellang/FrameworkBenchmarks,methane/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,herloct/FrameworkBenchmarks,testn/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Verber/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sgml/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zloster/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zapov/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks |
310553e1282231c35093ff355c61129e9f073a0a | src/lib/verify_email_google.py | src/lib/verify_email_google.py | import DNS
from validate_email import validate_email
from DNS.Lib import PackError
def is_google_apps_email(email):
hostname = email[email.find('@')+1:]
try:
mx_hosts = DNS.mxlookup(hostname)
except DNS.ServerError as e:
return False
except PackError as e:
return False
for mx in mx_hosts:
if len(mx) == 2:
priority, host_server = mx
else:
host_server = mx
if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower():
return True
return False
| import DNS
import re
from validate_email import validate_email
from DNS.Lib import PackError
EMAIL_RE = re.compile('^[a-zA-Z0-9\.\@]+$')
def is_valid_email(email):
if email.count('@') != 1:
return False
return bool(EMAIL_RE.match(email))
def is_google_apps_email(email):
if not is_valid_email(email):
return False
hostname = email[email.find('@')+1:]
try:
mx_hosts = DNS.mxlookup(hostname)
except DNS.ServerError as e:
return False
except PackError as e:
return False
for mx in mx_hosts:
if len(mx) == 2:
priority, host_server = mx
else:
host_server = mx
if 'google' in str(host_server).lower() and 'aspmx' in str(host_server).lower():
return True
return False
| Add Google Apps email address validation | Add Google Apps email address validation
| Python | agpl-3.0 | juposocial/jupo,juposocial/jupo,juposocial/jupo,juposocial/jupo |
0dc1412ad6e7cbe47eda1e476ce16603b7f6a030 | raspigibbon_bringup/scripts/raspigibbon_joint_subscriber.py | raspigibbon_bringup/scripts/raspigibbon_joint_subscriber.py | #!/usr/bin/env python
# coding: utf-8
from futaba_serial_servo import RS30X
import rospy
from sensor_msgs.msg import JointState
class Slave:
def __init__(self):
self.rs = RS30X.RS304MD()
self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10)
for i in range(1,6):
self.rs.setTorque(i, True)
rospy.sleep(0.01)
rospy.loginfo("servo initialized")
def joint_callback(self, msg):
for i in range(1, 6):
self.rs.setAngle(i, msg.position[i-1])
rospy.sleep(0.01)
if __name__ == "__main__":
try:
while not rospy.is_shutdown():
rospy.init_node("slave_joint_state")
slave = Slave()
rospy.spin()
except rospy.ROSInterruptException:
pass
| #!/usr/bin/env python
# coding: utf-8
from futaba_serial_servo import RS30X
import rospy
from sensor_msgs.msg import JointState
class Slave:
def __init__(self):
self.rs = RS30X.RS304MD()
self.sub = rospy.Subscriber("/raspigibbon/master_joint_state", JointState, self.joint_callback, queue_size=10)
for i in range(1,6):
self.rs.setTorque(i, True)
rospy.sleep(0.01)
rospy.loginfo("servo initialized")
def joint_callback(self, msg):
for i in range(1, 6):
self.rs.setAngle(i, msg.position[i-1])
rospy.sleep(0.01)
def shutdown(self):
for i in range(1,6):
self.rs.setTorque(i, False)
rospy.sleep(0.01)
rospy.loginfo("set all servo torque_off")
if __name__ == "__main__":
try:
while not rospy.is_shutdown():
rospy.init_node("slave_joint_state")
rospy.on_shutdown(self.shutdown)
slave = Slave()
rospy.spin()
except rospy.ROSInterruptException:
pass
| Add shutdown scripts to turn_off servo after subscribing | Add shutdown scripts to turn_off servo after subscribing
| Python | mit | raspberrypigibbon/raspigibbon_ros |
cf58ebf492cd0dfaf640d2fd8d3cf4e5b2706424 | alembic/versions/47dd43c1491_create_category_tabl.py | alembic/versions/47dd43c1491_create_category_tabl.py | """create category table
Revision ID: 47dd43c1491
Revises: 27bf0aefa49d
Create Date: 2013-05-21 10:41:43.548449
"""
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('created', sa.Text, default=make_timestamp),
)
def downgrade():
op.drop_table('category')
| """create category table
Revision ID: 47dd43c1491
Revises: 27bf0aefa49d
Create Date: 2013-05-21 10:41:43.548449
"""
# revision identifiers, used by Alembic.
revision = '47dd43c1491'
down_revision = '27bf0aefa49d'
from alembic import op
import sqlalchemy as sa
import datetime
def make_timestamp():
now = datetime.datetime.utcnow()
return now.isoformat()
def upgrade():
op.create_table(
'category',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, nullable=False, unique=True),
sa.Column('short_name', sa.Text, nullable=False, unique=True),
sa.Column('description', sa.Text, nullable=False),
sa.Column('created', sa.Text, default=make_timestamp),
)
# Add two categories
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Thinking\', \'thinking\', \'Applications where you can help using your skills\')'
op.execute(query)
query = 'INSERT INTO category (name, short_name, description) VALUES (\'Sensing\', \'sensing\', \'Applications where you can help gathering data\')'
op.execute(query)
def downgrade():
op.drop_table('category')
| Add description to the table and populate it with two categories | Add description to the table and populate it with two categories
| Python | agpl-3.0 | geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,PyBossa/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,CulturePlex/pybossa,geotagx/pybossa,proyectos-analizo-info/pybossa-analizo-info,CulturePlex/pybossa,OpenNewsLabs/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,geotagx/geotagx-pybossa-archive,PyBossa/pybossa,geotagx/pybossa,harihpr/tweetclickers,stefanhahmann/pybossa,stefanhahmann/pybossa,inteligencia-coletiva-lsd/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,jean/pybossa,inteligencia-coletiva-lsd/pybossa,jean/pybossa,CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,geotagx/geotagx-pybossa-archive |
8b7ab303340ba65aa219103c568ce9d88ea39689 | airmozilla/main/context_processors.py | airmozilla/main/context_processors.py | from django.conf import settings
from airmozilla.main.models import Event
def sidebar(request):
featured = Event.objects.approved().filter(public=True, featured=True)
upcoming = Event.objects.upcoming().order_by('start_time')
if not request.user.is_active:
featured = featured.filter(public=True)
upcoming = upcoming.filter(public=True)
upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT]
return {
'upcoming': upcoming,
'featured': featured
}
| from django.conf import settings
from airmozilla.main.models import Event
def sidebar(request):
featured = Event.objects.approved().filter(featured=True)
upcoming = Event.objects.upcoming().order_by('start_time')
if not request.user.is_active:
featured = featured.filter(public=True)
upcoming = upcoming.filter(public=True)
upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT]
return {
'upcoming': upcoming,
'featured': featured
}
| Fix context processor to correctly display internal featured videos. | Fix context processor to correctly display internal featured videos.
| Python | bsd-3-clause | EricSekyere/airmozilla,lcamacho/airmozilla,kenrick95/airmozilla,tannishk/airmozilla,tannishk/airmozilla,a-buck/airmozilla,bugzPDX/airmozilla,ehsan/airmozilla,mythmon/airmozilla,Nolski/airmozilla,blossomica/airmozilla,EricSekyere/airmozilla,blossomica/airmozilla,zofuthan/airmozilla,bugzPDX/airmozilla,EricSekyere/airmozilla,bugzPDX/airmozilla,chirilo/airmozilla,lcamacho/airmozilla,mythmon/airmozilla,kenrick95/airmozilla,Nolski/airmozilla,lcamacho/airmozilla,chirilo/airmozilla,anjalymehla/airmozilla,tannishk/airmozilla,ehsan/airmozilla,ehsan/airmozilla,anjalymehla/airmozilla,peterbe/airmozilla,chirilo/airmozilla,mozilla/airmozilla,EricSekyere/airmozilla,peterbe/airmozilla,EricSekyere/airmozilla,zofuthan/airmozilla,chirilo/airmozilla,anu7495/airmozilla,Nolski/airmozilla,chirilo/airmozilla,anu7495/airmozilla,mozilla/airmozilla,anjalymehla/airmozilla,anjalymehla/airmozilla,mozilla/airmozilla,mythmon/airmozilla,lcamacho/airmozilla,ehsan/airmozilla,kenrick95/airmozilla,lcamacho/airmozilla,tannishk/airmozilla,zofuthan/airmozilla,a-buck/airmozilla,mythmon/airmozilla,zofuthan/airmozilla,anjalymehla/airmozilla,peterbe/airmozilla,kenrick95/airmozilla,blossomica/airmozilla,Nolski/airmozilla,kenrick95/airmozilla,a-buck/airmozilla,anu7495/airmozilla,ehsan/airmozilla,a-buck/airmozilla,bugzPDX/airmozilla,Nolski/airmozilla,zofuthan/airmozilla,tannishk/airmozilla,blossomica/airmozilla,mythmon/airmozilla,mozilla/airmozilla,anu7495/airmozilla,anu7495/airmozilla |
ee55ce9cc95e0e058cac77f45fac0f899398061e | api/preprint_providers/serializers.py | api/preprint_providers/serializers.py | from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
logo_path = ser.CharField(read_only=True)
banner_path = ser.CharField(read_only=True)
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'get_preprints_url'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_preprints_url(self, obj):
return absolute_reverse('preprint_providers:preprints-list', kwargs={'provider_id': obj._id})
| from rest_framework import serializers as ser
from api.base.utils import absolute_reverse
from api.base.serializers import JSONAPISerializer, LinksField
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'name',
'description',
'id'
])
name = ser.CharField(required=True)
description = ser.CharField(required=False)
id = ser.CharField(max_length=200, source='_id')
logo_path = ser.CharField(read_only=True)
banner_path = ser.CharField(read_only=True)
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'get_preprints_url',
'external_url': 'get_external_url'
})
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_preprints_url(self, obj):
return absolute_reverse('preprint_providers:preprints-list', kwargs={'provider_id': obj._id})
def get_external_url(self, obj):
return obj.external_url
| Add external url to preprint provider serializer | Add external url to preprint provider serializer
| Python | apache-2.0 | chrisseto/osf.io,adlius/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,cslzchen/osf.io,mluo613/osf.io,binoculars/osf.io,adlius/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,binoculars/osf.io,Nesiehr/osf.io,alexschiller/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,TomBaxter/osf.io,mattclark/osf.io,hmoco/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,Nesiehr/osf.io,chrisseto/osf.io,cwisecarver/osf.io,mfraezz/osf.io,icereval/osf.io,aaxelb/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,erinspace/osf.io,cslzchen/osf.io,leb2dg/osf.io,mluo613/osf.io,alexschiller/osf.io,erinspace/osf.io,emetsger/osf.io,acshi/osf.io,caneruguz/osf.io,TomBaxter/osf.io,pattisdr/osf.io,caseyrollins/osf.io,mluo613/osf.io,rdhyee/osf.io,emetsger/osf.io,sloria/osf.io,laurenrevere/osf.io,alexschiller/osf.io,aaxelb/osf.io,aaxelb/osf.io,chrisseto/osf.io,crcresearch/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,hmoco/osf.io,emetsger/osf.io,emetsger/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,caseyrollins/osf.io,sloria/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,chennan47/osf.io,pattisdr/osf.io,chennan47/osf.io,adlius/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,icereval/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,acshi/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,chrisseto/osf.io,baylee-d/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,acshi/osf.io,mattclark/osf.io,Nesiehr/osf.io,crcresearch/osf.io,mluo613/osf.io,caneruguz/osf.io,rdhyee/osf.io,baylee-d/osf.io,cwisecarver/osf.io,aaxelb/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,mattclark/osf.io,hmoco/osf.io,acshi/osf.io,felliott/osf.io,leb2dg/osf.io,hmoco/osf.io,chennan47/osf.io,felliott/osf.io,mfraezz/osf.io,felliott/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,sloria/osf.io,acshi/osf.io,erinspace/osf.io,caseyrollins/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,adlius/osf.io,caneruguz/osf.io,mfraezz/osf.io,pattisdr/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,binoculars/osf.io |
ac44332d53736f1ac3e067eecf1064bcef038b3a | core/platform/transactions/django_transaction_services.py | core/platform/transactions/django_transaction_services.py | # coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides a seam for transaction services."""
__author__ = 'Sean Lip'
def run_in_transaction(fn, *args, **kwargs):
"""Run a function in a transaction."""
# TODO(sll): Actually run the function in a transaction.
return fn(*args, **kwargs)
| # coding: utf-8
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides a seam for transaction services."""
__author__ = 'Sean Lip'
from django.db import transaction
def run_in_transaction(fn, *args, **kwargs):
"""Run a function in a transaction."""
with transaction.commit_on_success():
return fn(*args, **kwargs)
| Add transaction support for django models. | Add transaction support for django models.
| Python | apache-2.0 | oulan/oppia,directorlive/oppia,google-code-export/oppia,oulan/oppia,michaelWagner/oppia,edallison/oppia,terrameijar/oppia,Dev4X/oppia,amitdeutsch/oppia,zgchizi/oppia-uc,virajprabhu/oppia,won0089/oppia,sunu/oppia,mit0110/oppia,sanyaade-teachings/oppia,kennho/oppia,BenHenning/oppia,CMDann/oppia,whygee/oppia,gale320/oppia,kevinlee12/oppia,won0089/oppia,bjvoth/oppia,kaffeel/oppia,won0089/oppia,cleophasmashiri/oppia,danieljjh/oppia,openhatch/oh-missions-oppia-beta,nagyistoce/oppia,kaffeel/oppia,mit0110/oppia,kevinlee12/oppia,kennho/oppia,rackstar17/oppia,toooooper/oppia,won0089/oppia,jestapinski/oppia,Dev4X/oppia,mit0110/oppia,BenHenning/oppia,sdulal/oppia,sanyaade-teachings/oppia,himanshu-dixit/oppia,leandrotoledo/oppia,kevinlee12/oppia,edallison/oppia,toooooper/oppia,souravbadami/oppia,openhatch/oh-missions-oppia-beta,google-code-export/oppia,aldeka/oppia,MaximLich/oppia,kingctan/oppia,oppia/oppia,google-code-export/oppia,AllanYangZhou/oppia,sunu/oppia,sdulal/oppia,sbhowmik89/oppia,sunu/oppia,danieljjh/oppia,Cgruppo/oppia,nagyistoce/oppia,terrameijar/oppia,toooooper/oppia,souravbadami/oppia,mindpin/mindpin_oppia,fernandopinhati/oppia,bjvoth/oppia,danieljjh/oppia,felipecocco/oppia,Atlas-Sailed-Co/oppia,miyucy/oppia,kaffeel/oppia,Atlas-Sailed-Co/oppia,nagyistoce/oppia,aldeka/oppia,dippatel1994/oppia,Cgruppo/oppia,leandrotoledo/oppia,leandrotoledo/oppia,mindpin/mindpin_oppia,raju249/oppia,BenHenning/oppia,kennho/oppia,raju249/oppia,dippatel1994/oppia,VictoriaRoux/oppia,oppia/oppia,mindpin/mindpin_oppia,wangsai/oppia,virajprabhu/oppia,miyucy/oppia,VictoriaRoux/oppia,fernandopinhati/oppia,infinyte/oppia,infinyte/oppia,anthkris/oppia,Dev4X/oppia,MaximLich/oppia,CMDann/oppia,brianrodri/oppia,jestapinski/oppia,kingctan/oppia,amitdeutsch/oppia,brylie/oppia,brianrodri/oppia,MAKOSCAFEE/oppia,nagyistoce/oppia,fernandopinhati/oppia,mindpin/mindpin_oppia,whygee/oppia,Atlas-Sailed-Co/oppia,amitdeutsch/oppia,MAKOSCAFEE/oppia,whygee/oppia,infinyte/oppia,danieljjh/oppia,Cgruppo/oppia,directorlive/oppia,CMDann/oppia,asandyz/oppia,gale320/oppia,souravbadami/oppia,cleophasmashiri/oppia,virajprabhu/oppia,brylie/oppia,cleophasmashiri/oppia,amgowano/oppia,sarahfo/oppia,bjvoth/oppia,sunu/oh-missions-oppia-beta,dippatel1994/oppia,prasanna08/oppia,brylie/oppia,himanshu-dixit/oppia,edallison/oppia,mit0110/oppia,zgchizi/oppia-uc,DewarM/oppia,anthkris/oppia,infinyte/oppia,DewarM/oppia,edallison/oppia,cleophasmashiri/oppia,himanshu-dixit/oppia,sanyaade-teachings/oppia,dippatel1994/oppia,sarahfo/oppia,michaelWagner/oppia,amgowano/oppia,bjvoth/oppia,sanyaade-teachings/oppia,amgowano/oppia,kevinlee12/oppia,shaz13/oppia,oulan/oppia,sbhowmik89/oppia,kevinlee12/oppia,zgchizi/oppia-uc,felipecocco/oppia,wangsai/oppia,openhatch/oh-missions-oppia-beta,BenHenning/oppia,MAKOSCAFEE/oppia,rackstar17/oppia,michaelWagner/oppia,sdulal/oppia,leandrotoledo/oppia,oppia/oppia,gale320/oppia,shaz13/oppia,sanyaade-teachings/oppia,virajprabhu/oppia,prasanna08/oppia,amitdeutsch/oppia,kingctan/oppia,himanshu-dixit/oppia,rackstar17/oppia,sunu/oppia,MAKOSCAFEE/oppia,oppia/oppia,felipecocco/oppia,hazmatzo/oppia,sunu/oppia,VictoriaRoux/oppia,aldeka/oppia,directorlive/oppia,sdulal/oppia,shaz13/oppia,wangsai/oppia,toooooper/oppia,oulan/oppia,bjvoth/oppia,aldeka/oppia,zgchizi/oppia-uc,jestapinski/oppia,danieljjh/oppia,Dev4X/oppia,anthkris/oppia,Atlas-Sailed-Co/oppia,kaffeel/oppia,oppia/oppia,BenHenning/oppia,asandyz/oppia,DewarM/oppia,CMDann/oppia,won0089/oppia,VictoriaRoux/oppia,miyucy/oppia,sunu/oh-missions-oppia-beta,kennho/oppia,wangsai/oppia,fernandopinhati/oppia,oulan/oppia,terrameijar/oppia,Cgruppo/oppia,shaz13/oppia,brylie/oppia,anthkris/oppia,miyucy/oppia,souravbadami/oppia,mit0110/oppia,sarahfo/oppia,kingctan/oppia,felipecocco/oppia,openhatch/oh-missions-oppia-beta,hazmatzo/oppia,anggorodewanto/oppia,amitdeutsch/oppia,felipecocco/oppia,prasanna08/oppia,dippatel1994/oppia,sarahfo/oppia,kennho/oppia,CMDann/oppia,terrameijar/oppia,fernandopinhati/oppia,prasanna08/oppia,google-code-export/oppia,gale320/oppia,hazmatzo/oppia,leandrotoledo/oppia,sdulal/oppia,brylie/oppia,toooooper/oppia,hazmatzo/oppia,anggorodewanto/oppia,Cgruppo/oppia,gale320/oppia,sunu/oh-missions-oppia-beta,kingctan/oppia,anggorodewanto/oppia,brianrodri/oppia,MaximLich/oppia,AllanYangZhou/oppia,raju249/oppia,anggorodewanto/oppia,sbhowmik89/oppia,asandyz/oppia,sunu/oh-missions-oppia-beta,brianrodri/oppia,DewarM/oppia,hazmatzo/oppia,sbhowmik89/oppia,asandyz/oppia,asandyz/oppia,sbhowmik89/oppia,AllanYangZhou/oppia,directorlive/oppia,DewarM/oppia,whygee/oppia,Atlas-Sailed-Co/oppia,jestapinski/oppia,wangsai/oppia,amgowano/oppia,infinyte/oppia,prasanna08/oppia,brianrodri/oppia,google-code-export/oppia,raju249/oppia,VictoriaRoux/oppia,sarahfo/oppia,virajprabhu/oppia,whygee/oppia,michaelWagner/oppia,MaximLich/oppia,souravbadami/oppia,kaffeel/oppia,Dev4X/oppia,michaelWagner/oppia,rackstar17/oppia,directorlive/oppia,AllanYangZhou/oppia,cleophasmashiri/oppia,nagyistoce/oppia |
e5bd4884fc7ea4389315d0d2b8ff248bbda9a905 | custom/enikshay/integrations/utils.py | custom/enikshay/integrations/utils.py | from corehq.apps.locations.models import SQLLocation
from dimagi.utils.logging import notify_exception
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
message = ("Location with id {location_id} not found. This is the owner for person with id: {person_id}"
.format(location_id=person_case.owner_id, person_id=person_case.case_id))
notify_exception(None, message="[ENIKSHAY] {}".format(message))
return True
return phi_location.metadata.get('is_test', "yes") == "yes"
| from corehq.apps.locations.models import SQLLocation
from custom.enikshay.exceptions import NikshayLocationNotFound
def is_submission_from_test_location(person_case):
try:
phi_location = SQLLocation.objects.get(location_id=person_case.owner_id)
except SQLLocation.DoesNotExist:
raise NikshayLocationNotFound(
"Location with id {location_id} not found. This is the owner for person with id: {person_id}"
.format(location_id=person_case.owner_id, person_id=person_case.case_id)
)
return phi_location.metadata.get('is_test', "yes") == "yes"
| Revert "Fallback is test location" | Revert "Fallback is test location"
This reverts commit 2ba9865fa0f05e9ae244b2513e046c961540fca1.
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq |
78136c619ebafb54e4bd65af3cfd85a8ff67766b | osfclient/tests/test_cloning.py | osfclient/tests/test_cloning.py | """Test `osf clone` command."""
import os
from mock import patch, mock_open, call
from osfclient import OSF
from osfclient.cli import clone
from osfclient.tests.mocks import MockProject
from osfclient.tests.mocks import MockArgs
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_clone_project(OSF_project):
# check that `osf clone` opens files with the right names and modes
args = MockArgs(project='1234')
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
with patch('osfclient.cli.os.makedirs'):
clone(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
for store in OSF_project.return_value.storages:
assert store._name_mock.called
for f in store.files:
assert f._path_mock.called
fname = f._path_mock.return_value
if fname.startswith('/'):
fname = fname[1:]
full_path = os.path.join('1234',
store._name_mock.return_value,
fname)
assert call(full_path, 'wb') in mock_open_func.mock_calls
| """Test `osf clone` command."""
import os
from mock import patch, mock_open, call
from osfclient import OSF
from osfclient.cli import clone
from osfclient.tests.mocks import MockProject
from osfclient.tests.mocks import MockArgs
@patch.object(OSF, 'project', return_value=MockProject('1234'))
def test_clone_project(OSF_project):
# check that `osf clone` opens files with the right names and modes
args = MockArgs(project='1234')
mock_open_func = mock_open()
with patch('osfclient.cli.open', mock_open_func):
with patch('osfclient.cli.os.makedirs'):
with patch('osfclient.cli.os.getenv', side_effect='SECRET'):
clone(args)
OSF_project.assert_called_once_with('1234')
# check that the project and the files have been accessed
for store in OSF_project.return_value.storages:
assert store._name_mock.called
for f in store.files:
assert f._path_mock.called
fname = f._path_mock.return_value
if fname.startswith('/'):
fname = fname[1:]
full_path = os.path.join('1234',
store._name_mock.return_value,
fname)
assert call(full_path, 'wb') in mock_open_func.mock_calls
| Fix osf clone test that was asking for a password | Fix osf clone test that was asking for a password
| Python | bsd-3-clause | betatim/osf-cli,betatim/osf-cli |
f17baf70d08f47dc4ebb8e0142ce0a3566aa1e9a | tests/window/WINDOW_CAPTION.py | tests/window/WINDOW_CAPTION.py | #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(200, 200)
w2 = window.Window(200, 200)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
'''Test that the window caption can be set.
Expected behaviour:
Two windows will be opened, one with the caption "Window caption 1"
counting up every second; the other with a Unicode string including
some non-ASCII characters.
Press escape or close either window to finished the test.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import time
import unittest
from pyglet import window
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
w1 = window.Window(400, 200, resizable=True)
w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
last_time = time.time()
while not (w1.has_exit or w2.has_exit):
if time.time() - last_time > 1:
count += 1
w1.set_caption('Window caption %d' % count)
last_time = time.time()
w1.dispatch_events()
w2.dispatch_events()
w1.close()
w2.close()
if __name__ == '__main__':
unittest.main()
| Make windows bigger in this test so the captions can be read. | Make windows bigger in this test so the captions can be read.
Index: tests/window/WINDOW_CAPTION.py
===================================================================
--- tests/window/WINDOW_CAPTION.py (revision 777)
+++ tests/window/WINDOW_CAPTION.py (working copy)
@@ -19,8 +19,8 @@
class WINDOW_CAPTION(unittest.TestCase):
def test_caption(self):
- w1 = window.Window(200, 200)
- w2 = window.Window(200, 200)
+ w1 = window.Window(400, 200, resizable=True)
+ w2 = window.Window(400, 200, resizable=True)
count = 1
w1.set_caption('Window caption %d' % count)
w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
git-svn-id: d4fdfcd4de20a449196f78acc655f735742cd30d@781 14d46d22-621c-0410-bb3d-6f67920f7d95
| Python | bsd-3-clause | regular/pyglet-avbin-optimizations,regular/pyglet-avbin-optimizations,regular/pyglet-avbin-optimizations,regular/pyglet-avbin-optimizations |
eca659b789cc80c7d99bc38e551def972af11607 | cs251tk/student/markdownify/check_submit_date.py | cs251tk/student/markdownify/check_submit_date.py | import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
return min(dates).strftime("%x %X")
| import os
from dateutil.parser import parse
from ...common import run, chdir
def check_dates(spec_id, username, spec, basedir):
""" Port of the CheckDates program from C++
Finds the first submission date for an assignment
by comparing first commits for all files in the spec
and returning the earliest """
basedir = os.path.join(basedir, 'students', username, spec_id)
dates = []
with chdir(basedir):
for file in spec['files']:
# Run a git log on each file with earliest commits listed first
status, res, _ = run(['git', 'log', '--reverse', '--pretty=format:%ad', '--date=iso8601',
os.path.join(basedir, file['filename'])])
# If we didn't get an error, add date to array
if status == 'success':
# Parse the first line
dates.append(parse(res.splitlines()[0]))
# Return earliest date as a string with the format mm/dd/yyyy hh:mm:ss
if not dates:
return "ERROR"
return min(dates).strftime("%x %X")
| Add check for unsuccessful date checks | Add check for unsuccessful date checks
| Python | mit | StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit |
9c7ff0d98d324e3a52664f9dcd6fe64334778e00 | web/dbconfig/dbconfigbock7k.py | web/dbconfig/dbconfigbock7k.py | #
# Configuration for the will database
#
import dbconfig
class dbConfigBock7k ( dbconfig.dbConfig ):
# cubedim is a dictionary so it can vary
# size of the cube at resolution
cubedim = { 0: [128, 128, 16] }
#information about the image stack
slicerange = [0,61]
tilesz = [ 256,256 ]
#resolution information -- lowest resolution and list of resolution
resolutions = [ 0 ]
imagesz = { 0: [ 7198, 7352 ] }
# Resize factor to eliminate distortion
zscale = { 0: 1.0 }
| #
# Configuration for the will database
#
import dbconfig
class dbConfigBock7k ( dbconfig.dbConfig ):
# cubedim is a dictionary so it can vary
# size of the cube at resolution
cubedim = { 0: [128, 128, 16],
1: [128, 128, 16],
2: [128, 128, 16],
3: [128, 128, 16] }
#information about the image stack
slicerange = [0,61]
tilesz = [ 256,256 ]
#resolution information -- lowest resolution and list of resolution
resolutions = [ 0, 1, 2, 3 ]
imagesz = { 0: [ 7198, 7352 ],
1: [ 3599, 3676 ],
2: [ 1800, 1838 ],
3: [ 900, 919 ] }
# Resize factor to eliminate distortion
zscale = { 0: 10.0,
1: 5.0,
2: 2.5,
3: 1.25 }
| Expand bock7k to be a multi-resolution project. | Expand bock7k to be a multi-resolution project.
| Python | apache-2.0 | neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome |
d82111c5415176ea07674723151f14445e4b52ab | fire_rs/firemodel/test_propagation.py | fire_rs/firemodel/test_propagation.py | import unittest
import fire_rs.firemodel.propagation as propagation
class TestPropagation(unittest.TestCase):
def test_propagate(self):
env = propagation.Environment([[475060.0, 477060.0], [6200074.0, 6202074.0]], wind_speed=4.11, wind_dir=0)
prop = propagation.propagate(env, 10, 20)
# prop.plot(blocking=True)
| import unittest
import fire_rs.firemodel.propagation as propagation
class TestPropagation(unittest.TestCase):
def test_propagate(self):
env = propagation.Environment([[480060.0, 490060.0], [6210074.0, 6220074.0]], wind_speed=4.11, wind_dir=0)
prop = propagation.propagate(env, 10, 20, horizon=3*3600)
# prop.plot(blocking=True)
| Set test area to a burnable one. | [fire-models] Set test area to a burnable one.
| Python | bsd-2-clause | fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop,fire-rs-laas/fire-rs-saop |
d919c1e29645a52e795e85686de6de8f1e57196e | glue/plugins/ginga_viewer/__init__.py | glue/plugins/ginga_viewer/__init__.py | try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
# Register qt client
from ...config import qt_client
qt_client.add(GingaWidget)
| try:
from .client import *
from .qt_widget import *
except ImportError:
import warnings
warnings.warn("Could not import ginga plugin, since ginga is required")
else:
# Register qt client
from ...config import qt_client
qt_client.add(GingaWidget)
| Fix if ginga is not installed | Fix if ginga is not installed | Python | bsd-3-clause | JudoWill/glue,stscieisenhamer/glue,saimn/glue,JudoWill/glue,saimn/glue,stscieisenhamer/glue |
ee425b43502054895986c447e4cdae2c7e6c9278 | Lib/fontTools/misc/timeTools.py | Lib/fontTools/misc/timeTools.py | """fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
except ValueError:
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
| """fontTools.misc.timeTools.py -- miscellaneous routines."""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import time
import calendar
# OpenType timestamp handling
epoch_diff = calendar.timegm((1904, 1, 1, 0, 0, 0, 0, 0, 0))
def timestampToString(value):
# https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
try:
value = time.asctime(time.gmtime(max(0, value + epoch_diff)))
except (OverflowError, ValueError):
value = time.asctime(time.gmtime(0))
def timestampFromString(value):
return calendar.timegm(time.strptime(value)) - epoch_diff
def timestampNow():
return int(time.time() - epoch_diff)
| Adjust for Python 3.3 change in gmtime() exception type | Adjust for Python 3.3 change in gmtime() exception type
https://github.com/behdad/fonttools/issues/99#issuecomment-66776810
Fixes https://github.com/behdad/fonttools/issues/99
| Python | mit | googlefonts/fonttools,fonttools/fonttools |
80e98c2291689aca97427abb3b85c89dce1f0af5 | lib/fuzzer/scripts/merge_data_flow.py | lib/fuzzer/scripts/merge_data_flow.py | #!/usr/bin/env python3
#===- lib/fuzzer/scripts/merge_data_flow.py ------------------------------===#
#
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
#===------------------------------------------------------------------------===#
# Merge several data flow traces into one.
# Usage:
# merge_data_flow.py trace1 trace2 ... > result
#===------------------------------------------------------------------------===#
import sys
import fileinput
from array import array
def Merge(a, b):
res = array('b')
for i in range(0, len(a)):
res.append(ord('1' if a[i] == '1' or b[i] == '1' else '0'))
return res.tostring()
def main(argv):
D = {}
for line in fileinput.input():
[F,BV] = line.strip().split(' ')
if F in D:
D[F] = Merge(D[F], BV)
else:
D[F] = BV;
for F in D.keys():
print("%s %s" % (F, D[F]))
if __name__ == '__main__':
main(sys.argv)
| #!/usr/bin/env python3
#===- lib/fuzzer/scripts/merge_data_flow.py ------------------------------===#
#
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
#===------------------------------------------------------------------------===#
# Merge several data flow traces into one.
# Usage:
# merge_data_flow.py trace1 trace2 ... > result
#===------------------------------------------------------------------------===#
import sys
import fileinput
from array import array
def Merge(a, b):
res = array('b')
for i in range(0, len(a)):
res.append(ord('1' if a[i] == '1' or b[i] == '1' else '0'))
return res.tostring()
def main(argv):
D = {}
for line in fileinput.input():
[F,BV] = line.strip().split(' ')
if F in D:
D[F] = Merge(D[F], BV)
else:
D[F] = BV;
for F in D.keys():
print("%s %s" % (F, str(D[F])))
if __name__ == '__main__':
main(sys.argv)
| Fix output format in data flow merge script after Py3 change. | [libFuzzer] Fix output format in data flow merge script after Py3 change.
Reviewers: Dor1s
Reviewed By: Dor1s
Subscribers: delcypher, #sanitizers, llvm-commits
Tags: #llvm, #sanitizers
Differential Revision: https://reviews.llvm.org/D60288
git-svn-id: c199f293c43da69278bea8e88f92242bf3aa95f7@357730 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | llvm-mirror/compiler-rt,llvm-mirror/compiler-rt,llvm-mirror/compiler-rt,llvm-mirror/compiler-rt,llvm-mirror/compiler-rt |
58c056894f0a2f5940a8ec9eb5fd30a57aade4aa | scripts/install_new_database.py | scripts/install_new_database.py | #!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
| #!/usr/bin/env python3
import os
import sys
_upper_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
if _upper_dir not in sys.path:
sys.path.append(_upper_dir)
import chdb
def sanity_check():
sdb = chdb.init_scratch_db()
snippet_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM snippets''')[0][0]
assert snippet_count > 100
article_count = sdb.execute_with_retry_s(
'''SELECT COUNT(*) FROM articles''')[0][0]
assert article_count > 100
if __name__ == '__main__':
sanity_check()
chdb.install_scratch_db()
| Add the sanity checks, but doing it right this time. | Add the sanity checks, but doing it right this time.
| Python | mit | eggpi/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt |
fbdc69e218a71e984982a39fc36de19b7cf56f90 | Publishers/SamplePachube.py | Publishers/SamplePachube.py | import clr
from System import *
from System.Net import WebClient
from System.Xml import XmlDocument
from System.Diagnostics import Trace
url = "http://pachube.com/api/"
apiKey = "40ab667a92d6f892fef6099f38ad5eb31e619dffd793ff8842ae3b00eaf7d7cb"
environmentId = 2065
def Publish(topic, data):
ms = MemoryStream()
Trace.WriteLine("Pachube Sample")
client = WebClient()
client.Headers.Add('X-PachubeApiKey', apiKey)
watts, temp = 25, 44
resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp))
client.Dispose();
return 1
def CreateFullUrl():
return url + str(environmentId) + '.csv'
def Shutdown():
return 1
def GetTopics():
return ["PowerMeter/CC128/Mark"]
| import clr
from System import *
from System.Net import WebClient
from System.Xml import XmlDocument
from System.Diagnostics import Trace
url = "http://pachube.com/api/"
apiKey = "<Your-Pachube-Api-Key-Here>"
environmentId = -1
def Publish(topic, data):
ms = MemoryStream()
Trace.WriteLine("Pachube Sample")
client = WebClient()
client.Headers.Add('X-PachubeApiKey', apiKey)
watts, temp = 25, 44
resp = client.UploadString(CreateFullUrl(), "PUT", str(watts) + "," + str(temp))
client.Dispose();
return 1
def CreateFullUrl():
return url + str(environmentId) + '.csv'
def Shutdown():
return 1
def GetTopics():
return ["PowerMeter/CC128/Mark"]
| Change to sample pachube script | Change to sample pachube script
| Python | mit | markallanson/sspe,markallanson/sspe |
5b66ef91a1f73563cf869ca455052b037ab9551f | backdrop/write/config/development_environment_sample.py | backdrop/write/config/development_environment_sample.py | # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'govuk_realtime': 'govuk-realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| # Copy this file to development_environment.py
# and replace OAuth credentials your dev credentials
TOKENS = {
'_foo_bucket': '_foo_bucket-bearer-token',
'bucket': 'bucket-bearer-token',
'foo': 'foo-bearer-token',
'foo_bucket': 'foo_bucket-bearer-token',
'licensing': 'licensing-bearer-token',
'licensing_journey': 'licensing_journey-bearer-token',
'govuk_realtime': 'govuk_realtime-bearer-token',
'licensing_realtime': 'licensing_realtime-bearer-token',
}
PERMISSIONS = {}
OAUTH_CLIENT_ID = \
"1759c91cdc926eebe5d5c9fce53a58170ad17ba30a22b4b451c377a339a98844"
OAUTH_CLIENT_SECRET = \
"8f205218c0a378e33dccae5a557b4cac766f343a7dbfcb50de2286f03db4273a"
OAUTH_BASE_URL = "http://signon.dev.gov.uk"
| Use consistent naming for tokens | Use consistent naming for tokens
| Python | mit | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop |
7f6c151d8d5c18fb78a5603792ee19738d625aab | python_scripts/extractor_python_readability_server.py | python_scripts/extractor_python_readability_server.py | #!/usr/bin/python
import sys
import glob
sys.path.append("python_scripts/gen-py")
sys.path.append("gen-py/thrift_solr/")
from thrift.transport import TSocket
from thrift.server import TServer
#import thrift_solr
import ExtractorService
import sys
import readability
import readability
def extract_with_python_readability( raw_content ):
doc = readability.Document( raw_content )
return [ u'' + doc.short_title(),
u'' + doc.summary() ]
class ExtractorHandler:
def extract_html( self, raw_html ):
print raw_html
#raw_html = raw_html.encode( 'utf-8' )
ret = extract_with_python_readability( raw_html )
print ret[1]
return ret
handler = ExtractorHandler()
processor = ExtractorService.Processor(handler)
listening_socket = TSocket.TServerSocket(port=9090)
server = TServer.TThreadPoolServer(processor, listening_socket)
print ("[Server] Started")
server.serve()
| #!/usr/bin/python
import sys
import os
import glob
#sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py"))
sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/"))
sys.path.append(os.path.dirname(__file__) )
from thrift.transport import TSocket
from thrift.server import TServer
#import thrift_solr
import ExtractorService
import sys
import readability
import readability
def extract_with_python_readability( raw_content ):
doc = readability.Document( raw_content )
return [ u'' + doc.short_title(),
u'' + doc.summary() ]
class ExtractorHandler:
def extract_html( self, raw_html ):
#print raw_html
#raw_html = raw_html.encode( 'utf-8' )
ret = extract_with_python_readability( raw_html )
#print ret[1]
return ret
handler = ExtractorHandler()
processor = ExtractorService.Processor(handler)
listening_socket = TSocket.TServerSocket(port=9090)
server = TServer.TThreadPoolServer(processor, listening_socket)
print ("[Server] Started")
server.serve()
| Fix include path and ascii / utf8 errors. | Fix include path and ascii / utf8 errors.
| Python | agpl-3.0 | AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud |
fa0e95f5447947f3d2c01d7c5760ad9db53bb73d | api/wph/settings/third_party.py | api/wph/settings/third_party.py | SHELL_PLUS = "ipython"
SOCIAL_AUTH_STEAM_EXTRA_DATA = ['player']
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_LOGIN_ERROR_URL = '/login/error/'
SOCIAL_AUTH_INACTIVE_USER_URL = '/login/inactive/'
SOCIAL_AUTH_NEW_USER_REDIRECT_URL = '/'
SOCIAL_AUTH_PASSWORDLESS = True
SOCIAL_AUTH_PIPELINE = (
'social_core.pipeline.social_auth.social_details',
'social_core.pipeline.social_auth.social_uid',
'social_core.pipeline.social_auth.auth_allowed',
'social_core.pipeline.social_auth.social_user',
'wph.social.pipeline.require_email',
'social_core.pipeline.user.get_username',
# 'social_core.pipeline.mail.mail_validation',
'social_core.pipeline.user.create_user',
'social_core.pipeline.social_auth.associate_user',
'social_core.pipeline.social_auth.load_extra_data',
'social_core.pipeline.user.user_details',
)
| SHELL_PLUS = "ipython"
SOCIAL_AUTH_STEAM_EXTRA_DATA = ['player']
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_LOGIN_ERROR_URL = '/login/error/'
SOCIAL_AUTH_INACTIVE_USER_URL = '/login/inactive/'
SOCIAL_AUTH_NEW_USER_REDIRECT_URL = '/'
SOCIAL_AUTH_PASSWORDLESS = True
SOCIAL_AUTH_PIPELINE = (
'social_core.pipeline.social_auth.social_details',
'social_core.pipeline.social_auth.social_uid',
'social_core.pipeline.social_auth.auth_allowed',
'social_core.pipeline.social_auth.social_user',
'wph.social.pipeline.require_email',
'social_core.pipeline.user.get_username',
# 'social_core.pipeline.mail.mail_validation',
'social_core.pipeline.user.create_user',
# 'social_core.pipeline.social_auth.associate_user',
'social_core.pipeline.social_auth.load_extra_data',
'social_core.pipeline.user.user_details',
)
| Remove assosiate user social auth step | Remove assosiate user social auth step
| Python | mit | prattl/wepickheroes,prattl/wepickheroes,prattl/wepickheroes,prattl/wepickheroes |
2a8a564fbd48fba25c4876ff3d4317152a1d647c | tests/basics/builtin_range.py | tests/basics/builtin_range.py | # test builtin range type
# print
print(range(4))
# bool
print(bool(range(0)))
print(bool(range(10)))
# len
print(len(range(0)))
print(len(range(4)))
print(len(range(1, 4)))
print(len(range(1, 4, 2)))
print(len(range(1, 4, -1)))
print(len(range(4, 1, -1)))
print(len(range(4, 1, -2)))
# subscr
print(range(4)[0])
print(range(4)[1])
print(range(4)[-1])
# slice
print(range(4)[0:])
print(range(4)[1:])
print(range(4)[1:2])
print(range(4)[1:3])
print(range(4)[1::2])
print(range(4)[1:-2:2])
# attrs
print(range(1, 2, 3).start)
print(range(1, 2, 3).stop)
print(range(1, 2, 3).step)
# bad unary op
try:
-range(1)
except TypeError:
print("TypeError")
# bad subscription (can't store)
try:
range(1)[0] = 1
except TypeError:
print("TypeError")
| # test builtin range type
# print
print(range(4))
# bool
print(bool(range(0)))
print(bool(range(10)))
# len
print(len(range(0)))
print(len(range(4)))
print(len(range(1, 4)))
print(len(range(1, 4, 2)))
print(len(range(1, 4, -1)))
print(len(range(4, 1, -1)))
print(len(range(4, 1, -2)))
# subscr
print(range(4)[0])
print(range(4)[1])
print(range(4)[-1])
# slice
print(range(4)[0:])
print(range(4)[1:])
print(range(4)[1:2])
print(range(4)[1:3])
print(range(4)[1::2])
print(range(4)[1:-2:2])
print(range(1,4)[:])
print(range(1,4)[0:])
print(range(1,4)[1:])
print(range(1,4)[:-1])
print(range(7,-2,-4)[:])
# attrs
print(range(1, 2, 3).start)
print(range(1, 2, 3).stop)
print(range(1, 2, 3).step)
# bad unary op
try:
-range(1)
except TypeError:
print("TypeError")
# bad subscription (can't store)
try:
range(1)[0] = 1
except TypeError:
print("TypeError")
| Test slicing a range that does not start at zero. | tests: Test slicing a range that does not start at zero.
| Python | mit | torwag/micropython,TDAbboud/micropython,dinau/micropython,dmazzella/micropython,pramasoul/micropython,adafruit/micropython,danicampora/micropython,misterdanb/micropython,trezor/micropython,misterdanb/micropython,redbear/micropython,noahwilliamsson/micropython,adafruit/circuitpython,alex-robbins/micropython,torwag/micropython,alex-robbins/micropython,mpalomer/micropython,ernesto-g/micropython,drrk/micropython,trezor/micropython,dinau/micropython,adamkh/micropython,pfalcon/micropython,matthewelse/micropython,xhat/micropython,cloudformdesign/micropython,chrisdearman/micropython,puuu/micropython,ernesto-g/micropython,turbinenreiter/micropython,bvernoux/micropython,alex-robbins/micropython,dxxb/micropython,alex-march/micropython,deshipu/micropython,MrSurly/micropython,adafruit/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,ryannathans/micropython,ernesto-g/micropython,matthewelse/micropython,HenrikSolver/micropython,jmarcelino/pycom-micropython,MrSurly/micropython-esp32,pramasoul/micropython,adafruit/micropython,ganshun666/micropython,puuu/micropython,selste/micropython,martinribelotta/micropython,ruffy91/micropython,omtinez/micropython,Peetz0r/micropython-esp32,hosaka/micropython,lowRISC/micropython,kerneltask/micropython,selste/micropython,micropython/micropython-esp32,misterdanb/micropython,cloudformdesign/micropython,deshipu/micropython,supergis/micropython,tuc-osg/micropython,mianos/micropython,alex-robbins/micropython,xuxiaoxin/micropython,mpalomer/micropython,ChuckM/micropython,blmorris/micropython,turbinenreiter/micropython,PappaPeppar/micropython,Timmenem/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,pramasoul/micropython,redbear/micropython,xhat/micropython,dhylands/micropython,ganshun666/micropython,supergis/micropython,infinnovation/micropython,praemdonck/micropython,cloudformdesign/micropython,mhoffma/micropython,AriZuu/micropython,bvernoux/micropython,turbinenreiter/micropython,bvernoux/micropython,mhoffma/micropython,selste/micropython,xuxiaoxin/micropython,infinnovation/micropython,bvernoux/micropython,dmazzella/micropython,Timmenem/micropython,tralamazza/micropython,dxxb/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,kerneltask/micropython,infinnovation/micropython,vitiral/micropython,misterdanb/micropython,lowRISC/micropython,tobbad/micropython,adamkh/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,tuc-osg/micropython,pramasoul/micropython,omtinez/micropython,blmorris/micropython,puuu/micropython,swegener/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,hosaka/micropython,cloudformdesign/micropython,mianos/micropython,tuc-osg/micropython,xuxiaoxin/micropython,xuxiaoxin/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,misterdanb/micropython,mianos/micropython,swegener/micropython,noahwilliamsson/micropython,pozetroninc/micropython,cwyark/micropython,chrisdearman/micropython,hosaka/micropython,deshipu/micropython,toolmacher/micropython,dhylands/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,cwyark/micropython,drrk/micropython,MrSurly/micropython,redbear/micropython,henriknelson/micropython,matthewelse/micropython,mhoffma/micropython,dinau/micropython,trezor/micropython,kerneltask/micropython,feilongfl/micropython,galenhz/micropython,ChuckM/micropython,toolmacher/micropython,alex-march/micropython,dhylands/micropython,torwag/micropython,selste/micropython,EcmaXp/micropython,noahwilliamsson/micropython,galenhz/micropython,micropython/micropython-esp32,feilongfl/micropython,ruffy91/micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,kerneltask/micropython,ryannathans/micropython,Timmenem/micropython,vitiral/micropython,chrisdearman/micropython,supergis/micropython,adamkh/micropython,lowRISC/micropython,MrSurly/micropython,dhylands/micropython,blazewicz/micropython,adamkh/micropython,toolmacher/micropython,omtinez/micropython,cwyark/micropython,tuc-osg/micropython,swegener/micropython,danicampora/micropython,Peetz0r/micropython-esp32,dxxb/micropython,xhat/micropython,toolmacher/micropython,ruffy91/micropython,henriknelson/micropython,mpalomer/micropython,oopy/micropython,danicampora/micropython,ChuckM/micropython,deshipu/micropython,EcmaXp/micropython,PappaPeppar/micropython,feilongfl/micropython,ganshun666/micropython,alex-march/micropython,hiway/micropython,trezor/micropython,mhoffma/micropython,dhylands/micropython,AriZuu/micropython,chrisdearman/micropython,puuu/micropython,swegener/micropython,drrk/micropython,mhoffma/micropython,HenrikSolver/micropython,emfcamp/micropython,pfalcon/micropython,turbinenreiter/micropython,AriZuu/micropython,blazewicz/micropython,hiway/micropython,hosaka/micropython,neilh10/micropython,adafruit/circuitpython,EcmaXp/micropython,hosaka/micropython,martinribelotta/micropython,hiway/micropython,feilongfl/micropython,xhat/micropython,pozetroninc/micropython,pfalcon/micropython,supergis/micropython,emfcamp/micropython,HenrikSolver/micropython,praemdonck/micropython,hiway/micropython,vitiral/micropython,oopy/micropython,chrisdearman/micropython,pozetroninc/micropython,noahwilliamsson/micropython,PappaPeppar/micropython,pramasoul/micropython,ryannathans/micropython,xuxiaoxin/micropython,blmorris/micropython,adafruit/micropython,blmorris/micropython,tobbad/micropython,ChuckM/micropython,selste/micropython,matthewelse/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,PappaPeppar/micropython,lowRISC/micropython,EcmaXp/micropython,galenhz/micropython,MrSurly/micropython-esp32,deshipu/micropython,TDAbboud/micropython,matthewelse/micropython,HenrikSolver/micropython,AriZuu/micropython,pozetroninc/micropython,feilongfl/micropython,blazewicz/micropython,henriknelson/micropython,vitiral/micropython,SHA2017-badge/micropython-esp32,omtinez/micropython,bvernoux/micropython,tralamazza/micropython,adafruit/circuitpython,galenhz/micropython,jmarcelino/pycom-micropython,turbinenreiter/micropython,AriZuu/micropython,xhat/micropython,tobbad/micropython,TDAbboud/micropython,danicampora/micropython,ChuckM/micropython,infinnovation/micropython,tralamazza/micropython,martinribelotta/micropython,blmorris/micropython,MrSurly/micropython,noahwilliamsson/micropython,henriknelson/micropython,martinribelotta/micropython,redbear/micropython,puuu/micropython,ryannathans/micropython,ernesto-g/micropython,omtinez/micropython,adamkh/micropython,dxxb/micropython,EcmaXp/micropython,dmazzella/micropython,mianos/micropython,torwag/micropython,cwyark/micropython,drrk/micropython,danicampora/micropython,ruffy91/micropython,ernesto-g/micropython,TDAbboud/micropython,MrSurly/micropython-esp32,drrk/micropython,ganshun666/micropython,neilh10/micropython,MrSurly/micropython,alex-march/micropython,neilh10/micropython,adafruit/circuitpython,TDAbboud/micropython,jmarcelino/pycom-micropython,cwyark/micropython,ruffy91/micropython,praemdonck/micropython,swegener/micropython,cloudformdesign/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,infinnovation/micropython,supergis/micropython,Peetz0r/micropython-esp32,blazewicz/micropython,dxxb/micropython,Timmenem/micropython,matthewelse/micropython,oopy/micropython,PappaPeppar/micropython,tuc-osg/micropython,trezor/micropython,praemdonck/micropython,ganshun666/micropython,lowRISC/micropython,oopy/micropython,tobbad/micropython,dinau/micropython,mianos/micropython,tralamazza/micropython,mpalomer/micropython,tobbad/micropython,dmazzella/micropython,praemdonck/micropython,redbear/micropython,alex-march/micropython,neilh10/micropython,galenhz/micropython,vitiral/micropython,toolmacher/micropython,neilh10/micropython,oopy/micropython,blazewicz/micropython,micropython/micropython-esp32,kerneltask/micropython,adafruit/micropython,mpalomer/micropython,emfcamp/micropython,alex-robbins/micropython,martinribelotta/micropython,emfcamp/micropython,adafruit/circuitpython,ryannathans/micropython,torwag/micropython,HenrikSolver/micropython,dinau/micropython |
73cb3c6883940e96e656b9b7dd6033ed2e41cb33 | custom/intrahealth/reports/recap_passage_report_v2.py | custom/intrahealth/reports/recap_passage_report_v2.py | from __future__ import absolute_import
from __future__ import unicode_literals
from memoized import memoized
from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter
from custom.intrahealth.sqldata import RecapPassageData2, DateSource2
from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport
class RecapPassageReport2(MultiReport):
title = "Recap Passage NEW"
name = "Recap Passage NEW"
slug = 'recap_passage2'
report_title = "Recap Passage"
exportable = True
default_rows = 10
fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2]
def config_update(self, config):
if self.location and self.location.location_type_name.lower() == 'pps':
config['location_id'] = self.location.location_id
@property
@memoized
def data_providers(self):
dates = DateSource2(config=self.report_config).rows
data_providers = []
for date in dates:
config = self.report_config
config.update(dict(startdate=date, enddate=date))
data_providers.append(RecapPassageData2(config=config))
if not data_providers:
data_providers.append(RecapPassageData2(config=self.report_config))
return data_providers
| from __future__ import absolute_import
from __future__ import unicode_literals
from memoized import memoized
from corehq.apps.reports.standard import MonthYearMixin
from custom.intrahealth.filters import RecapPassageLocationFilter2, FRMonthFilter, FRYearFilter
from custom.intrahealth.sqldata import RecapPassageData2, DateSource2
from custom.intrahealth.reports.tableu_de_board_report_v2 import MultiReport
class RecapPassageReport2(MonthYearMixin, MultiReport):
title = "Recap Passage NEW"
name = "Recap Passage NEW"
slug = 'recap_passage2'
report_title = "Recap Passage"
exportable = True
default_rows = 10
fields = [FRMonthFilter, FRYearFilter, RecapPassageLocationFilter2]
def config_update(self, config):
if self.location and self.location.location_type_name.lower() == 'pps':
config['location_id'] = self.location.location_id
@property
@memoized
def data_providers(self):
dates = DateSource2(config=self.report_config).rows
data_providers = []
for date in dates:
config = self.report_config
config.update(dict(startdate=date, enddate=date))
data_providers.append(RecapPassageData2(config=config))
if not data_providers:
data_providers.append(RecapPassageData2(config=self.report_config))
return data_providers
| Fix month filter for recap passage report | Fix month filter for recap passage report
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq |
23a88191e5d827dea84ad533853657110c94c840 | app/public/views.py | app/public/views.py | from flask import Blueprint, render_template, redirect, session, url_for
from app.decorators import login_required
blueprint = Blueprint('public', __name__)
@blueprint.route('/')
def home():
"""Return Home Page"""
return render_template('public/index.html')
@blueprint.route('/login', methods=['GET', 'POST'])
def login():
"""Return Login page"""
session['logged_in'] = True
return render_template('public/login.html')
@blueprint.route('/logout')
def logout():
session.pop('logged_in', None)
return redirect(url_for('home'))
| import os
from flask import Blueprint, redirect, render_template, request, session, url_for
from app.decorators import login_required
ADMIN_USERNAME = os.environ['CUSTOMER_INFO_ADMIN_USERNAME']
ADMIN_PASSWORD_HASH = os.environ['CUSTOMER_INFO_ADMIN_PASSWORD_HASH']
blueprint = Blueprint('public', __name__)
@blueprint.route('/')
def home():
"""Return Home Page"""
return render_template('public/index.html')
def _validate_credentials(username, password):
return (username == ADMIN_USERNAME and
check_password_hash(ADMIN_PASSWORD_HASH, password))
@blueprint.route('/login', methods=['GET', 'POST'])
def login():
"""Return Login page"""
error = None
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
if _validate_credentials(username, password):
session['logged_in'] = True
return redirect(url_for('customer/index.html'))
else:
error = 'Invalid username or password'
return render_template('public/login.html', error=error)
@blueprint.route('/logout')
def logout():
session.pop('logged_in', None)
return redirect(url_for('home'))
| Add logic to verify and login admin | Add logic to verify and login admin
| Python | apache-2.0 | ueg1990/customer-info,ueg1990/customer-info |
9c9fff8617a048a32cbff3fb72b3b3ba23476996 | thinc/neural/_classes/softmax.py | thinc/neural/_classes/softmax.py | from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
@describe.attributes(
W=Synapses("Weights matrix",
lambda obj: (obj.nO, obj.nI),
lambda W, ops: None)
)
class Softmax(Affine):
def predict(self, input__BI):
output__BO = self.ops.affine(self.W, self.b, input__BI)
self.ops.softmax(output__BO, inplace=True)
return output__BO
def begin_update(self, input__BI, drop=0.):
output__BO = self.predict(input__BI)
def finish_update(grad__BO, sgd=None):
self.d_W += self.ops.batch_outer(grad__BO, input__BI)
self.d_b += grad__BO.sum(axis=0)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return self.ops.batch_dot(grad__BO, self.W.T)
return output__BO, finish_update
| from .affine import Affine
from ... import describe
from ...describe import Dimension, Synapses, Biases
@describe.attributes(
W=Synapses("Weights matrix",
lambda obj: (obj.nO, obj.nI),
lambda W, ops: None)
)
class Softmax(Affine):
name = 'softmax'
def predict(self, input__BI):
output__BO = self.ops.affine(self.W, self.b, input__BI)
self.ops.softmax(output__BO, inplace=True)
return output__BO
def begin_update(self, input__BI, drop=0.):
output__BO = self.predict(input__BI)
def finish_update(grad__BO, sgd=None):
self.d_W += self.ops.batch_outer(grad__BO, input__BI)
self.d_b += grad__BO.sum(axis=0)
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient,
key=id(self._mem))
return self.ops.batch_dot(grad__BO, self.W.T)
return output__BO, finish_update
| Fix passing of params to optimizer in Softmax | Fix passing of params to optimizer in Softmax
| Python | mit | spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc |
0c6dfa4ad297562ec263a8e98bb75d836d2ab054 | src/python/expedient/ui/html/forms.py | src/python/expedient/ui/html/forms.py | '''
Created on Jun 20, 2010
@author: jnaous
'''
from django import forms
from expedient.ui.html.models import SliceFlowSpace
class FlowSpaceForm(forms.ModelForm):
"""
Form to edit flowspace.
"""
class Meta:
model = SliceFlowSpace
exclude = ["slice"]
| '''
Created on Jun 20, 2010
@author: jnaous
'''
from django import forms
from openflow.plugin.models import FlowSpaceRule
class FlowSpaceForm(forms.ModelForm):
"""
Form to edit flowspace.
"""
class Meta:
model = FlowSpaceRule
def __init__(self, sliver_qs, *args, **kwargs):
super(FlowSpaceForm, self).__init__(*args, **kwargs)
self.fields["slivers"].queryset = sliver_qs
| Modify FlowSpaceForm to use actual stored rules | Modify FlowSpaceForm to use actual stored rules
| Python | bsd-3-clause | avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf,avlach/univbris-ocf |
cf1da65820085a84eee51884431b0020d3018f23 | bot/project_info.py | bot/project_info.py | # Shared project info
name = 'telegram-bot-framework'
description = 'Python Telegram bot API framework'
url = 'https://github.com/alvarogzp/telegram-bot-framework'
author_name = 'Alvaro Gutierrez Perez'
author_email = '[email protected]'
authors_credits = (
("@AlvaroGP", "main developer"),
("@KouteiCheke", "i18n & UI/UX support")
)
is_open_source = True
license_name = 'GNU AGPL 3.0+'
license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html'
donation_addresses = ()
| # Shared project info
name = 'telegram-bot-framework'
description = 'Python Telegram bot API framework'
url = 'https://github.com/alvarogzp/telegram-bot-framework'
author_name = 'Alvaro Gutierrez Perez'
author_email = '[email protected]'
authors_credits = (
("@AlvaroGP", "main developer"),
("@KouteiCheke", "i18n & UI/UX support")
)
is_open_source = True
license_name = 'GNU AGPL 3.0+'
license_url = 'https://www.gnu.org/licenses/agpl-3.0.en.html'
donation_addresses = (
("Bitcoin", "36rwcSgcU1H9fuMvZoebZD3auus6h9wVXk"),
("Bitcoin (bech32 format)", "bc1q4943c5p5dl0hujmmcg2g0568hetynajd3qqtv0")
)
| Add bitcoin address to donation addresses | Add bitcoin address to donation addresses
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot |
2adf8e8bbf1d0f623e14b8490d511ac45cbb7430 | djangochurch_data/management/commands/djangochurchimages.py | djangochurch_data/management/commands/djangochurchimages.py | import os.path
from blanc_basic_assets.models import Image
from django.apps import apps
from django.core.files import File
from django.core.management.base import BaseCommand
IMAGE_LIST = [
(1, 'remember.jpg'),
(2, 'sample-image-1.jpg'),
(3, 'sample-image-2.jpg'),
(4, 'sample-image-3.jpg'),
(5, 'sample-image-4.jpg'),
]
class Command(BaseCommand):
help = 'Load Django Church images'
def handle(self, directory=None, *args, **options):
image_dir = os.path.join(apps.get_app_path('djangochurch_data'), 'images')
for image_id, image_name in IMAGE_LIST:
self.stdout.write('Importing: %s' % (image_name,))
image = Image.objects.get(id=image_id)
image_file = os.path.join(image_dir, image_name)
with open(image_file, 'rb') as f:
image.file.save(image_name, File(f))
| import os.path
from blanc_basic_assets.models import Image
from django.apps import apps
from django.core.files import File
from django.core.management.base import BaseCommand
IMAGE_LIST = [
(1, 'remember.jpg'),
(2, 'sample-image-1.jpg'),
(3, 'sample-image-2.jpg'),
(4, 'sample-image-3.jpg'),
(5, 'sample-image-4.jpg'),
]
class Command(BaseCommand):
help = 'Load Django Church images'
def handle(self, directory=None, *args, **options):
data_app = apps.get_app_config('djangochurch_data')
image_dir = os.path.join(data_app.path, 'images')
for image_id, image_name in IMAGE_LIST:
self.stdout.write('Importing: %s' % (image_name,))
image = Image.objects.get(id=image_id)
image_file = os.path.join(image_dir, image_name)
with open(image_file, 'rb') as f:
image.file.save(image_name, File(f))
| Use updated app config for getting the path | Use updated app config for getting the path
Prevent warning with Django 1.8, fixes #3
| Python | bsd-3-clause | djangochurch/djangochurch-data |
43e3df5a07caa1370e71858f593c9c8bd73d1e2f | cloudly/rqworker.py | cloudly/rqworker.py | from rq import Worker, Queue, Connection
from rq.job import Job
from cloudly.cache import redis
from cloudly.memoized import Memoized
def enqueue(function, *args):
return _get_queue().enqueue(function, *args)
def fetch_job(job_id):
return Job.fetch(job_id, redis)
@Memoized
def _get_queue():
return Queue(connection=redis)
def work(setup_fct=None):
if setup_fct:
setup_fct()
listen = ['high', 'default', 'low']
with Connection(redis):
worker = Worker(map(Queue, listen))
worker.work()
if __name__ == '__main__':
work()
| from rq import Worker, Queue, Connection
from rq.job import Job
from cloudly.cache import redis
from cloudly.memoized import Memoized
def enqueue(function, *args, **kwargs):
return _get_queue().enqueue(function, *args, **kwargs)
def fetch_job(job_id):
return Job.fetch(job_id, redis)
@Memoized
def _get_queue():
return Queue(connection=redis)
def work(setup_fct=None):
if setup_fct:
setup_fct()
listen = ['high', 'default', 'low']
with Connection(redis):
worker = Worker(map(Queue, listen))
worker.work()
if __name__ == '__main__':
work()
| Fix missing `kwargs` argument to enqueue. | Fix missing `kwargs` argument to enqueue.
| Python | mit | ooda/cloudly,ooda/cloudly |
0c0e81798b078547bc5931c26dd2b0ab6507db94 | devilry/project/common/devilry_test_runner.py | devilry/project/common/devilry_test_runner.py | import warnings
from django.test.runner import DiscoverRunner
from django.utils.deprecation import RemovedInDjango20Warning, RemovedInDjango110Warning
class DevilryTestRunner(DiscoverRunner):
def setup_test_environment(self, **kwargs):
# warnings.filterwarnings('ignore', category=RemovedInDjango)
super(DevilryTestRunner, self).setup_test_environment(**kwargs)
warnings.filterwarnings('ignore', category=RemovedInDjango20Warning)
warnings.filterwarnings('ignore', category=DeprecationWarning)
warnings.filterwarnings('ignore', category=RemovedInDjango110Warning)
| import warnings
from django.test.runner import DiscoverRunner
from django.utils.deprecation import RemovedInDjango20Warning
class DevilryTestRunner(DiscoverRunner):
def setup_test_environment(self, **kwargs):
# warnings.filterwarnings('ignore', category=RemovedInDjango)
super(DevilryTestRunner, self).setup_test_environment(**kwargs)
warnings.filterwarnings('ignore', category=RemovedInDjango20Warning)
warnings.filterwarnings('ignore', category=DeprecationWarning)
| Update warning ignores for Django 1.10. | project...DevilryTestRunner: Update warning ignores for Django 1.10.
| Python | bsd-3-clause | devilry/devilry-django,devilry/devilry-django,devilry/devilry-django,devilry/devilry-django |
c9402c1685a3351a9a39fe433fa343b58f895960 | Lib/fontTools/encodings/codecs_test.py | Lib/fontTools/encodings/codecs_test.py | from __future__ import print_function, division, absolute_import, unicode_literals
from fontTools.misc.py23 import *
import unittest
import fontTools.encodings.codecs # Not to be confused with "import codecs"
class ExtendedCodecsTest(unittest.TestCase):
def test_decode(self):
self.assertEqual(b'x\xfe\xfdy'.decode(encoding="x-mac-japanese-ttx"),
unichr(0x78)+unichr(0x2122)+unichr(0x00A9)+unichr(0x79))
def test_encode(self):
self.assertEqual(b'x\xfe\xfdy',
(unichr(0x78)+unichr(0x2122)+unichr(0x00A9)+unichr(0x79)).encode(encoding="x-mac-japanese-ttx"))
if __name__ == '__main__':
unittest.main()
| from __future__ import print_function, division, absolute_import, unicode_literals
from fontTools.misc.py23 import *
import unittest
import fontTools.encodings.codecs # Not to be confused with "import codecs"
class ExtendedCodecsTest(unittest.TestCase):
def test_decode(self):
self.assertEqual(b'x\xfe\xfdy'.decode(encoding="x-mac-japanese-ttx"),
unichr(0x78)+unichr(0x2122)+unichr(0x00A9)+unichr(0x79))
def test_encode(self):
self.assertEqual(b'x\xfe\xfdy',
(unichr(0x78)+unichr(0x2122)+unichr(0x00A9)+unichr(0x79)).encode("x-mac-japanese-ttx"))
if __name__ == '__main__':
unittest.main()
| Fix test on Python 2.6 | Fix test on Python 2.6
| Python | mit | fonttools/fonttools,googlefonts/fonttools |
2bfcbebe6535e2ea36cf969287e3ec7f5fe0cf86 | datapackage_pipelines/specs/hashers/hash_calculator.py | datapackage_pipelines/specs/hashers/hash_calculator.py | import hashlib
from datapackage_pipelines.utilities.extended_json import json
from ..errors import SpecError
from .dependency_resolver import resolve_dependencies
class HashCalculator(object):
def __init__(self):
self.all_pipeline_ids = {}
def calculate_hash(self, spec):
cache_hash = None
if spec.pipeline_id in self.all_pipeline_ids:
message = 'Duplicate key {0} in {1}' \
.format(spec.pipeline_id, spec.abspath)
spec.errors.append(SpecError('Duplicate Pipeline Id', message))
else:
cache_hash = resolve_dependencies(spec, self.all_pipeline_ids)
if len(spec.errors) > 0:
return cache_hash
for step in spec.pipeline_details['pipeline']:
m = hashlib.md5()
m.update(cache_hash.encode('ascii'))
with open(step['executor'], 'rb') as f:
m.update(f.read())
m.update(json.dumps(step, ensure_ascii=True, sort_keys=True)
.encode('ascii'))
cache_hash = m.hexdigest()
step['_cache_hash'] = cache_hash
self.all_pipeline_ids[spec.pipeline_id] = spec
spec.cache_hash = cache_hash
| import hashlib
from datapackage_pipelines.utilities.extended_json import json
from ..errors import SpecError
from .dependency_resolver import resolve_dependencies
class HashCalculator(object):
def __init__(self):
self.all_pipeline_ids = {}
def calculate_hash(self, spec):
cache_hash = None
if spec.pipeline_id in self.all_pipeline_ids:
message = 'Duplicate key {0} in {1}' \
.format(spec.pipeline_id, spec.path)
spec.errors.append(SpecError('Duplicate Pipeline Id', message))
else:
cache_hash = resolve_dependencies(spec, self.all_pipeline_ids)
if len(spec.errors) > 0:
return cache_hash
for step in spec.pipeline_details['pipeline']:
m = hashlib.md5()
m.update(cache_hash.encode('ascii'))
with open(step['executor'], 'rb') as f:
m.update(f.read())
m.update(json.dumps(step, ensure_ascii=True, sort_keys=True)
.encode('ascii'))
cache_hash = m.hexdigest()
step['_cache_hash'] = cache_hash
self.all_pipeline_ids[spec.pipeline_id] = spec
spec.cache_hash = cache_hash
| Fix error in error log | Fix error in error log
| Python | mit | frictionlessdata/datapackage-pipelines,frictionlessdata/datapackage-pipelines,frictionlessdata/datapackage-pipelines |
e201f59f25b3f7822531bfbdc6300178e2d2e285 | angr/engines/soot/static_dispatcher.py | angr/engines/soot/static_dispatcher.py |
from archinfo.arch_soot import SootMethodDescriptor
# TODO implement properly
# this will need the expression, the class hierarchy, and the position of the instruction (for invoke-super)
# this will also need the current state to try to figure out the dynamic type
def resolve_method(state, expr):
return SootMethodDescriptor(expr.class_name, expr.method_name, expr.method_params)
|
from archinfo.arch_soot import SootMethodDescriptor
from cle.errors import CLEError
import logging
l = logging.getLogger('angr.engines.soot.static_dispatcher')
# TODO implement properly
# this will need the expression, the class hierarchy, and the position of the instruction (for invoke-super)
# this will also need the current state to try to figure out the dynamic type
def resolve_method(state, expr):
# get binary containing the method
jar = state.regs._ip_binary
try:
# get all methods matching class- and method-name
methods = list(jar.get_method(expr.method_name, expr.class_name))
except CLEError:
# No methods found
# => We are executing code that is not in CLE (typically library code)
# Fallback: use only infos from the invocation, so we can still use SimProcedures
l.warning("Couldn't find method %s.%s" % (expr.method_name, expr.class_name))
return SootMethodDescriptor(expr.class_name, expr.method_name, expr.method_params)
else:
if len(methods) != 1:
# Found several methods matching class- and method-name
# TODO: use information about the function signature to find the right one
l.warning("Function %s is ambiguous in class %s" % (expr.method_name, expr.class_name))
return SootMethodDescriptor.from_soot_method(methods[0])
# return SootMethodDescriptor(expr.class_name, expr.method_name, expr.method_params)
| Add more attributes to resolved method | Add more attributes to resolved method
| Python | bsd-2-clause | iamahuman/angr,angr/angr,iamahuman/angr,schieb/angr,angr/angr,angr/angr,schieb/angr,schieb/angr,iamahuman/angr |
979d84f965b0118f86a8df7aa0311f65f8e36170 | indra/tools/reading/readers/trips/__init__.py | indra/tools/reading/readers/trips/__init__.py | from indra.tools.reading.readers.core import EmptyReader
from indra.sources import trips
class TripsReader(EmptyReader):
"""A stand-in for TRIPS reading.
Currently, we do not run TRIPS (more specifically DRUM) regularly at large
scales, however on occasion we have outputs from TRIPS that were generated
a while ago.
"""
name = 'TRIPS'
def __init__(self, *args, **kwargs):
self.version = self.get_version()
return
def _read(self, *args, **kwargs):
return []
@classmethod
def get_version(cls):
return 'STATIC'
@staticmethod
def get_processor(content):
return trips.process_xml(content)
| import os
import subprocess as sp
from indra.tools.reading.readers.core import Reader
from indra.sources.trips import client, process_xml
from indra_db import formats
class TripsReader(Reader):
"""A stand-in for TRIPS reading.
Currently, we do not run TRIPS (more specifically DRUM) regularly at large
scales, however on occasion we have outputs from TRIPS that were generated
a while ago.
"""
name = 'TRIPS'
result_format = formats.XML
def __init__(self, *args, **kwargs):
self.version = self.get_version()
return
def _read(self, content_iter, verbose=False, log=False, n_per_proc=None):
# Start trips running
if os.environ.get("IN_TRIPS_DOCKER", 'false') != 'true':
return []
p = sp.Popen('/sw/drum/bin/startup.sh', stdout=sp.PIPE,
stderr=sp.STDOUT)
service_endpoint = 'http://localhost:80/cgi/'
service_host = 'drum'
# Process all the content.
for content in content_iter:
html = client.send_query(content.get_text(),
service_endpoint=service_endpoint,
service_host=service_host)
xml = client.get_xml(html)
self.add_result(content.get_id(), xml)
return self.results
@classmethod
def get_version(cls):
return 'STATIC'
@staticmethod
def get_processor(reading_content):
return process_xml(reading_content)
| Implement the basics of the TRIPS reader. | Implement the basics of the TRIPS reader.
| Python | bsd-2-clause | sorgerlab/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,bgyori/indra |
493ce497e5d84d8db9c37816aefea9099df42e90 | pywatson/answer/synonym.py | pywatson/answer/synonym.py | class Synonym(object):
def __init__(self):
pass
| from pywatson.util.map_initializable import MapInitializable
class SynSetSynonym(MapInitializable):
def __init__(self, is_chosen, value, weight):
self.is_chosen = is_chosen
self.value = value
self.weight = weight
@classmethod
def from_mapping(cls, syn_mapping):
return cls(is_chosen=syn_mapping['isChosen'],
value=syn_mapping['value'],
weight=syn_mapping['weight'])
class SynSet(MapInitializable):
def __init__(self, name, synonyms=()):
self.name = name
self.synonyms = tuple(synonyms)
@classmethod
def from_mapping(cls, synset_mapping):
return cls(name=synset_mapping[0]['name'],
synonyms=(SynSetSynonym.from_mapping(s) for s in synset_mapping[0]['synonym']))
class Synonym(MapInitializable):
def __init__(self, part_of_speech, lemma, value, syn_set):
self.part_of_speech = part_of_speech
self.lemma = lemma
self.value = value
self.syn_set = syn_set
@classmethod
def from_mapping(cls, synonym_mapping):
return cls(part_of_speech=synonym_mapping['partOfSpeech'],
lemma=synonym_mapping['lemma'],
value=synonym_mapping['value'],
syn_set=SynSet.from_mapping(synonym_mapping['synSet']))
| Add Synonym and related classes | Add Synonym and related classes
| Python | mit | sherlocke/pywatson |
10426b049baeceb8dda1390650503e1d75ff8b64 | us_ignite/common/management/commands/common_load_fixtures.py | us_ignite/common/management/commands/common_load_fixtures.py | import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
| import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Category, Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
CATEGORY_LIST = [
'Developer',
'Community leader',
'Subject matter expert',
'Designer',
'Project manager',
'Network engineer',
'Funder',
'Press/media',
'Interested party',
]
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print u'Updated site URL.'
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s.' % interest
for name in CATEGORY_LIST:
category, is_new = Category.objects.get_or_create(name=name)
if is_new:
print u'Imported category: %s.' % category
| Add initial fixtures for the categories. | Add initial fixtures for the categories.
| Python | bsd-3-clause | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite |
fb53f2ed0e6337d6f5766f47cb67c204c89c0568 | src/oauth2client/__init__.py | src/oauth2client/__init__.py | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://oauth2.googleapis.com/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
| # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client library for using OAuth2, especially with Google APIs."""
__version__ = '4.1.3'
GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth'
GOOGLE_DEVICE_URI = 'https://oauth2.googleapis.com/device/code'
GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke'
GOOGLE_TOKEN_URI = 'https://oauth2.googleapis.com/token'
GOOGLE_TOKEN_INFO_URI = 'https://oauth2.googleapis.com/tokeninfo'
| Fix oauth2 revoke URI, new URL doesn't seem to work | Fix oauth2 revoke URI, new URL doesn't seem to work
| Python | apache-2.0 | GAM-team/GAM,GAM-team/GAM |
83e820209f9980e6c9103908b14ff07fee23dc41 | getCheckedOut.py | getCheckedOut.py | import requests
from bs4 import BeautifulSoup
import json
from dotenv import load_dotenv
import os
load_dotenv(".env")
s = requests.Session()
r = s.get("https://kcls.bibliocommons.com/user/login", verify=False)
payload = {
"name": os.environ.get("USER"),
"user_pin": os.environ.get("PIN")
}
s.post("https://kcls.bibliocommons.com/user/login", data=payload)
r = s.get("https://kcls.bibliocommons.com/checkedout?display_quantity=50&page=1&view=medium")
soup = BeautifulSoup(r.text, "html.parser")
checkedOutList = soup.find("div", { "id": "bibList" })
checkedOutItems = []
for title in checkedOutList.find_all("div", { "class": "listItem" }):
title_name = title.find("span", { "class": "title" })
due_date = title.find("span", { "class": "item_due_date" })
checkedOutItems.append({ "title": title_name.text.strip(), "due": due_date.text.strip() })
with open("checkedout.json", "w") as f:
print "%d title(s) checked out" % len(checkedOutItems)
f.write(json.dumps(checkedOutItems))
| import requests
from bs4 import BeautifulSoup
import json
from dotenv import load_dotenv
import os
load_dotenv(".env")
s = requests.Session()
r = s.get("https://kcls.bibliocommons.com/user/login", verify=False)
payload = {
"name": os.environ.get("KCLS_USER"),
"user_pin": os.environ.get("PIN")
}
p = s.post("https://kcls.bibliocommons.com/user/login", data=payload)
r = s.get("https://kcls.bibliocommons.com/checkedout?display_quantity=50&page=1&view=medium")
soup = BeautifulSoup(r.text, "html.parser")
checkedOutList = soup.find("div", { "id": "bibList" })
checkedOutItems = []
for title in checkedOutList.find_all("div", { "class": "listItem" }):
title_name = title.find("span", { "class": "title" })
due_date = title.find("span", { "class": "item_due_date" })
checkedOutItems.append({ "title": title_name.text.strip(), "due": due_date.text.strip() })
with open("checkedout.json", "w") as f:
print "%d title(s) checked out" % len(checkedOutItems)
f.write(json.dumps(checkedOutItems))
| Change .env variable to KCLS_USER | Change .env variable to KCLS_USER
| Python | apache-2.0 | mphuie/kcls-myaccount |
f0246b9897d89c1ec6f2361bbb488c4e162e5c5e | reddit_liveupdate/utils.py | reddit_liveupdate/utils.py | import itertools
import pytz
from babel.dates import format_time
from pylons import c
def pairwise(iterable):
a, b = itertools.tee(iterable)
next(b, None)
return itertools.izip(a, b)
def pretty_time(dt):
display_tz = pytz.timezone(c.liveupdate_event.timezone)
return format_time(
time=dt,
tzinfo=display_tz,
format="HH:mm z",
locale=c.locale,
)
| import datetime
import itertools
import pytz
from babel.dates import format_time, format_datetime
from pylons import c
def pairwise(iterable):
a, b = itertools.tee(iterable)
next(b, None)
return itertools.izip(a, b)
def pretty_time(dt):
display_tz = pytz.timezone(c.liveupdate_event.timezone)
today = datetime.datetime.now(display_tz).date()
date = dt.astimezone(display_tz).date()
if date == today:
return format_time(
time=dt,
tzinfo=display_tz,
format="HH:mm z",
locale=c.locale,
)
elif today - date < datetime.timedelta(days=365):
return format_datetime(
datetime=dt,
tzinfo=display_tz,
format="dd MMM HH:mm z",
locale=c.locale,
)
else:
return format_datetime(
datetime=dt,
tzinfo=display_tz,
format="dd MMM YYYY HH:mm z",
locale=c.locale,
)
| Make timestamps more specific as temporal context fades. | Make timestamps more specific as temporal context fades.
Fixes #6.
| Python | bsd-3-clause | madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,sim642/reddit-plugin-liveupdate,madbook/reddit-plugin-liveupdate,florenceyeun/reddit-plugin-liveupdate |
540c5f2969e75a0f461e9d46090cfe8d92c53b00 | Simulator/plot.py | Simulator/plot.py | from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
return 'history_' + y + 'txt'
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| from Simulator import *
import XMLParser
import textToXML
def getHistoryFileName(xmlFileName):
y = xmlFileName[:-3]
y = y + 'txt'
i = len(y) - 1
while i>=0 :
if y[i]=='\\' or y[i]=='/' :
break
i-=1
if i>=0 :
return y[:i+1] + 'history_' + y[i+1:]
else:
return 'history_' + y
def plotFromXML(fileName,simulationTime,chemicalList):
historyFile = getHistoryFileName(fileName)
sim = XMLParser.getSimulator(fileName)
sim.simulate(int(simulationTime),historyFile)
sim.plot(chemicalList)
def plotFromTxt(fileName,simulationTime,chemicalList):
xmlFile = textToXML.getXMLFromTxt(fileName)
plotFromXML(xmlFile,simulationTime,chemicalList)
| Remove history name error for absolute paths | Remove history name error for absolute paths
| Python | mit | aayushkapadia/chemical_reaction_simulator |
3e5f277e72fe60921f2424f0587b99b21155b452 | scrapi/settings/defaults.py | scrapi/settings/defaults.py | BROKER_URL = 'amqp://guest@localhost'
CELERY_RESULT_BACKEND = 'amqp://guest@localhost'
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
STORAGE_METHOD = 'disk'
ARCHIVE_DIRECTORY = 'archive/'
RECORD_DIRECTORY = 'records'
STORE_HTTP_TRANSACTIONS = False
NORMALIZED_PROCESSING = ['storage']
RAW_PROCESSING = ['storage']
SENTRY_DNS = None
FLUENTD_ARGS = None
# OUTPUT SETTINGS
OSF_ENABLED = False
PROTOCOL = 'http'
VERIFY_SSL = True
OSF_PREFIX = 'localhost:5000'
APP_ID = 'some id'
API_KEY_LABEL = 'some label'
API_KEY = 'some api key'
OSF_AUTH = (API_KEY_LABEL, API_KEY)
| DEBUG = False
BROKER_URL = 'amqp://guest@localhost'
CELERY_RESULT_BACKEND = 'amqp://guest@localhost'
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
STORAGE_METHOD = 'disk'
ARCHIVE_DIRECTORY = 'archive/'
RECORD_DIRECTORY = 'records'
STORE_HTTP_TRANSACTIONS = False
NORMALIZED_PROCESSING = ['storage']
RAW_PROCESSING = ['storage']
SENTRY_DNS = None
FLUENTD_ARGS = None
# OUTPUT SETTINGS
OSF_ENABLED = False
PROTOCOL = 'http'
VERIFY_SSL = True
OSF_PREFIX = 'localhost:5000'
APP_ID = 'some id'
API_KEY_LABEL = 'some label'
API_KEY = 'some api key'
OSF_AUTH = (API_KEY_LABEL, API_KEY)
| Add a setting for debugging | Add a setting for debugging
| Python | apache-2.0 | icereval/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,felliott/scrapi,fabianvf/scrapi,alexgarciac/scrapi,erinspace/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,jeffreyliu3230/scrapi,fabianvf/scrapi,erinspace/scrapi |
ffab98b03588cef69ab11a10a440d02952661edf | cyder/cydns/soa/forms.py | cyder/cydns/soa/forms.py | from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
SOAAVForm = get_eav_form(SOAAV, SOA)
| from django.forms import ModelForm
from cyder.base.mixins import UsabilityFormMixin
from cyder.base.eav.forms import get_eav_form
from cyder.cydns.soa.models import SOA, SOAAV
class SOAForm(ModelForm, UsabilityFormMixin):
class Meta:
model = SOA
fields = ('root_domain', 'primary', 'contact', 'expire',
'retry', 'refresh', 'minimum', 'ttl', 'description',
'is_signed', 'dns_enabled')
exclude = ('serial', 'dirty',)
def clean(self, *args, **kwargs):
contact = self.cleaned_data['contact']
self.cleaned_data['contact'] = contact.replace('@', '.')
return super(SOAForm, self).clean(*args, **kwargs)
SOAAVForm = get_eav_form(SOAAV, SOA)
| Replace @ with . in soa form clean | Replace @ with . in soa form clean
| Python | bsd-3-clause | OSU-Net/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,drkitty/cyder,akeym/cyder,murrown/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,OSU-Net/cyder |
2ebe4b4c281c6b604330b0ea250da41f0802717f | citrination_client/views/descriptors/alloy_composition_descriptor.py | citrination_client/views/descriptors/alloy_composition_descriptor.py | from citrination_client.views.descriptors.descriptor import MaterialDescriptor
class AlloyCompositionDescriptor(MaterialDescriptor):
def __init__(self, key, balance_element, basis=100, threshold=None):
self.options = dict(balance_element=balance_element, basis=basis, units=threshold)
super(AlloyCompositionDescriptor, self).__init__(key, "Alloy composition")
| from citrination_client.views.descriptors.descriptor import MaterialDescriptor
class AlloyCompositionDescriptor(MaterialDescriptor):
def __init__(self, key, balance_element, basis=100, threshold=None):
self.options = dict(balance_element=balance_element, basis=basis, threshold=threshold)
super(AlloyCompositionDescriptor, self).__init__(key, "Alloy composition")
| Fix for mismamed threshold parameter in allow desc | Fix for mismamed threshold parameter in allow desc
| Python | apache-2.0 | CitrineInformatics/python-citrination-client |
26f984a7732491e87e4eb756caf0056a7ac71484 | contract_invoice_merge_by_partner/models/account_analytic_analysis.py | contract_invoice_merge_by_partner/models/account_analytic_analysis.py | # -*- coding: utf-8 -*-
# © 2016 Carlos Dauden <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, models
class PurchaseOrderLine(models.Model):
_inherit = 'account.analytic.account'
@api.multi
def _recurring_create_invoice(self, automatic=False):
invoice_obj = self.env['account.invoice']
invoices = invoice_obj.browse(
super(PurchaseOrderLine, self)._recurring_create_invoice(automatic))
res = []
unlink_list = []
for partner in invoices.mapped('partner_id'):
inv_to_merge = invoices.filtered(
lambda x: x.partner_id.id == partner)
if partner.contract_invoice_merge:
invoices_merged = inv_to_merge.do_merge()
res.extend(invoices_merged)
unlink_list.extend(inv_to_merge)
else:
res.extend(inv_to_merge)
if unlink_list:
invoice_obj.unlink([x.id for x in unlink_list])
return res
| # -*- coding: utf-8 -*-
# © 2016 Carlos Dauden <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp import api, models
class PurchaseOrderLine(models.Model):
_inherit = 'account.analytic.account'
@api.multi
def _recurring_create_invoice(self, automatic=False):
invoice_obj = self.env['account.invoice']
invoices = invoice_obj.browse(
super(PurchaseOrderLine, self)._recurring_create_invoice(
automatic))
res = []
unlink_list = []
for partner in invoices.mapped('partner_id'):
inv_to_merge = invoices.filtered(
lambda x: x.partner_id.id == partner)
if partner.contract_invoice_merge and len(inv_to_merge) > 1:
invoices_merged = inv_to_merge.do_merge()
res.extend(invoices_merged)
unlink_list.extend(inv_to_merge)
else:
res.extend(inv_to_merge)
if unlink_list:
invoice_obj.browse(unlink_list).unlink()
return res
| Fix unlink, >1 filter and lines too long | Fix unlink, >1 filter and lines too long | Python | agpl-3.0 | bullet92/contract,open-synergy/contract |
cb9b1a2163f960e34721f74bad30622fda71e43b | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Darwin")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@skipUnlessDarwin
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Foo")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function. | Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@326640 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb |
7ad47fad53be18a07aede85c02e41176a96c5de2 | learnwithpeople/__init__.py | learnwithpeople/__init__.py | # This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__version__ = "dev"
GIT_REVISION = "dev"
| # This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)
__version__ = "dev"
GIT_REVISION = "dev"
| Update celery setup according to docs | Update celery setup according to docs
| Python | mit | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles |
e67c57128f88b61eac08e488e54343d48f1454c7 | ddcz/forms/authentication.py | ddcz/forms/authentication.py | import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=20)
password = forms.CharField(label="Heslo", max_length=50, widget=forms.PasswordInput)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
| import logging
from django import forms
from django.contrib.auth import forms as authforms
from ..models import UserProfile
logger = logging.getLogger(__name__)
class LoginForm(forms.Form):
nick = forms.CharField(label="Nick", max_length=25)
password = forms.CharField(
label="Heslo", max_length=100, widget=forms.PasswordInput
)
class PasswordResetForm(authforms.PasswordResetForm):
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This is overridem from original form to use UserProfile instead of standard
user model since that is normative for email storage.
"""
user_profiles = UserProfile.objects.filter(email_uzivatele__iexact=email)
users = tuple(
list(
up.user
for up in user_profiles
if up.user.has_usable_password() and up.user.is_active
)
)
logger.info(
"Selected users for password reset: %s"
% ", ".join([str(u.pk) for u in users])
)
return users
| Update LoginForm to match reality | Update LoginForm to match reality
| Python | mit | dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard |
14d6955118893c532c1d9f8f6037d1da1b18dbbb | analysis/plot-skeleton.py | analysis/plot-skeleton.py | #!/usr/bin/env python
import climate
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block02/*trial00*.csv.gz'):
for trial in database.Experiment(root).trials_matching(pattern):
with plots.space() as ax:
plots.skeleton(ax, trial, 100)
if __name__ == '__main__':
climate.call(main)
| #!/usr/bin/env python
import climate
import pandas as pd
import database
import plots
@climate.annotate(
root='plot data rooted at this path',
pattern=('plot data from files matching this pattern', 'option'),
)
def main(root, pattern='*/*block03/*trial00*.csv.gz'):
for trial in database.Experiment(root).trials_matching(pattern):
with plots.space() as ax:
for i in range(3):
plots.skeleton(ax, trial, 1000 + 300 * i, lw=2, color='#fd3220', alpha=0.3)
#trial.rotate_heading(pd.Series([-6.28 / 10] * len(trial.df)))
trial.make_body_relative()
for i in range(3):
plots.skeleton(ax, trial, 1000 + 300 * i, offset=(0.5 * i, 0.5 * i), lw=2, color='#111111', alpha=0.3)
if __name__ == '__main__':
climate.call(main)
| Add multiple skeletons for the moment. | Add multiple skeletons for the moment.
| Python | mit | lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment |
bfd75a927da2b46cb8630fab0cd3828ba71bf4ee | dependencies.py | dependencies.py | #! /usr/bin/env python3
from setuptools.command import easy_install
requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"]
for module in requires:
easy_install.main( ["-U",module] )
| #! /usr/bin/env python3
import subprocess
import sys
requires = ["dnslib", "dkimpy>=0.7.1", "pyyaml", "ddt", "authheaders"]
def install(package):
subprocess.call([sys.executable, "-m", "pip", "install", package])
for module in requires:
install(module)
| Use pip instead of easy_install | Use pip instead of easy_install
| Python | mit | ValiMail/arc_test_suite |
3171e7e355536f41a6c517ca7128a152c2577829 | anndata/tests/test_uns.py | anndata/tests/test_uns.py | import numpy as np
import pandas as pd
from anndata import AnnData
def test_uns_color_subset():
# Tests for https://github.com/theislab/anndata/issues/257
obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)])
obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category")
obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category")
uns = dict(
cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"],
)
adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns)
# If number of categories does not match number of colors,
# they should be reset
v = adata[:, [0, 1]]
assert "cat1_colors" not in v.uns
assert "cat2_colors" not in v.uns
# Otherwise the colors should still match after reseting
adata.uns["cat1_colors"] = ["red", "green", "blue", "yellow"]
v = adata[[0, 1], :]
assert len(v.uns["cat1_colors"]) == 1
assert v.uns["cat1_colors"][0] == "red"
| import numpy as np
import pandas as pd
from anndata import AnnData
def test_uns_color_subset():
# Tests for https://github.com/theislab/anndata/issues/257
obs = pd.DataFrame(index=[f"cell{i}" for i in range(5)])
obs["cat1"] = pd.Series(list("aabcd"), index=obs.index, dtype="category")
obs["cat2"] = pd.Series(list("aabbb"), index=obs.index, dtype="category")
uns = dict(
cat1_colors=["red", "green", "blue"], cat2_colors=["red", "green", "blue"],
)
adata = AnnData(np.ones((5, 5)), obs=obs, uns=uns)
# If number of categories does not match number of colors,
# they should be reset
v = adata[:, [0, 1]]
assert "cat1_colors" not in v.uns
assert "cat2_colors" not in v.uns
# Otherwise the colors should still match after reseting
cat1_colors = ["red", "green", "blue", "yellow"]
adata.uns["cat1_colors"] = cat1_colors.copy()
v = adata[[0, 1], :]
assert len(v.uns["cat1_colors"]) == 1
assert v.uns["cat1_colors"][0] == "red"
# But original object should not change
assert list(adata.uns["cat1_colors"]) == cat1_colors
| Add test for categorical colors staying around after subsetting | Add test for categorical colors staying around after subsetting
| Python | bsd-3-clause | theislab/anndata |
2dece45476170e24e14903f19f9bf400c10ebf42 | djangocms_wow/cms_plugins.py | djangocms_wow/cms_plugins.py | # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from . import models
class AnimationPlugin(CMSPluginBase):
model = models.Animation
name = _('Animation')
render_template = 'djangocms_wow/animation.html'
allow_children = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(AnimationPlugin)
class WOWAnimationPlugin(CMSPluginBase):
model = models.WOWAnimation
name = _("Wow Animation")
render_template = 'djangocms_wow/wow_animation.html'
allow_children = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(WOWAnimationPlugin)
| # -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from . import models
class AnimationPlugin(CMSPluginBase):
model = models.Animation
name = _('Animation')
render_template = 'djangocms_wow/animation.html'
allow_children = True
text_enabled = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(AnimationPlugin)
class WOWAnimationPlugin(CMSPluginBase):
model = models.WOWAnimation
name = _("Wow Animation")
render_template = 'djangocms_wow/wow_animation.html'
allow_children = True
text_enabled = True
cache = True
def render(self, context, instance, placeholder):
context.update({'instance': instance})
return context
plugin_pool.register_plugin(WOWAnimationPlugin)
| Allow WOW animations to be used in text plugin. | Allow WOW animations to be used in text plugin.
| Python | bsd-3-clause | narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow,narayanaditya95/djangocms-wow |
50eba1720cd34684eaf0a931e28474ad987ea699 | asana/resources/events.py | asana/resources/events.py |
from ._events import _Events
from ..error import InvalidTokenError
import time
class Events(_Events):
POLL_INTERVAL = 1000
def get_next(self, params):
params = params.copy()
if 'sync' not in params:
try:
self.get(params)
except InvalidTokenError as e:
params['sync'] = e.value['sync']
while True:
result = self.get(params)
if 'data' in result and len(result['data']) > 0:
return (result['data'], result['sync'])
else:
params['sync'] = result['sync']
time.sleep(self.POLL_INTERVAL / 1000.0)
def get_iterator(self, params):
params = params.copy()
while True:
items, sync = self.get_next(params)
for item in items:
yield item
params['sync'] = sync
|
from ._events import _Events
from ..error import InvalidTokenError
import time
class Events(_Events):
POLL_INTERVAL = 5000
def get_next(self, params):
params = params.copy()
if 'sync' not in params:
try:
self.get(params)
except InvalidTokenError as e:
params['sync'] = e.value['sync']
while True:
result = self.get(params)
if 'data' in result and len(result['data']) > 0:
return (result['data'], result['sync'])
else:
params['sync'] = result['sync']
time.sleep(self.POLL_INTERVAL / 1000.0)
def get_iterator(self, params):
params = params.copy()
while True:
items, sync = self.get_next(params)
for item in items:
yield item
params['sync'] = sync
| Change polling interval to 5 seconds | Change polling interval to 5 seconds
| Python | mit | asana/python-asana,asana/python-asana,Asana/python-asana |
c81b07f93253acc49cbc5028ec83e5334fb47ed9 | flask_admin/model/typefmt.py | flask_admin/model/typefmt.py | from jinja2 import Markup
from flask_admin._compat import text_type
def null_formatter(view, value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(view, value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(view, value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
glyph = 'ok-circle' if value else 'minus-sign'
fa = 'check-circle' if value else 'minus-circle'
return Markup('<span class="fa fa-%s glyphicon glyphicon-%s icon-%s"></span>' % (fa, glyph, glyph))
def list_formatter(view, values):
"""
Return string with comma separated values
:param values:
Value to check
"""
return u', '.join(text_type(v) for v in values)
BASE_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter,
list: list_formatter,
}
EXPORT_FORMATTERS = {
type(None): empty_formatter,
list: list_formatter,
}
| from jinja2 import Markup
from flask_admin._compat import text_type
try:
from enum import Enum
except ImportError:
Enum = None
def null_formatter(view, value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(view, value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(view, value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
glyph = 'ok-circle' if value else 'minus-sign'
fa = 'check-circle' if value else 'minus-circle'
return Markup('<span class="fa fa-%s glyphicon glyphicon-%s icon-%s"></span>' % (fa, glyph, glyph))
def list_formatter(view, values):
"""
Return string with comma separated values
:param values:
Value to check
"""
return u', '.join(text_type(v) for v in values)
def enum_formatter(view, value):
"""
Return the name of the enumerated member.
:param value:
Value to check
"""
return value.name
BASE_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter,
list: list_formatter,
}
EXPORT_FORMATTERS = {
type(None): empty_formatter,
list: list_formatter,
}
if Enum is not None:
BASE_FORMATTERS[Enum] = enum_formatter
EXPORT_FORMATTERS[Enum] = enum_formatter
| Add default type formatters for Enum | Add default type formatters for Enum
| Python | bsd-3-clause | jschneier/flask-admin,jschneier/flask-admin,jschneier/flask-admin,jmagnusson/flask-admin,likaiguo/flask-admin,quokkaproject/flask-admin,flask-admin/flask-admin,lifei/flask-admin,likaiguo/flask-admin,ArtemSerga/flask-admin,iurisilvio/flask-admin,flask-admin/flask-admin,flask-admin/flask-admin,jschneier/flask-admin,jmagnusson/flask-admin,betterlife/flask-admin,closeio/flask-admin,closeio/flask-admin,lifei/flask-admin,quokkaproject/flask-admin,betterlife/flask-admin,quokkaproject/flask-admin,betterlife/flask-admin,lifei/flask-admin,quokkaproject/flask-admin,lifei/flask-admin,iurisilvio/flask-admin,likaiguo/flask-admin,iurisilvio/flask-admin,ArtemSerga/flask-admin,closeio/flask-admin,ArtemSerga/flask-admin,likaiguo/flask-admin,closeio/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,flask-admin/flask-admin,ArtemSerga/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,rochacbruno/flask-admin,iurisilvio/flask-admin,betterlife/flask-admin,rochacbruno/flask-admin |
a2fd2436cb1c0285dfdd18fad43e505d7c246535 | modules/module_spotify.py | modules/module_spotify.py |
import re
import urllib
def handle_url(bot, user, channel, url, msg):
"""Handle IMDB urls"""
m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", url)
if not m: return
dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4))
f = urllib.urlopen(dataurl)
songinfo = f.read()
f.close()
artist, album, song = songinfo.split("/", 2)
bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
| import re
import urllib
def do_spotify(bot, user, channel, dataurl):
f = urllib.urlopen(dataurl)
songinfo = f.read()
f.close()
artist, album, song = songinfo.split("/", 2)
bot.say(channel, "[Spotify] %s - %s (%s)" % (artist.strip(), song.strip(), album.strip()))
def handle_privmsg(bot, user, reply, msg):
"""Grab Spotify URLs from the messages and handle them"""
m = re.match("(http:\/\/open.spotify.com\/|spotify:)(album|artist|track)([:\/])([a-zA-Z0-9]+)\/?", msg)
if not m: return
dataurl = "http://spotify.url.fi/%s/%s?txt" % (m.group(2), m.group(4))
do_spotify(bot, user, reply, dataurl)
| Handle spotify: -type urls Cleanup | Handle spotify: -type urls
Cleanup
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@144 dda364a1-ef19-0410-af65-756c83048fb2
| Python | bsd-3-clause | rnyberg/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,EArmour/pyfibot,nigeljonez/newpyfibot,EArmour/pyfibot,huqa/pyfibot,lepinkainen/pyfibot,rnyberg/pyfibot,aapa/pyfibot,aapa/pyfibot |
99fba41b7392b1e5e4216145f1e8913698b60914 | mopidy_gmusic/commands.py | mopidy_gmusic/commands.py | import gmusicapi
from mopidy import commands
from oauth2client.client import OAuth2WebServerFlow
class GMusicCommand(commands.Command):
def __init__(self):
super().__init__()
self.add_child("login", LoginCommand())
class LoginCommand(commands.Command):
def run(self, args, config):
oauth_info = gmusicapi.Mobileclient._session_class.oauth
flow = OAuth2WebServerFlow(**oauth_info._asdict())
print()
print(
"Go to the following URL to get an initial auth code, then "
+ "provide it below: "
+ flow.step1_get_authorize_url()
)
print()
try:
initial_code = raw_input("code: ")
except NameError:
# Python 3
initial_code = input("code: ")
credentials = flow.step2_exchange(initial_code)
refresh_token = credentials.refresh_token
print("\nPlease update your config to include the following:")
print()
print("[gmusic]")
print("refresh_token =", refresh_token)
print()
| import gmusicapi
from mopidy import commands
from oauth2client.client import OAuth2WebServerFlow
class GMusicCommand(commands.Command):
def __init__(self):
super().__init__()
self.add_child("login", LoginCommand())
class LoginCommand(commands.Command):
def run(self, args, config):
oauth_info = gmusicapi.Mobileclient._session_class.oauth
flow = OAuth2WebServerFlow(**oauth_info._asdict())
print()
print(
"Go to the following URL to get an initial auth code, "
"then provide it below:"
)
print(flow.step1_get_authorize_url())
print()
initial_code = input("code: ")
credentials = flow.step2_exchange(initial_code)
refresh_token = credentials.refresh_token
print("\nPlease update your config to include the following:")
print()
print("[gmusic]")
print("refresh_token =", refresh_token)
print()
| Remove Python 2 compatibility code | py3: Remove Python 2 compatibility code
| Python | apache-2.0 | hechtus/mopidy-gmusic,mopidy/mopidy-gmusic |
8521837cc3f57e11278fc41bfd0e5d106fc140fe | deflect/views.py | deflect/views.py | from __future__ import unicode_literals
import base32_crockford
import logging
from django.db.models import F
from django.http import Http404
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.timezone import now
from .models import ShortURL
from .models import ShortURLAlias
from .utils import add_query_params
logger = logging.getLogger(__name__)
def redirect(request, key):
"""
Given the short URL key, update the statistics and redirect the
user to the destination URL, including available Google Analytics
parameters.
"""
try:
alias = ShortURLAlias.objects.select_related().get(alias=key.lower())
key_id = alias.redirect.id
except ShortURLAlias.DoesNotExist:
try:
key_id = base32_crockford.decode(key)
except ValueError as e:
logger.warning("Error decoding redirect: %s" % e)
raise Http404
redirect = get_object_or_404(ShortURL, pk=key_id)
ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1,
last_used=now())
# Inject Google campaign parameters
utm_params = {'utm_source': redirect.key,
'utm_campaign': redirect.campaign,
'utm_content': redirect.content,
'utm_medium': redirect.medium}
url = add_query_params(redirect.long_url, utm_params)
return HttpResponsePermanentRedirect(url)
| from __future__ import unicode_literals
import base32_crockford
import logging
from django.db.models import F
from django.http import Http404
from django.http import HttpResponsePermanentRedirect
from django.shortcuts import get_object_or_404
from django.utils.timezone import now
from .models import ShortURL
from .models import ShortURLAlias
from .utils import add_query_params
logger = logging.getLogger(__name__)
def redirect(request, key):
"""
Given the short URL key, update the statistics and redirect the
user to the destination URL, including available Google Analytics
parameters.
"""
try:
alias = ShortURLAlias.objects.get(alias=key.lower())
key_id = alias.redirect_id
except ShortURLAlias.DoesNotExist:
try:
key_id = base32_crockford.decode(key)
except ValueError as e:
logger.warning("Error decoding redirect: %s" % e)
raise Http404
redirect = get_object_or_404(ShortURL, pk=key_id)
ShortURL.objects.filter(pk=key_id).update(hits=F('hits') + 1,
last_used=now())
# Inject Google campaign parameters
utm_params = {'utm_source': redirect.key,
'utm_campaign': redirect.campaign,
'utm_content': redirect.content,
'utm_medium': redirect.medium}
url = add_query_params(redirect.long_url, utm_params)
return HttpResponsePermanentRedirect(url)
| Simplify database query when looking up an alias | Simplify database query when looking up an alias
| Python | bsd-3-clause | jbittel/django-deflect |
c322e4f2202f3b004a4f41bd4c2786f88292cf37 | deconstrst/deconstrst.py | deconstrst/deconstrst.py | # -*- coding: utf-8 -*-
import argparse
import sys
from os import path
from builder import DeconstJSONBuilder
from sphinx.application import Sphinx
from sphinx.builders import BUILTIN_BUILDERS
def build(argv):
"""
Invoke Sphinx with locked arguments to generate JSON content.
"""
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--submit",
help="Submit results to the content store.",
action="store_true")
args = parser.parse_args(argv[1:])
# I am a terrible person
BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder
# Lock source and destination to the same paths as the Makefile.
srcdir, destdir = '.', '_build/deconst'
doctreedir = path.join(destdir, '.doctrees')
app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir,
doctreedir=doctreedir, buildername="deconst",
confoverrides={}, status=sys.stdout, warning=sys.stderr,
freshenv=True, warningiserror=False, tags=[], verbosity=0,
parallel=1)
app.build(True, [])
if app.statuscode != 0 or not args.submit:
return app.statuscode
print("submit active")
return 0
| # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import sys
import os
from builder import DeconstJSONBuilder
from sphinx.application import Sphinx
from sphinx.builders import BUILTIN_BUILDERS
def build(argv):
"""
Invoke Sphinx with locked arguments to generate JSON content.
"""
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--submit",
help="Submit results to the content store.",
action="store_true")
args = parser.parse_args(argv[1:])
content_store_url = os.getenv("CONTENT_STORE")
if args.submit and not content_store_url:
print("Please set CONTENT_STORE if submitting results.",
file=sys.stderr)
sys.exit(1)
# I am a terrible person
BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder
# Lock source and destination to the same paths as the Makefile.
srcdir, destdir = '.', '_build/deconst'
doctreedir = os.path.join(destdir, '.doctrees')
app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir,
doctreedir=doctreedir, buildername="deconst",
confoverrides={}, status=sys.stdout, warning=sys.stderr,
freshenv=True, warningiserror=False, tags=[], verbosity=0,
parallel=1)
app.build(True, [])
if app.statuscode != 0 or not args.submit:
return app.statuscode
print("submit active")
return 0
| Validate the presence of CONTENT_STORE. | Validate the presence of CONTENT_STORE.
| Python | apache-2.0 | ktbartholomew/preparer-sphinx,ktbartholomew/preparer-sphinx,deconst/preparer-sphinx,deconst/preparer-sphinx |
88de184c1d9daa79e47873b0bd8912ea67b32ec1 | app/__init__.py | app/__init__.py | from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64']))
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = \
cf_services['elasticsearch-compose'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(
base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64'])
)
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| Change the VCAP_SERVICE key for elasticsearch | Change the VCAP_SERVICE key for elasticsearch
GOV.UK PaaS have recently changed the name of their elasticsearch service in preparation for migration.
This quick fix will work until elasticsearch-compose is withdrawn; a future solution should use a more robust way of determining the elasticsearch URI.
| Python | mit | alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api |
15f1abef288411539b512f6bdb572c4a54aa5447 | airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py | airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add dag_id/state index on dag_run table
Revision ID: 127d2bf2dfa7
Revises: 1a5a9e6bf2b5
Create Date: 2017-01-25 11:43:51.635667
"""
# revision identifiers, used by Alembic.
revision = '127d2bf2dfa7'
down_revision = '1a5a9e6bf2b5'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False)
def downgrade():
op.drop_index('dag_id_state', table_name='dag_run')
| #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add dag_id/state index on dag_run table
Revision ID: 127d2bf2dfa7
Revises: 5e7d17757c7a
Create Date: 2017-01-25 11:43:51.635667
"""
# revision identifiers, used by Alembic.
revision = '127d2bf2dfa7'
down_revision = '5e7d17757c7a'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False)
def downgrade():
op.drop_index('dag_id_state', table_name='dag_run')
| Correct down_revision dag_id/state index creation | [AIRFLOW-810] Correct down_revision dag_id/state index creation
Due to revert the revision were not correct anymore and an unclean
build environment would still consider it for alembic migrations.
| Python | apache-2.0 | lyft/incubator-airflow,artwr/airflow,mrkm4ntr/incubator-airflow,stverhae/incubator-airflow,hamedhsn/incubator-airflow,OpringaoDoTurno/airflow,dgies/incubator-airflow,preete-dixit-ck/incubator-airflow,AllisonWang/incubator-airflow,gilt/incubator-airflow,mtagle/airflow,malmiron/incubator-airflow,sekikn/incubator-airflow,owlabs/incubator-airflow,dmitry-r/incubator-airflow,wndhydrnt/airflow,lxneng/incubator-airflow,Fokko/incubator-airflow,edgarRd/incubator-airflow,andrewmchen/incubator-airflow,subodhchhabra/airflow,bolkedebruin/airflow,Twistbioscience/incubator-airflow,aminghadersohi/airflow,dhuang/incubator-airflow,jesusfcr/airflow,zoyahav/incubator-airflow,asnir/airflow,mrares/incubator-airflow,artwr/airflow,brandsoulmates/incubator-airflow,preete-dixit-ck/incubator-airflow,wileeam/airflow,cfei18/incubator-airflow,skudriashev/incubator-airflow,r39132/airflow,N3da/incubator-airflow,Tagar/incubator-airflow,criccomini/airflow,N3da/incubator-airflow,Twistbioscience/incubator-airflow,wileeam/airflow,sid88in/incubator-airflow,vijaysbhat/incubator-airflow,jiwang576/incubator-airflow,N3da/incubator-airflow,edgarRd/incubator-airflow,lyft/incubator-airflow,lyft/incubator-airflow,mrares/incubator-airflow,fenglu-g/incubator-airflow,hgrif/incubator-airflow,holygits/incubator-airflow,DinoCow/airflow,jgao54/airflow,apache/airflow,edgarRd/incubator-airflow,zack3241/incubator-airflow,rishibarve/incubator-airflow,jfantom/incubator-airflow,aminghadersohi/airflow,mistercrunch/airflow,andyxhadji/incubator-airflow,apache/airflow,andrewmchen/incubator-airflow,spektom/incubator-airflow,Fokko/incubator-airflow,jfantom/incubator-airflow,sekikn/incubator-airflow,holygits/incubator-airflow,lxneng/incubator-airflow,adamhaney/airflow,brandsoulmates/incubator-airflow,dmitry-r/incubator-airflow,adrpar/incubator-airflow,owlabs/incubator-airflow,apache/incubator-airflow,NielsZeilemaker/incubator-airflow,bolkedebruin/airflow,mrkm4ntr/incubator-airflow,CloverHealth/airflow,malmiron/incubator-airflow,cjqian/incubator-airflow,zack3241/incubator-airflow,dhuang/incubator-airflow,zoyahav/incubator-airflow,aminghadersohi/airflow,NielsZeilemaker/incubator-airflow,nathanielvarona/airflow,cjqian/incubator-airflow,malmiron/incubator-airflow,hamedhsn/incubator-airflow,rishibarve/incubator-airflow,wolfier/incubator-airflow,subodhchhabra/airflow,NielsZeilemaker/incubator-airflow,zack3241/incubator-airflow,gritlogic/incubator-airflow,wooga/airflow,cjqian/incubator-airflow,malmiron/incubator-airflow,OpringaoDoTurno/airflow,dmitry-r/incubator-airflow,spektom/incubator-airflow,lxneng/incubator-airflow,Acehaidrey/incubator-airflow,yk5/incubator-airflow,sekikn/incubator-airflow,mtagle/airflow,stverhae/incubator-airflow,AllisonWang/incubator-airflow,dmitry-r/incubator-airflow,lxneng/incubator-airflow,danielvdende/incubator-airflow,MetrodataTeam/incubator-airflow,alexvanboxel/airflow,apache/incubator-airflow,dhuang/incubator-airflow,nathanielvarona/airflow,airbnb/airflow,vijaysbhat/incubator-airflow,sdiazb/airflow,jgao54/airflow,danielvdende/incubator-airflow,danielvdende/incubator-airflow,nathanielvarona/airflow,adrpar/incubator-airflow,Acehaidrey/incubator-airflow,jgao54/airflow,AllisonWang/incubator-airflow,zodiac/incubator-airflow,janczak10/incubator-airflow,sid88in/incubator-airflow,r39132/airflow,lyft/incubator-airflow,jesusfcr/airflow,aminghadersohi/airflow,jiwang576/incubator-airflow,airbnb/airflow,hgrif/incubator-airflow,CloverHealth/airflow,preete-dixit-ck/incubator-airflow,adrpar/incubator-airflow,yk5/incubator-airflow,Acehaidrey/incubator-airflow,janczak10/incubator-airflow,jlowin/airflow,MortalViews/incubator-airflow,MetrodataTeam/incubator-airflow,mattuuh7/incubator-airflow,ProstoMaxim/incubator-airflow,nathanielvarona/airflow,stverhae/incubator-airflow,ProstoMaxim/incubator-airflow,mtagle/airflow,MetrodataTeam/incubator-airflow,wndhydrnt/airflow,preete-dixit-ck/incubator-airflow,andyxhadji/incubator-airflow,dgies/incubator-airflow,MortalViews/incubator-airflow,jgao54/airflow,mrkm4ntr/incubator-airflow,hgrif/incubator-airflow,yati-sagade/incubator-airflow,RealImpactAnalytics/airflow,wndhydrnt/airflow,jhsenjaliya/incubator-airflow,mtagle/airflow,wooga/airflow,MortalViews/incubator-airflow,ronfung/incubator-airflow,saguziel/incubator-airflow,saguziel/incubator-airflow,gritlogic/incubator-airflow,yati-sagade/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,skudriashev/incubator-airflow,akosel/incubator-airflow,andyxhadji/incubator-airflow,wileeam/airflow,apache/airflow,adamhaney/airflow,vijaysbhat/incubator-airflow,KL-WLCR/incubator-airflow,asnir/airflow,yati-sagade/incubator-airflow,akosel/incubator-airflow,jesusfcr/airflow,holygits/incubator-airflow,gilt/incubator-airflow,gtoonstra/airflow,RealImpactAnalytics/airflow,CloverHealth/airflow,sdiazb/airflow,r39132/airflow,mattuuh7/incubator-airflow,saguziel/incubator-airflow,NielsZeilemaker/incubator-airflow,artwr/airflow,artwr/airflow,danielvdende/incubator-airflow,apache/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,vijaysbhat/incubator-airflow,bolkedebruin/airflow,zodiac/incubator-airflow,jesusfcr/airflow,Fokko/incubator-airflow,janczak10/incubator-airflow,asnir/airflow,easytaxibr/airflow,andrewmchen/incubator-airflow,sergiohgz/incubator-airflow,mrares/incubator-airflow,gritlogic/incubator-airflow,nathanielvarona/airflow,Fokko/incubator-airflow,sdiazb/airflow,KL-WLCR/incubator-airflow,jlowin/airflow,asnir/airflow,wolfier/incubator-airflow,easytaxibr/airflow,rishibarve/incubator-airflow,mistercrunch/airflow,mrares/incubator-airflow,wileeam/airflow,sergiohgz/incubator-airflow,cjqian/incubator-airflow,spektom/incubator-airflow,sid88in/incubator-airflow,gtoonstra/airflow,stverhae/incubator-airflow,sekikn/incubator-airflow,gilt/incubator-airflow,gtoonstra/airflow,apache/airflow,yk5/incubator-airflow,adrpar/incubator-airflow,criccomini/airflow,RealImpactAnalytics/airflow,airbnb/airflow,wndhydrnt/airflow,dgies/incubator-airflow,brandsoulmates/incubator-airflow,sdiazb/airflow,bolkedebruin/airflow,CloverHealth/airflow,zoyahav/incubator-airflow,owlabs/incubator-airflow,cfei18/incubator-airflow,yk5/incubator-airflow,wooga/airflow,easytaxibr/airflow,mattuuh7/incubator-airflow,janczak10/incubator-airflow,Acehaidrey/incubator-airflow,sergiohgz/incubator-airflow,jlowin/airflow,dhuang/incubator-airflow,jfantom/incubator-airflow,DinoCow/airflow,danielvdende/incubator-airflow,spektom/incubator-airflow,andrewmchen/incubator-airflow,fenglu-g/incubator-airflow,criccomini/airflow,DinoCow/airflow,ProstoMaxim/incubator-airflow,DinoCow/airflow,danielvdende/incubator-airflow,zack3241/incubator-airflow,airbnb/airflow,Acehaidrey/incubator-airflow,subodhchhabra/airflow,mattuuh7/incubator-airflow,andyxhadji/incubator-airflow,zoyahav/incubator-airflow,edgarRd/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,skudriashev/incubator-airflow,bolkedebruin/airflow,ronfung/incubator-airflow,apache/airflow,criccomini/airflow,wooga/airflow,Twistbioscience/incubator-airflow,wolfier/incubator-airflow,Tagar/incubator-airflow,nathanielvarona/airflow,saguziel/incubator-airflow,mistercrunch/airflow,ronfung/incubator-airflow,apache/incubator-airflow,owlabs/incubator-airflow,hgrif/incubator-airflow,Tagar/incubator-airflow,jfantom/incubator-airflow,fenglu-g/incubator-airflow,subodhchhabra/airflow,brandsoulmates/incubator-airflow,cfei18/incubator-airflow,OpringaoDoTurno/airflow,jhsenjaliya/incubator-airflow,RealImpactAnalytics/airflow,cfei18/incubator-airflow,AllisonWang/incubator-airflow,alexvanboxel/airflow,ProstoMaxim/incubator-airflow,KL-WLCR/incubator-airflow,alexvanboxel/airflow,gritlogic/incubator-airflow,akosel/incubator-airflow,hamedhsn/incubator-airflow,skudriashev/incubator-airflow,MortalViews/incubator-airflow,gilt/incubator-airflow,yati-sagade/incubator-airflow,akosel/incubator-airflow,gtoonstra/airflow,sergiohgz/incubator-airflow,OpringaoDoTurno/airflow,r39132/airflow,apache/airflow,MetrodataTeam/incubator-airflow,jhsenjaliya/incubator-airflow,adamhaney/airflow,cfei18/incubator-airflow,wolfier/incubator-airflow,jlowin/airflow,fenglu-g/incubator-airflow,zodiac/incubator-airflow,dgies/incubator-airflow,jhsenjaliya/incubator-airflow,hamedhsn/incubator-airflow,ronfung/incubator-airflow,zodiac/incubator-airflow,Tagar/incubator-airflow,KL-WLCR/incubator-airflow,cfei18/incubator-airflow,N3da/incubator-airflow,Twistbioscience/incubator-airflow,easytaxibr/airflow,mrkm4ntr/incubator-airflow,sid88in/incubator-airflow,holygits/incubator-airflow,rishibarve/incubator-airflow |
c037f405de773a3c9e9a7affedf2ee154a3c1766 | django_q/migrations/0003_auto_20150708_1326.py | django_q/migrations/0003_auto_20150708_1326.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_q', '0002_auto_20150630_1624'),
]
operations = [
migrations.AlterModelOptions(
name='failure',
options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'},
),
migrations.AlterModelOptions(
name='schedule',
options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'},
),
migrations.AlterModelOptions(
name='success',
options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'},
),
migrations.AlterField(
model_name='task',
name='id',
field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('django_q', '0002_auto_20150630_1624'),
]
operations = [
migrations.AlterModelOptions(
name='failure',
options={'verbose_name_plural': 'Failed tasks', 'verbose_name': 'Failed task'},
),
migrations.AlterModelOptions(
name='schedule',
options={'verbose_name_plural': 'Scheduled tasks', 'ordering': ['next_run'], 'verbose_name': 'Scheduled task'},
),
migrations.AlterModelOptions(
name='success',
options={'verbose_name_plural': 'Successful tasks', 'verbose_name': 'Successful task'},
),
migrations.RemoveField(
model_name='task',
name='id',
),
migrations.AddField(
model_name='task',
name='id',
field=models.CharField(max_length=32, primary_key=True, editable=False, serialize=False),
),
]
| Remove and replace task.id field, instead of Alter | Remove and replace task.id field, instead of Alter | Python | mit | Koed00/django-q |
d577545431c1e41a8987497ee116472f20404252 | molly/installer/__init__.py | molly/installer/__init__.py | # Packages which Molly needs, but Pip can't handle
PIP_PACKAGES = [
('PyZ3950', 'git+http://github.com/oucs/PyZ3950.git'), # Custom PyZ3950, contains some bug fixes
('django-compress', 'git+git://github.com/mollyproject/django-compress.git#egg=django-compress'), # Fork of django-compress contains some extra features we need
('PIL', 'PIL'), # Because it doesn't install properly when called using setuptools...
] | # Packages which Molly needs, but Pip can't handle
PIP_PACKAGES = [
('PyZ3950', 'git+git://github.com/oucs/PyZ3950.git'), # Custom PyZ3950, contains some bug fixes
('django-compress', 'git+git://github.com/mollyproject/django-compress.git#egg=django-compress'), # Fork of django-compress contains some extra features we need
('PIL', 'PIL'), # Because it doesn't install properly when called using setuptools...
] | Change PyZ3950 to use git+git | MOLLY-188: Change PyZ3950 to use git+git
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject |
423d9b9e294ef20fafbb1cb67a6c54c38112cddb | bot/multithreading/worker.py | bot/multithreading/worker.py | import queue
import threading
class Worker:
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
self.name = name
self.queue = work_queue
# using an event instead of a boolean flag to avoid race conditions between threads
self.end = threading.Event()
self.error_handler = error_handler
def run(self):
while self._should_run():
work = self.queue.get()
self._work(work)
def _should_run(self):
return not self.end.is_set()
def _work(self, work: Work):
try:
work.do_work()
except Exception as e:
self.error_handler(e, work, self)
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.end.set()
class Work:
def __init__(self, func: callable, name: str):
self.func = func
self.name = name
def do_work(self):
self.func()
| import queue
import threading
class Worker:
def __init__(self, name: str, work_queue: queue.Queue, error_handler: callable):
self.name = name
self.queue = work_queue
# using an event instead of a boolean flag to avoid race conditions between threads
self.end = threading.Event()
self.error_handler = error_handler
def run(self):
while self._should_run():
work = self.queue.get()
self._work(work)
def _should_run(self):
return not self.end.is_set()
def _work(self, work: Work):
try:
work.do_work()
except BaseException as e:
self._error(e, work)
def _error(self, e: BaseException, work: Work):
try:
self.error_handler(e, work, self)
except:
pass
def post(self, work: Work):
self.queue.put(work)
def shutdown(self):
self.end.set()
class Work:
def __init__(self, func: callable, name: str):
self.func = func
self.name = name
def do_work(self):
self.func()
| Improve Worker resistance against external code exceptions | Improve Worker resistance against external code exceptions
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot |
46c63fea860217fecf4ca334149970e8df7fd149 | webserver/webTermSuggester.py | webserver/webTermSuggester.py | #!/usr/bin/env python
################################################################################
# Created by Oscar Martinez #
# [email protected] #
################################################################################
from flask import Flask, request, jsonify
from TermSuggester import TermSuggester, SearchMethodAggregation
from elsearch import ELSearch
from wnsearch import WNSearch
app = Flask(__name__)
searchMethodClasses = (ELSearch, WNSearch)
initializeParameters = ((None, False),('/home/oscarr/concept-search-wd/data/wordnet', False))
ts = TermSuggester(searchMethodClasses, initializeParameters)
@app.route("/suggester", methods = ['GET',])
def api_term():
if request.method == 'GET':
if 'term' in request.args:
data = ts.getSuggestions(str(request.args['term']), SearchMethodAggregation.SumMethod)
resp = jsonify(data)
resp.status_code = 200
return resp
else:
return 'Error: Need to specif a term!'
if __name__ == "__main__":
app.run(debug=True) | #!/usr/bin/env python
################################################################################
# Created by Oscar Martinez #
# [email protected] #
################################################################################
from flask import Flask, request, jsonify
from TermSuggester import TermSuggester, SearchMethodAggregation
from elsearch import ELSearch
from wnsearch import WNSearch
app = Flask(__name__)
searchMethodClasses = (ELSearch, WNSearch)
initializeParameters = ((None, False),([]))
ts = TermSuggester(searchMethodClasses, initializeParameters)
@app.route("/suggester", methods = ['GET',])
def api_term():
if request.method == 'GET':
if 'term' in request.args:
data = ts.getSuggestions(str(request.args['term']), SearchMethodAggregation.SumMethod)
resp = jsonify(data)
resp.status_code = 200
return resp
else:
return 'Error: Need to specif a term!'
if __name__ == "__main__":
app.run(debug=True) | Change init param of wordnet | Change init param of wordnet | Python | apache-2.0 | nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search |
66e2e3bee9996a0cb55c7b802a638e42bc72ccbe | zazu/plugins/astyle_styler.py | zazu/plugins/astyle_styler.py | # -*- coding: utf-8 -*-
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '-v'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
fix_needed = output.startswith('Formatted ')
return file, fix_needed
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
| # -*- coding: utf-8 -*-
"""astyle plugin for zazu"""
import zazu.styler
import zazu.util
__author__ = "Nicholas Wiles"
__copyright__ = "Copyright 2017"
class AstyleStyler(zazu.styler.Styler):
"""Astyle plugin for code styling"""
def style_file(self, file, verbose, dry_run):
"""Run astyle on a file"""
args = ['astyle', '--formatted'] + self.options
if dry_run:
args.append('--dry-run')
args.append(file)
output = zazu.util.check_output(args)
return file, bool(output)
@staticmethod
def default_extensions():
return ['*.c',
'*.cc',
'*.cpp',
'*.h',
'*.hpp',
'*.java']
@staticmethod
def type():
return 'astyle'
| Use formatted flag on astyle to simplify code | Use formatted flag on astyle to simplify code
| Python | mit | stopthatcow/zazu,stopthatcow/zazu |
887cb1b1a021b6d4a1952fdeb178e602d8cabfdc | clifford/test/__init__.py | clifford/test/__init__.py | from .test_algebra_initialisation import *
from .test_clifford import *
from .test_io import *
from .test_g3c_tools import *
from .test_tools import *
from .test_g3c_CUDA import *
import unittest
def run_all_tests():
unittest.main()
| import os
import pytest
def run_all_tests(*args):
""" Invoke pytest, forwarding options to pytest.main """
pytest.main([os.path.dirname(__file__)] + list(args))
| Fix `clifford.test.run_all_tests` to use pytest | Fix `clifford.test.run_all_tests` to use pytest
Closes gh-91. Tests can be run with
```python
import clifford.test
clifford.test.run_all_tests()
```
| Python | bsd-3-clause | arsenovic/clifford,arsenovic/clifford |
c9ef00ff3225aa545cbb1a3da592c9af1bb0791e | django_git/management/commands/git_pull_utils/git_folder_enum.py | django_git/management/commands/git_pull_utils/git_folder_enum.py | from django_git.models import RepoInfo
from tagging.models import Tag, TaggedItem
def enum_git_repo(tag_name="git"):
tag_filter = Tag.objects.filter(name=tag_name)
if tag_filter.exists():
tag = tag_filter[0]
tagged_item_list = TaggedItem.objects.filter(tag__exact=tag.pk)
for tagged_item in tagged_item_list:
obj_tag = tagged_item.tag.name
obj = tagged_item.object
if obj is None:
continue
RepoInfo.objects.get_or_create(full_path=obj.full_path)
for repo in RepoInfo.objects.all().order_by("last_checked"):
yield repo
| from django_git.models import RepoInfo
from tagging.models import Tag, TaggedItem
def enum_git_repo(tag_name="git"):
tag_filter = Tag.objects.filter(name=tag_name)
if tag_filter.exists():
tag = tag_filter[0]
tagged_item_list = TaggedItem.objects.filter(tag__exact=tag.pk)
for tagged_item in tagged_item_list:
obj_tag = tagged_item.tag.name
obj = tagged_item.object
if obj is None:
continue
RepoInfo.objects.get_or_create(full_path=obj.full_path)
for repo in RepoInfo.objects.all().order_by("last_checked"):
yield repo
| Fix issue when GIT is not tagged. | Fix issue when GIT is not tagged.
| Python | bsd-3-clause | weijia/django-git,weijia/django-git |
7258923a3fc6467c2aac2c81f108c71e790a9e6b | wtl/wtparser/parsers/regex.py | wtl/wtparser/parsers/regex.py | import re
from itertools import repeat
class RegexParserMixin(object):
quoted_re = r'''(?P<q>"|')(?P<x>.+)(?P=q)'''
version_re = r'''(?P<s>[<>=~]*)\s*(?P<n>.*)'''
def _get_value(self, lines, prefix, regex):
filtered = self._lines_startwith(lines, '{0} '.format(prefix))
return self._match(filtered[0], 'x', regex) if len(lines) else None
def _lines_startwith(self, lines, init):
return [l.strip() for l in lines if l.strip().startswith(init)]
def _match(self, line, group, regex):
ms = re.compile(regex).match(line)
if ms is not None:
return ms.groupdict().get(group, None)
def _match_groups(self, line, regex):
ms = re.compile(regex).match(line)
return ms.groups() if ms is not None else repeat(None)
| import re
from itertools import repeat
class RegexParserMixin(object):
quoted_re = r'''(?P<q>"|')(?P<x>.+)(?P=q)'''
version_re = r'''(?P<s>[<>=~]*)\s*(?P<n>.*)'''
def _get_value(self, lines, prefix, regex):
filtered = self._lines_startwith(lines, '{0} '.format(prefix))
return self._match(filtered[0], 'x', regex) if len(filtered) else None
def _lines_startwith(self, lines, init):
return [l.strip() for l in lines if l.strip().startswith(init)]
def _match(self, line, group, regex):
ms = re.compile(regex).match(line)
if ms is not None:
return ms.groupdict().get(group, None)
def _match_groups(self, line, regex):
ms = re.compile(regex).match(line)
return ms.groups() if ms is not None else repeat(None)
| Fix bug in RegEx parser mixin | Fix bug in RegEx parser mixin
| Python | mit | elegion/djangodash2013,elegion/djangodash2013 |
9633f3ee1a3431cb373a4652afbfc2cd8b3b4c23 | test_utils/anki/__init__.py | test_utils/anki/__init__.py | import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
modules_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self):
self.shadowed_modules = {}
for module in self.modules_list:
self.shadowed_modules[module] = sys.modules.get(module)
sys.modules[module] = MagicMock()
def unmock(self):
for module in self.modules_list:
shadowed_module = self.shadowed_modules[module]
if shadowed_module is not None:
sys.modules[module] = shadowed_module
else:
if module in sys.modules:
del sys.modules[module]
| from typing import List
from typing import Optional
import sys
from unittest.mock import MagicMock
class MockAnkiModules:
"""
I'd like to get rid of the situation when this is required, but for now this helps with the situation that
anki modules are not available during test runtime.
"""
module_names_list = ['anki', 'anki.hooks', 'anki.exporting', 'anki.decks', 'anki.utils', 'anki.cards', 'anki.models',
'anki.notes', 'aqt', 'aqt.qt', 'aqt.exporting', 'aqt.utils']
def __init__(self, module_names_list: Optional[List[str]] = None):
if module_names_list is None:
module_names_list = self.module_names_list
self.shadowed_modules = {}
for module_name in module_names_list:
self.shadowed_modules[module_name] = sys.modules.get(module_name)
sys.modules[module_name] = MagicMock()
def unmock(self):
for module_name, module in self.shadowed_modules.items():
if module is not None:
sys.modules[module_name] = module
else:
if module_name in sys.modules:
del sys.modules[module_name]
| Allow specifying modules to be mocked | Allow specifying modules to be mocked
| Python | mit | Stvad/CrowdAnki,Stvad/CrowdAnki,Stvad/CrowdAnki |
deb87fefcc7fa76de3ae29ae58e816e49184d100 | openfisca_core/model_api.py | openfisca_core/model_api.py | # -*- coding: utf-8 -*-
from datetime import date # noqa analysis:ignore
from numpy import maximum as max_, minimum as min_, logical_not as not_, where, select # noqa analysis:ignore
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
| # -*- coding: utf-8 -*-
from datetime import date # noqa analysis:ignore
from numpy import ( # noqa analysis:ignore
logical_not as not_,
maximum as max_,
minimum as min_,
round as round_,
select,
where,
)
from .columns import ( # noqa analysis:ignore
AgeCol,
BoolCol,
DateCol,
EnumCol,
FixedStrCol,
FloatCol,
IntCol,
PeriodSizeIndependentIntCol,
StrCol,
)
from .enumerations import Enum # noqa analysis:ignore
from .formulas import ( # noqa analysis:ignore
ADD,
calculate_output_add,
calculate_output_divide,
dated_function,
DIVIDE,
set_input_dispatch_by_period,
set_input_divide_by_period,
missing_value
)
from .base_functions import ( # noqa analysis:ignore
requested_period_added_value,
requested_period_default_value,
requested_period_last_or_next_value,
requested_period_last_value,
)
from .variables import DatedVariable, Variable # noqa analysis:ignore
from .formula_helpers import apply_thresholds, switch # noqa analysis:ignore
from .periods import MONTH, YEAR, ETERNITY # noqa analysis:ignore
from .reforms import Reform # noqa analysis:ignore
| Add numpy.round to model api | Add numpy.round to model api
| Python | agpl-3.0 | openfisca/openfisca-core,openfisca/openfisca-core |
b6c7338666c89843d734517e7efc8a0336bedd3b | opentreemap/treemap/urls.py | opentreemap/treemap/urls.py | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf.urls import patterns, include, url
from treemap.views import index, settings
urlpatterns = patterns(
'',
url(r'^/$', index),
url(r'^config/settings.js$', settings)
)
| from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf.urls import patterns, include, url
from treemap.views import index, settings
urlpatterns = patterns(
'',
url(r'^$', index),
url(r'^config/settings.js$', settings)
)
| Fix url pattern to stop requiring two trailing slashes. | Fix url pattern to stop requiring two trailing slashes.
In order to match this urlpattern, I had to make a request
to localhost:6060/1// with two trailing slashes required.
| Python | agpl-3.0 | RickMohr/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,recklessromeo/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,recklessromeo/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core |
ccd2afdc687c3d6b7d01bed130e1b0097a4fdc2d | src/damis/run_experiment.py | src/damis/run_experiment.py | import sys
from damis.models import Experiment
exp_pk = sys.argv[1]
exp = Experiment.objects.get(pk=exp_pk)
exp.status = 'FINISHED'
exp.save()
| import sys
from damis.models import Experiment, Connection
from damis.settings import BUILDOUT_DIR
from os.path import splitext
from algorithms.preprocess import transpose
def transpose_data_callable(X, c, *args, **kwargs):
X_absolute = BUILDOUT_DIR + '/var/www' + X
Y = '%s_transposed%s' % splitext(X)
Y_absolute = BUILDOUT_DIR + '/var/www' + Y
transpose(X_absolute, Y_absolute, int(c))
return [('Y', Y)]
def do_nothing(*args, **kwargs):
return []
# Collables which get
SERVICES = {
"UPLOAD FILE": do_nothing,
"EXISTING FILE": do_nothing,
"MIDAS FILE": do_nothing,
"TECHNICAL DETAILS": do_nothing,
"CHART": do_nothing,
# "CLEAN DATA",
# "FILTER DATA",
# "SPLIT DATA",
"TRANSPOSE DATA": transpose_data_callable,
# "TRANSFORM DATA": transform_data_callable,
# "STAT PRIMITIVES",
# "MLP",
# "C45",
# "KMEANS",
# "PCA",
# "SMACOF",
# "DMA",
# "SDS",
# "SAMANN",
# "SOM",
# "SOMMDS",
# "SELECT FEATURES",
}
## Recursively walk through through tasks.
def execute_tasks(task):
# Get INPUT and COMMON parameter values.
kwargs = {}
for pv in task.parameter_values.all():
cons = Connection.objects.filter(target=pv)
if cons:
value = cons[0].source.value
else:
value = pv.value
kwargs[pv.parameter.name] = value
# Call executable
service = SERVICES[task.algorithm.title]
response = service(**kwargs) # Response dict: name -> value
# Set OUTPUT parameter values and save.
for name, value in response:
pv = task.parameter_values.get(parameter__name=name)
pv.value = value
pv.save()
task.status = 'SAVED'
task.save()
## Call its following tasks
for pv in task.parameter_values.all():
for con in Connection.objects.filter(source=pv):
next_task = con.target.task
if next_task.status == 'SAVED':
execute_tasks(next_task)
if __name__ == '__main__':
exp_pk = sys.argv[1]
exp = Experiment.objects.get(pk=exp_pk)
first_task = exp.tasks.filter(algorithm__category='DATA')[0]
execute_tasks(first_task)
exp.status = 'FINISHED'
exp.save()
| Implement experiment workflow execution with transpose method. | Implement experiment workflow execution with transpose method.
| Python | agpl-3.0 | InScience/DAMIS-old,InScience/DAMIS-old |
a7b95dada6098dc2837c4072a7820818c6efc538 | molly/apps/feeds/events/urls.py | molly/apps/feeds/events/urls.py | from django.conf.urls.defaults import *
from .views import IndexView, ItemListView, ItemDetailView
urlpatterns = patterns('',
(r'^$',
IndexView, {},
'index'),
(r'^(?P<slug>[a-z\-]+)/$',
ItemListView, {},
'item_list'),
(r'^(?P<slug>[a-z\-]+)/(?P<id>\d+)/$',
ItemDetailView, {},
'item_detail'),
) | from django.conf.urls.defaults import *
from .views import IndexView, ItemListView, ItemDetailView
urlpatterns = patterns('',
(r'^$',
IndexView, {},
'index'),
(r'^(?P<slug>[a-z\-]+)/$',
ItemListView, {},
'item-list'),
(r'^(?P<slug>[a-z\-]+)/(?P<id>\d+)/$',
ItemDetailView, {},
'item-detail'),
) | Change URLs to format used in templates (consistent with news app) | Change URLs to format used in templates (consistent with news app)
| Python | apache-2.0 | mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject |
536716d095b152355dfb00cff713552a96b95857 | calc_weights.py | calc_weights.py | import sys
import megatableau, data_prob
import scipy, scipy.optimize
# Argument parsing
assert len(sys.argv)==2
tableau_file_name = sys.argv[1]
# Read in data
mt = megatableau.MegaTableau(tableau_file_name)
w_0 = -scipy.rand(len(mt.weights))
nonpos_reals = [(-25,0) for wt in mt.weights]
def one_minus_probability(weights, tableau):
return 1.0-data_prob.probability(weights, tableau)
def negative_probability(weights, tableau):
return -data_prob.probability(weights, tableau)
learned_weights = scipy.optimize.fmin_tnc(data_prob.probability, w_0, args = (mt.tableau,), bounds=nonpos_reals, approx_grad=True)
print(learned_weights)
print("Probability given weights found by the original MEGT:")
print(data_prob.probability([-2.19,-0.43], mt.tableau)) | import sys
import megatableau, data_prob
import scipy, scipy.optimize
# Argument parsing
assert len(sys.argv)==2
tableau_file_name = sys.argv[1]
# Read in data
mt = megatableau.MegaTableau(tableau_file_name)
w_0 = -scipy.rand(len(mt.weights))
nonpos_reals = [(-25,0) for wt in mt.weights]
def one_minus_probability(weights, tableau):
return 1.0-data_prob.probability(weights, tableau)
def negative_probability(weights, tableau):
return -data_prob.probability(weights, tableau)
learned_weights = scipy.optimize.fmin_tnc(data_prob.probability, w_0, args = (mt.tableau,), bounds=nonpos_reals, approx_grad=True)
print(learned_weights)
# print("Probability given weights found by the original MEGT:")
# print(data_prob.probability([-2.19,-0.43], mt.tableau)) | Comment out lines accidentally left in the last commit. Oops. | Comment out lines accidentally left in the last commit. Oops.
| Python | bsd-3-clause | rdaland/PhoMEnt |
00cea9f8e51f53f338e19adf0165031d2f9cad77 | c2corg_ui/templates/utils/format.py | c2corg_ui/templates/utils/format.py | import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
| import bbcode
import markdown
import html
from c2corg_ui.format.wikilinks import C2CWikiLinkExtension
from markdown.extensions.nl2br import Nl2BrExtension
from markdown.extensions.toc import TocExtension
_markdown_parser = None
_bbcode_parser = None
def _get_markdown_parser():
global _markdown_parser
if not _markdown_parser:
extensions = [
C2CWikiLinkExtension(),
Nl2BrExtension(),
TocExtension(marker='[toc]', baselevel=2),
]
_markdown_parser = markdown.Markdown(output_format='xhtml5',
extensions=extensions)
return _markdown_parser
def _get_bbcode_parser():
global _bbcode_parser
if not _bbcode_parser:
_bbcode_parser = bbcode.Parser(escape_html=False, newline='\n')
return _bbcode_parser
def parse_code(text, md=True, bb=True):
if md:
text = _get_markdown_parser().convert(text)
if bb:
text = _get_bbcode_parser().format(text)
return text
def sanitize(text):
return html.escape(text)
| Enable markdown extensions for TOC and linebreaks | Enable markdown extensions for TOC and linebreaks
| Python | agpl-3.0 | Courgetteandratatouille/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui,c2corg/v6_ui,c2corg/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui,olaurendeau/v6_ui,c2corg/v6_ui,Courgetteandratatouille/v6_ui,olaurendeau/v6_ui |
fee245628d492f64f3fe02563d3059317d456ed6 | trimesh/interfaces/vhacd.py | trimesh/interfaces/vhacd.py | import os
import platform
from .generic import MeshScript
from ..constants import log
from distutils.spawn import find_executable
_search_path = os.environ['PATH']
if platform.system() == 'Windows':
# split existing path by delimiter
_search_path = [i for i in _search_path.split(';') if len(i) > 0]
_search_path.append('C:\Program Files')
_search_path.append('C:\Program Files (x86)')
_search_path = ';'.join(_search_path)
log.debug('searching for vhacd in: %s', _search_path)
_vhacd_executable = None
for _name in ['vhacd', 'testVHACD']:
_vhacd_executable = find_executable(_name, path=_search_path)
if _vhacd_executable is not None:
break
exists = _vhacd_executable is not None
def convex_decomposition(mesh, **kwargs):
if not exists:
raise ValueError('No vhacd available!')
argstring = ' --input $mesh_0 --output $mesh_post --log $script'
# pass through extra arguments from the input dictionary
for key, value in kwargs.items():
argstring += ' --{} {}'.format(str(key),
str(value))
with MeshScript(meshes=[mesh],
script='',
tmpfile_ext='obj') as vhacd:
result = vhacd.run(_vhacd_executable + argstring)
return result
| import os
import platform
from .generic import MeshScript
from ..constants import log
from distutils.spawn import find_executable
_search_path = os.environ['PATH']
if platform.system() == 'Windows':
# split existing path by delimiter
_search_path = [i for i in _search_path.split(';') if len(i) > 0]
_search_path.append(r'C:\Program Files')
_search_path.append(r'C:\Program Files (x86)')
_search_path = ';'.join(_search_path)
log.debug('searching for vhacd in: %s', _search_path)
_vhacd_executable = None
for _name in ['vhacd', 'testVHACD']:
_vhacd_executable = find_executable(_name, path=_search_path)
if _vhacd_executable is not None:
break
exists = _vhacd_executable is not None
def convex_decomposition(mesh, **kwargs):
if not exists:
raise ValueError('No vhacd available!')
argstring = ' --input $mesh_0 --output $mesh_post --log $script'
# pass through extra arguments from the input dictionary
for key, value in kwargs.items():
argstring += ' --{} {}'.format(str(key),
str(value))
with MeshScript(meshes=[mesh],
script='',
tmpfile_ext='obj') as vhacd:
result = vhacd.run(_vhacd_executable + argstring)
return result
| Use raw string for Windows paths | Use raw string for Windows paths
This avoids:
DeprecationWarning: invalid escape sequence \P
_search_path.append('C:\Program Files') | Python | mit | mikedh/trimesh,mikedh/trimesh,mikedh/trimesh,dajusc/trimesh,dajusc/trimesh,mikedh/trimesh |
Subsets and Splits