commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
0d2079b1dcb97708dc55c32d9e2c1a0f12595875 | salt/runners/launchd.py | salt/runners/launchd.py | # -*- coding: utf-8 -*-
'''
Manage launchd plist files
'''
# Import python libs
import os
import sys
def write_launchd_plist(program):
'''
Write a launchd plist for managing salt-master or salt-minion
CLI Example:
.. code-block:: bash
salt-run launchd.write_launchd_plist salt-master
'''
plist_sample_text = """
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>org.saltstack.{program}</string>
<key>ProgramArguments</key>
<array>
<string>{python}</string>
<string>{script}</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
""".strip()
supported_programs = ['salt-master', 'salt-minion']
if program not in supported_programs:
sys.stderr.write("Supported programs: %r\n" % supported_programs)
sys.exit(-1)
sys.stdout.write(
plist_sample_text.format(
program=program,
python=sys.executable,
script=os.path.join(os.path.dirname(sys.executable), program)
)
)
| # -*- coding: utf-8 -*-
'''
Manage launchd plist files
'''
# Import python libs
import os
import sys
def write_launchd_plist(program):
'''
Write a launchd plist for managing salt-master or salt-minion
CLI Example:
.. code-block:: bash
salt-run launchd.write_launchd_plist salt-master
'''
plist_sample_text = '''
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>org.saltstack.{program}</string>
<key>ProgramArguments</key>
<array>
<string>{python}</string>
<string>{script}</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
'''.strip()
supported_programs = ['salt-master', 'salt-minion']
if program not in supported_programs:
sys.stderr.write('Supported programs: {0!r}\n'.format(supported_programs))
sys.exit(-1)
sys.stdout.write(
plist_sample_text.format(
program=program,
python=sys.executable,
script=os.path.join(os.path.dirname(sys.executable), program)
)
)
| Replace string substitution with string formatting | Replace string substitution with string formatting
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
69e760e4a571d16e75f30f1e97ea1a917445f333 | recipes/recipe_modules/gitiles/__init__.py | recipes/recipe_modules/gitiles/__init__.py | DEPS = [
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/python',
'recipe_engine/raw_io',
'url',
]
| DEPS = [
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/url',
]
| Switch to recipe engine "url" module. | Switch to recipe engine "url" module.
BUG=None
TEST=expectations
[email protected]
Change-Id: I43a65405c957cb6dddd64f61846b926d81046752
Reviewed-on: https://chromium-review.googlesource.com/505278
Reviewed-by: Robbie Iannucci <[email protected]>
Commit-Queue: Daniel Jacques <[email protected]>
| Python | bsd-3-clause | CoherentLabs/depot_tools,CoherentLabs/depot_tools |
d926c984e895b68ad0cc0383926451c0d7249512 | astropy/tests/tests/test_imports.py | astropy/tests/tests/test_imports.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import pkgutil
def test_imports():
"""
This just imports all modules in astropy, making sure they don't have any
dependencies that sneak through
"""
def onerror(name):
# We should raise any legitimate error that occurred, but not
# any warnings which happen to be caught because of our pytest
# settings (e.g., DeprecationWarning).
try:
raise
except Warning:
pass
for imper, nm, ispkg in pkgutil.walk_packages(['astropy'], 'astropy.',
onerror=onerror):
imper.find_module(nm)
def test_toplevel_namespace():
import astropy
d = dir(astropy)
assert 'os' not in d
assert 'log' in d
assert 'test' in d
assert 'sys' not in d
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import pkgutil
def test_imports():
"""
This just imports all modules in astropy, making sure they don't have any
dependencies that sneak through
"""
def onerror(name):
# We should raise any legitimate error that occurred, but not
# any warnings which happen to be caught because of our pytest
# settings (e.g., DeprecationWarning).
try:
raise
except Warning:
pass
for imper, nm, ispkg in pkgutil.walk_packages(['astropy'], 'astropy.',
onerror=onerror):
imper.find_spec(nm)
def test_toplevel_namespace():
import astropy
d = dir(astropy)
assert 'os' not in d
assert 'log' in d
assert 'test' in d
assert 'sys' not in d
| Fix use of deprecated find_module | Fix use of deprecated find_module
| Python | bsd-3-clause | saimn/astropy,lpsinger/astropy,mhvk/astropy,lpsinger/astropy,pllim/astropy,astropy/astropy,lpsinger/astropy,larrybradley/astropy,StuartLittlefair/astropy,astropy/astropy,pllim/astropy,saimn/astropy,saimn/astropy,StuartLittlefair/astropy,astropy/astropy,StuartLittlefair/astropy,astropy/astropy,mhvk/astropy,astropy/astropy,pllim/astropy,mhvk/astropy,mhvk/astropy,mhvk/astropy,saimn/astropy,StuartLittlefair/astropy,lpsinger/astropy,larrybradley/astropy,larrybradley/astropy,larrybradley/astropy,pllim/astropy,saimn/astropy,StuartLittlefair/astropy,lpsinger/astropy,larrybradley/astropy,pllim/astropy |
394954fc80230e01112166db4fe133c107febead | gitautodeploy/parsers/common.py | gitautodeploy/parsers/common.py |
class WebhookRequestParser(object):
"""Abstract parent class for git service parsers. Contains helper
methods."""
def __init__(self, config):
self._config = config
def get_matching_repo_configs(self, urls):
"""Iterates over the various repo URLs provided as argument (git://,
ssh:// and https:// for the repo) and compare them to any repo URL
specified in the config"""
configs = []
for url in urls:
for repo_config in self._config['repositories']:
if repo_config in configs:
continue
if repo_config['url'] == url:
configs.append(repo_config)
elif 'url_without_usernme' in repo_config and repo_config['url_without_usernme'] == url:
configs.append(repo_config)
return configs |
class WebhookRequestParser(object):
"""Abstract parent class for git service parsers. Contains helper
methods."""
def __init__(self, config):
self._config = config
def get_matching_repo_configs(self, urls):
"""Iterates over the various repo URLs provided as argument (git://,
ssh:// and https:// for the repo) and compare them to any repo URL
specified in the config"""
configs = []
for url in urls:
for repo_config in self._config['repositories']:
if repo_config in configs:
continue
if repo_config.get('repo', repo_config.get('url')) == url:
configs.append(repo_config)
elif 'url_without_usernme' in repo_config and repo_config['url_without_usernme'] == url:
configs.append(repo_config)
return configs
| Allow more than one GitHub repo from the same user | Allow more than one GitHub repo from the same user
GitHub does not allow the same SSH key to be used for multiple
repositories on the same server belonging to the same user, see:
http://snipe.net/2013/04/multiple-github-deploy-keys-single-server
The fix there doesn't work because the "url" field is used both to
get the repo and to identify it when a push comes in. Using a local
SSH name for the repo works for getting the repo but then the name in
the push doesn't match.
This patch adds a 'repo' option that that can be set to the name of
the repo as given in the push. If 'repo' is not set the behaviour
is unchanged.
Example:
"url": "git@repo-a-shortname/somerepo.git"
"repo": "[email protected]/somerepo.git"
| Python | mit | evoja/docker-Github-Gitlab-Auto-Deploy,evoja/docker-Github-Gitlab-Auto-Deploy |
bb9d1255548b46dc2ba7a85e26606b7dd4c926f3 | examples/greeting.py | examples/greeting.py | # greeting.py
#
# Demonstration of the pyparsing module, on the prototypical "Hello, World!"
# example
#
# Copyright 2003, by Paul McGuire
#
from pyparsing import Word, alphas
# define grammar
greet = Word( alphas ) + "," + Word( alphas ) + "!"
# input string
hello = "Hello, World!"
# parse input string
print(hello, "->", greet.parseString( hello ))
| # greeting.py
#
# Demonstration of the pyparsing module, on the prototypical "Hello, World!"
# example
#
# Copyright 2003, 2019 by Paul McGuire
#
import pyparsing as pp
# define grammar
greet = pp.Word(pp.alphas) + "," + pp.Word(pp.alphas) + pp.oneOf("! ? .")
# input string
hello = "Hello, World!"
# parse input string
print(hello, "->", greet.parseString( hello ))
# parse a bunch of input strings
greet.runTests("""\
Hello, World!
Ahoy, Matey!
Howdy, Pardner!
Morning, Neighbor!
""") | Update original "Hello, World!" parser to latest coding, plus runTests | Update original "Hello, World!" parser to latest coding, plus runTests
| Python | mit | pyparsing/pyparsing,pyparsing/pyparsing |
bc6c3834cd8383f7e1f9e109f0413bb6015a92bf | go/scheduler/views.py | go/scheduler/views.py | import datetime
from django.views.generic import ListView
from go.scheduler.models import Task
class SchedulerListView(ListView):
paginate_by = 12
context_object_name = 'tasks'
template = 'scheduler/task_list.html'
def get_queryset(self):
now = datetime.datetime.utcnow()
return Task.objects.filter(
account_id=self.request.user_api.user_account_key
).order_by('-scheduled_for')
| from django.views.generic import ListView
from go.scheduler.models import Task
class SchedulerListView(ListView):
paginate_by = 12
context_object_name = 'tasks'
template = 'scheduler/task_list.html'
def get_queryset(self):
return Task.objects.filter(
account_id=self.request.user_api.user_account_key
).order_by('-scheduled_for')
| Remove unneeded datetime from view | Remove unneeded datetime from view
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go |
ebfaf30fca157e83ea9e4bf33173221fc9525caf | demo/examples/employees/forms.py | demo/examples/employees/forms.py | from datetime import date
from django import forms
from django.utils import timezone
from .models import Employee, DeptManager, Title, Salary
class ChangeManagerForm(forms.Form):
manager = forms.ModelChoiceField(queryset=Employee.objects.all()[:100])
def __init__(self, *args, **kwargs):
self.department = kwargs.pop('department')
super(ChangeManagerForm, self).__init__(*args, **kwargs)
def save(self):
new_manager = self.cleaned_data['manager']
DeptManager.objects.filter(
department=self.department
).set(
department=self.department,
employee=new_manager
)
class ChangeTitleForm(forms.Form):
position = forms.CharField()
def __init__(self, *args, **kwargs):
self.employee = kwargs.pop('employee')
super(ChangeTitleForm, self).__init__(*args, **kwargs)
def save(self):
new_title = self.cleaned_data['position']
Title.objects.filter(
employee=self.employee,
).set(
employee=self.employee,
title=new_title
)
class ChangeSalaryForm(forms.Form):
salary = forms.IntegerField()
def __init__(self, *args, **kwargs):
self.employee = kwargs.pop('employee')
super(ChangeSalaryForm, self).__init__(*args, **kwargs)
def save(self):
new_salary = self.cleaned_data['salary']
Salary.objects.filter(
employee=self.employee,
).set(
employee=self.employee,
salary=new_salary,
)
| from django import forms
from .models import Employee, DeptManager, Title, Salary
class ChangeManagerForm(forms.Form):
manager = forms.ModelChoiceField(queryset=Employee.objects.all()[:100])
def __init__(self, *args, **kwargs):
self.department = kwargs.pop('department')
super(ChangeManagerForm, self).__init__(*args, **kwargs)
def save(self):
new_manager = self.cleaned_data['manager']
DeptManager.objects.filter(
department=self.department
).set(
department=self.department,
employee=new_manager
)
class ChangeTitleForm(forms.Form):
position = forms.CharField()
def __init__(self, *args, **kwargs):
self.employee = kwargs.pop('employee')
super(ChangeTitleForm, self).__init__(*args, **kwargs)
def save(self):
new_title = self.cleaned_data['position']
Title.objects.filter(
employee=self.employee,
).set(
employee=self.employee,
title=new_title
)
class ChangeSalaryForm(forms.Form):
salary = forms.IntegerField(max_value=1000000)
def __init__(self, *args, **kwargs):
self.employee = kwargs.pop('employee')
super(ChangeSalaryForm, self).__init__(*args, **kwargs)
def save(self):
new_salary = self.cleaned_data['salary']
Salary.objects.filter(
employee=self.employee,
).set(
employee=self.employee,
salary=new_salary,
)
| Fix emplorrs demo salary db error | Fix emplorrs demo salary db error
| Python | bsd-3-clause | viewflow/django-material,viewflow/django-material,viewflow/django-material |
06f78c21e6b7e3327244e89e90365169f4c32ea1 | calaccess_campaign_browser/api.py | calaccess_campaign_browser/api.py | from tastypie.resources import ModelResource, ALL
from .models import Filer, Filing
from .utils.serializer import CIRCustomSerializer
class FilerResource(ModelResource):
class Meta:
queryset = Filer.objects.all()
serializer = CIRCustomSerializer()
filtering = { 'filer_id_raw': ALL }
excludes = [ 'id' ]
class FilingResource(ModelResource):
class Meta:
queryset = Filing.objects.all()
serializer = CIRCustomSerializer()
filtering = { 'filing_id_raw': ALL }
excludes = [ 'id' ]
| from tastypie.resources import ModelResource, ALL
from .models import Filer, Filing
from .utils.serializer import CIRCustomSerializer
class FilerResource(ModelResource):
class Meta:
queryset = Filer.objects.all()
serializer = CIRCustomSerializer()
filtering = {'filer_id_raw': ALL}
excludes = ['id']
class FilingResource(ModelResource):
class Meta:
queryset = Filing.objects.all()
serializer = CIRCustomSerializer()
filtering = {'filing_id_raw': ALL}
excludes = ['id']
| Fix style issues raised by pep8. | Fix style issues raised by pep8.
| Python | mit | myersjustinc/django-calaccess-campaign-browser,dwillis/django-calaccess-campaign-browser,myersjustinc/django-calaccess-campaign-browser,california-civic-data-coalition/django-calaccess-campaign-browser,dwillis/django-calaccess-campaign-browser,california-civic-data-coalition/django-calaccess-campaign-browser |
a473b2cb9af95c1296ecae4d2138142f2be397ee | examples/variants.py | examples/variants.py | #!/usr/bin/env python
# -*- coding: utf8 - *-
from __future__ import print_function, unicode_literals
from cihai.bootstrap import bootstrap_unihan
from cihai.core import Cihai
def variant_list(unihan, field):
for char in unihan.with_fields(field):
print("Character: {}".format(char.char))
for var in char.untagged_vars(field):
print(var)
def script(unihan_options={}):
"""Wrapped so we can test in tests/test_examples.py"""
print("This example prints variant character data.")
c = Cihai()
c.add_dataset('cihai.unihan.Unihan', namespace='unihan')
if not c.sql.is_bootstrapped: # download and install Unihan to db
bootstrap_unihan(c.sql.metadata, options=unihan_options)
c.sql.reflect_db() # automap new table created during bootstrap
print("## ZVariants")
variant_list(c.unihan, "kZVariant")
print("## kSemanticVariant")
variant_list(c.unihan, "kSemanticVariant")
print("## kSpecializedSemanticVariant")
variant_list(c.unihan, "kSpecializedSemanticVariant")
if __name__ == '__main__':
script()
| #!/usr/bin/env python
# -*- coding: utf8 - *-
from __future__ import print_function, unicode_literals
from cihai.bootstrap import bootstrap_unihan
from cihai.core import Cihai
def variant_list(unihan, field):
for char in unihan.with_fields(field):
print("Character: {}".format(char.char))
for var in char.untagged_vars(field):
print(var)
def script(unihan_options={}):
"""Wrapped so we can test in tests/test_examples.py"""
print("This example prints variant character data.")
c = Cihai()
c.add_dataset('cihai.unihan.Unihan', namespace='unihan')
if not c.sql.is_bootstrapped: # download and install Unihan to db
bootstrap_unihan(c.sql.metadata, options=unihan_options)
c.sql.reflect_db() # automap new table created during bootstrap
c.unihan.add_extension('cihai.unihan.UnihanVariants', namespace='variants')
print("## ZVariants")
variant_list(c.unihan, "kZVariant")
print("## kSemanticVariant")
variant_list(c.unihan, "kSemanticVariant")
print("## kSpecializedSemanticVariant")
variant_list(c.unihan, "kSpecializedSemanticVariant")
if __name__ == '__main__':
script()
| Add variant extension in example script | Add variant extension in example script
| Python | mit | cihai/cihai,cihai/cihai-python,cihai/cihai |
0727ad29721a3dad4c36113a299f5c67bda70822 | importlib_resources/__init__.py | importlib_resources/__init__.py | """Read resources contained within a package."""
import sys
__all__ = [
'contents',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
'Package',
'Resource',
'ResourceReader',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package, Resource, contents, is_resource, open_binary, open_text, path,
read_binary, read_text)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents, is_resource, open_binary, open_text, path, read_binary,
read_text)
del __all__[-3:]
__version__ = read_text('importlib_resources', 'version.txt').strip()
| """Read resources contained within a package."""
import sys
__all__ = [
'Package',
'Resource',
'ResourceReader',
'contents',
'is_resource',
'open_binary',
'open_text',
'path',
'read_binary',
'read_text',
]
if sys.version_info >= (3,):
from importlib_resources._py3 import (
Package,
Resource,
contents,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
from importlib_resources.abc import ResourceReader
else:
from importlib_resources._py2 import (
contents,
is_resource,
open_binary,
open_text,
path,
read_binary,
read_text,
)
del __all__[:3]
__version__ = read_text('importlib_resources', 'version.txt').strip()
| Sort everything alphabetically on separate lines. | Sort everything alphabetically on separate lines.
| Python | apache-2.0 | python/importlib_resources |
7f974b87c278ef009535271461b5e49686057a9a | avatar/management/commands/rebuild_avatars.py | avatar/management/commands/rebuild_avatars.py | from django.core.management.base import NoArgsCommand
from avatar.conf import settings
from avatar.models import Avatar
class Command(NoArgsCommand):
help = ("Regenerates avatar thumbnails for the sizes specified in "
"settings.AVATAR_AUTO_GENERATE_SIZES.")
def handle_noargs(self, **options):
for avatar in Avatar.objects.all():
for size in settings.AVATAR_AUTO_GENERATE_SIZES:
if options['verbosity'] != 0:
print("Rebuilding Avatar id=%s at size %s." % (avatar.id, size))
avatar.create_thumbnail(size)
| from django.core.management.base import BaseCommand
from avatar.conf import settings
from avatar.models import Avatar
class Command(BaseCommand):
help = ("Regenerates avatar thumbnails for the sizes specified in "
"settings.AVATAR_AUTO_GENERATE_SIZES.")
def handle(self, *args, **options):
for avatar in Avatar.objects.all():
for size in settings.AVATAR_AUTO_GENERATE_SIZES:
if options['verbosity'] != 0:
print("Rebuilding Avatar id=%s at size %s." % (avatar.id, size))
avatar.create_thumbnail(size)
| Fix for django >= 1.10 | Fix for django >= 1.10
The class django.core.management.NoArgsCommand is removed. | Python | bsd-3-clause | grantmcconnaughey/django-avatar,jezdez/django-avatar,grantmcconnaughey/django-avatar,ad-m/django-avatar,ad-m/django-avatar,jezdez/django-avatar |
6e2362351d9ccaa46a5a2bc69c4360e4faff166d | iclib/qibla.py | iclib/qibla.py | from . import formula
def direction(lat, lng):
return formula.qibla(lat, lng)
def direction_dms(lat, lng):
return _dms(formula.qibla(lat, lng))
def direction_str(lat, lng, prec=0):
d, m, s = direction_dms(lat, lng)
# negative input might returns wrong result
return '{}° {}\' {:.{}f}"'.format(d, m, s, prec)
def _dms(deg):
seconds = deg * 3600
m, s = divmod(seconds, 60)
d, m = divmod(m, 60)
return (int(d), int(m), s)
| # -*- coding: utf-8 -*-
from . import formula
def direction(lat, lng):
return formula.qibla(lat, lng)
def direction_dms(lat, lng):
return _dms(formula.qibla(lat, lng))
def direction_str(lat, lng, prec=0):
d, m, s = direction_dms(lat, lng)
# negative input might returns wrong result
return '{}° {}\' {:.{}f}"'.format(d, m, s, prec)
def _dms(deg):
seconds = deg * 3600
m, s = divmod(seconds, 60)
d, m = divmod(m, 60)
return (int(d), int(m), s)
| Add encoding spec to comply Python 2 | Add encoding spec to comply Python 2
| Python | apache-2.0 | fikr4n/iclib-python |
eb1fdf3419bdfd1d5920d73a877f707162b783b0 | cfgrib/__init__.py | cfgrib/__init__.py | #
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import (
Dataset,
DatasetBuildError,
open_container,
open_file,
open_fileindex,
open_from_index,
)
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
| #
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
| Drop unused and dangerous entrypoint `open_fileindex` | Drop unused and dangerous entrypoint `open_fileindex`
| Python | apache-2.0 | ecmwf/cfgrib |
e3548d62aa67472f291f6d3c0c8beca9813d6032 | gym/envs/toy_text/discrete.py | gym/envs/toy_text/discrete.py | from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
| from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
self._reset()
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
| Make it possible to step() in a newly created env, rather than throwing AttributeError | Make it possible to step() in a newly created env, rather than throwing AttributeError
| Python | mit | d1hotpep/openai_gym,Farama-Foundation/Gymnasium,dianchen96/gym,machinaut/gym,dianchen96/gym,d1hotpep/openai_gym,machinaut/gym,Farama-Foundation/Gymnasium |
eb57a07277f86fc90b7845dc48fb5cde1778c8d4 | test/unit_test/test_cut_number.py | test/unit_test/test_cut_number.py | from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
| from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
def test_cut_by_number_normal(self):
assert cut_by_number("Text", 1) == ["Text"]
assert cut_by_number("This text has five words", 5) == \
["This ", "text ", "has ", "five ", "words"]
assert cut_by_number("Hanging space ", 2) == ["Hanging ", "space "]
| Test cut_by_number with words and normal chunk numbers | Test cut_by_number with words and normal chunk numbers
| Python | mit | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos |
91e916cb67867db9ce835be28b31904e6efda832 | spacy/tests/regression/test_issue1727.py | spacy/tests/regression/test_issue1727.py | from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
| '''Test that models with no pretrained vectors can be deserialized correctly
after vectors are added.'''
from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
| Add comment to new test | Add comment to new test
| Python | mit | aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy |
9df00bbfa829006396c2a6718e4540410b27c4c6 | kolibri/tasks/apps.py | kolibri/tasks/apps.py | from __future__ import absolute_import, print_function, unicode_literals
from django.apps import AppConfig
class KolibriTasksConfig(AppConfig):
name = 'kolibri.tasks'
label = 'kolibritasks'
verbose_name = 'Kolibri Tasks'
def ready(self):
pass
| from __future__ import absolute_import, print_function, unicode_literals
from django.apps import AppConfig
class KolibriTasksConfig(AppConfig):
name = 'kolibri.tasks'
label = 'kolibritasks'
verbose_name = 'Kolibri Tasks'
def ready(self):
from kolibri.tasks.api import client
client.clear(force=True)
| Clear the job queue upon kolibri initialization. | Clear the job queue upon kolibri initialization.
| Python | mit | MingDai/kolibri,mrpau/kolibri,benjaoming/kolibri,DXCanas/kolibri,lyw07/kolibri,learningequality/kolibri,rtibbles/kolibri,mrpau/kolibri,mrpau/kolibri,rtibbles/kolibri,DXCanas/kolibri,indirectlylit/kolibri,MingDai/kolibri,lyw07/kolibri,mrpau/kolibri,christianmemije/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,christianmemije/kolibri,christianmemije/kolibri,lyw07/kolibri,lyw07/kolibri,benjaoming/kolibri,jonboiser/kolibri,jonboiser/kolibri,indirectlylit/kolibri,christianmemije/kolibri,benjaoming/kolibri,learningequality/kolibri,rtibbles/kolibri,learningequality/kolibri,benjaoming/kolibri,MingDai/kolibri,indirectlylit/kolibri,MingDai/kolibri,indirectlylit/kolibri,rtibbles/kolibri,jonboiser/kolibri,DXCanas/kolibri |
bb5cbae79ef8efb8d0b7dd3ee95e76955317d3d7 | tests/integration/api/test_sc_test_jobs.py | tests/integration/api/test_sc_test_jobs.py | from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
| from tests.base import BaseTest
from tenable_io.api.models import ScTestJob
class TestScTestJobsApi(BaseTest):
def test_status(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
test_job = client.sc_test_jobs_api.status(jobs[0].job_id)
assert isinstance(test_job, ScTestJob), u'The method returns type.'
def test_by_image(self, client, image):
job = client.sc_test_jobs_api.by_image(image['id'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_by_image_digest(self, client, image):
job = client.sc_test_jobs_api.by_image_digest(image['digest'])
assert isinstance(job, ScTestJob), u'The method returns type.'
def test_list(self, client, image):
jobs = client.sc_test_jobs_api.list()
assert len(jobs) > 0, u'At least one job exists.'
assert isinstance(jobs[0], ScTestJob), u'The method returns job list.'
| Fix for broken container security test | Fix for broken container security test
| Python | mit | tenable/Tenable.io-SDK-for-Python |
f6be438e01a499dc2bde6abfa5a00fb281db7b83 | kamboo/core.py | kamboo/core.py |
import botocore
from kotocore.session import Session
class KambooConnection(object):
"""
Kamboo connection with botocore session initialized
"""
session = botocore.session.get_session()
def __init__(self, service_name="ec2", region_name="us-east-1",
credentials=None):
self.region = region_name
self.credentials = credentials
if self.credentials:
self.session.set_credentials(**self.credentials)
Connection = Session(session=self.session).get_connection(service_name)
self.conn = Connection(region_name=self.region)
|
import botocore
from kotocore.session import Session
class KambooConnection(object):
"""
Kamboo connection with botocore session initialized
"""
session = botocore.session.get_session()
def __init__(self, service_name="ec2", region_name="us-east-1",
account_id=None,
credentials=None):
self.region = region_name
self.account_id = account_id
self.credentials = credentials
if self.credentials:
self.session.set_credentials(**self.credentials)
Connection = Session(session=self.session).get_connection(service_name)
self.conn = Connection(region_name=self.region)
| Add account_id as the element of this class | Add account_id as the element of this class
| Python | apache-2.0 | henrysher/kamboo,henrysher/kamboo |
29d151366d186ed75da947f2861741ed87af902b | website/addons/badges/settings/__init__.py | website/addons/badges/settings/__init__.py | from .defaults import * # noqa
logger = logging.getLogger(__name__)
try:
from .local import * # noqa
except ImportError as error:
logger.warn('No local.py settings file found')
| # -*- coding: utf-8 -*-
import logging
from .defaults import * # noqa
logger = logging.getLogger(__name__)
try:
from .local import * # noqa
except ImportError as error:
logger.warn('No local.py settings file found')
| Add missing import to settings | Add missing import to settings
| Python | apache-2.0 | samchrisinger/osf.io,himanshuo/osf.io,jinluyuan/osf.io,chrisseto/osf.io,zachjanicki/osf.io,njantrania/osf.io,chrisseto/osf.io,reinaH/osf.io,billyhunt/osf.io,RomanZWang/osf.io,aaxelb/osf.io,arpitar/osf.io,mattclark/osf.io,sbt9uc/osf.io,jolene-esposito/osf.io,rdhyee/osf.io,amyshi188/osf.io,kwierman/osf.io,njantrania/osf.io,RomanZWang/osf.io,pattisdr/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,doublebits/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,caseyrygt/osf.io,KAsante95/osf.io,mattclark/osf.io,kwierman/osf.io,erinspace/osf.io,GaryKriebel/osf.io,ckc6cz/osf.io,jmcarp/osf.io,haoyuchen1992/osf.io,chennan47/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,abought/osf.io,cwisecarver/osf.io,zachjanicki/osf.io,baylee-d/osf.io,reinaH/osf.io,lyndsysimon/osf.io,kushG/osf.io,laurenrevere/osf.io,brandonPurvis/osf.io,cldershem/osf.io,lyndsysimon/osf.io,dplorimer/osf,petermalcolm/osf.io,pattisdr/osf.io,jmcarp/osf.io,caseyrygt/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,GaryKriebel/osf.io,HarryRybacki/osf.io,adlius/osf.io,zkraime/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,DanielSBrown/osf.io,abought/osf.io,ckc6cz/osf.io,kushG/osf.io,brandonPurvis/osf.io,emetsger/osf.io,doublebits/osf.io,sloria/osf.io,KAsante95/osf.io,dplorimer/osf,lyndsysimon/osf.io,wearpants/osf.io,brandonPurvis/osf.io,SSJohns/osf.io,aaxelb/osf.io,KAsante95/osf.io,bdyetton/prettychart,jmcarp/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,sbt9uc/osf.io,reinaH/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,pattisdr/osf.io,cosenal/osf.io,mluke93/osf.io,caseyrollins/osf.io,felliott/osf.io,caneruguz/osf.io,danielneis/osf.io,revanthkolli/osf.io,alexschiller/osf.io,mluo613/osf.io,RomanZWang/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,cosenal/osf.io,Ghalko/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,caseyrygt/osf.io,barbour-em/osf.io,icereval/osf.io,fabianvf/osf.io,RomanZWang/osf.io,jnayak1/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,cslzchen/osf.io,Ghalko/osf.io,lamdnhan/osf.io,kushG/osf.io,mattclark/osf.io,himanshuo/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,fabianvf/osf.io,GageGaskins/osf.io,jeffreyliu3230/osf.io,emetsger/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,kch8qx/osf.io,barbour-em/osf.io,emetsger/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,samchrisinger/osf.io,mluo613/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,sloria/osf.io,rdhyee/osf.io,himanshuo/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,HarryRybacki/osf.io,jinluyuan/osf.io,binoculars/osf.io,arpitar/osf.io,binoculars/osf.io,binoculars/osf.io,jinluyuan/osf.io,adlius/osf.io,bdyetton/prettychart,amyshi188/osf.io,cwisecarver/osf.io,bdyetton/prettychart,abought/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,cosenal/osf.io,wearpants/osf.io,kch8qx/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,Nesiehr/osf.io,barbour-em/osf.io,ckc6cz/osf.io,alexschiller/osf.io,erinspace/osf.io,amyshi188/osf.io,fabianvf/osf.io,SSJohns/osf.io,cslzchen/osf.io,acshi/osf.io,felliott/osf.io,hmoco/osf.io,jeffreyliu3230/osf.io,ticklemepierce/osf.io,Johnetordoff/osf.io,arpitar/osf.io,revanthkolli/osf.io,samanehsan/osf.io,leb2dg/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,mluke93/osf.io,TomBaxter/osf.io,abought/osf.io,samanehsan/osf.io,barbour-em/osf.io,baylee-d/osf.io,felliott/osf.io,TomHeatwole/osf.io,dplorimer/osf,amyshi188/osf.io,SSJohns/osf.io,mfraezz/osf.io,kushG/osf.io,asanfilippo7/osf.io,himanshuo/osf.io,petermalcolm/osf.io,laurenrevere/osf.io,doublebits/osf.io,monikagrabowska/osf.io,jeffreyliu3230/osf.io,mfraezz/osf.io,zkraime/osf.io,haoyuchen1992/osf.io,cldershem/osf.io,revanthkolli/osf.io,alexschiller/osf.io,fabianvf/osf.io,kwierman/osf.io,ckc6cz/osf.io,mfraezz/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,mluo613/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,emetsger/osf.io,erinspace/osf.io,lyndsysimon/osf.io,cldershem/osf.io,jinluyuan/osf.io,acshi/osf.io,mluke93/osf.io,lamdnhan/osf.io,acshi/osf.io,petermalcolm/osf.io,GageGaskins/osf.io,zamattiac/osf.io,zamattiac/osf.io,HarryRybacki/osf.io,njantrania/osf.io,felliott/osf.io,DanielSBrown/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,KAsante95/osf.io,kch8qx/osf.io,bdyetton/prettychart,cslzchen/osf.io,mfraezz/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,icereval/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,cwisecarver/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,mluo613/osf.io,jnayak1/osf.io,chennan47/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,lamdnhan/osf.io,icereval/osf.io,Johnetordoff/osf.io,cosenal/osf.io,reinaH/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,kch8qx/osf.io,KAsante95/osf.io,billyhunt/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,lamdnhan/osf.io,kwierman/osf.io,ZobairAlijan/osf.io,zkraime/osf.io,arpitar/osf.io,MerlinZhang/osf.io,danielneis/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,chennan47/osf.io,jolene-esposito/osf.io,wearpants/osf.io,baylee-d/osf.io,doublebits/osf.io,hmoco/osf.io,acshi/osf.io,hmoco/osf.io,caneruguz/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,MerlinZhang/osf.io,hmoco/osf.io,cwisecarver/osf.io,crcresearch/osf.io,HarryRybacki/osf.io,crcresearch/osf.io,mluo613/osf.io,chrisseto/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,mluke93/osf.io,leb2dg/osf.io,caseyrollins/osf.io,adlius/osf.io,danielneis/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,njantrania/osf.io,leb2dg/osf.io,asanfilippo7/osf.io,saradbowman/osf.io,revanthkolli/osf.io,GaryKriebel/osf.io,GaryKriebel/osf.io,doublebits/osf.io,Ghalko/osf.io,dplorimer/osf,brianjgeiger/osf.io,zkraime/osf.io,caseyrygt/osf.io,billyhunt/osf.io,jeffreyliu3230/osf.io,leb2dg/osf.io,chrisseto/osf.io,alexschiller/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,GageGaskins/osf.io |
959897478bbda18f02aa6e38f2ebdd837581f1f0 | tests/test_sct_verify_signature.py | tests/test_sct_verify_signature.py | from os.path import join, dirname
from utlz import flo
from ctutlz.sct.verification import verify_signature
def test_verify_signature():
basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature')
signature_input = \
open(flo('{basedir}/signature_input_valid.bin'), 'rb').read()
signature = open(flo('{basedir}/signature.der'), 'rb').read()
pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read()
got_verified, got_output, got_cmd_res = \
verify_signature(signature_input, signature, pubkey)
assert got_verified is True
assert got_output == 'Verified OK\n'
assert got_cmd_res.exitcode == 0
signature_input = b'some invalid signature input'
got_verified, got_output, got_cmd_res = \
verify_signature(signature_input, signature, pubkey)
assert got_verified is False
assert got_output == 'Verification Failure\n'
assert got_cmd_res.exitcode == 1
| from os.path import join, dirname
from utlz import flo
from ctutlz.sct.verification import verify_signature
def test_verify_signature():
basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature')
signature_input = \
open(flo('{basedir}/signature_input_valid.bin'), 'rb').read()
signature = open(flo('{basedir}/signature.der'), 'rb').read()
pubkey = open(flo('{basedir}/pubkey.pem'), 'rb').read()
assert verify_signature(signature_input, signature, pubkey) is True
signature_input = b'some invalid signature input'
assert verify_signature(signature_input, signature, pubkey) is False
| Fix test for changed SctVerificationResult | Fix test for changed SctVerificationResult
| Python | mit | theno/ctutlz,theno/ctutlz |
1d10582d622ce6867a85d9e4e8c279ab7e4ab5ab | src/etc/tidy.py | src/etc/tidy.py | #!/usr/bin/python
import sys, fileinput, subprocess
err=0
cols=78
config_proc=subprocess.Popen([ "git", "config", "core.autocrlf" ],
stdout=subprocess.PIPE)
result=config_proc.communicate()[0]
autocrlf=result.strip() == b"true" if result is not None else False
def report_err(s):
global err
print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s))
err=1
for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")):
if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1:
report_err("tab character")
if not autocrlf and line.find('\r') != -1:
report_err("CR character")
line_len = len(line)-2 if autocrlf else len(line)-1
if line_len > cols:
report_err("line longer than %d chars" % cols)
sys.exit(err)
| #!/usr/bin/python
import sys, fileinput
err=0
cols=78
def report_err(s):
global err
print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s))
err=1
for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")):
if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1:
report_err("tab character")
if line.find('\r') != -1:
report_err("CR character")
if len(line)-1 > cols:
report_err("line longer than %d chars" % cols)
sys.exit(err)
| Revert "Don't complain about \r when core.autocrlf is on in Git" | Revert "Don't complain about \r when core.autocrlf is on in Git"
This reverts commit 828afaa2fa4cc9e3e53bda0ae3073abfcfa151ca.
| Python | apache-2.0 | ejjeong/rust,omasanori/rust,quornian/rust,mvdnes/rust,barosl/rust,aturon/rust,carols10cents/rust,mdinger/rust,AerialX/rust,krzysz00/rust,krzysz00/rust,sarojaba/rust-doc-korean,SiegeLord/rust,l0kod/rust,philyoon/rust,KokaKiwi/rust,nwin/rust,ktossell/rust,victorvde/rust,dwillmer/rust,0x73/rust,waynenilsen/rand,fabricedesre/rust,vhbit/rust,jroesch/rust,andars/rust,ejjeong/rust,avdi/rust,j16r/rust,michaelballantyne/rust-gpu,michaelballantyne/rust-gpu,l0kod/rust,ebfull/rust,XMPPwocky/rust,mitsuhiko/rust,philyoon/rust,kwantam/rust,Ryman/rust,bombless/rust,gifnksm/rust,fabricedesre/rust,defuz/rust,XMPPwocky/rust,bombless/rust,philyoon/rust,zaeleus/rust,pshc/rust,krzysz00/rust,pelmers/rust,andars/rust,michaelballantyne/rust-gpu,mitsuhiko/rust,bhickey/rand,XMPPwocky/rust,robertg/rust,LeoTestard/rust,rohitjoshi/rust,ebfull/rust,P1start/rust,vhbit/rust,dwillmer/rust,l0kod/rust,aepsil0n/rust,barosl/rust,miniupnp/rust,zachwick/rust,zachwick/rust,aidancully/rust,jashank/rust,mahkoh/rust,aidancully/rust,krzysz00/rust,erickt/rust,pelmers/rust,kmcallister/rust,bombless/rust,victorvde/rust,victorvde/rust,reem/rust,emk/rust,ruud-v-a/rust,GBGamer/rust,kwantam/rust,stepancheg/rust-ide-rust,cllns/rust,vhbit/rust,LeoTestard/rust,l0kod/rust,zachwick/rust,avdi/rust,AerialX/rust-rt-minimal,dwillmer/rust,richo/rust,pshc/rust,j16r/rust,kwantam/rust,emk/rust,kimroen/rust,avdi/rust,aneeshusa/rust,carols10cents/rust,victorvde/rust,aturon/rust,avdi/rust,aidancully/rust,ebfull/rust,hauleth/rust,rohitjoshi/rust,philyoon/rust,untitaker/rust,GBGamer/rust,stepancheg/rust-ide-rust,barosl/rust,TheNeikos/rust,SiegeLord/rust,P1start/rust,ebfull/rust,omasanori/rust,andars/rust,seanrivera/rust,pczarn/rust,huonw/rand,ktossell/rust,rohitjoshi/rust,barosl/rust,jroesch/rust,jbclements/rust,AerialX/rust-rt-minimal,erickt/rust,servo/rust,GBGamer/rust,zaeleus/rust,pythonesque/rust,TheNeikos/rust,richo/rust,aidancully/rust,emk/rust,jashank/rust,ktossell/rust,AerialX/rust-rt-minimal,bombless/rust,l0kod/rust,pythonesque/rust,LeoTestard/rust,kmcallister/rust,zubron/rust,seanrivera/rust,jbclements/rust,sarojaba/rust-doc-korean,TheNeikos/rust,KokaKiwi/rust,mahkoh/rust,cllns/rust,erickt/rust,pelmers/rust,aneeshusa/rust,sarojaba/rust-doc-korean,zubron/rust,erickt/rust,zubron/rust,KokaKiwi/rust,bluss/rand,hauleth/rust,defuz/rust,emk/rust,omasanori/rust,victorvde/rust,mihneadb/rust,barosl/rust,sarojaba/rust-doc-korean,ruud-v-a/rust,sae-bom/rust,zaeleus/rust,zachwick/rust,jashank/rust,krzysz00/rust,cllns/rust,aturon/rust,ruud-v-a/rust,ruud-v-a/rust,shepmaster/rand,TheNeikos/rust,gifnksm/rust,kimroen/rust,Ryman/rust,gifnksm/rust,graydon/rust,erickt/rust,servo/rust,aneeshusa/rust,GBGamer/rust,kimroen/rust,hauleth/rust,pshc/rust,jashank/rust,jbclements/rust,mitsuhiko/rust,jroesch/rust,kimroen/rust,miniupnp/rust,bombless/rust,kimroen/rust,pczarn/rust,LeoTestard/rust,untitaker/rust,sae-bom/rust,vhbit/rust,mahkoh/rust,jashank/rust,fabricedesre/rust,omasanori/rust,jbclements/rust,TheNeikos/rust,sarojaba/rust-doc-korean,XMPPwocky/rust,sae-bom/rust,emk/rust,jbclements/rust,quornian/rust,j16r/rust,victorvde/rust,richo/rust,andars/rust,KokaKiwi/rust,aneeshusa/rust,dinfuehr/rust,GBGamer/rust,jroesch/rust,pythonesque/rust,LeoTestard/rust,defuz/rust,mitsuhiko/rust,miniupnp/rust,mdinger/rust,SiegeLord/rust,mihneadb/rust,dwillmer/rust,robertg/rust,dwillmer/rust,AerialX/rust,erickt/rust,0x73/rust,defuz/rust,nwin/rust,Ryman/rust,jbclements/rust,zaeleus/rust,avdi/rust,nwin/rust,kmcallister/rust,erickt/rust,rohitjoshi/rust,aturon/rust,ruud-v-a/rust,dinfuehr/rust,nham/rust,dinfuehr/rust,mitsuhiko/rust,aepsil0n/rust,j16r/rust,quornian/rust,carols10cents/rust,j16r/rust,dinfuehr/rust,l0kod/rust,richo/rust,krzysz00/rust,gifnksm/rust,untitaker/rust,kimroen/rust,pczarn/rust,kmcallister/rust,fabricedesre/rust,ruud-v-a/rust,mvdnes/rust,pczarn/rust,Ryman/rust,mdinger/rust,l0kod/rust,reem/rust,michaelballantyne/rust-gpu,Ryman/rust,jbclements/rust,robertg/rust,jbclements/rust,zubron/rust,P1start/rust,XMPPwocky/rust,jroesch/rust,miniupnp/rust,rprichard/rust,rohitjoshi/rust,0x73/rust,sarojaba/rust-doc-korean,stepancheg/rust-ide-rust,michaelballantyne/rust-gpu,jashank/rust,ejjeong/rust,aepsil0n/rust,aidancully/rust,pythonesque/rust,philyoon/rust,kwantam/rust,dwillmer/rust,dwillmer/rust,achanda/rand,robertg/rust,j16r/rust,mahkoh/rust,pshc/rust,pythonesque/rust,ebfull/rust,omasanori/rust,quornian/rust,graydon/rust,barosl/rust,zachwick/rust,richo/rust,mvdnes/rust,untitaker/rust,P1start/rust,aneeshusa/rust,stepancheg/rust-ide-rust,pczarn/rust,reem/rust,zubron/rust,gifnksm/rust,servo/rust,servo/rust,nham/rust,GBGamer/rust,robertg/rust,SiegeLord/rust,0x73/rust,jashank/rust,rprichard/rust,rprichard/rust,michaelballantyne/rust-gpu,aepsil0n/rust,zubron/rust,reem/rust,pczarn/rust,fabricedesre/rust,andars/rust,zubron/rust,pshc/rust,kimroen/rust,AerialX/rust,mdinger/rust,kwantam/rust,jashank/rust,mahkoh/rust,reem/rust,seanrivera/rust,KokaKiwi/rust,SiegeLord/rust,richo/rust,P1start/rust,0x73/rust,retep998/rand,rprichard/rust,bombless/rust-docs-chinese,untitaker/rust,Ryman/rust,robertg/rust,pshc/rust,carols10cents/rust,quornian/rust,l0kod/rust,omasanori/rust,jroesch/rust,0x73/rust,mitsuhiko/rust,mvdnes/rust,KokaKiwi/rust,pythonesque/rust,aturon/rust,GrahamDennis/rand,AerialX/rust-rt-minimal,arthurprs/rand,ejjeong/rust,mdinger/rust,andars/rust,michaelballantyne/rust-gpu,graydon/rust,sae-bom/rust,emk/rust,fabricedesre/rust,bombless/rust,aneeshusa/rust,vhbit/rust,hauleth/rust,P1start/rust,philyoon/rust,XMPPwocky/rust,zubron/rust,seanrivera/rust,dwillmer/rust,servo/rust,kmcallister/rust,miniupnp/rust,AerialX/rust-rt-minimal,dinfuehr/rust,GBGamer/rust,nham/rust,nham/rust,rprichard/rust,carols10cents/rust,dinfuehr/rust,aepsil0n/rust,nwin/rust,ejjeong/rust,cllns/rust,reem/rust,pshc/rust,kmcallister/rust,pythonesque/rust,AerialX/rust,mvdnes/rust,carols10cents/rust,vhbit/rust,graydon/rust,hauleth/rust,mitsuhiko/rust,quornian/rust,mihneadb/rust,nwin/rust,graydon/rust,cllns/rust,defuz/rust,zachwick/rust,servo/rust,sae-bom/rust,LeoTestard/rust,mihneadb/rust,pelmers/rust,TheNeikos/rust,graydon/rust,avdi/rust,sae-bom/rust,GBGamer/rust,gifnksm/rust,barosl/rust,aturon/rust,ktossell/rust,rohitjoshi/rust,ejjeong/rust,untitaker/rust,mahkoh/rust,mvdnes/rust,jroesch/rust,kwantam/rust,mihneadb/rust,0x73/rust,defuz/rust,stepancheg/rust-ide-rust,P1start/rust,ebfull/rust,j16r/rust,sarojaba/rust-doc-korean,aturon/rust,AerialX/rust,pshc/rust,AerialX/rust-rt-minimal,rprichard/rust,kmcallister/rust,SiegeLord/rust,vhbit/rust,ktossell/rust,stepancheg/rust-ide-rust,mihneadb/rust,nwin/rust,zaeleus/rust,cllns/rust,zaeleus/rust,nwin/rust,miniupnp/rust,stepancheg/rust-ide-rust,aidancully/rust,nham/rust,LeoTestard/rust,seanrivera/rust,hauleth/rust,servo/rust,vhbit/rust,AerialX/rust,jroesch/rust,jbclements/rust,SiegeLord/rust,mdinger/rust,ktossell/rust,miniupnp/rust,aepsil0n/rust,emk/rust,ebfull/rand,pelmers/rust,pczarn/rust,pelmers/rust,nham/rust,nham/rust,miniupnp/rust,ktossell/rust,Ryman/rust,nwin/rust,fabricedesre/rust,seanrivera/rust,quornian/rust |
a378649f85f0bc55060ad0238e426f587bc2ff1a | core/exceptions.py | core/exceptions.py | """
exceptions - Core exceptions
"""
class InvalidMembership(Exception):
"""
The membership provided is not valid
"""
pass
class SourceNotFound(Exception):
"""
InstanceSource doesn't have an associated source.
"""
pass
class RequestLimitExceeded(Exception):
"""
A limit was exceeded for the specific request
"""
pass
class ProviderLimitExceeded(Exception):
"""
A limit was exceeded for the specific provider
"""
pass
class ProviderNotActive(Exception):
"""
The provider that was requested is not active
"""
def __init__(self, provider, *args, **kwargs):
self.message = "Cannot create driver on an inactive provider:%s" \
% (provider,)
pass
| """
exceptions - Core exceptions
"""
class InvalidMembership(Exception):
"""
The membership provided is not valid
"""
pass
class SourceNotFound(Exception):
"""
InstanceSource doesn't have an associated source.
"""
pass
class RequestLimitExceeded(Exception):
"""
A limit was exceeded for the specific request
"""
pass
class ProviderLimitExceeded(Exception):
"""
A limit was exceeded for the specific provider
"""
pass
class ProviderNotActive(Exception):
"""
The provider that was requested is not active
"""
def __init__(self, provider, *args, **kwargs):
self.message = "Cannot create driver on an inactive provider: %s" \
% (provider.location,)
pass
| Send location only when printing exception (Avoid leaking ID/UUID) | Send location only when printing exception (Avoid leaking ID/UUID)
| Python | apache-2.0 | CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend |
99c3eba0d6384cd42c90ef347823e6d66659d6e3 | viper/interpreter/prelude/operators.py | viper/interpreter/prelude/operators.py | from ..value import ForeignCloVal
def plus(a: int, b: int) -> int:
return a + b
def minus(a: int, b: int) -> int:
return a - b
def times(a: int, b: int) -> int:
return a * b
def divide(a: int, b: int) -> float:
return a / b
env = {
'+': ForeignCloVal(plus, {}),
'-': ForeignCloVal(minus, {}),
'*': ForeignCloVal(times, {}),
'//': ForeignCloVal(divide, {}),
} | from ..value import ForeignCloVal
def plus(a: int, b: int) -> int:
return a + b
def minus(a: int, b: int) -> int:
return a - b
def times(a: int, b: int) -> int:
return a * b
def divide(a: int, b: int) -> float:
return a / b
env = {
'+': ForeignCloVal(plus, {}),
'-': ForeignCloVal(minus, {}),
'*': ForeignCloVal(times, {}),
'/': ForeignCloVal(divide, {}),
} | Fix typo in division operator | Fix typo in division operator
| Python | apache-2.0 | pdarragh/Viper |
5a8199744bf658d491721b16fea7639303e47d3f | july/people/views.py | july/people/views.py | from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.template.context import RequestContext
#from google.appengine.ext import db
from july.people.models import Commit
from gae_django.auth.models import User
from django.http import Http404, HttpResponseRedirect
from django.core.urlresolvers import reverse
def user_profile(request, username):
user = User.all().filter("username", username).get()
if user == None:
raise Http404("User not found")
commits = Commit.all().ancestor(request.user.key())
return render_to_response('people/profile.html',
{"commits":commits},
RequestContext(request))
@login_required
def edit_profile(request, username, template_name='people/edit.html'):
from forms import EditUserForm
user = request.user
#CONSIDER FILES with no POST? Can that happen?
form = EditUserForm(request.POST or None, request.FILES or None)
if form.is_valid():
for key in form.cleaned_data:
setattr(user,key,form.cleaned_data.get(key))
user.put()
return HttpResponseRedirect(
reverse('member-profile', kwargs={'username': request.user.username})
)
if user == None:
raise Http404("User not found")
return render_to_response(template_name,
{'form':form,},
RequestContext(request))
| from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.template.context import RequestContext
#from google.appengine.ext import db
from july.people.models import Commit
from gae_django.auth.models import User
from django.http import Http404, HttpResponseRedirect
from django.core.urlresolvers import reverse
def user_profile(request, username):
user = User.all().filter("username", username).get()
if user == None:
raise Http404("User not found")
commits = Commit.all().ancestor(request.user.key())
return render_to_response('people/profile.html',
{"commits":commits},
RequestContext(request))
@login_required
def edit_profile(request, username, template_name='people/edit.html'):
from forms import EditUserForm
user = request.user
form = EditUserForm(request.POST or None, user=request.user)
if form.is_valid():
for key in form.cleaned_data:
setattr(user, key, form.cleaned_data.get(key))
user.put()
return HttpResponseRedirect(
reverse('member-profile', kwargs={'username':request.user.username})
)
if user == None:
raise Http404("User not found")
return render_to_response(template_name,
{'form':form},
RequestContext(request))
| Edit view pre-populates with data from user object | Edit view pre-populates with data from user object
| Python | mit | ChimeraCoder/GOctober,ChimeraCoder/GOctober,julython/julython.org,ChimeraCoder/GOctober,julython/julython.org,julython/julython.org,julython/julython.org |
a8e43dcdbdd00de9d4336385b3f3def1ae5c2515 | main/modelx.py | main/modelx.py | # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
def avatar_url(self, size=None):
return '//gravatar.com/avatar/%(hash)s?d=identicon&r=x%(size)s' % {
'hash': hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
'size': '&s=%d' % size if size > 0 else '',
}
| # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
def avatar_url_size(self, size=None):
return '//gravatar.com/avatar/%(hash)s?d=identicon&r=x%(size)s' % {
'hash': hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
'size': '&s=%d' % size if size > 0 else '',
}
avatar_url = property(avatar_url_size)
| Update UserX, with back compatibility | Update UserX, with back compatibility | Python | mit | vanessa-bell/hd-kiosk-v2,carylF/lab5,gmist/fix-5studio,lipis/the-smallest-creature,NeftaliYagua/gae-init,gmist/my-gae-init-auth,jakedotio/gae-init,carylF/lab5,lipis/gae-init,lipis/gae-init,lovesoft/gae-init,gae-init/gae-init-docs,mdxs/gae-init,tonyin/optionstg,gmist/my-gae-init,gae-init/gae-init-babel,terradigital/gae-init,lipis/life-line,gae-init/gae-init-upload,gmist/nashi-5studio,CLOUGH/info3180-lab5,topless/gae-init,tiberiucorbu/av-website,Kingclove/lab5info3180,gmist/alice-box,gmist/five-studio2,chineyting/lab5-Info3180,gae-init/phonebook,lipis/meet-notes,lipis/hurry-app,tkstman/lab5,lipis/gae-init,tonyin/optionstg,gae-init/gae-init-docs,gae-init/gae-init-babel,vanessa-bell/hd-kiosk-v2,jakedotio/gae-init,gae-init/gae-init-debug,wodore/wodore-gae,gae-init/gae-init-upload,mdxs/gae-init,d4rr3ll/gae-init-docker,vanessa-bell/hd-kiosk-v2,terradigital/gae-init,jaja14/lab5,wilfriedE/gae-init,gmist/nashi-5studio,tiberiucorbu/av-website,mdxs/gae-init,jaja14/lab5,georgekis/salary,gmist/fix-5studio,JoeyCodinja/INFO3180LAB3,gae-init/gae-init-babel,mdxs/gae-init-babel,gae-init/gae-init,gmist/1businka2,topless/gae-init-upload,gmist/1businka2,dhstack/gae-init,gmist/nashi-5studio,gmist/ctm-5studio,lovesoft/gae-init,lipis/gae-init,JoeyCodinja/INFO3180LAB3,wodore/wodore-gae,gae-init/gae-init,topless/gae-init-upload,gmist/fix-5studio,lipis/hurry-app,mdxs/gae-init-babel,wilfriedE/gae-init,lipis/the-smallest-creature,topless/gae-init-upload,lipis/life-line,lipis/the-smallest-creature,gmist/my-gae-init,NeftaliYagua/gae-init,gae-init/gae-init-docs,d4rr3ll/gae-init-docker,gae-init/gae-init-babel,jakedotio/gae-init,d4rr3ll/gae-init-docker,gmist/ctm-5studio,lovesoft/gae-init,CLOUGH/info3180-lab5,lipis/guestbook,lipis/github-stats,gae-init/gae-init,gmist/fix-5studio,JoeyCodinja/INFO3180LAB3,gae-init/gae-init-debug,d4rr3ll/gae-init-docker,lipis/electron-crash-reporter,lipis/github-stats,topless/gae-init,michals/hurry-app,gae-init/gae-init-docs,wodore/wodore-gae,michals/hurry-app,tkstman/lab5,NeftaliYagua/gae-init,gmist/ctm-5studio,jakedotio/gae-init,gmist/1businka2,lipis/electron-crash-reporter,gae-init/gae-init-upload,mdxs/gae-init-babel,georgekis/salary,michals/hurry-app,gae-init/gae-init-debug,lipis/github-stats,gmist/five-studio2,gmist/ctm-5studio,gae-init/gae-init-debug,JoeyCodinja/INFO3180LAB3,lipis/hurry-app,antotodd/lab5,gmist/alice-box,gae-init/gae-init,dhstack/gae-init,gmist/five-studio2,wilfriedE/gae-init,lipis/life-line,antotodd/lab5,topless/gae-init,gmist/my-gae-init,gae-init/gae-init-upload,gmist/five-studio2,Kingclove/lab5info3180,gmist/my-gae-init,lipis/electron-crash-reporter,chineyting/lab5-Info3180,lipis/meet-notes,mdxs/gae-init-docs,lipis/github-stats,dhstack/gae-init,topless/gae-init,terradigital/gae-init,lipis/meet-notes,tiberiucorbu/av-website,mdxs/gae-init,wodore/wodore-gae,vanessa-bell/hd-kiosk-v2,georgekis/salary |
73b9246164994049d291d5b482d4dbf2ca41a124 | tests/app/test_accessibility_statement.py | tests/app/test_accessibility_statement.py | import re
import subprocess
from datetime import datetime
def test_last_review_date():
statement_file_path = "app/templates/views/accessibility_statement.html"
# test local changes against master for a full diff of what will be merged
statement_diff = subprocess.run(
[f"git diff --exit-code origin/master -- {statement_file_path}"], stdout=subprocess.PIPE, shell=True
)
# if statement has changed, test the review date was part of those changes
if statement_diff.returncode == 1:
raw_diff = statement_diff.stdout.decode("utf-8")
today = datetime.now().strftime("%d %B %Y")
with open(statement_file_path, "r") as statement_file:
current_review_date = re.search(
(r'"Last updated": "(\d{1,2} [A-Z]{1}[a-z]+ \d{4})"'), statement_file.read()
).group(1)
# guard against changes that don't need to update the review date
if current_review_date != today:
assert '"Last updated": "' in raw_diff
| import re
import subprocess
from datetime import datetime
def test_last_review_date():
statement_file_path = "app/templates/views/accessibility_statement.html"
# test local changes against main for a full diff of what will be merged
statement_diff = subprocess.run(
[f"git diff --exit-code origin/main -- {statement_file_path}"], stdout=subprocess.PIPE, shell=True
)
# if statement has changed, test the review date was part of those changes
if statement_diff.returncode == 1:
raw_diff = statement_diff.stdout.decode("utf-8")
today = datetime.now().strftime("%d %B %Y")
with open(statement_file_path, "r") as statement_file:
current_review_date = re.search(
(r'"Last updated": "(\d{1,2} [A-Z]{1}[a-z]+ \d{4})"'), statement_file.read()
).group(1)
# guard against changes that don't need to update the review date
if current_review_date != today:
assert '"Last updated": "' in raw_diff
| Rename master branch to main | Rename master branch to main
| Python | mit | alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin |
99e9ef79178d6e2dffd8ec7ed12b3edbd8b7d0f1 | longclaw/longclawbasket/views.py | longclaw/longclawbasket/views.py | from django.shortcuts import render
from django.views.generic import ListView
from longclaw.longclawbasket.models import BasketItem
from longclaw.longclawbasket import utils
class BasketView(ListView):
model = BasketItem
template_name = "longclawbasket/basket.html"
def get_context_data(self, **kwargs):
items, _ = utils.get_basket_items(self.request)
return {"basket": items}
| from django.shortcuts import render
from django.views.generic import ListView
from longclaw.longclawbasket.models import BasketItem
from longclaw.longclawbasket import utils
class BasketView(ListView):
model = BasketItem
template_name = "longclawbasket/basket.html"
def get_context_data(self, **kwargs):
items, _ = utils.get_basket_items(self.request)
total_price = sum(item.total() for item in items)
return {"basket": items, "total_price": total_price}
| Add basket total to context | Add basket total to context
| Python | mit | JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw |
6bec22cd51288c94dff40cf0c973b975538040d5 | tests/integration/minion/test_timeout.py | tests/integration/minion/test_timeout.py | # -*- coding: utf-8 -*-
'''
Tests for various minion timeouts
'''
# Import Python libs
from __future__ import absolute_import
import os
import sys
import salt.utils.platform
# Import Salt Testing libs
from tests.support.case import ShellCase
class MinionTimeoutTestCase(ShellCase):
'''
Test minion timing functions
'''
def test_long_running_job(self):
'''
Test that we will wait longer than the job timeout for a minion to
return.
'''
# Launch the command
sleep_length = 30
if salt.utils.platform.is_windows():
popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))}
else:
popen_kwargs = None
ret = self.run_salt(
'minion test.sleep {0}'.format(sleep_length),
timeout=45,
catch_stderr=True,
popen_kwargs=popen_kwargs,
)
self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion'
' may have returned error: {0}'.format(ret))
self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret))
self.assertTrue('True' in ret[0][1], 'Minion did not return True after '
'{0} seconds. ret={1}'.format(sleep_length, ret))
| # -*- coding: utf-8 -*-
'''
Tests for various minion timeouts
'''
# Import Python libs
from __future__ import absolute_import
import os
import sys
import salt.utils.platform
# Import Salt Testing libs
from tests.support.case import ShellCase
class MinionTimeoutTestCase(ShellCase):
'''
Test minion timing functions
'''
def test_long_running_job(self):
'''
Test that we will wait longer than the job timeout for a minion to
return.
'''
# Launch the command
sleep_length = 30
if salt.utils.platform.is_windows():
popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))}
else:
popen_kwargs = None
ret = self.run_salt(
'minion test.sleep {0}'.format(sleep_length),
timeout=90,
catch_stderr=True,
popen_kwargs=popen_kwargs,
)
self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion'
' may have returned error: {0}'.format(ret))
self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret))
self.assertTrue('True' in ret[0][1], 'Minion did not return True after '
'{0} seconds. ret={1}'.format(sleep_length, ret))
| Increase timeout for test_long_running_job test | Increase timeout for test_long_running_job test
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
6cfc94d8a03439c55808090aa5e3a4f35c288887 | menpodetect/tests/opencv_test.py | menpodetect/tests/opencv_test.py | from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
| from numpy.testing import assert_allclose
from menpodetect.opencv import (load_opencv_frontal_face_detector,
load_opencv_eye_detector)
import menpo.io as mio
takeo = mio.import_builtin_asset.takeo_ppm()
def test_frontal_face_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy)
assert len(pcs) == 1
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
def test_frontal_face_detector_min_neighbors():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_frontal_face_detector()
pcs = opencv_detector(takeo_copy, min_neighbours=100)
assert len(pcs) == 0
assert takeo_copy.n_channels == 3
def test_eye_detector():
takeo_copy = takeo.copy()
opencv_detector = load_opencv_eye_detector()
pcs = opencv_detector(takeo_copy, min_size=(5, 5))
assert_allclose(len(pcs), 1)
assert takeo_copy.n_channels == 3
assert takeo_copy.landmarks['opencv_0'][None].n_points == 4
| Use assert_allclose so we can see the appveyor failure | Use assert_allclose so we can see the appveyor failure
| Python | bsd-3-clause | yuxiang-zhou/menpodetect,jabooth/menpodetect,yuxiang-zhou/menpodetect,jabooth/menpodetect |
1f98e497136ce3d9da7e63a6dc7c3f67fedf50b5 | observations/views.py | observations/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from braces.views import LoginRequiredMixin
from .forms import ObservationForm, BatchUploadForm
class AddObservationView(FormView):
"""
Add a single observation.
"""
form_class = ObservationForm
template_name = "observations/add_observation.html"
success_url = reverse_lazy('observations:add_observation')
class UploadObservationsView(LoginRequiredMixin, FormView):
"""
Upload a file of observations.
"""
form_class = BatchUploadForm
template_name = "observations/upload_observations.html"
success_url = reverse_lazy('observations:upload_observations')
def form_valid(self, form):
form.process_file()
messages.success(self.request, _("File uploaded successfully!"))
return super(UploadObservationsView, self).form_valid(form)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import messages
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.views.generic.edit import FormView
from braces.views import LoginRequiredMixin
from .forms import ObservationForm, BatchUploadForm
class AddObservationView(FormView):
"""
Add a single observation.
"""
form_class = ObservationForm
template_name = "observations/add_observation.html"
success_url = reverse_lazy('observations:add_observation')
def form_valid(self, form):
observation = form.save(commit=False)
observation.observer = self.request.observer
observation.save()
return super(AddObservationView, self).form_valid(form)
class UploadObservationsView(LoginRequiredMixin, FormView):
"""
Upload a file of observations.
"""
form_class = BatchUploadForm
template_name = "observations/upload_observations.html"
success_url = reverse_lazy('observations:upload_observations')
def form_valid(self, form):
form.process_file()
messages.success(self.request, _("File uploaded successfully!"))
return super(UploadObservationsView, self).form_valid(form)
| Save the observation if the form was valid. | Save the observation if the form was valid.
| Python | mit | zsiciarz/variablestars.net,zsiciarz/variablestars.net,zsiciarz/variablestars.net |
091ebd935c6145ac233c03bedeb52c65634939f4 | Lib/xml/__init__.py | Lib/xml/__init__.py | """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
try:
import _xmlplus
except ImportError:
pass
else:
import sys
sys.modules[__name__] = _xmlplus
| """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$"[1:-1].split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
| Include the version-detecting code to allow PyXML to override the "standard" xml package. Require at least PyXML 0.6.1. | Include the version-detecting code to allow PyXML to override the "standard"
xml package. Require at least PyXML 0.6.1.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator |
3a27568211c07cf614aa9865a2f08d2a9b9bfb71 | dinosaurs/views.py | dinosaurs/views.py | import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
ret, passwd = api.create_email(connection, email)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
| import os
import json
import httplib as http
import tornado.web
import tornado.ioloop
from dinosaurs import api
from dinosaurs import settings
class SingleStatic(tornado.web.StaticFileHandler):
def initialize(self, path):
self.dirname, self.filename = os.path.split(path)
super(SingleStatic, self).initialize(self.dirname)
def get(self, path=None, include_body=True):
super(SingleStatic, self).get(self.filename, include_body)
class DomainAPIHandler(tornado.web.RequestHandler):
def get(self):
self.write({
'availableDomains': settings.DOMAINS.keys()
})
class EmailAPIHandler(tornado.web.RequestHandler):
def write_error(self, status_code, **kwargs):
self.finish({
"code": status_code,
"message": self._reason,
})
def post(self):
try:
req_json = json.loads(self.request.body)
except ValueError:
raise tornado.web.HTTPError(http.BAD_REQUEST)
email = req_json.get('email')
domain = req_json.get('domain')
connection = api.get_connection(domain)
if not email or not domain or not connection:
raise tornado.web.HTTPError(http.BAD_REQUEST)
try:
ret, passwd = api.create_email(connection, email)
except api.YandexException as e:
if e.message != 'occupied':
raise
self.write({})
raise tornado.web.HTTPError(http.FORBIDDEN)
self.write({
'password': passwd,
'email': ret['login'],
'domain': ret['domain']
})
self.set_status(http.CREATED)
| Return errors in json only | Return errors in json only
| Python | mit | chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy |
f574e19b14ff861c45f6c66c64a2570bdb0e3a3c | crawl_comments.py | crawl_comments.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__doc__ = '''
Crawl comment from nicovideo.jp
Usage:
main_crawl.py [--sqlite <sqlite>] [--csv <csv>]
Options:
--sqlite <sqlite> (optional) path of comment DB [default: comments.sqlite3]
--csv <csv> (optional) path of csv file contains urls of videos [default: crawled.csv]
'''
from docopt import docopt
from nicocrawler.nicocrawler import NicoCrawler
if __name__ == '__main__':
# コマンドライン引数の取得
args = docopt(__doc__)
sqlite_path = args['--sqlite']
csv_path = args['--csv']
ncrawler = NicoCrawler()
ncrawler.connect_sqlite(sqlite_path)
url = 'http://ch.nicovideo.jp/2016winter_anime'
df = ncrawler.get_all_video_url_of_season(url)
ncrawler.initialize_csv_from_db(csv_path)
# # デイリーランキング1~300位の動画を取得する
# url = 'http://www.nicovideo.jp/ranking/fav/daily/all'
# ncrawler.initialize_csv_from_url(url, csv_path, max_page=3)
# ncrawler.get_all_comments_of_csv(csv_path, max_n_iter=1)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
__doc__ = '''
Crawl comment from nicovideo.jp
Usage:
crawl_comments.py [--sqlite <sqlite>] [--csv <csv>]
Options:
--sqlite <sqlite> (optional) path of comment DB [default: comments.sqlite3]
--csv <csv> (optional) path of csv file contains urls of videos [default: crawled.csv]
'''
from docopt import docopt
from nicocrawler.nicocrawler import NicoCrawler
if __name__ == '__main__':
# コマンドライン引数の取得
args = docopt(__doc__)
sqlite_path = args['--sqlite']
csv_path = args['--csv']
ncrawler = NicoCrawler()
ncrawler.connect_sqlite(sqlite_path)
url = 'http://ch.nicovideo.jp/2016winter_anime'
df = ncrawler.get_all_video_url_of_season(url)
ncrawler.initialize_csv_from_db(csv_path)
# # デイリーランキング1~300位の動画を取得する
# url = 'http://www.nicovideo.jp/ranking/fav/daily/all'
# ncrawler.initialize_csv_from_url(url, csv_path, max_page=3)
# ncrawler.get_all_comments_of_csv(csv_path, max_n_iter=1)
| Apply change of file name | Apply change of file name
| Python | mit | tosh1ki/NicoCrawler |
317926c18ac2e139d2018acd767d10b4f53428f3 | installer/installer_config/views.py | installer/installer_config/views.py | from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = form_class(request.POST)
if form.is_valid():
config_profile = form.save(commit=False)
config_profile.user = request.user
config_profile.save()
return HttpResponseRedirect(reverse('profile:profile'))
return self.render_to_response({'form': form})
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
# import pdb; pdb.set_trace()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
| from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
| Remove unneeded post method from CreateEnvProfile view | Remove unneeded post method from CreateEnvProfile view
| Python | mit | ezPy-co/ezpy,alibulota/Package_Installer,ezPy-co/ezpy,alibulota/Package_Installer |
c24dbc2d4d8b59a62a68f326edb350b3c633ea25 | interleaving/interleaving_method.py | interleaving/interleaving_method.py | class InterleavingMethod(object):
'''
Interleaving
'''
def interleave(self, k, a, b):
'''
k: the maximum length of resultant interleaving
a: a list of document IDs
b: a list of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def multileave(self, k, *lists):
'''
k: the maximum length of resultant multileaving
*lists: lists of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def evaluate(self, ranking, clicks):
'''
ranking: an instance of Ranking generated by Balanced.interleave
clicks: a list of indices clicked by a user
Return one of the following tuples:
- (1, 0): Ranking 'a' won
- (0, 1): Ranking 'b' won
- (0, 0): Tie
'''
raise NotImplementedError()
| class InterleavingMethod(object):
'''
Interleaving
'''
def interleave(self, k, a, b):
'''
k: the maximum length of resultant interleaving
a: a list of document IDs
b: a list of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def multileave(self, k, *lists):
'''
k: the maximum length of resultant multileaving
*lists: lists of document IDs
Return an instance of Ranking
'''
raise NotImplementedError()
def evaluate(self, ranking, clicks):
'''
ranking: an instance of Ranking generated by Balanced.interleave
clicks: a list of indices clicked by a user
Return a list of pairs of ranker indices
in which element (i, j) indicates i won j.
e.g. a result [(1, 0), (2, 1), (2, 0)] indicates
ranker 1 won ranker 0, and ranker 2 won ranker 0 as well as ranker 1.
'''
raise NotImplementedError()
| Change the comment of InterleavingMethod.evaluate | Change the comment of InterleavingMethod.evaluate
| Python | mit | mpkato/interleaving |
85769162560d83a58ccc92f818559ddd3dce2a09 | pages/index.py | pages/index.py | import web
from modules.base import renderer
from modules.login import loginInstance
from modules.courses import Course
#Index page
class IndexPage:
#Simply display the page
def GET(self):
if loginInstance.isLoggedIn():
userInput = web.input();
if "logoff" in userInput:
loginInstance.disconnect();
return renderer.index(False)
else:
courses = Course.GetAllCoursesIds()
return renderer.main(courses)
else:
return renderer.index(False)
#Try to log in
def POST(self):
userInput = web.input();
if "login" in userInput and "password" in userInput and loginInstance.connect(userInput.login,userInput.password):
return renderer.main()
else:
return renderer.index(True) | import web
from modules.base import renderer
from modules.login import loginInstance
from modules.courses import Course
#Index page
class IndexPage:
#Simply display the page
def GET(self):
if loginInstance.isLoggedIn():
userInput = web.input();
if "logoff" in userInput:
loginInstance.disconnect();
return renderer.index(False)
else:
return renderer.main(Course.GetAllCoursesIds())
else:
return renderer.index(False)
#Try to log in
def POST(self):
userInput = web.input();
if "login" in userInput and "password" in userInput and loginInstance.connect(userInput.login,userInput.password):
return renderer.main(Course.GetAllCoursesIds())
else:
return renderer.index(True) | Fix another bug in the authentication | Fix another bug in the authentication
| Python | agpl-3.0 | layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious |
6d8dbb6621da2ddfffd58303131eb6cda345e37c | pombola/south_africa/urls.py | pombola/south_africa/urls.py | from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView,SAPlaceDetailSub
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
| from django.conf.urls import patterns, include, url
from pombola.core.views import PersonDetailSub
from pombola.south_africa.views import LatLonDetailView,SAPlaceDetailSub
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
url(r'^person/(?P<slug>[-\w]+)/$', PersonDetailSub.as_view(), { 'sub_page': 'experience' }, name='person'),
)
| Make person experience the default tab for ZA | Make person experience the default tab for ZA
| Python | agpl-3.0 | hzj123/56th,mysociety/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,geoffkilpin/pombola,patricmutwiri/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,ken-muturi/pombola,geoffkilpin/pombola,ken-muturi/pombola,hzj123/56th,patricmutwiri/pombola,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola |
b65283984b1be7e8bb88d3281bb3654a3dd12233 | nova/tests/scheduler/__init__.py | nova/tests/scheduler/__init__.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
| Make sure test setup is run for subdirectories | Make sure test setup is run for subdirectories | Python | apache-2.0 | n0ano/ganttclient |
|
84ee720fd2d8403de5f49c54fc41bfcb67a78f78 | stdnum/tr/__init__.py | stdnum/tr/__init__.py | # __init__.py - collection of Turkish numbers
# coding: utf-8
#
# Copyright (C) 2016 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of Turkish numbers."""
| # __init__.py - collection of Turkish numbers
# coding: utf-8
#
# Copyright (C) 2016 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Collection of Turkish numbers."""
from stdnum.tr import vkn as vat # noqa: F401
| Add missing vat alias for Turkey | Add missing vat alias for Turkey
| Python | lgpl-2.1 | arthurdejong/python-stdnum,arthurdejong/python-stdnum,arthurdejong/python-stdnum |
cf07c34fe3a3d7b8767e50e77e609253dd177cff | moulinette/utils/serialize.py | moulinette/utils/serialize.py | import logging
from json.encoder import JSONEncoder
import datetime
logger = logging.getLogger('moulinette.utils.serialize')
# JSON utilities -------------------------------------------------------
class JSONExtendedEncoder(JSONEncoder):
"""Extended JSON encoder
Extend default JSON encoder to recognize more types and classes. It
will never raise if the object can't be encoded and return its repr
instead.
The following objects and types are supported:
- set: converted into list
"""
def default(self, o):
"""Return a serializable object"""
# Convert compatible containers into list
if isinstance(o, set) or (
hasattr(o, '__iter__') and hasattr(o, 'next')):
return list(o)
# Convert compatible containers into list
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return str(o)
# Return the repr for object that json can't encode
logger.warning('cannot properly encode in JSON the object %s, '
'returned repr is: %r', type(o), o)
return repr(o)
| import logging
from json.encoder import JSONEncoder
import datetime
logger = logging.getLogger('moulinette.utils.serialize')
# JSON utilities -------------------------------------------------------
class JSONExtendedEncoder(JSONEncoder):
"""Extended JSON encoder
Extend default JSON encoder to recognize more types and classes. It
will never raise if the object can't be encoded and return its repr
instead.
The following objects and types are supported:
- set: converted into list
"""
def default(self, o):
"""Return a serializable object"""
# Convert compatible containers into list
if isinstance(o, set) or (
hasattr(o, '__iter__') and hasattr(o, 'next')):
return list(o)
# Convert compatible containers into list
if isinstance(o, datetime.datetime) or isinstance(o, datetime.date):
return o.isoformat()
# Return the repr for object that json can't encode
logger.warning('cannot properly encode in JSON the object %s, '
'returned repr is: %r', type(o), o)
return repr(o)
| Use isoformat date RFC 3339 | [enh] Use isoformat date RFC 3339 | Python | agpl-3.0 | YunoHost/moulinette |
25e71a56d48e5bdc4d73522333196d69d735707a | ports/nrf/boards/pca10056/examples/buttons.py | ports/nrf/boards/pca10056/examples/buttons.py | import board
import digitalio
import gamepad
import time
pad = gamepad.GamePad(
digitalio.DigitalInOut(board.PA11),
digitalio.DigitalInOut(board.PA12),
digitalio.DigitalInOut(board.PA24),
digitalio.DigitalInOut(board.PA25),
)
prev_buttons = 0
while True:
buttons = pad.get_pressed()
if buttons != prev_buttons:
for i in range(0, 4):
bit = (1 << i)
if (buttons & bit) != (prev_buttons & bit):
print('Button %d %s' % (i + 1, 'pressed' if buttons & bit else 'released'))
prev_buttons = buttons
time.sleep(0.1)
| import board
import digitalio
import gamepad
import time
pad = gamepad.GamePad(
digitalio.DigitalInOut(board.P0_11),
digitalio.DigitalInOut(board.P0_12),
digitalio.DigitalInOut(board.P0_24),
digitalio.DigitalInOut(board.P0_25),
)
prev_buttons = 0
while True:
buttons = pad.get_pressed()
if buttons != prev_buttons:
for i in range(0, 4):
bit = (1 << i)
if (buttons & bit) != (prev_buttons & bit):
print('Button %d %s' % (i + 1, 'pressed' if buttons & bit else 'released'))
prev_buttons = buttons
time.sleep(0.1)
| Update the PCA10056 example to use new pin naming | nrf: Update the PCA10056 example to use new pin naming
| Python | mit | adafruit/micropython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython |
396ab20874a0c3492482a8ae03fd7d61980917a5 | chatterbot/adapters/logic/closest_match.py | chatterbot/adapters/logic/closest_match.py | # -*- coding: utf-8 -*-
from fuzzywuzzy import fuzz
from .base_match import BaseMatchAdapter
class ClosestMatchAdapter(BaseMatchAdapter):
"""
The ClosestMatchAdapter logic adapter creates a response by
using fuzzywuzzy's process class to extract the most similar
response to the input. This adapter selects a response to an
input statement by selecting the closest known matching
statement based on the Levenshtein Distance between the text
of each statement.
"""
def get(self, input_statement):
"""
Takes a statement string and a list of statement strings.
Returns the closest matching statement from the list.
"""
statement_list = self.context.storage.get_response_statements()
if not statement_list:
if self.has_storage_context:
# Use a randomly picked statement
self.logger.info(
u'No statements have known responses. ' +
u'Choosing a random response to return.'
)
return 0, self.context.storage.get_random()
else:
raise self.EmptyDatasetException()
confidence = -1
closest_match = input_statement
# Find the closest matching known statement
for statement in statement_list:
ratio = fuzz.ratio(input_statement.text.lower(), statement.text.lower())
if ratio > confidence:
confidence = ratio
closest_match = statement
# Convert the confidence integer to a percent
confidence /= 100.0
return confidence, closest_match
| # -*- coding: utf-8 -*-
from fuzzywuzzy import fuzz
from .base_match import BaseMatchAdapter
class ClosestMatchAdapter(BaseMatchAdapter):
"""
The ClosestMatchAdapter logic adapter selects a known response
to an input by searching for a known statement that most closely
matches the input based on the Levenshtein Distance between the text
of each statement.
"""
def get(self, input_statement):
"""
Takes a statement string and a list of statement strings.
Returns the closest matching statement from the list.
"""
statement_list = self.context.storage.get_response_statements()
if not statement_list:
if self.has_storage_context:
# Use a randomly picked statement
self.logger.info(
u'No statements have known responses. ' +
u'Choosing a random response to return.'
)
return 0, self.context.storage.get_random()
else:
raise self.EmptyDatasetException()
confidence = -1
closest_match = input_statement
# Find the closest matching known statement
for statement in statement_list:
ratio = fuzz.ratio(input_statement.text.lower(), statement.text.lower())
if ratio > confidence:
confidence = ratio
closest_match = statement
# Convert the confidence integer to a percent
confidence /= 100.0
return confidence, closest_match
| Update closest match adapter docstring. | Update closest match adapter docstring.
| Python | bsd-3-clause | Reinaesaya/OUIRL-ChatBot,vkosuri/ChatterBot,gunthercox/ChatterBot,Gustavo6046/ChatterBot,maclogan/VirtualPenPal,Reinaesaya/OUIRL-ChatBot,davizucon/ChatterBot |
2947fe97d466872de05ada289d9172f41895969c | tests/templates/components/test_radios_with_images.py | tests/templates/components/test_radios_with_images.py | import json
def test_govuk_frontend_jinja_overrides_on_design_system_v3():
with open("package.json") as package_file:
package_json = json.load(package_file)
assert package_json["dependencies"]["govuk-frontend"].startswith("3."), (
"After upgrading the Design System, manually validate that "
"`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`"
"are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the "
"rendering process."
)
| import json
from importlib import metadata
from packaging.version import Version
def test_govuk_frontend_jinja_overrides_on_design_system_v3():
with open("package.json") as package_file:
package_json = json.load(package_file)
govuk_frontend_version = Version(package_json["dependencies"]["govuk-frontend"])
govuk_frontend_jinja_version = Version(metadata.version("govuk-frontend-jinja"))
# This should be checking govuk_frontend_version == 3.14.x, but we're not there yet. Update this when we are.
# Compatibility between these two libs is defined at https://github.com/LandRegistry/govuk-frontend-jinja/
correct_govuk_frontend_version = Version("3.0.0") <= govuk_frontend_version < Version("4.0.0")
correct_govuk_frontend_jinja_version = Version("1.5.0") <= govuk_frontend_jinja_version < Version("1.6.0")
assert correct_govuk_frontend_version and correct_govuk_frontend_jinja_version, (
"After upgrading either of the Design System packages, you must validate that "
"`app/templates/govuk_frontend_jinja_overrides/templates/components/*/template.html`"
"are all structurally-correct and up-to-date macros. If not, update the macros or retire them and update the "
"rendering process."
)
| Update GOV.UK Frontend/Jinja lib test | Update GOV.UK Frontend/Jinja lib test
Check both the javascript and python packages, and make sure they're
both on our expected versions. If not, prompt the developer to check
macros.
| Python | mit | alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin |
15ae458f7cf1a8257967b2b3b0ceb812547c4766 | IPython/utils/tests/test_pycolorize.py | IPython/utils/tests/test_pycolorize.py | """Test suite for our color utilities.
Authors
-------
* Min RK
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING.txt, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# third party
import nose.tools as nt
# our own
from IPython.utils.PyColorize import Parser
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
def test_unicode_colorize():
p = Parser()
f1 = p.format('1/0', 'str')
f2 = p.format(u'1/0', 'str')
nt.assert_equal(f1, f2)
| # coding: utf-8
"""Test suite for our color utilities.
Authors
-------
* Min RK
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING.txt, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# third party
import nose.tools as nt
# our own
from IPython.utils.PyColorize import Parser
import io
#-----------------------------------------------------------------------------
# Test functions
#-----------------------------------------------------------------------------
sample = u"""
def function(arg, *args, kwarg=True, **kwargs):
'''
this is docs
'''
pass is True
False == None
with io.open(ru'unicode'):
raise ValueError("\n escape \r sequence")
print("wěird ünicoðe")
class Bar(Super):
def __init__(self):
super(Bar, self).__init__(1**2, 3^4, 5 or 6)
"""
def test_loop_colors():
for scheme in ('Linux', 'NoColor','LightBG'):
def test_unicode_colorize():
p = Parser()
f1 = p.format('1/0', 'str', scheme=scheme)
f2 = p.format(u'1/0', 'str', scheme=scheme)
nt.assert_equal(f1, f2)
def test_parse_sample():
"""and test writing to a buffer"""
buf = io.StringIO()
p = Parser()
p.format(sample, buf, scheme=scheme)
buf.seek(0)
f1 = buf.read()
nt.assert_not_in('ERROR', f1)
def test_parse_error():
p = Parser()
f1 = p.format(')', 'str', scheme=scheme)
if scheme != 'NoColor':
nt.assert_in('ERROR', f1)
yield test_unicode_colorize
yield test_parse_sample
yield test_parse_error
| Test more edge cases of the highlighting parser | Test more edge cases of the highlighting parser
| Python | bsd-3-clause | ipython/ipython,ipython/ipython |
6cb0822aade07999d54e5fcd19eb2c7322abc80a | measurement/admin.py | measurement/admin.py | from django.contrib import admin
from .models import Measurement
admin.site.register(Measurement)
| from django.contrib import admin
from .models import Measurement
class MeasurementAdmin(admin.ModelAdmin):
model = Measurement
def get_queryset(self, request):
return super(MeasurementAdmin, self).get_queryset(request).select_related('patient__user')
admin.site.register(Measurement, MeasurementAdmin)
| Improve performance @ Measurement Admin | Improve performance @ Measurement Admin
| Python | mit | sigurdsa/angelika-api |
b9b3837937341e6b1b052bbfdd979e3bb57d87c4 | tests/integration/test_with_ssl.py | tests/integration/test_with_ssl.py | from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
| import os
from pymco.test import ctxt
from . import base
FIXTURES_PATH = os.path.join(ctxt.ROOT, 'fixtures')
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
'plugin.ssl_server_private': os.path.join(FIXTURES_PATH,
'server-private.pem'),
'securityprovider': 'ssl',
'plugin.ssl_client_cert_dir': FIXTURES_PATH,
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
| Fix SSL security provider integration tests | Fix SSL security provider integration tests
They were running with none provider instead.
| Python | bsd-3-clause | rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective |
66284e57accec5977d606fc91a0b28177b352eb4 | test/test_producer.py | test/test_producer.py | import pytest
from kafka import KafkaConsumer, KafkaProducer
from test.conftest import version
from test.testutil import random_string
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
def test_end_to_end(kafka_broker):
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
max_block_ms=10000,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
for i in range(1000):
producer.send(topic, 'msg %d' % i)
producer.flush()
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(1000):
try:
msgs.add(next(consumer).value)
except StopIteration:
break
assert msgs == set(['msg %d' % i for i in range(1000)])
| import pytest
from kafka import KafkaConsumer, KafkaProducer
from test.conftest import version
from test.testutil import random_string
@pytest.mark.skipif(not version(), reason="No KAFKA_VERSION set")
@pytest.mark.parametrize("compression", [None, 'gzip', 'snappy', 'lz4'])
def test_end_to_end(kafka_broker, compression):
# LZ4 requires 0.8.2
if compression == 'lz4' and version() < (0, 8, 2):
return
connect_str = 'localhost:' + str(kafka_broker.port)
producer = KafkaProducer(bootstrap_servers=connect_str,
max_block_ms=10000,
compression_type=compression,
value_serializer=str.encode)
consumer = KafkaConsumer(bootstrap_servers=connect_str,
group_id=None,
consumer_timeout_ms=10000,
auto_offset_reset='earliest',
value_deserializer=bytes.decode)
topic = random_string(5)
for i in range(1000):
producer.send(topic, 'msg %d' % i)
producer.flush()
producer.close()
consumer.subscribe([topic])
msgs = set()
for i in range(1000):
try:
msgs.add(next(consumer).value)
except StopIteration:
break
assert msgs == set(['msg %d' % i for i in range(1000)])
| Add end-to-end integration testing for all compression types | Add end-to-end integration testing for all compression types
| Python | apache-2.0 | dpkp/kafka-python,ohmu/kafka-python,Yelp/kafka-python,ohmu/kafka-python,zackdever/kafka-python,Aloomaio/kafka-python,DataDog/kafka-python,Aloomaio/kafka-python,wikimedia/operations-debs-python-kafka,mumrah/kafka-python,scrapinghub/kafka-python,mumrah/kafka-python,dpkp/kafka-python,Yelp/kafka-python,scrapinghub/kafka-python,wikimedia/operations-debs-python-kafka,zackdever/kafka-python |
4cbbe7c3ab891a11492f368d780a1416d37358ff | feedzilla/syndication.py | feedzilla/syndication.py | # -*- coding: utf-8 -*-
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
from django.contrib.syndication.views import Feed
from django.conf import settings
from feedzilla.models import Post
class PostFeed(Feed):
title_template = 'feedzilla/feed/post_title.html'
description_template = 'feedzilla/feed/post_description.html'
title = settings.FEEDZILLA_SITE_TITLE
description = settings.FEEDZILLA_SITE_DESCRIPTION
link = '/'
def items(self, obj):
return Post.active_objects.all()\
.order_by('-created')[:settings.FEEDZILLA_PAGE_SIZE]
#def item_title(self, item):
#return item.name
#def item_description(self, item):
#return item.description
def item_pubdate(self, item):
return item.created
def item_guid(self, item):
return str(item.guid)
| # -*- coding: utf-8 -*-
# Copyright: 2011, Grigoriy Petukhov
# Author: Grigoriy Petukhov (http://lorien.name)
# License: BSD
from django.contrib.syndication.views import Feed
from django.conf import settings
from feedzilla.models import Post
class PostFeed(Feed):
title_template = 'feedzilla/feed/post_title.html'
description_template = 'feedzilla/feed/post_description.html'
title = settings.FEEDZILLA_SITE_TITLE
description = settings.FEEDZILLA_SITE_DESCRIPTION
link = '/'
def items(self, obj):
return Post.active_objects.all()\
.order_by('-created')[:settings.FEEDZILLA_PAGE_SIZE]
#def item_title(self, item):
#return item.name
#def item_description(self, item):
#return item.description
def item_pubdate(self, item):
return item.created
def item_guid(self, item):
return item.link
| Change the method of generating content of GUID element | Change the method of generating content of GUID element
| Python | bsd-3-clause | feedzilla/feedzilla,feedzilla/feedzilla,feedzilla/feedzilla |
f127f0e9bb0b8778feafbdbc1fa68e79a923d639 | whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py | whats_fresh/whats_fresh_api/tests/views/entry/test_list_products.py | from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper productss and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
for product in Product.objects.all():
self.assertEqual(
items[product.id-1]['description'], product.description)
self.assertEqual(
items[product.id-1]['name'], product.name)
self.assertEqual(
items[product.id-1]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
items[product.id-1]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(items[product.id-1]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
| from django.test import TestCase
from django.core.urlresolvers import reverse
from whats_fresh_api.models import *
from django.contrib.gis.db import models
import json
class ListProductTestCase(TestCase):
fixtures = ['test_fixtures']
def test_url_endpoint(self):
url = reverse('entry-list-products')
self.assertEqual(url, '/entry/products')
def test_list_items(self):
"""
Tests to see if the list of products contains the proper products and
proper product data
"""
response = self.client.get(reverse('entry-list-products'))
items = response.context['item_list']
product_dict = {}
for product in items:
product_id = product['link'].split('/')[-1]
product_dict[str(product_id)] = product
for product in Product.objects.all():
self.assertEqual(
product_dict[str(product.id)]['description'],
product.description)
self.assertEqual(
product_dict[str(product.id)]['name'], product.name)
self.assertEqual(
product_dict[str(product.id)]['link'],
reverse('edit-product', kwargs={'id': product.id}))
self.assertEqual(
product_dict[str(product.id)]['modified'],
product.modified.strftime("%I:%M %P, %d %b %Y"))
self.assertEqual(
sort(product_dict[str(product.id)]['preparations']),
sort([prep.name for prep in product.preparations.all()]))
| Update product listing test to use product ids rather than index | Update product listing test to use product ids rather than index
| Python | apache-2.0 | osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api |
12b34fc09baa5060495e25e57680d1f6170559c5 | addons/bestja_configuration_fpbz/__openerp__.py | addons/bestja_configuration_fpbz/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
| # -*- coding: utf-8 -*-
{
'name': "Bestja: FBŻ",
'summary': "Installation configuration for FPBŻ",
'description': "Installation configuration for Federacja Polskich Banków Żywności",
'author': "Laboratorium EE",
'website': "http://www.laboratorium.ee",
'version': '0.1',
'category': 'Specific Industry Applications',
'depends': [
'base',
'bestja_base',
'bestja_volunteer',
'bestja_volunteer_notes',
'bestja_account_deletion',
'bestja_organization',
'bestja_organization_hierarchy',
'bestja_project',
'bestja_project_hierarchy',
'bestja_stores',
'bestja_requests',
'bestja_detailed_reports',
'bestja_estimation_reports',
'bestja_offers',
'bestja_offers_by_org',
'bestja_files',
'quizzes',
'bestja_organization_warehouse',
'bestja_age_verification',
'bestja_frontend_fpbz',
'bestja_page_fixtures_fpbz',
],
'data': [
'data.xml',
],
'application': True,
}
| Enable estimation reports for FPBŻ | Enable estimation reports for FPBŻ
| Python | agpl-3.0 | KamilWo/bestja,EE/bestja,ludwiktrammer/bestja,KrzysiekJ/bestja,ludwiktrammer/bestja,KrzysiekJ/bestja,KrzysiekJ/bestja,ludwiktrammer/bestja,EE/bestja,KamilWo/bestja,EE/bestja,KamilWo/bestja |
fc75f5843af70c09e0d63284277bf88689cbb06d | invocations/docs.py | invocations/docs.py | import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def docs(clean=False, browse=False):
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
| import os
from invoke.tasks import task
from invoke.runner import run
docs_dir = 'docs'
build = os.path.join(docs_dir, '_build')
@task
def clean_docs():
run("rm -rf %s" % build)
@task
def browse_docs():
run("open %s" % os.path.join(build, 'index.html'))
@task
def api_docs(target, output="api", exclude=""):
"""
Runs ``sphinx-apidoc`` to autogenerate your API docs.
Must give target directory/package as ``target``. Results are written out
to ``docs/<output>`` (``docs/api`` by default).
To exclude certain output files from the final build give ``exclude`` as a
comma separated list of file paths.
"""
output = os.path.join('docs', output)
# Have to make these absolute or apidoc is dumb :(
exclude = map(
lambda x: os.path.abspath(os.path.join(os.getcwd(), x)),
exclude.split(',')
)
run("sphinx-apidoc -o %s %s %s" % (output, target, ' '.join(exclude)))
@task
def docs(clean=False, browse=False, api_target=None, api_output=None,
api_exclude=None):
"""
Build Sphinx docs, optionally ``clean``ing and/or ``browse``ing.
Can also build API docs by giving ``api_target`` and optionally
``api_output`` and/or ``api_exclude``.
"""
if api_target:
kwargs = {'target': api_target}
if api_output:
kwargs['output'] = api_output
if api_exclude:
kwargs['exclude'] = api_exclude
api_docs.body(**kwargs)
if clean:
clean_docs.body()
run("sphinx-build %s %s" % (docs_dir, build), pty=True)
if browse:
browse_docs.body()
| Add apidoc to doc building | Add apidoc to doc building
| Python | bsd-2-clause | mrjmad/invocations,pyinvoke/invocations,alex/invocations,singingwolfboy/invocations |
a9ac098ec492739f37005c9bd6278105df0261c5 | parliamentsearch/items.py | parliamentsearch/items.py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
| # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class MemberofParliament(scrapy.Item):
"""
Data structure to define Member of Parliament information
"""
mp_id = scrapy.Field()
mp_name = scrapy.Field()
mp_constituency = scrapy.Field()
mp_party = scrapy.Field()
mp_photo = scrapy.Field()
class RajyaSabhaQuestion(scrapy.Item):
"""
Data structure to define a Rajya Sabha question
"""
q_no = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
class LokSabhaQuestion(scrapy.Item):
"""
Data structure to define a Lok Sabha question
"""
q_no = scrapy.Field()
q_session = scrapy.Field()
q_type = scrapy.Field()
q_date = scrapy.Field()
q_ministry = scrapy.Field()
q_member = scrapy.Field()
q_subject = scrapy.Field()
q_url = scrapy.Field()
q_annex = scrapy.Field()
| Add fields to save question url and annexure links | Add fields to save question url and annexure links
Details of each question is in another link and some questions have annexures
(in English/Hindi), add fields to save all these items
Signed-off-by: Arun Siluvery <[email protected]>
| Python | mit | mthipparthi/parliament-search |
376b8aa5b77066e06c17f41d65fe32a3c2bdef1f | geo.py | geo.py | #! /usr/bin/python3
# -*- coding-utf-8 -*-
"""
This script transform a md into a plain html in the context of a
documentation for Kit&Pack.
"""
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
| #! /usr/bin/python3
# -*- coding-utf-8 -*-
"""
This script transform a md into a plain html in the context of a
documentation for Kit&Pack.
"""
import mmap
import yaml
print("---------------------------- geo --")
print("-- by [email protected] --")
print("-----------------------------------")
doc_in = "./001-v2-doc.md"
class geoReader():
def __init__(self, doc_in):
self.doc_in = doc_in
self.header = None
self.header_limit = -1
def __enter__(self):
"""Open the file.
"""
self.f = open(self.doc_in, 'r')
return self
def __exit__(self, type, value, traceback):
"""Close the file.
"""
self.f.close()
def parseHeader(self):
"""Parse the header of the file.
"""
s = mmap.mmap(self.f.fileno(), 0, access=mmap.ACCESS_READ)
self.header_limit = s.find(b'---')
if self.header_limit != -1:
self.header = yaml.load(s[0:self.header_limit])
print(self.header['name'])
else:
print("Cannot load the header")
# Read the document
with geoReader(doc_in) as g:
g.parseHeader()
| Add a default value to the header limit | Add a default value to the header limit
| Python | mit | a2ohm/geo |
fdae17a50223c2f9b8ba4a665fc24726e2c2ce14 | tests/lib/es_tools.py | tests/lib/es_tools.py | """ Commands for interacting with Elastic Search """
# pylint: disable=broad-except
from os.path import join
import requests
from lib.tools import TEST_FOLDER
def es_is_available():
""" Test if Elastic Search is running """
try:
return (
requests.get("http://localhost:9200").json()["tagline"]
== "You Know, for Search"
)
except Exception:
return False
def load_json_file(filename):
""" Load JSON file into Elastic Search """
url = "http://localhost:9200/_bulk"
path = join(TEST_FOLDER, "data", filename)
headers = {"Content-Type": "application/x-ndjson"}
with open(path, "r") as handle:
body = handle.read().encode(encoding="utf-8")
return requests.post(url, headers=headers, data=body)
| """ Commands for interacting with Elastic Search """
# pylint: disable=broad-except
from os.path import join
import requests
from lib.tools import TEST_FOLDER
def es_is_available():
""" Test if Elastic Search is running """
try:
return (
requests.get("http://localhost:9200", auth=("elastic", "changeme")).json()[
"tagline"
]
== "You Know, for Search"
)
except Exception:
return False
def load_json_file(filename):
""" Load JSON file into Elastic Search """
url = "http://localhost:9200/_bulk"
path = join(TEST_FOLDER, "data", filename)
headers = {"Content-Type": "application/x-ndjson"}
with open(path, "r") as handle:
body = handle.read().encode(encoding="utf-8")
return requests.post(
url, headers=headers, data=body, auth=("elastic", "changeme")
)
| Add auth header to the fixture loader | Add auth header to the fixture loader
It seems to work fine with the unauthenticated es instance
| Python | mit | matthewfranglen/postgres-elasticsearch-fdw |
87244598ed08e790835818656ecba0178bb7ca89 | fsplit/__init__.py | fsplit/__init__.py | #!/usr/bin/env python2
##
# fsplit
# https://github.com/leosartaj/fsplit.git
#
# Copyright (c) 2014 Sartaj Singh
# Licensed under the MIT license.
##
from info import __version__ # define __version__ variable
from info import __desc__ # define __desc__ variable for description
| #!/usr/bin/env python2
##
# fsplit
# https://github.com/leosartaj/fsplit.git
#
# Copyright (c) 2014 Sartaj Singh
# Licensed under the MIT license.
##
from .info import __version__ # define __version__ variable
from .info import __desc__ # define __desc__ variable for description
| Upgrade to a better version | Upgrade to a better version
from info import __version__ # define __version__ variable ModuleNotFoundError: No module named 'info
Gave error while building
Coz of missing . | Python | mit | leosartaj/fsplit |
bafdbd28e35d80d28bfb82c23532533cb2915066 | fuel/exceptions.py | fuel/exceptions.py | class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
| class AxisLabelsMismatchError(ValueError):
"""Raised when a pair of axis labels tuples do not match."""
class ConfigurationError(Exception):
"""Error raised when a configuration value is requested but not set."""
class MissingInputFiles(Exception):
"""Exception raised by a converter when input files are not found.
Parameters
----------
message : str
The error message to be associated with this exception.
filenames : list
A list of filenames that were not found.
"""
def __init__(self, message, filenames):
self.filenames = filenames
super(MissingInputFiles, self).__init__(message, filenames)
class NeedURLPrefix(Exception):
"""Raised when a URL is not provided for a file."""
| Add docs for MissingInputFiles 'message' arg. | Add docs for MissingInputFiles 'message' arg.
| Python | mit | hantek/fuel,rodrigob/fuel,dmitriy-serdyuk/fuel,codeaudit/fuel,udibr/fuel,mjwillson/fuel,dribnet/fuel,capybaralet/fuel,aalmah/fuel,glewis17/fuel,glewis17/fuel,vdumoulin/fuel,dmitriy-serdyuk/fuel,dwf/fuel,bouthilx/fuel,mila-udem/fuel,chrishokamp/fuel,udibr/fuel,janchorowski/fuel,dwf/fuel,dribnet/fuel,markusnagel/fuel,aalmah/fuel,markusnagel/fuel,orhanf/fuel,capybaralet/fuel,rodrigob/fuel,dhruvparamhans/fuel,dhruvparamhans/fuel,janchorowski/fuel,mila-udem/fuel,bouthilx/fuel,harmdevries89/fuel,hantek/fuel,harmdevries89/fuel,chrishokamp/fuel,codeaudit/fuel,orhanf/fuel,vdumoulin/fuel,mjwillson/fuel |
0fdb93fb73142315fe404b9a161ef19af0d920cd | tests/test_bawlerd.py | tests/test_bawlerd.py | import io
import os
from textwrap import dedent
from pg_bawler import bawlerd
class TestBawlerdConfig:
def test_build_config_location_list(self):
assert not bawlerd.conf.build_config_location_list(locations=())
user_conf = os.path.join(
os.path.expanduser('~'),
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
system_conf = os.path.join(
'/etc/pg_bawler',
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
assert user_conf in bawlerd.conf.build_config_location_list()
assert system_conf in bawlerd.conf.build_config_location_list()
def test__load_file(self):
config = bawlerd.conf._load_file(io.StringIO(dedent("""\
logging:
formatters:
standard:
format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\"
handlers:
default:
level: "INFO"
formatter: standard
class: logging.StreamHandler
loggers:
"":
handlers: ["default"]
level: INFO
propagate: True
""")))
assert 'logging' in config
| import io
import os
from textwrap import dedent
from pg_bawler import bawlerd
class TestBawlerdConfig:
def test_build_config_location_list(self):
assert not bawlerd.conf.build_config_location_list(locations=())
user_conf = os.path.join(
os.path.expanduser('~'),
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
system_conf = os.path.join(
'/etc/pg_bawler',
bawlerd.conf.DEFAULT_CONFIG_FILENAME)
assert user_conf in bawlerd.conf.build_config_location_list()
assert system_conf in bawlerd.conf.build_config_location_list()
def test__load_file(self):
config = bawlerd.conf._load_file(io.StringIO(dedent("""\
logging:
formatters:
standard:
format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\"
handlers:
default:
level: "INFO"
formatter: standard
class: logging.StreamHandler
loggers:
"":
handlers: ["default"]
level: INFO
propagate: True
""")))
assert 'logging' in config
def test_read_config_files(self):
config_base = os.path.join(
os.path.abspath(os.path.dirname(__file__)), 'configs')
locations = [
os.path.join(config_base, 'etc'),
os.path.join(config_base, 'home'),
]
config = bawlerd.conf.read_config_files(
bawlerd.conf.build_config_location_list(locations=locations))
assert config['common']['listen_timeout'] == 40
assert 'logging' in config
| Add simple test for config builder | Add simple test for config builder
Signed-off-by: Michal Kuffa <[email protected]>
| Python | bsd-3-clause | beezz/pg_bawler,beezz/pg_bawler |
ea3ad65d3d0976ec24c15703fafacb805a6b5351 | students/psbriant/final_project/clean_data.py | students/psbriant/final_project/clean_data.py | """
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
from datetime import datetime
def clean(data):
"""
Take in data and return cleaned version.
"""
# Remove Date Values column
data = data.drop(["Date Value"], axis=1)
# Determine what values are missing
# empty = data.apply(lambda col: pandas.isnull(col))
return data
def find_low_water_use(data):
"""
"""
under100 = data[(data["90012"] <= 100) & (data["90013"] <= 100)]
print(under100)
under25 = data[(data["90012"] <= 25) & (data["90013"] <= 25)]
print(under25)
def main():
"""
"""
# Connect to file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
cleaned_data = clean(data)
find_low_water_use(cleaned_data)
if __name__ == '__main__':
main()
| """
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Code for Final Project
"""
import pandas
import matplotlib.pyplot as plt
from datetime import datetime
def clean(data):
"""
Take in data and return cleaned version.
"""
# Remove Date Values column
data = data.drop(["Date Value"], axis=1)
# Determine what values are missing
# empty = data.apply(lambda col: pandas.isnull(col))
return data
def find_low_water_use(data):
"""
"""
under100 = data[(data["90012"] <= 100) & (data["90013"] <= 100)]
print(under100)
under25 = data[(data["90012"] <= 25) & (data["90013"] <= 25)]
print(under25)
def plot_zipcode(data, zipcode):
"""
"""
# data["90012"].plot(kind="bar", rot=10)
plt.plot(data[zipcode])
plt.show()
def main():
"""
"""
# Connect to file.
data = pandas.read_csv("data/Residential_Water_Usage_Zip_Code_on_Top.csv")
cleaned_data = clean(data)
# find_low_water_use(cleaned_data)
plot_zipcode(cleaned_data, "90012")
# cleaned_data["90012"].plot(kind="bar", rot=10)
# cleaned_data["90012"].hist()
# plt.plot(cleaned_data["90012"])
# plt.plot([1, 2, 3, 4])
if __name__ == '__main__':
main()
| Add simple plots of downtown LA wateruse. | Add simple plots of downtown LA wateruse.
| Python | unlicense | UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016 |
c99e0ac2e463302d41838f11ea28ea8a62990671 | wafer/kv/serializers.py | wafer/kv/serializers.py | from django.core.exceptions import PermissionDenied
from rest_framework import serializers
from wafer.kv.models import KeyValue
class KeyValueSerializer(serializers.ModelSerializer):
class Meta:
model = KeyValue
# There doesn't seem to be a better way of handling the problem
# of filtering the groups.
# See the DRF meta-issue https://github.com/tomchristie/django-rest-framework/issues/1985
# and various related subdisscussions, such as https://github.com/tomchristie/django-rest-framework/issues/2292
def __init__(self, *args, **kwargs):
# Explicitly fail with a hopefully informative error message
# if there is no request. This is for introspection tools which
# call serializers without a request
if 'request' not in kwargs['context']:
raise PermissionDenied("No request information provided."
"The KeyValue API isn't available without "
"an authorized user")
user = kwargs['context']['request'].user
# Limit to groups shown to those we're a member of
groups = self.fields['group']
groups.queryset = user.groups
super(KeyValueSerializer, self).__init__(*args, **kwargs)
| from django.core.exceptions import PermissionDenied
from rest_framework import serializers
from wafer.kv.models import KeyValue
class KeyValueSerializer(serializers.ModelSerializer):
class Meta:
model = KeyValue
fields = ('group', 'key', 'value')
# There doesn't seem to be a better way of handling the problem
# of filtering the groups.
# See the DRF meta-issue https://github.com/tomchristie/django-rest-framework/issues/1985
# and various related subdisscussions, such as https://github.com/tomchristie/django-rest-framework/issues/2292
def __init__(self, *args, **kwargs):
# Explicitly fail with a hopefully informative error message
# if there is no request. This is for introspection tools which
# call serializers without a request
if 'request' not in kwargs['context']:
raise PermissionDenied("No request information provided."
"The KeyValue API isn't available without "
"an authorized user")
user = kwargs['context']['request'].user
# Limit to groups shown to those we're a member of
groups = self.fields['group']
groups.queryset = user.groups
super(KeyValueSerializer, self).__init__(*args, **kwargs)
| Add catchall fields property to KeyValueSerializer | Add catchall fields property to KeyValueSerializer
With the latest django-restframework, not explicitly setting the
fields for a serializer causes errors. This explicitly sets the
fields to those of the model.
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer |
f802111f1f241444f874119e5949f3da4abd1c85 | python/raindrops/raindrops.py | python/raindrops/raindrops.py | def raindrops(number):
if is_three_a_factor(number):
return "Pling"
if is_five_a_factor(number):
return "Plang"
return "{}".format(number)
def is_three_a_factor(number):
return number % 3 == 0
def is_five_a_factor(number):
return number % 5 == 0
| def raindrops(number):
if is_three_a_factor(number):
return "Pling"
if is_five_a_factor(number):
return "Plang"
if is_seven_a_factor(number):
return "Plong"
return "{}".format(number)
def is_three_a_factor(number):
return number % 3 == 0
def is_five_a_factor(number):
return number % 5 == 0
def is_seven_a_factor(number):
return number % 7 == 0
| Handle 7 as a factor | Handle 7 as a factor
| Python | mit | rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism,rootulp/exercism |
21844df3e266803cc119d049faadddb5bf5410f0 | run.py | run.py | import serial
import threading
print('Starting server...')
temperature_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
temperature_ser = ser.Serial(temperature_usb, BAUD_RATE)
def process_line(line):
print('Need to process line: {}'.format(line))
def temperature_loop():
line = ""
while True:
data = temperature_ser.read()
if(data == "\r"):
process_line(line)
line = ""
else:
line = line + data
temperature_thread = threading.Thread(target=temperature_loop)
temperature_thread.start()
| import serial
import threading
print('Starting server...')
temperature_usb = '/dev/ttyAMA0'
BAUD_RATE = 9600
temperature_ser = serial.Serial(temperature_usb, BAUD_RATE)
def process_line(line):
print('Need to process line: {}'.format(line))
def temperature_loop():
line = ""
while True:
data = temperature_ser.read()
if(data == "\r"):
process_line(line)
line = ""
else:
line = line + data
temperature_thread = threading.Thread(target=temperature_loop)
temperature_thread.start()
| Use serial instead of ser, DUH | Use serial instead of ser, DUH
| Python | mit | illumenati/duwamish-sensor,tipsqueal/duwamish-sensor |
bb86433e80c1361b57c58fb32a2e250e915b1b05 | thinglang/__init__.py | thinglang/__init__.py | import os
from thinglang import utils
from thinglang.execution.execution import ExecutionEngine
from thinglang.lexer.lexer import lexer
from thinglang.parser.analyzer import Analyzer
from thinglang.parser.parser import parse
from thinglang.parser.simplifier import Simplifier
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'include')
def collect_includes():
files = [os.path.join(BASE_DIR, path) for path in os.listdir(BASE_DIR)]
return '\n' + '\n'.join(open(f).read() for f in files)
def run(source):
if not source:
raise ValueError('Source cannot be empty')
source = (source + collect_includes()).strip().replace(' ' * 4, '\t')
utils.print_header('Source', source)
lexical_groups = list(lexer(source))
ast = parse(lexical_groups)
Simplifier(ast).run()
utils.print_header('Parsed AST', ast.tree())
Analyzer(ast).run()
with ExecutionEngine(ast) as engine:
engine.execute()
return engine.results()
| import os
from thinglang import utils
from thinglang.execution.execution import ExecutionEngine
from thinglang.lexer.lexer import lexer
from thinglang.parser.analyzer import Analyzer
from thinglang.parser.parser import parse
from thinglang.parser.simplifier import Simplifier
BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'include')
def collect_includes():
files = [os.path.join(BASE_DIR, path) for path in os.listdir(BASE_DIR)]
return '\n' + '\n'.join(open(f).read() for f in files)
def run(source):
if not source:
raise ValueError('Source cannot be empty')
source = (source + collect_includes()).strip().replace(' ' * 4, '\t')
utils.print_header('Source', source)
lexical_groups = list(lexer(source))
ast = parse(lexical_groups)
Simplifier(ast).run()
utils.print_header('C++ Transpilation', ast.transpile_children())
utils.print_header('Parsed AST', ast.tree())
Analyzer(ast).run()
with ExecutionEngine(ast) as engine:
engine.execute()
return engine.results()
| Print C++ code during parsing | Print C++ code during parsing
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang |
baacda228682a50acc5a4528d43f5d3a88c7c6ec | salt/client/netapi.py | salt/client/netapi.py | # encoding: utf-8
'''
The main entry point for salt-api
'''
# Import python libs
import logging
import multiprocessing
# Import salt-api libs
import salt.loader
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
def run(self):
'''
Load and start all available api modules
'''
netapi = salt.loader.netapi(self.opts)
for fun in netapi:
if fun.endswith('.start'):
logger.info("Starting '{0}' api module".format(fun))
multiprocessing.Process(target=netapi[fun]).start()
| # encoding: utf-8
'''
The main entry point for salt-api
'''
# Import python libs
import logging
import multiprocessing
import signal
# Import salt-api libs
import salt.loader
logger = logging.getLogger(__name__)
class NetapiClient(object):
'''
Start each netapi module that is configured to run
'''
def __init__(self, opts):
self.opts = opts
self.processes = []
def run(self):
'''
Load and start all available api modules
'''
netapi = salt.loader.netapi(self.opts)
for fun in netapi:
if fun.endswith('.start'):
logger.info("Starting '{0}' api module".format(fun))
p = multiprocessing.Process(target=netapi[fun])
p.start()
self.processes.append(p)
# make sure to kill the subprocesses if the parent is killed
signal.signal(signal.SIGTERM, self.kill_children)
def kill_children(self, *args):
'''
Kill all of the children
'''
for p in self.processes:
p.terminate()
p.join()
| Make sure to not leave hanging children processes if the parent is killed | Make sure to not leave hanging children processes if the parent is killed
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt |
a3b119e14df4aff213231492470587f88457a241 | setuptools/command/upload.py | setuptools/command/upload.py | import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
| import getpass
from distutils.command import upload as orig
class upload(orig.upload):
"""
Override default upload behavior to obtain password
in a variety of different ways.
"""
def finalize_options(self):
orig.upload.finalize_options(self)
# Attempt to obtain password. Short circuit evaluation at the first
# sign of success.
self.password = (
self.password or
self._load_password_from_keyring() or
self._prompt_for_password()
)
def _load_password_from_keyring(self):
"""
Attempt to load password from keyring. Suppress Exceptions.
"""
try:
keyring = __import__('keyring')
password = keyring.get_password(self.repository, self.username)
except Exception:
password = None
finally:
return password
def _prompt_for_password(self):
"""
Prompt for a password on the tty. Suppress Exceptions.
"""
password = None
try:
while not password:
password = getpass.getpass()
except (Exception, KeyboardInterrupt):
password = None
finally:
return password
| Add carriage return for symmetry | Add carriage return for symmetry
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools |
1dade842098292f2201502fa55239a98628dfc2b | scheduler/schedule.py | scheduler/schedule.py | import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=604800) # 7 day timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
| import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph, timeout=3600) # 1 hour timeout
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
| Set update job timeout back to a more reasonable value | Set update job timeout back to a more reasonable value
| Python | apache-2.0 | ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod |
0b8cc130f00b51b18e55805f82ba661fdf66fba6 | saml2idp/saml2idp_metadata.py | saml2idp/saml2idp_metadata.py | """
Django Settings that more closely resemble SAML Metadata.
Detailed discussion is in doc/SETTINGS_AND_METADATA.txt.
"""
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
CERTIFICATE_DATA = 'certificate_data'
CERTIFICATE_FILENAME = 'certificate_file'
PRIVATE_KEY_DATA = 'private_key_data'
PRIVATE_KEY_FILENAME = 'private_key_file'
def check_configuration_contains(config, keys):
available_keys = set(keys).intersection(set(config.keys()))
if not available_keys:
raise ImproperlyConfigured(
'one of the followin keys is required but none was '
'specified: {}'.format(keys))
if len(available_keys) > 1:
raise ImproperlyConfigured(
'found conflicting configuration: {}. Only one key can be used at'
'a time.'.format(available_keys))
def validate_configuration(config):
check_configuration_contains(config=config,
keys=[PRIVATE_KEY_DATA, PRIVATE_KEY_FILENAME])
check_configuration_contains(config=config,
keys=[CERTIFICATE_DATA, CERTIFICATE_FILENAME])
try:
SAML2IDP_CONFIG = settings.SAML2IDP_CONFIG
except:
raise ImproperlyConfigured('SAML2IDP_CONFIG setting is missing.')
else:
validate_configuration(SAML2IDP_CONFIG)
try:
SAML2IDP_REMOTES = settings.SAML2IDP_REMOTES
except:
raise ImproperlyConfigured('SAML2IDP_REMOTES setting is missing.')
| """
Django Settings that more closely resemble SAML Metadata.
Detailed discussion is in doc/SETTINGS_AND_METADATA.txt.
"""
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
CERTIFICATE_DATA = 'certificate_data'
CERTIFICATE_FILENAME = 'certificate_file'
PRIVATE_KEY_DATA = 'private_key_data'
PRIVATE_KEY_FILENAME = 'private_key_file'
def check_configuration_contains(config, keys):
available_keys = frozenset(keys).intersection(frozenset(config.keys()))
if not available_keys:
raise ImproperlyConfigured(
'one of the following keys is required but none was '
'specified: {}'.format(keys))
if len(available_keys) > 1:
raise ImproperlyConfigured(
'found conflicting configuration: {}. Only one key can be used at'
'a time.'.format(available_keys))
def validate_configuration(config):
check_configuration_contains(config=config,
keys=(PRIVATE_KEY_DATA, PRIVATE_KEY_FILENAME))
check_configuration_contains(config=config,
keys=(CERTIFICATE_DATA, CERTIFICATE_FILENAME))
try:
SAML2IDP_CONFIG = settings.SAML2IDP_CONFIG
except:
raise ImproperlyConfigured('SAML2IDP_CONFIG setting is missing.')
else:
validate_configuration(SAML2IDP_CONFIG)
try:
SAML2IDP_REMOTES = settings.SAML2IDP_REMOTES
except:
raise ImproperlyConfigured('SAML2IDP_REMOTES setting is missing.')
| Implement suggested changes in PR review | Implement suggested changes in PR review
| Python | mit | mobify/dj-saml-idp,mobify/dj-saml-idp,mobify/dj-saml-idp |
b6c63bedc6fcd2294aae60643f41df4acb2ee681 | pdfminer/pdfcolor.py | pdfminer/pdfcolor.py | import collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('DeviceGray', 1),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
| import collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('DeviceGray', 1), # default value first
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
| Make DeviceGray the default color as it should be | Make DeviceGray the default color as it should be
| Python | mit | goulu/pdfminer,pdfminer/pdfminer.six |
70ba84dc485ed3db4ccf5008db87b2c9f003634b | tests/fixtures/__init__.py | tests/fixtures/__init__.py | """Test data"""
from pathlib import Path
def patharg(path):
"""
Back slashes need to be escaped in ITEM args,
even in Windows paths.
"""
return str(path).replace('\\', '\\\\\\')
FIXTURES_ROOT = Path(__file__).parent
FILE_PATH = FIXTURES_ROOT / 'test.txt'
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
FILE_PATH_ARG = patharg(FILE_PATH)
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
# Strip because we don't want new lines in the data so that we can
# easily count occurrences also when embedded in JSON (where the new
# line would be escaped).
FILE_CONTENT = FILE_PATH.read_text().strip()
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text()
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
UNICODE = FILE_CONTENT
| """Test data"""
from pathlib import Path
def patharg(path):
"""
Back slashes need to be escaped in ITEM args,
even in Windows paths.
"""
return str(path).replace('\\', '\\\\\\')
FIXTURES_ROOT = Path(__file__).parent
FILE_PATH = FIXTURES_ROOT / 'test.txt'
JSON_FILE_PATH = FIXTURES_ROOT / 'test.json'
BIN_FILE_PATH = FIXTURES_ROOT / 'test.bin'
FILE_PATH_ARG = patharg(FILE_PATH)
BIN_FILE_PATH_ARG = patharg(BIN_FILE_PATH)
JSON_FILE_PATH_ARG = patharg(JSON_FILE_PATH)
# Strip because we don't want new lines in the data so that we can
# easily count occurrences also when embedded in JSON (where the new
# line would be escaped).
FILE_CONTENT = FILE_PATH.read_text('utf8').strip()
JSON_FILE_CONTENT = JSON_FILE_PATH.read_text('utf8')
BIN_FILE_CONTENT = BIN_FILE_PATH.read_bytes()
UNICODE = FILE_CONTENT
| Fix fixture encoding on Windows | Fix fixture encoding on Windows
| Python | bsd-3-clause | PKRoma/httpie,jakubroztocil/httpie,jkbrzt/httpie,jakubroztocil/httpie,jkbrzt/httpie,jakubroztocil/httpie,jkbrzt/httpie,PKRoma/httpie |
4d7df38e056d0132af41759062cf8e380c736250 | django_backend_test/noras_menu/urls.py | django_backend_test/noras_menu/urls.py | # -*- encoding: utf-8 -*-
from django.conf.urls import url, include
from django.views.decorators.csrf import csrf_exempt
from .views import CreateMenu,ListMenu,UpdateMenu,CreateSelection,ListSelection,CreateSubscriber
urlpatterns = [
url(r'^menu/new$',CreateMenu.as_view(),name='Create Menu'),
url(r'^menu/edit/(?P<pk>\d+)/$',UpdateMenu.as_view(),name='Update Menu'),
url(r'^menu/list$',ListMenu.as_view(),name='List Menu'),
url(r'^menu/selection$',ListSelection.as_view(),name='List Selection'),
url(r'^menu/(?P<uuid>[0-9a-z-]+)$',CreateSelection.as_view(),name='Create Selection'),
url(r'^subscriber/new$',CreateSubscriber.as_view(),name='Create Subscriber'),
] | Update Urls from nora_menu app | Update Urls from nora_menu app
| Python | mit | semorale/backend-test,semorale/backend-test,semorale/backend-test |
|
4be668a7d8cdb692c20be2eabf65c20e294e16a8 | scopus/utils/get_encoded_text.py | scopus/utils/get_encoded_text.py | # Namespaces for Scopus XML
ns = {'dtd': 'http://www.elsevier.com/xml/svapi/abstract/dtd',
'dn': 'http://www.elsevier.com/xml/svapi/abstract/dtd',
'ait': "http://www.elsevier.com/xml/ani/ait",
'cto': "http://www.elsevier.com/xml/cto/dtd",
'xocs': "http://www.elsevier.com/xml/xocs/dtd",
'ce': 'http://www.elsevier.com/xml/ani/common',
'prism': 'http://prismstandard.org/namespaces/basic/2.0/',
'xsi': "http://www.w3.org/2001/XMLSchema-instance",
'dc': 'http://purl.org/dc/elements/1.1/',
'atom': 'http://www.w3.org/2005/Atom',
'opensearch': 'http://a9.com/-/spec/opensearch/1.1/'}
def get_encoded_text(container, xpath):
"""Return text for element at xpath in the container xml if it is there.
Parameters
----------
container : xml.etree.ElementTree.Element
The element to be searched in.
xpath : str
The path to be looked for.
Returns
-------
result : str
"""
try:
return container.find(xpath, ns).text
except AttributeError:
return None
| # Namespaces for Scopus XML
ns = {'dtd': 'http://www.elsevier.com/xml/svapi/abstract/dtd',
'dn': 'http://www.elsevier.com/xml/svapi/abstract/dtd',
'ait': "http://www.elsevier.com/xml/ani/ait",
'cto': "http://www.elsevier.com/xml/cto/dtd",
'xocs': "http://www.elsevier.com/xml/xocs/dtd",
'ce': 'http://www.elsevier.com/xml/ani/common',
'prism': 'http://prismstandard.org/namespaces/basic/2.0/',
'xsi': "http://www.w3.org/2001/XMLSchema-instance",
'dc': 'http://purl.org/dc/elements/1.1/',
'atom': 'http://www.w3.org/2005/Atom',
'opensearch': 'http://a9.com/-/spec/opensearch/1.1/'}
def get_encoded_text(container, xpath):
"""Return text for element at xpath in the container xml if it is there.
Parameters
----------
container : xml.etree.ElementTree.Element
The element to be searched in.
xpath : str
The path to be looked for.
Returns
-------
result : str
"""
try:
return "".join(container.find(xpath, ns).itertext())
except AttributeError:
return None
| Use itertext() to skip children in elements with text | Use itertext() to skip children in elements with text
| Python | mit | scopus-api/scopus,jkitchin/scopus |
e924f67b37c1a7612e520cca9715152029ddf338 | test/integration/ggrc/services/test_query_snapshots.py | test/integration/ggrc/services/test_query_snapshots.py | # coding: utf-8
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Tests for /query api endpoint."""
from datetime import datetime
from operator import itemgetter
from flask import json
from nose.plugins.skip import SkipTest
from ggrc import db
from ggrc import views
from ggrc.models import CustomAttributeDefinition as CAD
from integration.ggrc.converters import TestCase
from integration.ggrc.models import factories
class BaseQueryAPITestCase(TestCase):
"""Base class for /query api tests with utility methods."""
def setUp(self):
"""Log in before performing queries."""
# we don't call super as TestCase.setUp clears the DB
# super(BaseQueryAPITestCase, self).setUp()
self.client.get("/login")
def _setup_objects(self):
audit = factories.AuditFactory()
factories.MarketFactory()
factories.MarketFactory()
def test_basic_query_in(self):
"""Filter by ~ operator."""
self._setup_objects()
| # coding: utf-8
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Tests for /query api endpoint."""
from ggrc import views
from ggrc import models
from integration.ggrc.converters import TestCase
from integration.ggrc.models import factories
class BaseQueryAPITestCase(TestCase):
"""Base class for /query api tests with utility methods."""
def setUp(self):
"""Log in before performing queries."""
super(BaseQueryAPITestCase, self).setUp()
self.client.get("/login")
def _setup_objects(self):
text_cad = factories.CustomAttributeDefinitionFactory(
definition_type="market",
)
date_cad = factories.CustomAttributeDefinitionFactory(
definition_type="market",
attribute_type="Text",
)
audit = factories.AuditFactory()
for i in range(5):
market = factories.MarketFactory()
factories.CustomAttributeValueFactory(
custom_attribute=date_cad,
attributable=market,
attribute_value="2016-11-0{}".format(i + 1),
)
factories.CustomAttributeValueFactory(
custom_attribute=text_cad,
attributable=market,
attribute_value="2016-11-0{}".format(i + 1),
)
revisions = models.Revision.query.filter(
models.Revision.resource_type == "Market")
self.snapshots = [
factories.SnapshotFactory(
child_id=revision.resource_id,
child_type=revision.resource_type,
revision=revision,
parent=audit,
)
for revision in revisions
]
views.do_reindex()
def test_basic_query_in(self):
"""Filter by ~ operator."""
self._setup_objects()
| Update snapshot query test generation | Update snapshot query test generation
| Python | apache-2.0 | selahssea/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core |
aaa3f6b8154f03eab16528c05d889c6160e63f22 | server/siege/views/devices.py | server/siege/views/devices.py | from flask import request
from flask import url_for
from flask import abort
from siege.service import app, db
from siege.models import Device
from view_utils import jsonate
@app.route('/devices')
def devices_index():
response = jsonate([d.to_dict() for d in Device.query.all()])
return response
@app.route('/devices/<device_id>')
def devices_get(device_id):
device = Device.query.get(device_id)
if not device:
abort(404, 'Device not found')
response = jsonate(device.to_dict())
return response
@app.route('/devices', methods=['POST'])
def devices_create():
new_device = Device(comment=request.access_route)
db.session.add(new_device)
db.session.commit()
response = jsonate(new_device.to_dict())
response.status_code = 201
response.headers['Location'] = url_for('devices_get', device_id=new_device.id)
return response | from flask import request
from flask import url_for
from flask import abort
from siege.service import app, db
from siege.models import Device
from view_utils import jsonate
@app.route('/devices')
def devices_index():
response = jsonate([d.to_dict() for d in Device.query.all()])
return response
@app.route('/devices/<device_id>')
def devices_get(device_id):
device = Device.query.get(device_id)
if not device:
abort(404, 'Device not found')
response = jsonate(device.to_dict())
return response
@app.route('/devices', methods=['POST'])
def devices_create():
comment = '%s, %s' % (request.remote_addr, request.user_agent)
new_device = Device(comment=comment)
db.session.add(new_device)
db.session.commit()
response = jsonate(new_device.to_dict())
response.status_code = 201
response.headers['Location'] = url_for('devices_get', device_id=new_device.id)
return response | Put the user agent in the device object | Put the user agent in the device object
| Python | bsd-2-clause | WalterCReel3/siege,WalterCReel3/siege,WalterCReel3/siege,WalterCReel3/siege |
95d0461cf2f06534f81a954b1f95658cbb019ec6 | tests/startsymbol_tests/NonterminalNotInGrammarTest.py | tests/startsymbol_tests/NonterminalNotInGrammarTest.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class NonterminalNotInGrammarTest(TestCase):
pass
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 10.08.2017 23:20
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
from grammpy.exceptions import NonterminalDoesNotExistsException
class A(Nonterminal):
pass
class B(Nonterminal):
pass
class NonterminalNotInGrammarTest(TestCase):
def test_shouldNotSetStartSymbol(self):
g = Grammar(nonterminals=[A])
self.assertFalse(g.start_isSet())
with self.assertRaises(NonterminalDoesNotExistsException):
g.start_set(B)
self.assertFalse(g.start_isSet())
self.assertFalse(g.start_is(B))
def test_shouldNotSetStartSymbolWhenCreate(self):
with self.assertRaises(NonterminalDoesNotExistsException):
g = Grammar(nonterminals=[B],
start_symbol=A)
def test_oldStartSymbolShouldStaySame(self):
g = Grammar(nonterminals=[A], start_symbol=A)
self.assertTrue(g.start_isSet())
with self.assertRaises(NonterminalDoesNotExistsException):
g.start_set(B)
self.assertTrue(g.start_isSet())
self.assertTrue(g.start_is(A))
self.assertEqual(g.start_get(), A)
if __name__ == '__main__':
main()
| Add tests of setting nonterminal, which is not in grammar, as start symbol | Add tests of setting nonterminal, which is not in grammar, as start symbol
| Python | mit | PatrikValkovic/grammpy |
7e88a5d648d8e9aec82be14cd55667136adb3754 | pytest-{{cookiecutter.plugin_name}}/test_{{cookiecutter.plugin_name}}.py | pytest-{{cookiecutter.plugin_name}}/test_{{cookiecutter.plugin_name}}.py | # -*- coding: utf-8 -*-
def test_bar_fixture(testdir):
testdir.tmpdir.join('test_foo.py').write('''
def test_a(bar):
assert bar == "something"
'''
result = testdir.runpytest('--foo=something')
def test_foo_option():
pass
| # -*- coding: utf-8 -*-
def test_bar_fixture(testdir):
"""Make sure that pytest accepts our fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_sth(bar):
assert bar == "europython2015"
""")
# run pytest with the following cmd args
result = testdir.runpytest(
'--foo=something',
'-v'
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_a PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cat:',
'*--foo=DEST_FOO*Set the value for the fixture "bar".',
])
| Implement test for help and cli args | Implement test for help and cli args
| Python | mit | luzfcb/cookiecutter-pytest-plugin,pytest-dev/cookiecutter-pytest-plugin,s0undt3ch/cookiecutter-pytest-plugin |
9dad4033e4a66208ca00bcb0340f6a2271f1090f | montage_wrapper/mpi.py | montage_wrapper/mpi.py | MPI_COMMAND = 'mpirun -n {n_proc} {executable}'
def set_mpi_command(command):
"""
Set the MPI Command to use.
This should contain {n_proc} to indicate the number of processes, and
{executable} to indicate the name of the executable.
Parameters
----------
command: str
The MPI command for running executables
Examples
--------
Use ``mpirun``:
>>> set_mpi_command('mpirun -n {n_proc} {executable}')
Use ``mpiexec`` with host list:
>>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}')
"""
MPI_COMMAND = command
def _get_mpi_command(executable=None, n_proc=None):
return MPI_COMMAND.format(executable=executable, n_proc=n_proc) | MPI_COMMAND = 'mpirun -n {n_proc} {executable}'
def set_mpi_command(command):
"""
Set the MPI Command to use.
This should contain {n_proc} to indicate the number of processes, and
{executable} to indicate the name of the executable.
Parameters
----------
command: str
The MPI command for running executables
Examples
--------
Use ``mpirun``:
>>> set_mpi_command('mpirun -n {n_proc} {executable}')
Use ``mpiexec`` with host list:
>>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}')
"""
global MPI_COMMAND
MPI_COMMAND = command
def _get_mpi_command(executable=None, n_proc=None):
return MPI_COMMAND.format(executable=executable, n_proc=n_proc) | Fix setting of custom MPI command | Fix setting of custom MPI command | Python | bsd-3-clause | vterron/montage-wrapper,astrofrog/montage-wrapper,astropy/montage-wrapper,astrofrog/montage-wrapper,jat255/montage-wrapper |
bd97e698d3a2a795f4c38d7e54eae63737ed74a6 | multi_schema/management/commands/syncdb.py | multi_schema/management/commands/syncdb.py | import os.path
from django.core.management.commands import syncdb
from django.db import models, connection, transaction
try:
from south.management.commands import syncdb
except ImportError:
pass
from ...models import Schema, template_schema
class Command(syncdb.Command):
def handle_noargs(self, **options):
# Ensure we have the clone_schema() function
clone_schema_file = os.path.join(os.path.abspath(__file__ + '/../../../'), 'sql', 'clone_schema.sql')
clone_schema_function = " ".join([x.strip() for x in open(clone_schema_file).readlines() if not x.startswith('--')])
clone_schema_function = clone_schema_function.replace("'%'", "'%%'")
cursor = connection.cursor()
cursor.execute(clone_schema_function)
# Ensure we have a __template__ schema.
template_schema.create_schema()
# Set the search path, so we find created models correctly
cursor = connection.cursor()
cursor.execute("SET search_path TO public,__template__;")
super(Command, self).handle_noargs(**options)
# Ensure all existing schemata exist (in case we imported them using loaddata or something)
for schema in Schema.objects.all():
schema.create_schema() | import os.path
from django.core.management.commands import syncdb
from django.db import models, connection, transaction
try:
from south.management.commands import syncdb
except ImportError:
pass
from ...models import Schema, template_schema
class Command(syncdb.Command):
def handle_noargs(self, **options):
# Ensure we have the clone_schema() function
clone_schema_file = os.path.join(os.path.abspath(__file__ + '/../../../'), 'sql', 'clone_schema.sql')
clone_schema_function = " ".join([x.strip() for x in open(clone_schema_file).readlines() if not x.strip().startswith('--')])
clone_schema_function = clone_schema_function.replace("'%'", "'%%'")
cursor = connection.cursor()
cursor.execute(clone_schema_function)
# Ensure we have a __template__ schema.
template_schema.create_schema()
# Set the search path, so we find created models correctly
cursor = connection.cursor()
cursor.execute("SET search_path TO public,__template__;")
super(Command, self).handle_noargs(**options)
# Ensure all existing schemata exist (in case we imported them using loaddata or something)
for schema in Schema.objects.all():
schema.create_schema() | Allow for comments in the sql file that do not start the line. | Allow for comments in the sql file that do not start the line.
| Python | bsd-3-clause | schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse |
a09689c570e70c80ad7cadd9702133b3851c63b9 | providers/provider.py | providers/provider.py | import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector):
html = self._http_get(url)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
def parse_json(self, url, path=None):
data = self._http_get(url)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60 * 60):
response = requests.get(url, timeout=10)
return get_unicode_from_response(response)
| import json
import requests
from requests.utils import get_unicode_from_response
from lxml import html as lxml_html
class BaseProvider(object):
# ==== HELPER METHODS ====
def parse_html(self, url, css_selector, timeout=60):
html = self._http_get(url, timeout=timeout)
document = lxml_html.document_fromstring(html)
results = document.cssselect(css_selector)
data = [result.text_content() for result in results]
return data
def traverse_json(self, data, path):
if not path:
return data
for item in path.split("."):
if item.isdigit():
item = int(item)
try:
data = data[item]
except (IndexError, KeyError):
return {}
return data
def parse_json(self, url, path=None, timeout=60):
data = self._http_get(url, timeout=timeout)
data = json.loads(data)
data = self.traverse_json(data, path)
return data
# ==== PRIVATE METHODS ====
def _http_get(self, url, timeout=60):
response = requests.get(url, timeout=timeout)
return get_unicode_from_response(response)
| Increase timeout to 60 sec and make available to external callers. | Increase timeout to 60 sec and make available to external callers.
| Python | mit | EmilStenstrom/nephele |
cdbcc903c72ba7bf8acb45d69248e62fdc10efcd | rtrss/util.py | rtrss/util.py | import csv
import logging
import os
import datetime
from rtrss import config
from rtrss.models import User
from rtrss.database import session_scope
_logger = logging.getLogger(__name__)
def save_debug_file(filename, contents):
ts_prefix = datetime.datetime.now().strftime('%d-%m-%Y_%H_%M_%S')
filename = "{}_{}".format(ts_prefix, filename)
filename = os.path.join(config.DATA_DIR, filename)
with open(filename, 'w') as f:
f.write(contents)
def import_users(filename):
"""Import user account from CSV file, skipping existing users"""
with open(filename) as csvfile:
reader = csv.DictReader(csvfile, skipinitialspace=True)
lines = [line for line in reader]
added = 0
with session_scope() as db:
for fields in lines:
fields['id'] = int(fields['id'])
fields['downloads_limit'] = int(fields['downloads_limit'])
existing_user = db.query(User).get(fields['id'])
if existing_user:
continue
user = User(**fields)
db.add(user)
added += 1
_logger.info("%d users added, %d skipped", added, len(lines) - added)
| import csv
import logging
import os
import datetime
from rtrss import config
from rtrss.models import User
from rtrss.database import session_scope
_logger = logging.getLogger(__name__)
def save_debug_file(filename, contents):
ts_prefix = datetime.datetime.now().strftime('%d-%m-%Y_%H_%M_%S')
filename = "{}_{}".format(ts_prefix, filename)
filename = os.path.join(config.DATA_DIR, filename)
with open(filename, 'w') as f:
f.write(contents)
def import_users(filename):
"""Import user account from CSV file, skipping existing users"""
with open(filename) as csvfile:
reader = csv.DictReader(csvfile, skipinitialspace=True)
lines = [line for line in reader]
_logger.info("Importing {} accounts from {}".format(filename, len(lines)))
added = 0
with session_scope() as db:
for fields in lines:
fields['id'] = int(fields['id'])
fields['downloads_limit'] = int(fields['downloads_limit'])
existing_user = db.query(User).get(fields['id'])
if existing_user:
continue
user = User(**fields)
db.add(user)
added += 1
_logger.info("%d users added, %d skipped", added, len(lines) - added)
| Add log message at start of user import | Add log message at start of user import
| Python | apache-2.0 | notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss |
fe1e6e4af9bf9b85be1046dd9b831e9741aaa677 | src/artgraph/miner.py | src/artgraph/miner.py | import pymw
import pymw.interfaces
import artgraph.plugins.infobox
from artgraph.node import NodeTypes
from artgraph.node import Node
class Miner(object):
nodes = []
relationships = []
master = None
task_queue = []
def __init__(self, debug=False):
mwinterface = pymw.interfaces.GenericInterface()
self.master = pymw.PyMW_Master(mwinterface, delete_files=not debug)
def mine(self, artist):
self.mine_internal(Node(artist, NodeTypes.ARTIST))
(finished_task, new_relationships) = self.master.get_result()
while new_relationships:
for n in new_relationships:
self.relationships.append(n)
if n.get_predicate() not in self.nodes:
self.mine_internal(n.get_predicate())
def mine_internal(self, current_node, level=0, parent=None, relationship=None):
self.nodes.append(current_node)
infoboxplugin = artgraph.plugins.infobox.InfoboxPlugin(current_node)
self.task_queue.append(self.master.submit_task(infoboxplugin.get_nodes, input_data=(infoboxplugin,), modules=("artgraph",), data_files=("my.cnf",)))
| import pymw
import pymw.interfaces
import artgraph.plugins.infobox
from artgraph.node import NodeTypes
from artgraph.node import Node
class Miner(object):
nodes = []
relationships = []
master = None
task_queue = []
def __init__(self, debug=False):
mwinterface = pymw.interfaces.GenericInterface()
self.master = pymw.PyMW_Master(mwinterface, delete_files=not debug)
def mine(self, artist):
self.mine_internal(Node(artist, NodeTypes.ARTIST))
(finished_task, new_relationships) = self.master.get_result()
while new_relationships:
for n in new_relationships:
self.relationships.append(n)
if n.get_predicate() not in self.nodes:
self.mine_internal(n.get_predicate())
(finished_task, new_relationships) = self.master.get_result()
def mine_internal(self, current_node, level=0, parent=None, relationship=None):
self.nodes.append(current_node)
infoboxplugin = artgraph.plugins.infobox.InfoboxPlugin(current_node)
self.task_queue.append(self.master.submit_task(infoboxplugin.get_nodes, input_data=(infoboxplugin,), modules=("artgraph",), data_files=("my.cnf",)))
| Correct implementation of the while loop | Correct implementation of the while loop | Python | mit | dMaggot/ArtistGraph |
df91557edb813fc4b62b040b54e914f2f9b5237e | lpthw/ex30.py | lpthw/ex30.py | people = 30
cars = 40
buses = 55
if cars > people:
print "We should take the cars."
elif cars < people:
print "We should not take the cars."
else:
print "We can't decide."
if buses > cars:
print "That's too many buses."
elif buses < cars:
print "Maybe we could take the buses."
else:
print "We still can't decide."
if people > buses:
print "Alright, let's just take the buses."
else:
print "Fine, let's stay home then."
# Study Drills
if (buses > cars and cars > people):
print "The buses out numbers us... DECEPTICONS!!"
elif (buses > cars and cars < people):
print "This line won't print... just trust me. *peers through console*"
else:
print "If this line actually prints, I'll go to bed. I dare you to try."
| people = 30
cars = 40
buses = 55
if cars > people:
print "We should take the cars."
elif cars < people:
print "We should not take the cars."
else:
print "We can't decide."
if buses > cars:
print "That's too many buses."
elif buses < cars:
print "Maybe we could take the buses."
else:
print "We still can't decide."
if people > buses:
print "Alright, let's just take the buses."
else:
print "Fine, let's stay home then."
# Study Drills
if (buses > cars and cars > people):
print "The buses out number us... DECEPTICONS!!"
elif (buses > cars and cars < people):
print "This line won't print... just trust me. *peers through console*"
else:
print "If this line actually prints, I'll go to bed. I dare you to try."
| Fix on teeny tiny little typo... Also remember to write commit messages in the present tense. | Fix on teeny tiny little typo... Also remember to write commit messages in the present tense.
| Python | mit | jaredmanning/learning,jaredmanning/learning |
a229e1737542a5011e70c3fa63c360638e96e754 | lettuce_webdriver/css_selector_steps.py | lettuce_webdriver/css_selector_steps.py | from lettuce import step
from lettuce import world
from lettuce_webdriver.util import assert_true
from lettuce_webdriver.util import assert_false
import logging
log = logging.getLogger(__name__)
def wait_for_elem(browser, xpath, timeout=15):
start = time.time()
elems = []
while time.time() - start < timeout:
elems = browser.find_elements_by_css_selector(xpath)
if elems:
return elems
time.sleep(0.2)
return elems
@step(r'There should be an element matching \$\("(.*?)"\) within (\d+) seconds?')
def wait_for_element_by_selector(step, selector, seconds):
log.error(selector)
#elems = wait_for_elem(world.browser, selector, seconds)
#assert_true(step, elems)
__all__ = ['wait_for_element_by_selector']
| import time
from lettuce import step
from lettuce import world
from lettuce_webdriver.util import assert_true
from lettuce_webdriver.util import assert_false
import logging
log = logging.getLogger(__name__)
def wait_for_elem(browser, sel, timeout=15):
start = time.time()
elems = []
while time.time() - start < timeout:
elems = browser.find_elements_by_css_selector(sel)
if elems:
return elems
time.sleep(0.2)
return elems
@step(r'There should be an element matching \$\("(.*?)"\) within (\d+) seconds?')
def wait_for_element_by_selector(step, selector, seconds):
log.error(selector)
elems = wait_for_elem(world.browser, selector, seconds)
assert_true(step, elems)
__all__ = ['wait_for_element_by_selector']
| Make the step actually do something. | Make the step actually do something.
| Python | mit | koterpillar/aloe_webdriver,aloetesting/aloe_webdriver,macndesign/lettuce_webdriver,ponsfrilus/lettuce_webdriver,aloetesting/aloe_webdriver,macndesign/lettuce_webdriver,infoxchange/aloe_webdriver,bbangert/lettuce_webdriver,aloetesting/aloe_webdriver,koterpillar/aloe_webdriver,infoxchange/lettuce_webdriver,infoxchange/aloe_webdriver,infoxchange/lettuce_webdriver,ponsfrilus/lettuce_webdriver,bbangert/lettuce_webdriver |
6926ddbb9cdbf05808339412cee5106e581f66cb | tests/import_wordpress_and_build_workflow.py | tests/import_wordpress_and_build_workflow.py | # -*- coding: utf-8 -*-
"""
Script to test the import workflow.
It will remove an existing Nikola installation and then install from the
package directory.
After that it will do create a new site with the import_wordpress
command and use that newly created site to make a build.
"""
from __future__ import unicode_literals, print_function
import os
import shutil
TEST_SITE_DIRECTORY = 'import_test_site'
def main(import_directory=None):
if import_directory is None:
import_directory = TEST_SITE_DIRECTORY
if os.path.exists(import_directory):
print('deleting %s' % import_directory)
shutil.rmtree(import_directory)
test_directory = os.path.dirname(__file__)
package_directory = os.path.abspath(os.path.join(test_directory, '..'))
os.system('echo "y" | pip uninstall Nikola')
os.system('pip install %s' % package_directory)
os.system('nikola')
import_file = os.path.join(test_directory, 'wordpress_export_example.xml')
os.system(
'nikola import_wordpress -f %s -o %s' % (import_file, import_directory))
assert os.path.exists(
import_directory), "The directory %s should be existing."
os.chdir(import_directory)
os.system('nikola build')
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
"""
Script to test the import workflow.
It will remove an existing Nikola installation and then install from the
package directory.
After that it will do create a new site with the import_wordpress
command and use that newly created site to make a build.
"""
from __future__ import unicode_literals, print_function
import os
import shutil
TEST_SITE_DIRECTORY = 'import_test_site'
def main(import_directory=None):
if import_directory is None:
import_directory = TEST_SITE_DIRECTORY
if os.path.exists(import_directory):
print('deleting %s' % import_directory)
shutil.rmtree(import_directory)
test_directory = os.path.dirname(__file__)
package_directory = os.path.abspath(os.path.join(test_directory, '..'))
os.system('echo "y" | pip uninstall Nikola')
os.system('pip install %s' % package_directory)
os.system('nikola')
import_file = os.path.join(test_directory, 'wordpress_export_example.xml')
os.system(
'nikola import_wordpress -o {folder} {file}'.format(file=import_file,
folder=import_directory))
assert os.path.exists(
import_directory), "The directory %s should be existing."
os.chdir(import_directory)
os.system('nikola build')
if __name__ == '__main__':
main()
| Use the more or less new options for importing | Use the more or less new options for importing
| Python | mit | damianavila/nikola,xuhdev/nikola,getnikola/nikola,berezovskyi/nikola,TyberiusPrime/nikola,kotnik/nikola,atiro/nikola,servalproject/nikola,gwax/nikola,schettino72/nikola,kotnik/nikola,lucacerone/nikola,okin/nikola,s2hc-johan/nikola,andredias/nikola,masayuko/nikola,x1101/nikola,s2hc-johan/nikola,Proteus-tech/nikola,techdragon/nikola,jjconti/nikola,berezovskyi/nikola,techdragon/nikola,servalproject/nikola,masayuko/nikola,getnikola/nikola,immanetize/nikola,damianavila/nikola,jjconti/nikola,knowsuchagency/nikola,wcmckee/nikola,JohnTroony/nikola,xuhdev/nikola,getnikola/nikola,masayuko/nikola,andredias/nikola,gwax/nikola,knowsuchagency/nikola,damianavila/nikola,berezovskyi/nikola,TyberiusPrime/nikola,wcmckee/nikola,pluser/nikola,okin/nikola,schettino72/nikola,xuhdev/nikola,okin/nikola,x1101/nikola,TyberiusPrime/nikola,JohnTroony/nikola,wcmckee/nikola,atiro/nikola,lucacerone/nikola,yamila-moreno/nikola,Proteus-tech/nikola,x1101/nikola,lucacerone/nikola,kotnik/nikola,jjconti/nikola,JohnTroony/nikola,xuhdev/nikola,atiro/nikola,knowsuchagency/nikola,immanetize/nikola,Proteus-tech/nikola,gwax/nikola,techdragon/nikola,getnikola/nikola,pluser/nikola,s2hc-johan/nikola,immanetize/nikola,schettino72/nikola,servalproject/nikola,Proteus-tech/nikola,yamila-moreno/nikola,okin/nikola,andredias/nikola,pluser/nikola,yamila-moreno/nikola |
48394c55599968c456f1f58c0fcdf58e1750f293 | amplpy/tests/TestBase.py | amplpy/tests/TestBase.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import, division
from builtins import map, range, object, zip, sorted
from .context import amplpy
import unittest
import tempfile
import shutil
import os
class TestBase(unittest.TestCase):
def setUp(self):
self.ampl = amplpy.AMPL()
self.dirpath = tempfile.mkdtemp()
def str2file(self, filename, content):
fullpath = self.tmpfile(filename)
with open(fullpath, 'w') as f:
print(content, file=f)
return fullpath
def tmpfile(self, filename):
return os.path.join(self.dirpath, filename)
def tearDown(self):
self.ampl.close()
shutil.rmtree(self.dirpath)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, absolute_import, division
from builtins import map, range, object, zip, sorted
from .context import amplpy
import unittest
import tempfile
import shutil
import os
# For MSYS2, MINGW, etc., run with:
# $ REAL_ROOT=`cygpath -w /` python -m amplpy.tests
REAL_ROOT = os.environ.get('REAL_ROOT', None)
class TestBase(unittest.TestCase):
def setUp(self):
self.ampl = amplpy.AMPL()
self.dirpath = tempfile.mkdtemp()
def _tmpfile(self, filename):
return os.path.join(self.dirpath, filename)
def _real_filename(self, filename):
# Workaround for MSYS2, MINGW paths
if REAL_ROOT is not None and filename.startswith('/'):
filename = filename.replace('/', REAL_ROOT, 1)
return filename
def str2file(self, filename, content):
fullpath = self._tmpfile(filename)
with open(fullpath, 'w') as f:
print(content, file=f)
return self._real_filename(fullpath)
def tmpfile(self, filename):
return self._real_filename(self._tmpfile(filename))
def tearDown(self):
self.ampl.close()
shutil.rmtree(self.dirpath)
if __name__ == '__main__':
unittest.main()
| Add workaround for tests on MSYS2 and MINGW | Add workaround for tests on MSYS2 and MINGW
| Python | bsd-3-clause | ampl/amplpy,ampl/amplpy,ampl/amplpy |
b121b98516beae23b5517aea1810a6eeed7a4fff | fedoracommunity/mokshaapps/packages/controllers/overview.py | fedoracommunity/mokshaapps/packages/controllers/overview.py | from moksha.lib.base import Controller
from moksha.lib.helpers import Category, MokshaApp, Not, not_anonymous, MokshaWidget
from moksha.api.widgets.containers import DashboardContainer
from moksha.api.widgets import ContextAwareWidget
from tg import expose, tmpl_context, require, request
from bugs import BugsController
from builds import BuildsController
from changelog import ChangelogController
from downloads import DownloadsController
from maintainers import MaintainersController
from owners import OwnersController
from updates import UpdatesController
from versions import VersionsController
from watchers import WatchersController
class OverviewDashboard(DashboardContainer, ContextAwareWidget):
template = 'mako:fedoracommunity.mokshaapps.packages.templates.single_col_dashboard'
layout = [Category('content-col-apps',[])]
overview_dashboard = OverviewDashboard
class OverviewController(Controller):
bugs = BugsController()
builds = BuildsController()
changelog = ChangelogController()
downloads = DownloadsController()
maintainers = MaintainersController()
owners = OwnersController()
updates = UpdatesController()
verisons = VersionsController()
watchers = WatchersController()
@expose('mako:moksha.templates.widget')
def index(self, package):
tmpl_context.widget = overview_dashboard
return {'package': package}
@expose('mako:moksha.templates.widget')
def overview(self, package):
return self.index(package)
| from moksha.lib.base import Controller
from moksha.lib.helpers import Category, MokshaApp, Not, not_anonymous, MokshaWidget
from moksha.api.widgets.containers import DashboardContainer
from moksha.api.widgets import ContextAwareWidget
from tg import expose, tmpl_context, require, request
from bugs import BugsController
from builds import BuildsController
from changelog import ChangelogController
from downloads import DownloadsController
from maintainers import MaintainersController
from owners import OwnersController
from updates import UpdatesController
from versions import VersionsController
from watchers import WatchersController
class OverviewDashboard(DashboardContainer, ContextAwareWidget):
template = 'mako:fedoracommunity.mokshaapps.packages.templates.single_col_dashboard'
layout = [Category('content-col-apps',[])]
overview_dashboard = OverviewDashboard
class OverviewController(Controller):
bugs = BugsController()
builds = BuildsController()
changelog = ChangelogController()
downloads = DownloadsController()
maintainers = MaintainersController()
owners = OwnersController()
updates = UpdatesController()
verisons = VersionsController()
watchers = WatchersController()
@expose('mako:moksha.templates.widget')
def index(self, package):
tmpl_context.widget = overview_dashboard
return dict(package=package, options={})
@expose('mako:moksha.templates.widget')
def overview(self, package):
tmpl_context.widget = overview_dashboard
return dict(package=package, options={})
| Fix the use of the moksha.templates.widget template, in one place. This needs to be fixed in many places | Fix the use of the moksha.templates.widget template, in one place. This needs to be fixed in many places
| Python | agpl-3.0 | fedora-infra/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages,fedora-infra/fedora-packages,Fale/fedora-packages,Fale/fedora-packages,Fale/fedora-packages |
43afda1fa0ae2d0011d6b87b5c05e3eb1fe13a21 | viewer_examples/viewers/collection_viewer.py | viewer_examples/viewers/collection_viewer.py | """
=====================
CollectionViewer demo
=====================
Demo of CollectionViewer for viewing collections of images. This demo uses
successively darker versions of the same image to fake an image collection.
You can scroll through images with the slider, or you can interact with the
viewer using your keyboard:
left/right arrows
Previous/next image in collection.
number keys, 0--9
0% to 90% of collection. For example, "5" goes to the image in the
middle (i.e. 50%) of the collection.
home/end keys
First/last image in collection.
"""
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
img = data.lena()
img_collection = [np.uint8(img * 0.9**i) for i in range(20)]
view = CollectionViewer(img_collection)
view.show()
| """
=====================
CollectionViewer demo
=====================
Demo of CollectionViewer for viewing collections of images. This demo uses
successively darker versions of the same image to fake an image collection.
You can scroll through images with the slider, or you can interact with the
viewer using your keyboard:
left/right arrows
Previous/next image in collection.
number keys, 0--9
0% to 90% of collection. For example, "5" goes to the image in the
middle (i.e. 50%) of the collection.
home/end keys
First/last image in collection.
"""
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
from skimage.transform import build_gaussian_pyramid
img = data.lena()
img_collection = tuple(build_gaussian_pyramid(img))
view = CollectionViewer(img_collection)
view.show()
| Use gaussian pyramid function for collection viewer example | Use gaussian pyramid function for collection viewer example
| Python | bsd-3-clause | rjeli/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,newville/scikit-image,SamHames/scikit-image,bennlich/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,blink1073/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,emon10005/scikit-image,youprofit/scikit-image,ofgulban/scikit-image,newville/scikit-image,bsipocz/scikit-image,Midafi/scikit-image,almarklein/scikit-image,jwiggins/scikit-image,rjeli/scikit-image,chintak/scikit-image,SamHames/scikit-image,michaelaye/scikit-image,chintak/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,dpshelio/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,bennlich/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,Britefury/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,blink1073/scikit-image,rjeli/scikit-image,jwiggins/scikit-image,paalge/scikit-image,GaZ3ll3/scikit-image,warmspringwinds/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,juliusbierk/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,oew1v07/scikit-image,warmspringwinds/scikit-image,ClinicalGraphics/scikit-image,Britefury/scikit-image,robintw/scikit-image,SamHames/scikit-image,paalge/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,emon10005/scikit-image,youprofit/scikit-image,michaelaye/scikit-image,ajaybhat/scikit-image,dpshelio/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,WarrenWeckesser/scikits-image |
710c77b2805058364e326d26c9e0c7cfcfed6453 | repugeng/Compat3k.py | repugeng/Compat3k.py | from repugeng.StaticClass import StaticClass
import sys
class Compat3k(StaticClass):
@classmethod
def str_to_bytes(cls, s):
"""Convert a string of either width to a byte string."""
try:
try:
return bytes(s)
except NameError:
return str(s)
except ValueError:
pass #Not ASCII? Not really a problem...
except TypeError:
pass #I didn't specify an encoding? Oh, boo hoo...
return s.encode("latin1") #Not utf-8, m'kay...
@classmethod
def prompt_user(cls, s="", file=None):
"""Substitute of py2k's raw_input()."""
(file or sys.stderr).write(s)
return sys.stdin.readline().rstrip("\r\n")
| from repugeng.StaticClass import StaticClass
import sys
class Compat3k(StaticClass):
@classmethod
def str_to_bytes(cls, s):
"""Convert a string of either width to a byte string."""
try:
try:
return bytes(s)
except NameError:
return str(s)
except ValueError:
pass #Not ASCII? Not really a problem...
except TypeError:
pass #I didn't specify an encoding? Oh, boo hoo...
return s.encode("latin1") #Not utf-8, m'kay...
@classmethod
def prompt_user(cls, s="", file=None):
"""Substitute of py2k's raw_input()."""
(file or sys.stderr).write(s)
(file or sys.stderr).flush()
return sys.stdin.readline().rstrip("\r\n")
| Fix yet another 3k issue (stderr not flushing automatically). | Fix yet another 3k issue (stderr not flushing automatically).
Signed-off-by: Thomas Hori <[email protected]>
| Python | mpl-2.0 | thomas-hori/Repuge-NG |
c0ff6cbf293bca3f0757a62e05a14c56dbdf12a4 | installscripts/jazz-terraform-unix-noinstances/scripts/health_check.py | installscripts/jazz-terraform-unix-noinstances/scripts/health_check.py | import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
| import boto3
import sys
import time
def health_check_tg(client, tg_arn, max_tries):
if max_tries == 1:
return False
else:
max_tries -= 1
try:
response = client.describe_target_health(TargetGroupArn=str(tg_arn))
if response['TargetHealthDescriptions'][0]['TargetHealth']['State'] == 'healthy':
time.sleep(30)
return True
else:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
except Exception:
time.sleep(30)
health_check_tg(client, tg_arn, max_tries)
if __name__ == u"__main__":
client = boto3.client('elbv2')
health_check_tg(client, sys.argv[1], 50)
| Fix travis issue for v1.13.1 release | Fix travis issue for v1.13.1 release
| Python | apache-2.0 | tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer |
8ad795f86e16209007537cbf47a3466733653e2d | snippets/__main__.py | snippets/__main__.py | import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.source)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
| import argparse
import sys
from .generator import Generator
from .repository import Repository
def run(args=sys.argv[1:]):
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--repository', default='snippets')
parser.add_argument('-o', '--output', default='output')
parser.add_argument('-t', '--theme')
args = parser.parse_args(args)
repository = Repository()
repository.add_repopath(args.repository)
generator = Generator(repository, args.theme)
generator.generate(args.output)
if __name__ == '__main__':
run()
| Fix cli argument name for repository path | Fix cli argument name for repository path
| Python | isc | trilan/snippets,trilan/snippets |
cd38101f097edc60312f0c083385968ed40fd54a | src/control.py | src/control.py | #!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Twist
from constants import DELTA_T, STEPS
from controller import create_controller
from plotter import Plotter
def get_pose(message):
global current_pose
current_pose = message.pose[2]
def compute_control_actions():
global i
controller.compute_control_actions(current_pose, i)
plotter.add_point(current_pose)
twist = Twist()
twist.linear.x = controller.v_n
twist.angular.z = controller.w_n
twist_publisher.publish(twist)
i += 1
if __name__ == '__main__':
rospy.init_node('control')
current_pose = None
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
twist_publisher = rospy.Publisher('computed_control_actions', Twist, queue_size=1)
while current_pose is None:
pass
i = 0
plotter = Plotter()
controller = create_controller()
rate = rospy.Rate(int(1 / DELTA_T))
while not rospy.is_shutdown() and i < STEPS:
compute_control_actions()
rate.sleep()
plotter.plot_results()
rospy.spin()
| #!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Twist
from constants import DELTA_T, STEPS
from controller import create_controller
from plotter import Plotter
def get_pose(message):
global current_pose, current_twist
current_pose = message.pose[2]
current_twist = message.twist[2]
def compute_control_actions():
global i
controller.compute_control_actions(current_pose, i)
plotter.add_point(current_pose)
twist = Twist()
twist.linear.x = controller.v_n
twist.angular.z = controller.w_n
twist_publisher.publish(twist)
i += 1
if __name__ == '__main__':
rospy.init_node('control')
current_pose = None
current_twist = None
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
twist_publisher = rospy.Publisher('computed_control_actions', Twist, queue_size=1)
while current_pose is None or current_twist is None:
pass
i = 0
plotter = Plotter()
controller = create_controller()
rate = rospy.Rate(int(1 / DELTA_T))
while not rospy.is_shutdown() and i < STEPS:
compute_control_actions()
rate.sleep()
plotter.plot_results()
rospy.spin()
| Store current twist in a global variable | Store current twist in a global variable
| Python | mit | bit0001/trajectory_tracking,bit0001/trajectory_tracking |
7d03a6bfa32d2bf20a95769b2937e098972285af | src/scs_mfr/test/opc_test.py | src/scs_mfr/test/opc_test.py | """
Created on 18 May 2017
@author: Bruno Beloff ([email protected])
"""
import sys
from scs_dfe.particulate.opc_n2 import OPCN2
from scs_host.bus.i2c import I2C
from scs_host.sys.host import Host
from scs_mfr.test.test import Test
# --------------------------------------------------------------------------------------------------------------------
class OPCTest(Test):
"""
test script
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, verbose):
Test.__init__(self, verbose)
# ----------------------------------------------------------------------------------------------------------------
def conduct(self):
if self.verbose:
print("OPC...", file=sys.stderr)
opc = None
try:
I2C.open(Host.I2C_SENSORS)
# resources...
opc = OPCN2(Host.OPC_SPI_BUS, Host.OPC_SPI_DEVICE)
opc.power_on()
opc.operations_on()
# test...
self.datum = opc.firmware()
if self.verbose:
print(self.datum, file=sys.stderr)
# test criterion...
return len(self.datum) > 0 and self.datum.startswith('OPC')
finally:
if opc:
opc.operations_off()
opc.power_off()
I2C.close()
| """
Created on 18 May 2017
@author: Bruno Beloff ([email protected])
"""
import sys
from scs_dfe.particulate.opc_n2 import OPCN2
from scs_host.bus.i2c import I2C
from scs_host.sys.host import Host
from scs_mfr.test.test import Test
# --------------------------------------------------------------------------------------------------------------------
class OPCTest(Test):
"""
test script
"""
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, verbose):
Test.__init__(self, verbose)
# ----------------------------------------------------------------------------------------------------------------
def conduct(self):
if self.verbose:
print("OPC...", file=sys.stderr)
opc = None
try:
I2C.open(Host.I2C_SENSORS)
# resources...
opc = OPCN2(Host.opc_spi_bus(), Host.opc_spi_device())
opc.power_on()
opc.operations_on()
# test...
self.datum = opc.firmware()
if self.verbose:
print(self.datum, file=sys.stderr)
# test criterion...
return len(self.datum) > 0 and self.datum.startswith('OPC')
finally:
if opc:
opc.operations_off()
opc.power_off()
I2C.close()
| Put SPI slave configurations on Host. | Put SPI slave configurations on Host.
| Python | mit | south-coast-science/scs_mfr,south-coast-science/scs_mfr |
6b819174557a1dffbcb397dc1d6e2a3f7e01a12b | milestones/migrations/0002_data__seed_relationship_types.py | milestones/migrations/0002_data__seed_relationship_types.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from milestones.data import fetch_milestone_relationship_types
def seed_relationship_types(apps, schema_editor):
"""Seed the relationship types."""
MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType")
db_alias = schema_editor.connection.alias
for name in fetch_milestone_relationship_types().values():
MilestoneRelationshipType.objects.using(db_alias).get_or_create(
name=name,
description='Autogenerated milestone relationship type "{}"'.format(name),
)
def delete_relationship_types(apps, schema_editor):
"""Clean up any relationships we made."""
MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType")
db_alias = schema_editor.connection.alias
for name in fetch_milestone_relationship_types().values():
MilestoneRelationshipType.objects.using(db_alias).filter(name=name).delete()
class Migration(migrations.Migration):
dependencies = [
('milestones', '0001_initial'),
]
operations = [
migrations.RunPython(seed_relationship_types, delete_relationship_types),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from milestones.data import fetch_milestone_relationship_types
def seed_relationship_types(apps, schema_editor):
"""Seed the relationship types."""
MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType")
for name in fetch_milestone_relationship_types().values():
MilestoneRelationshipType.objects.get_or_create(
name=name,
description='Autogenerated milestone relationship type "{}"'.format(name),
)
def delete_relationship_types(apps, schema_editor):
"""Clean up any relationships we made."""
MilestoneRelationshipType = apps.get_model("milestones", "MilestoneRelationshipType")
for name in fetch_milestone_relationship_types().values():
MilestoneRelationshipType.objects.filter(name=name).delete()
class Migration(migrations.Migration):
dependencies = [
('milestones', '0001_initial'),
]
operations = [
migrations.RunPython(seed_relationship_types, delete_relationship_types),
]
| Remove uses of using() from migrations | Remove uses of using() from migrations
This hardcoded the db_alias fetched from schema_editor and forces django
to try and migrate any second database you use, rather than routing to
the default database. In testing a build from scratch, these do not
appear needed.
Using using() prevents us from using multiple databases behind edxapp
| Python | agpl-3.0 | edx/edx-milestones |
6bdb91aefc6acb9b0065c7edae19887778dedb22 | .ci/package-version.py | .ci/package-version.py | #!/usr/bin/env python3
import os.path
import sys
def main():
setup_py = os.path.join(os.path.dirname(os.path.dirname(__file__)),
'setup.py')
with open(setup_py, 'r') as f:
for line in f:
if line.startswith('VERSION ='):
_, _, version = line.partition('=')
print(version.strip(" \n'\""))
return 0
print('could not find package version in setup.py', file=sys.stderr)
return 1
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python3
import os.path
import sys
def main():
version_file = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'uvloop', '__init__.py')
with open(version_file, 'r') as f:
for line in f:
if line.startswith('__version__ ='):
_, _, version = line.partition('=')
print(version.strip(" \n'\""))
return 0
print('could not find package version in uvloop/__init__.py',
file=sys.stderr)
return 1
if __name__ == '__main__':
sys.exit(main())
| Fix ci / package_version.py script to support __version__ | Fix ci / package_version.py script to support __version__
| Python | apache-2.0 | 1st1/uvloop,MagicStack/uvloop,MagicStack/uvloop |
261cb5aecc52d07b10d826e8b22d17817d1c3529 | web/backend/backend_django/apps/capacity/management/commands/importpath.py | web/backend/backend_django/apps/capacity/management/commands/importpath.py | from __future__ import unicode_literals
from optparse import make_option
import os
from csv import reader
from codecs import BOM_UTF8
import pickle
from django.utils.six import string_types, PY3
from django.core.management.base import BaseCommand, CommandError
from ...models import Path
class Command(BaseCommand):
help = 'Encode txt files in ascii format'
def add_arguments(self, parser):
parser.add_argument('--input', '-i', help='input file as pickle')
def handle(self, *args, **options):
i = options['input']
if not os.path.isfile(i):
raise CommandError
trips = pickle.load(open(i, "rb"))
print(len(trips))
for k, path in trips.items():
trip_id = k[0]
stop_id = k[1]
try:
_, created = Path.objects.get_or_create(
trip_id = int(trip_id),
stop_id = int(stop_id),
path = str(path)
)
pass
except Exception as e:
self.stdout.write("Error with row {} {} : {}".format(k, path, e))
self.stdout.write("Done") | from __future__ import unicode_literals
from optparse import make_option
import os
from csv import reader
from codecs import BOM_UTF8
import pickle
from django.utils.six import string_types, PY3
from django.core.management.base import BaseCommand, CommandError
from ...models import Path
class Command(BaseCommand):
help = 'Encode txt files in ascii format'
def add_arguments(self, parser):
parser.add_argument('--input', '-i', help='input file as pickle')
def handle(self, *args, **options):
i = options['input']
if not os.path.isfile(i):
raise CommandError
trips = pickle.load(open(i, "rb"))
print(len(trips))
i = 0
for k, path in trips.items():
trip_id = k[0]
stop_id = k[1]
if i%1000==0: print(i)
try:
_, created = Path.objects.get_or_create(
trip_id = int(trip_id),
stop_id = int(stop_id),
path = str(path)
)
pass
except Exception as e:
self.stdout.write("Error with row {} {} : {}".format(k, path, e))
i = i+1
self.stdout.write("Done") | Update import path method to reflect behaviour | Update import path method to reflect behaviour
| Python | apache-2.0 | tOverney/ADA-Project,tOverney/ADA-Project,tOverney/ADA-Project |
24d7f9f05e4d597358b62a50d2d0f5fad6a61c63 | package_name/__meta__.py | package_name/__meta__.py | name = "package-name" # See https://www.python.org/dev/peps/pep-0008/
path = name.lower().replace("-", "_").replace(" ", "_")
version = "0.1.dev0" # https://python.org/dev/peps/pep-0440 https://semver.org
author = "Author Name"
author_email = ""
description = "" # One-liner
url = "" # your project homepage
license = "MIT" # See https://choosealicense.com
| # `name` is the name of the package as used for `pip install package`
name = "package-name"
# `path` is the name of the package for `import package`
path = name.lower().replace("-", "_").replace(" ", "_")
# Your version number should follow https://python.org/dev/peps/pep-0440 and
# https://semver.org
version = "0.1.dev0"
author = "Author Name"
author_email = ""
description = "" # One-liner
url = "" # your project homepage
license = "MIT" # See https://choosealicense.com
| Clarify comments on what name and path are | DOC: Clarify comments on what name and path are
| Python | mit | scottclowe/python-continuous-integration,scottclowe/python-ci,scottclowe/python-ci,scottclowe/python-continuous-integration |
1869b79d49419799cecf1f5e19eb0aa3987e215b | tests/test_vector2_scalar_multiplication.py | tests/test_vector2_scalar_multiplication.py | import pytest # type: ignore
from ppb_vector import Vector2
@pytest.mark.parametrize("x, y, expected", [
(Vector2(6, 1), 0, Vector2(0, 0)),
(Vector2(6, 1), 2, Vector2(12, 2)),
(Vector2(0, 0), 3, Vector2(0, 0)),
(Vector2(-1.5, 2.4), -2, Vector2(3.0, -4.8)),
(Vector2(1, 2), 0.1, Vector2(0.1, 0.2))
])
def test_scalar_multiplication(x, y, expected):
assert x * y == expected
| import pytest # type: ignore
from hypothesis import given
from hypothesis.strategies import floats
from utils import vectors
from ppb_vector import Vector2
@pytest.mark.parametrize("x, y, expected", [
(Vector2(6, 1), 0, Vector2(0, 0)),
(Vector2(6, 1), 2, Vector2(12, 2)),
(Vector2(0, 0), 3, Vector2(0, 0)),
(Vector2(-1.5, 2.4), -2, Vector2(3.0, -4.8)),
(Vector2(1, 2), 0.1, Vector2(0.1, 0.2))
])
def test_scalar_multiplication(x, y, expected):
assert x * y == expected
@given(
x=floats(min_value=-1e75, max_value=1e75),
y=floats(min_value=-1e75, max_value=1e75),
v=vectors(max_magnitude=1e150)
)
def test_scalar_associative(x: float, y: float, v: Vector2):
left = (x * y) * v
right = x * (y * v)
assert left.isclose(right)
| Add a test of the associativity of scalar multiplication | Add a test of the associativity of scalar multiplication
| Python | artistic-2.0 | ppb/ppb-vector,ppb/ppb-vector |
6b14c9e5683d41ca9d8b9138c25af7526c83d1e4 | test/integration/ggrc/converters/test_import_delete.py | test/integration/ggrc/converters/test_import_delete.py | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc.converters import errors
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data_dry = self.import_file(filename, dry_run=True)
response_data = self.import_file(filename)
self.assertEqual(response_data_dry, response_data)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
| # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from integration.ggrc import TestCase
class TestBasicCsvImport(TestCase):
def setUp(self):
TestCase.setUp(self)
self.client.get("/login")
def test_policy_basic_import(self):
filename = "ca_setup_for_deletion.csv"
self.import_file(filename)
filename = "ca_deletion.csv"
response_data = self.import_file(filename)
self.assertEqual(response_data[0]["deleted"], 2)
self.assertEqual(response_data[0]["ignored"], 0)
| Optimize basic delete import tests | Optimize basic delete import tests
The dry-run check is now automatically performed on each import and we
do not need to duplicate the work in the delete test.
| Python | apache-2.0 | selahssea/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core |
e4079d7cdeb59a3cac129813b7bb14a6639ea9db | plugins/Webcam_plugin.py | plugins/Webcam_plugin.py | info = {
'id': 'webcam',
'name': 'Webcam',
'description': 'Generic webcam driver',
'module name': 'Webcam',
'class name': 'Webcam',
'author': 'Philip Chimento',
'copyright year': '2011',
}
| info = {
'id': 'webcam',
'name': 'OpenCV',
'description': 'Video camera interfacing through OpenCV',
'module name': 'Webcam',
'class name': 'Webcam',
'author': 'Philip Chimento',
'copyright year': '2011',
}
| Rename 'webcam' plugin to OpenCV | Rename 'webcam' plugin to OpenCV
| Python | mit | ptomato/Beams |
6fb30db07457fb231827cfa4c8215f8ff107cb74 | tests/testapp/tests/utils.py | tests/testapp/tests/utils.py | # coding: utf-8
import datetime
from django.core.management import CommandError
from django.core.management.color import no_style
from django.core.management.sql import sql_delete, sql_all
from django.db import connections, transaction, DEFAULT_DB_ALIAS
import elephantblog.models
def mock_datetime():
class MockDatetime(datetime.datetime):
@classmethod
def now(cls):
return datetime.datetime(2012, 6, 1)
return MockDatetime
def mock_date():
class MockDate(datetime.date):
@classmethod
def today(cls):
return datetime.date(2012, 6, 1)
return MockDate
def reset_db():
using = DEFAULT_DB_ALIAS
connection = connections[using]
sql_list = sql_delete(elephantblog.models, no_style(), connection)
sql_list += sql_all(elephantblog.models, no_style(), connection)
try:
cursor = connection.cursor()
for sql in sql_list:
cursor.execute(sql)
except Exception, e:
transaction.rollback_unless_managed()
raise CommandError("Error: database couldn't be reset: %s" % e)
else:
transaction.commit_unless_managed()
| # coding: utf-8
import datetime
from django.core.management import CommandError
from django.core.management.color import no_style
from django.core.management.sql import sql_delete, sql_all
from django.db import connections, transaction, DEFAULT_DB_ALIAS
import elephantblog.models
def mock_datetime():
class MockDatetime(datetime.datetime):
@classmethod
def now(cls):
return datetime.datetime(2012, 6, 1)
return MockDatetime
def mock_date():
class MockDate(datetime.date):
@classmethod
def today(cls):
return datetime.date(2012, 6, 1)
return MockDate
def reset_db():
using = DEFAULT_DB_ALIAS
connection = connections[using]
sql_list = sql_delete(elephantblog.models, no_style(), connection)
sql_list += sql_all(elephantblog.models, no_style(), connection)
try:
cursor = connection.cursor()
for sql in sql_list:
cursor.execute(sql)
except Exception as e:
transaction.rollback_unless_managed()
raise CommandError("Error: database couldn't be reset: %s" % e)
else:
transaction.commit_unless_managed()
| Fix an old-style except: statement | Fix an old-style except: statement
| Python | bsd-3-clause | feincms/feincms-elephantblog,sbaechler/feincms-elephantblog,michaelkuty/feincms-elephantblog,joshuajonah/feincms-elephantblog,sbaechler/feincms-elephantblog,joshuajonah/feincms-elephantblog,matthiask/feincms-elephantblog,michaelkuty/feincms-elephantblog,sbaechler/feincms-elephantblog,joshuajonah/feincms-elephantblog,michaelkuty/feincms-elephantblog,matthiask/feincms-elephantblog,matthiask/feincms-elephantblog,feincms/feincms-elephantblog |
df6642256806e0a501e83c06e64b35f187efaf60 | rally/benchmark/scenarios/authenticate/authenticate.py | rally/benchmark/scenarios/authenticate/authenticate.py | # Copyright 2014 Red Hat, Inc. <http://www.redhat.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.benchmark.scenarios import base
from rally import osclients
class Authenticate(base.Scenario):
"""This class should contain authentication mechanism for different
types of clients like Keystone.
"""
def keystone(self, **kwargs):
keystone_endpoint = self.clients("endpoint")
cl = osclients.Clients(keystone_endpoint)
cl.get_keystone_client()
| # Copyright 2014 Red Hat, Inc. <http://www.redhat.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rally.benchmark.scenarios import base
class Authenticate(base.Scenario):
"""This class should contain authentication mechanism for different
types of clients like Keystone.
"""
def keystone(self, **kwargs):
self.clients("keystone")
| Fix for Authentication scenario to correctly use self.clients | Fix for Authentication scenario to correctly use self.clients
Scenario has recently been refactored, self.clients in Scenario
now takes the name of the CLI client. During the refactoring,
the Authenticate scenario was not correctly updated, which
causes the authentication scenario to fail. This patch fixes
that.
Change-Id: I546c0846e00a5285f0d47bc80b6304a53cc566ff
Closes-Bug: #1291386
| Python | apache-2.0 | pandeyop/rally,go-bears/rally,vefimova/rally,aplanas/rally,group-policy/rally,amit0701/rally,shdowofdeath/rally,ytsarev/rally,go-bears/rally,vefimova/rally,group-policy/rally,shdowofdeath/rally,openstack/rally,gluke77/rally,ytsarev/rally,redhat-openstack/rally,varunarya10/rally,amit0701/rally,gluke77/rally,vganapath/rally,gluke77/rally,aforalee/RRally,amit0701/rally,gluke77/rally,afaheem88/rally,eayunstack/rally,vponomaryov/rally,cernops/rally,vganapath/rally,aforalee/RRally,openstack/rally,aplanas/rally,vponomaryov/rally,paboldin/rally,redhat-openstack/rally,cernops/rally,vganapath/rally,eonpatapon/rally,paboldin/rally,yeming233/rally,pyKun/rally,eayunstack/rally,openstack/rally,eonpatapon/rally,vganapath/rally,paboldin/rally,eayunstack/rally,group-policy/rally,yeming233/rally,varunarya10/rally,openstack/rally,pyKun/rally,pandeyop/rally,afaheem88/rally |
Subsets and Splits