commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
921421e4d9e2d536596980e14286db5faa83dd5c
egpackager/cli.py
egpackager/cli.py
#!/usr/bin/env python import click import sys from egpackager.datasources import GspreadDataSource @click.group() def cli(): ''' ''' pass @cli.command() def register(): click.echo(click.style('Initialized the database', fg='green')) @cli.command() def list(): click.echo(click.style('Dropped the database', fg='red')) if __name__ == '__main__': sys.exit(cli())
#!/usr/bin/env python import click import sys from egpackager.registry import RegistryManager @click.group() @click.pass_context def cli(ctx): ''' ''' ctx.obj = {} ctx.obj['MANAGER'] = RegistryManager() @cli.command() @click.pass_context @click.option('--type', type=click.Choice(['gspread']), help='type of data source') @click.option('--uri', default='', help='URI to the data source') @click.option('--credentials', default='', help='path to Google Drive API credentials JSON file') @click.argument('raster', nargs=1) def create_metadata(ctx, uri, type, credentials, raster): if type == 'gspread': try: if uri == '': raise click.ClickException('For Google spreadsheets, an URI must be provided') elif credentials == '': raise click.ClickException('For Google spreadsheets, a path to Google Drive API credentials JSON file must be provided') else: ctx.obj['MANAGER'].add_gpsread_datasource(uri, credentials) except click.ClickException as e: e.show() except FileNotFoundError as e: click.echo(click.style('File {0} not found'.format(credentials), fg='red')) @cli.command() def list(): click.echo(click.style('Dropped the database', fg='red')) if __name__ == '__main__': cli()
Add basic options for the CLI
Add basic options for the CLI
Python
mit
VUEG/egpackager
bd2d1869894b30eb83eb11ec6e9814e7ab2d4168
panda/api/activity_log.py
panda/api/activity_log.py
#!/usr/bin/env python from tastypie import fields from tastypie.authorization import DjangoAuthorization from panda.api.utils import PandaApiKeyAuthentication, PandaModelResource, PandaSerializer from panda.models import ActivityLog class ActivityLogResource(PandaModelResource): """ API resource for DataUploads. """ from panda.api.users import UserResource creator = fields.ForeignKey(UserResource, 'user', full=True) class Meta: queryset = ActivityLog.objects.all() resource_name = 'activity_log' allowed_methods = ['get', 'post'] authentication = PandaApiKeyAuthentication() authorization = DjangoAuthorization() serializer = PandaSerializer() def obj_create(self, bundle, request=None, **kwargs): """ Create an activity log for the accessing user. """ bundle = super(ActivityLogResource, self).obj_create(bundle, request=request, user=request.user, **kwargs) return bundle
#!/usr/bin/env python from tastypie import fields from tastypie.authorization import DjangoAuthorization from tastypie.exceptions import ImmediateHttpResponse from tastypie.http import HttpConflict from panda.api.utils import PandaApiKeyAuthentication, PandaModelResource, PandaSerializer from django.db import IntegrityError from panda.models import ActivityLog class ActivityLogResource(PandaModelResource): """ API resource for DataUploads. """ from panda.api.users import UserResource creator = fields.ForeignKey(UserResource, 'user', full=True) class Meta: queryset = ActivityLog.objects.all() resource_name = 'activity_log' allowed_methods = ['get', 'post'] authentication = PandaApiKeyAuthentication() authorization = DjangoAuthorization() serializer = PandaSerializer() def obj_create(self, bundle, request=None, **kwargs): """ Create an activity log for the accessing user. """ try: bundle = super(ActivityLogResource, self).obj_create(bundle, request=request, user=request.user, **kwargs) except IntegrityError: raise ImmediateHttpResponse(response=HttpConflict('Activity has already been recorded.')) return bundle
Return 409 for duplicate activity logging.
Return 409 for duplicate activity logging.
Python
mit
ibrahimcesar/panda,PalmBeachPost/panda,ibrahimcesar/panda,NUKnightLab/panda,pandaproject/panda,datadesk/panda,newsapps/panda,ibrahimcesar/panda,newsapps/panda,pandaproject/panda,PalmBeachPost/panda,PalmBeachPost/panda,NUKnightLab/panda,pandaproject/panda,pandaproject/panda,ibrahimcesar/panda,ibrahimcesar/panda,PalmBeachPost/panda,datadesk/panda,datadesk/panda,NUKnightLab/panda,PalmBeachPost/panda,datadesk/panda,newsapps/panda,NUKnightLab/panda,newsapps/panda,pandaproject/panda,datadesk/panda
fb69cb186b4c82ae5f64551dc65a3ed948650b5e
voteswap/urls.py
voteswap/urls.py
"""voteswap URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include from django.conf.urls import url from django.contrib import admin from django.contrib.auth.views import logout from voteswap.views import index from voteswap.views import landing_page from voteswap.views import signup urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url('^home/$', index, name='index'), url('^$', landing_page, name='landing_page'), url('^logout/$', logout, name='logout'), url('^user/', include('users.urls', namespace='users')), url('^signup/$', signup, name='signup'), ]
"""voteswap URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import include from django.conf.urls import url from django.contrib import admin from django.contrib.auth.views import logout from voteswap.views import index from voteswap.views import landing_page urlpatterns = [ url(r'^admin/', admin.site.urls), url('', include('social.apps.django_app.urls', namespace='social')), url('^home/$', index, name='index'), url('^$', landing_page, name='landing_page'), url('^logout/$', logout, name='logout'), url('^user/', include('users.urls', namespace='users')), url('^signup/$', landing_page, name='signup'), ]
Make signup just point to landing page
Make signup just point to landing page
Python
mit
sbuss/voteswap,sbuss/voteswap,sbuss/voteswap,sbuss/voteswap
6602471252a7c8e3dd3ab94db54e45fccfc6e62f
yarn_api_client/__init__.py
yarn_api_client/__init__.py
# -*- coding: utf-8 -*- __version__ = '0.3.6' __all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager'] from .application_master import ApplicationMaster from .history_server import HistoryServer from .node_manager import NodeManager from .resource_manager import ResourceManager
# -*- coding: utf-8 -*- __version__ = '0.3.7.dev' __all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager'] from .application_master import ApplicationMaster from .history_server import HistoryServer from .node_manager import NodeManager from .resource_manager import ResourceManager
Prepare for next development iteration
Prepare for next development iteration
Python
bsd-3-clause
toidi/hadoop-yarn-api-python-client
8bf5edab5cebb0e713d67c0ec5f866b2d63d537b
wdom/__init__.py
wdom/__init__.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from wdom.log import configure_logger configure_logger()
#!/usr/bin/env python3 # -*- coding: utf-8 -*-
Revert configure logger at initialization
Revert configure logger at initialization
Python
mit
miyakogi/wdom,miyakogi/wdom,miyakogi/wdom
b07964e8b243b151e64af86cb09a37e980f94eb1
vantage/utils.py
vantage/utils.py
import binascii import base64 import click def to_base64(value): value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8") return f"base64:{value}" def from_base64(value): if value.startswith("base64:"): try: value = base64.urlsafe_b64decode(value[7:]).decode("utf-8") except binascii.Error: pass return value def loquacious(line): try: env = click.get_current_context().obj if env is not None and env.get("VG_VERBOSE"): click.echo(f"VG-LOG: {line}") except RuntimeError: # This happens when there's no active click context so we can't get the # env. In this case we default to not printing the verbose logs. # This situation happens when you're trying to autocomplete pass
import binascii import base64 import click def to_base64(value): value = base64.urlsafe_b64encode(value.encode("utf-8")).decode("utf-8") return f"base64:{value}" def from_base64(value): if value.startswith("base64:"): try: value = base64.urlsafe_b64decode(value[7:]).decode("utf-8") except binascii.Error: pass return value def loquacious(line, env=None): try: env = env or click.get_current_context().obj if env is not None and env.get("VG_VERBOSE"): click.echo(f"VG-LOG: {line}") except RuntimeError: # This happens when there's no active click context so we can't get the # env. In this case we default to not printing the verbose logs. # This situation happens when you're trying to autocomplete pass
Add optional env kwargs to logging method
Add optional env kwargs to logging method
Python
mit
vantage-org/vantage,vantage-org/vantage
7a35cc6aa667b3fd144fabfa24bbf2397cd5c011
vc_zoom/setup.py
vc_zoom/setup.py
# This file is part of the Indico plugins. # Copyright (C) 2020 - 2021 CERN and ENEA # # The Indico plugins are free software; you can redistribute # them and/or modify them under the terms of the MIT License; # see the LICENSE file for more details. from setuptools import setup setup()
# This file is part of the Indico plugins. # Copyright (C) 2020 - 2021 CERN and ENEA # # The Indico plugins are free software; you can redistribute # them and/or modify them under the terms of the MIT License; # see the LICENSE file for more details. from setuptools import setup setup()
Fix inconsistent linebreaks after file headers
Fix inconsistent linebreaks after file headers
Python
mit
indico/indico-plugins,indico/indico-plugins,indico/indico-plugins,indico/indico-plugins
f11cf81bec8c1590aa8cbeb65209b493f96dd766
general/zipUnzip.py
general/zipUnzip.py
import os import zipfile import zipfile try: import zlib mode= zipfile.ZIP_DEFLATED except: mode= zipfile.ZIP_STORED def unzipDir(inputDir): for root, dirs, files in os.walk(inputDir): for f in files: if f.endswith('.zip'): inFile = os.path.join(root, f) print 'Working on', inFile outDir = os.path.join(root, f[:-4]) if not os.path.isdir(outDir): os.mkdir(outDir) print 'Created',outDir else: continue with zipfile.ZipFile(inFile,'r') as z: z.extractall(outDir) print f,'was successful.' def zipDir(inputDir): zipFileName = os.path.join(inputDir,'zipfile.zip') print zipFileName zip= zipfile.ZipFile(zipFileName, 'w', mode) for root, dirs, files in os.walk(inputDir): for f in files: if f.endswith('.xml'): fileName = os.path.join(root,f) zip.write(fileName, arcname=f) print 'ZIP CREATED' zip.close() inputDir = r'C:\Users\mart3565\Desktop\test' #inputDir = args.input_path #unzipDir(inputDir) zipDir(inputDir)
import os import zipfile import zipfile try: import zlib mode= zipfile.ZIP_DEFLATED except: mode= zipfile.ZIP_STORED def unzipDir(inputDir): for root, dirs, files in os.walk(inputDir): for f in files: if f.endswith('.zip'): inFile = os.path.join(root, f) print 'Working on', inFile outDir = os.path.join(root, f[:-4]) if not os.path.isdir(outDir): os.mkdir(outDir) print 'Created',outDir else: continue with zipfile.ZipFile(inFile,'r') as z: z.extractall(outDir) print f,'was successful.' print 'Done.' def zipDir(inputDir): zipFileName = os.path.join(inputDir,'zipfile.zip') print zipFileName zip= zipfile.ZipFile(zipFileName, 'w', mode) for root, dirs, files in os.walk(inputDir): for f in files: if f.endswith('.xml'): fileName = os.path.join(root,f) zip.write(fileName, arcname=f) print 'ZIP CREATED' zip.close() print 'Done.' inputDir = r'C:\Users\mart3565\Downloads\hennepin11102014' #inputDir = args.input_path unzipDir(inputDir) #zipDir(inputDir)
Create output dir if does not exist.
Create output dir if does not exist.
Python
mit
borchert/metadata-tools
f9a4ad56230de8c057e259b14fb14a309b2de0c0
homedisplay/control_milight/management/commands/run_timed.py
homedisplay/control_milight/management/commands/run_timed.py
from control_milight.models import LightAutomation from control_milight.views import update_lightstate from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.utils.timezone import now from ledcontroller import LedController import datetime import redis class Command(BaseCommand): args = '' help = 'Run timed transitions' def handle(self, *args, **options): redis_instance = redis.StrictRedis() led = LedController(settings.MILIGHT_IP) time = datetime.datetime.now() hour = datetime.time(time.hour, time.minute) for item in LightAutomation.objects.filter(running=True): if not item.is_running(time): continue percent_done = item.percent_done(time) if item.action == "evening": print "Setting evening brightness to", ((1-percent_done)*100) led.set_brightness(int((1-percent_done)*100)) elif item.action == "morning": print "Setting morning brightness to", ((percent_done)*100) led.set_brightness(int((percent_done)*100)) # update_lightstate(transition.group.group_id, transition.to_brightness, transition.to_color)
from control_milight.models import LightAutomation from control_milight.views import update_lightstate from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.utils.timezone import now from ledcontroller import LedController import datetime import redis class Command(BaseCommand): args = '' help = 'Run timed transitions' def handle(self, *args, **options): redis_instance = redis.StrictRedis() led = LedController(settings.MILIGHT_IP) time = datetime.datetime.now() hour = datetime.time(time.hour, time.minute) for item in LightAutomation.objects.filter(running=True): if not item.is_running(time): continue percent_done = item.percent_done(time) if item.action == "evening": print "Setting evening brightness to", ((1-percent_done)*100) led.set_brightness(int((1-percent_done)*100)) elif item.action == "morning": print "Setting morning brightness to", ((percent_done)*100) led.set_brightness(int((percent_done)*100)) led.white() # update_lightstate(transition.group.group_id, transition.to_brightness, transition.to_color)
Change morning transition color to white
Change morning transition color to white Fixes #25
Python
bsd-3-clause
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
50784afbb0c95f435c1a25e0840438e406349bbb
plyer/facades/uniqueid.py
plyer/facades/uniqueid.py
'''UniqueID facade. Returns the following depending on the platform: * **Android**: Android ID * **OS X**: Serial number of the device * **Linux**: Serial number using lshw * **Windows**: MachineGUID from regkey Simple Example -------------- To get the unique ID:: >>> from plyer import uniqueid >>> uniqueid.id '1b1a7a4958e2a845' .. versionadded:: 1.2.0 .. versionchanged:: 1.2.4 On Android returns Android ID instead of IMEI. ''' class UniqueID(object): ''' UniqueID facade. ''' @property def id(self): ''' Property that returns the unique id of the platform. ''' return self.get_uid() def get_uid(self): return self._get_uid() # private def _get_uid(self, **kwargs): raise NotImplementedError()
'''UniqueID facade. Returns the following depending on the platform: * **Android**: Android ID * **OS X**: Serial number of the device * **Linux**: Serial number using lshw * **Windows**: MachineGUID from regkey * **iOS**: UUID Simple Example -------------- To get the unique ID:: >>> from plyer import uniqueid >>> uniqueid.id '1b1a7a4958e2a845' .. versionadded:: 1.2.0 .. versionchanged:: 1.2.4 On Android returns Android ID instead of IMEI. ''' class UniqueID(object): ''' UniqueID facade. ''' @property def id(self): ''' Property that returns the unique id of the platform. ''' return self.get_uid() def get_uid(self): return self._get_uid() # private def _get_uid(self, **kwargs): raise NotImplementedError()
Add description for iOS in facade
Add description for iOS in facade
Python
mit
kivy/plyer,kived/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,kived/plyer,kivy/plyer,KeyWeeUsr/plyer
96fac3babb22386fd94eccc86abb5bd15c917c53
rpyc/core/__init__.py
rpyc/core/__init__.py
from rpyc.core.stream import SocketStream, PipeStream from rpyc.core.channel import Channel from rpyc.core.protocol import Connection from rpyc.core.netref import BaseNetref from rpyc.core.async import AsyncResult, AsyncResultTimeout from rpyc.core.service import Service, VoidService, SlaveService from rpyc.core.vinegar import GenericException, install_rpyc_excepthook # for .NET import platform if platform.system() == "cli": import clr # Add Reference to IronPython zlib (required for channel compression) # grab it from http://bitbucket.org/jdhardy/ironpythonzlib clr.AddReference("IronPython.Zlib") install_rpyc_excepthook()
from rpyc.core.stream import SocketStream, PipeStream from rpyc.core.channel import Channel from rpyc.core.protocol import Connection from rpyc.core.netref import BaseNetref from rpyc.core.async import AsyncResult, AsyncResultTimeout from rpyc.core.service import Service, VoidService, SlaveService from rpyc.core.vinegar import GenericException, install_rpyc_excepthook install_rpyc_excepthook()
Remove Code specific for IronPython - useless with 2.7
Remove Code specific for IronPython - useless with 2.7
Python
mit
glpatcern/rpyc,sponce/rpyc,pyq881120/rpyc,pombredanne/rpyc,geromueller/rpyc,kwlzn/rpyc,siemens/rpyc,eplaut/rpyc,gleon99/rpyc
82cb6d190ce1e805914cc791518c97e063ecdc96
tests/test_individual.py
tests/test_individual.py
import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) print(myPath) sys.path.insert(0, myPath + '/../SATSolver') from unittest import TestCase from individual import Individual from BitVector import BitVector from bitarray import bitarray class TestIndividual(TestCase): """ Testing class for Individual. """ def test_get(self): ind = Individual(9) ind.data = bitarray("011010100") self.assertEqual(ind.get(5), 1) self.assertEqual(ind.get(1), 0) self.assertEqual(ind.get(10), None) def test_set(self): ind = Individual(9) ind.data = bitarray("011010100") ind.set(2, 1) self.assertEqual(ind.get(2), 1) ind.set(7, 0) self.assertEqual(ind.get(7), 0) ind.set(6, 1) self.assertEqual(ind.get(6), 1) def test_flip(self): ind = Individual(9) ind.data = bitarray("011010100") ind.flip(1) self.assertEqual(ind.get(1), 1) ind.flip(8) self.assertEqual(ind.get(8), 1) ind.flip(4) self.assertEqual(ind.get(4), 1)
import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) print(myPath) sys.path.insert(0, myPath + '/../SATSolver') from unittest import TestCase from individual import Individual from bitarray import bitarray class TestIndividual(TestCase): """ Testing class for Individual. """ def test_get(self): ind = Individual(9) ind.data = bitarray("011010100") self.assertEqual(ind.get(5), 1) self.assertEqual(ind.get(1), 0) self.assertEqual(ind.get(10), None) def test_set(self): ind = Individual(9) ind.data = bitarray("011010100") ind.set(2, 1) self.assertEqual(ind.get(2), 1) ind.set(7, 0) self.assertEqual(ind.get(7), 0) ind.set(6, 1) self.assertEqual(ind.get(6), 1) def test_flip(self): ind = Individual(9) ind.data = bitarray("011010100") ind.flip(1) self.assertEqual(ind.get(1), 1) ind.flip(8) self.assertEqual(ind.get(8), 1) ind.flip(4) self.assertEqual(ind.get(4), 1)
Remove BitVector import - Build fails
Remove BitVector import - Build fails
Python
mit
Imperium-Software/resolver,Imperium-Software/resolver,Imperium-Software/resolver,Imperium-Software/resolver
b2764b9ada2ca3bec548ceb82e71697f7515f14f
citrination_client/__init__.py
citrination_client/__init__.py
import os import re from citrination_client.base import * from citrination_client.search import * from citrination_client.data import * from citrination_client.models import * from citrination_client.views.descriptors import * from .client import CitrinationClient from pkg_resources import get_distribution, DistributionNotFound def __get_version(): """ Returns the version of this package, whether running from source or install :return: The version of this package """ try: # Try local first, if missing setup.py, then use pkg info here = os.path.abspath(os.path.dirname(__file__)) print("here:"+here) with open(os.path.join(here, "../setup.py")) as fp: version_file = fp.read() version_match = re.search(r"version=['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) except IOError: pass try: _dist = get_distribution('citrination_client') # Normalize case for Windows systems dist_loc = os.path.normcase(_dist.location) here = os.path.normcase(__file__) if not here.startswith(os.path.join(dist_loc, 'citrination_client')): # not installed, but there is another version that *is* raise DistributionNotFound except DistributionNotFound: raise RuntimeError("Unable to find version string.") else: return _dist.version __version__ = __get_version()
import os import re from citrination_client.base import * from citrination_client.search import * from citrination_client.data import * from citrination_client.models import * from citrination_client.views.descriptors import * from .client import CitrinationClient from pkg_resources import get_distribution, DistributionNotFound def __get_version(): """ Returns the version of this package, whether running from source or install :return: The version of this package """ try: # Try local first, if missing setup.py, then use pkg info here = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(here, "../setup.py")) as fp: version_file = fp.read() version_match = re.search(r"version=['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) except IOError: pass try: _dist = get_distribution('citrination_client') # Normalize case for Windows systems dist_loc = os.path.normcase(_dist.location) here = os.path.normcase(__file__) if not here.startswith(os.path.join(dist_loc, 'citrination_client')): # not installed, but there is another version that *is* raise DistributionNotFound except DistributionNotFound: raise RuntimeError("Unable to find version string.") else: return _dist.version __version__ = __get_version()
Remove debug print on getVersion
Remove debug print on getVersion
Python
apache-2.0
CitrineInformatics/python-citrination-client
2f140327c24a8efab5482a975793dddedd0ebfc4
nucleus/wsgi.py
nucleus/wsgi.py
# flake8: noqa """ WSGI config for nucleus project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ # newrelic.agent must be imported and initialized first # https://docs.newrelic.com/docs/agents/python-agent/installation/python-agent-advanced-integration#manual-integration import newrelic.agent newrelic.agent.initialize('newrelic.ini') import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nucleus.settings') # NOQA from django.core.handlers.wsgi import WSGIRequest from django.core.wsgi import get_wsgi_application from decouple import config IS_HTTPS = config('HTTPS', default='off', cast=bool) class WSGIHTTPSRequest(WSGIRequest): def _get_scheme(self): if IS_HTTPS: return 'https' return super(WSGIHTTPSRequest, self)._get_scheme() application = get_wsgi_application() application.request_class = WSGIHTTPSRequest if config('SENTRY_DSN', None): from raven.contrib.django.raven_compat.middleware.wsgi import Sentry application = Sentry(application) newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None) if newrelic_license_key: application = newrelic.agent.WSGIApplicationWrapper(application)
# flake8: noqa # newrelic.agent must be imported and initialized first # https://docs.newrelic.com/docs/agents/python-agent/installation/python-agent-advanced-integration#manual-integration import newrelic.agent newrelic.agent.initialize('newrelic.ini') import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'nucleus.settings') # NOQA from django.core.handlers.wsgi import WSGIRequest from django.core.wsgi import get_wsgi_application from decouple import config IS_HTTPS = config('HTTPS', default='off', cast=bool) class WSGIHTTPSRequest(WSGIRequest): def _get_scheme(self): if IS_HTTPS: return 'https' return super(WSGIHTTPSRequest, self)._get_scheme() application = get_wsgi_application() application.request_class = WSGIHTTPSRequest if config('SENTRY_DSN', None): from raven.contrib.django.raven_compat.middleware.wsgi import Sentry application = Sentry(application) newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None) if newrelic_license_key: application = newrelic.agent.WSGIApplicationWrapper(application)
Remove old docstring with link to old django docs
Remove old docstring with link to old django docs
Python
mpl-2.0
mozilla/nucleus,mozilla/nucleus,mozilla/nucleus,mozilla/nucleus
18bf9dd5e1e054d0c260959a8379f331940e167f
online_status/__init__.py
online_status/__init__.py
VERSION = (0, 1, 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: if VERSION[3] != 'final': version = "%s %s" % (version, VERSION[3]) if VERSION[4] != 0: version = '%s %s' % (version, VERSION[4]) return version
VERSION = (0, 1, 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) return version
Fix 'index out of bound' issue
Fix 'index out of bound' issue
Python
unlicense
hovel/django-online-status,hovel/django-online-status
4b0583f132bac50a0208452d38702f4f819f64c8
hindkit/__init__.py
hindkit/__init__.py
from __future__ import division, print_function, unicode_literals __version__ = '0.1.2' from hindkit.constants import paths, linguistics, styles, templates from hindkit.family import Family, Master, Style from hindkit.builder import Builder def confirm_version(required_version): if __version__ != required_version: message = templates.EXIT_MESSAGE.format(required_version, __version__) raise SystemExit(message)
from __future__ import division, print_function, unicode_literals __version__ = '0.1.3' from hindkit.constants import paths, linguistics, styles, templates from hindkit.family import Family, Master, Style from hindkit.builder import Builder def confirm_version(required_version): if __version__ != required_version: message = templates.EXIT_MESSAGE.format(required_version, __version__) raise SystemExit(message)
Change the version number to 0.1.3
Change the version number to 0.1.3
Python
mit
mooniak/hindkit,itfoundry/hindkit,itfoundry/hindkit
6d8f79d32194f5b956785a021ee6fad6093a03f1
insanity/layers.py
insanity/layers.py
import numpy as np import theano import theano.tensor as T from theano.tensor.nnet import conv from theano.tensor.nnet import softmax from theano.tensor import shared_randomstreams from theano.tensor.signal import downsample class Layer(object): def __init__(self, numInputs, numNeurons, activation): self.numInputs = numInputs self.numNeurons = numNeurons self.activation = activation #Initialize weights. self.weights = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=np.sqrt(1.0/self.numNeurons), size=(self.numInputs, self.numNeurons)), dtype=theano.config.floatX), name='weights', borrow=True) #Initialize biases. self.biases = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=1.0, size=(self.numNeurons,)), dtype=theano.config.floatX), name='biases', borrow=True) @property def input(value): #Configure the layer output. self.output = something class FullyConnectedLayer(Layer):
import numpy as np import theano import theano.tensor as T from theano.tensor.nnet import conv from theano.tensor.nnet import softmax from theano.tensor import shared_randomstreams from theano.tensor.signal import downsample class Layer(object): def __init__(self, numInputs, numNeurons, activation, miniBatchSize): self.numInputs = numInputs self.numNeurons = numNeurons self.activation = activation self.miniBatchSize = miniBatchSize #Initialize weights. self.weights = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=np.sqrt(1.0/self.numNeurons), size=(self.numInputs, self.numNeurons)), dtype=theano.config.floatX), name='weights', borrow=True) #Initialize biases. self.biases = theano.shared( np.asarray( np.random.normal( loc=0.0, scale=1.0, size=(self.numNeurons,)), dtype=theano.config.floatX), name='biases', borrow=True) @input.setter def input(self, value): self.input = value #Configure the layer output. self.output = something class FullyConnectedLayer(Layer): @Layer.input.setter def input(self, value): self.input = value #Configure the layer output. self.output = something
Add proper use of property setter.
Add proper use of property setter.
Python
cc0-1.0
cn04/insanity
b064d8dbc4be13c12c1c87491ebcb484ab71ac52
geopy/__init__.py
geopy/__init__.py
""" geopy is a Python 2 and 3 client for several popular geocoding web services. geopy makes it easy for Python developers to locate the coordinates of addresses, cities, countries, and landmarks across the globe using third-party geocoders and other data sources. geopy is tested against CPython (versions 2.7, 3.4, 3.5, 3.6), PyPy, and PyPy3. geopy does not and will not support CPython 2.6. """ from geopy.point import Point from geopy.location import Location from geopy.geocoders import * # pylint: disable=W0401 from geopy.util import __version__
""" geopy is a Python 2 and 3 client for several popular geocoding web services. geopy makes it easy for Python developers to locate the coordinates of addresses, cities, countries, and landmarks across the globe using third-party geocoders and other data sources. geopy is tested against CPython (versions 2.7, 3.4, 3.5, 3.6), PyPy, and PyPy3. geopy does not and will not support CPython 2.6. """ from geopy.location import Location from geopy.point import Point from geopy.util import __version__ from geopy.geocoders import * # noqa # geopy.geocoders.options must not be importable as `geopy.options`, # because that is ambiguous (which options are that). del options # noqa
Fix geocoder.options being also exported as `geopy.options`
Fix geocoder.options being also exported as `geopy.options`
Python
mit
geopy/geopy,jmb/geopy
1a3fb78b32fbb95e3efc0f06ef62690834e820e3
libraries/vytree/__init__.py
libraries/vytree/__init__.py
# vytree.__init__: package init file. # # Copyright (C) 2014 VyOS Development Group <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA from vytree.node import ( Node, ChildNotFoundError, ChildAlreadyExistsError, ) from vytree.config_node import ConfigNode from vytree.reference_node import ReferenceNode from vytree.reference_tree_loader import ReferenceTreeLoader
# vytree.__init__: package init file. # # Copyright (C) 2014 VyOS Development Group <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 # USA from vytree.node import ( Node, ChildNotFoundError, ChildAlreadyExistsError, ) from vytree.config_node import ConfigNode
Remove referencetree-related imports from the top level vytree package.
Remove referencetree-related imports from the top level vytree package.
Python
lgpl-2.1
vyos-legacy/vyconfd,vyos-legacy/vyconfd
75fd4aadedb4bcdcfe41f9ae61bf62282ffdadea
test/__init__.py
test/__init__.py
import glob, os.path, sys version = sys.version.split(" ")[0] majorminor = version[0:3] # Add path to hiredis.so load path path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0] sys.path.insert(0, os.path.dirname(path)) from unittest import * from . import reader def tests(): suite = TestSuite() suite.addTest(makeSuite(reader.ReaderTest)) return suite
import glob, os.path, sys version = sys.version.split(" ")[0] majorminor = version[0:3] # Add path to hiredis.so load path path = glob.glob("build/lib*-%s/hiredis" % majorminor)[0] sys.path.insert(0, path) from unittest import * from . import reader def tests(): suite = TestSuite() suite.addTest(makeSuite(reader.ReaderTest)) return suite
Fix build path detection on SunOS
Fix build path detection on SunOS Inside the hiredis directory there is another directory that contains the shared object. This is specific to the platform so we shouldn't care where the shared object itself is placed.
Python
bsd-3-clause
redis/hiredis-py,charsyam/hiredis-py,badboy/hiredis-py-win,badboy/hiredis-py-win,badboy/hiredis-py-win,redis/hiredis-py,charsyam/hiredis-py
a6e6e6bf18c48638d4c6c7d97f894edd3fc3c1ad
ipython_config.py
ipython_config.py
c.InteractiveShellApp.exec_lines = [] # ipython-autoimport - Automatically import modules c.InteractiveShellApp.exec_lines.append( "try:\n %load_ext ipython_autoimport\nexcept ImportError: pass") # Automatically reload modules c.InteractiveShellApp.exec_lines.append('%load_ext autoreload') c.InteractiveShellApp.exec_lines.append('%autoreload 2')
c.InteractiveShellApp.exec_lines = [] # ipython-autoimport - Automatically import modules c.InteractiveShellApp.exec_lines.append( "try:\n %load_ext ipython_autoimport\nexcept ImportError: pass") # Automatically reload modules c.InteractiveShellApp.exec_lines.append('%load_ext autoreload') c.InteractiveShellApp.exec_lines.append('%autoreload 2') c.TerminalInteractiveShell.editor = 'gvim'
Set default shell editor for ipython to gvim
Set default shell editor for ipython to gvim
Python
mit
brycepg/dotfiles,brycepg/dotfiles
bf694ffdf1fd61f6e108f3076ed975d538af5224
wlauto/common/android/resources.py
wlauto/common/android/resources.py
# Copyright 2014-2015 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from wlauto.common.resources import FileResource class ReventFile(FileResource): name = 'revent' def __init__(self, owner, stage): super(ReventFile, self).__init__(owner) self.stage = stage class JarFile(FileResource): name = 'jar' class ApkFile(FileResource): name = 'apk' def __init__(self, owner, platform=None, uiauto=False): super(ApkFile, self).__init__(owner) self.platform = platform self.uiauto = uiauto def __str__(self): apk_type = 'uiautomator ' if self.uiauto else '' return '<{}\'s {} {}APK>'.format(self.owner, self.platform, apk_type) class uiautoApkFile(FileResource): name = 'uiautoapk' def __init__(self, owner, platform=None): super(uiautoApkFile, self).__init__(owner) self.platform = platform def __str__(self): return '<{}\'s {} UiAuto APK>'.format(self.owner, self.platform)
# Copyright 2014-2015 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from wlauto.common.resources import FileResource class ReventFile(FileResource): name = 'revent' def __init__(self, owner, stage): super(ReventFile, self).__init__(owner) self.stage = stage class JarFile(FileResource): name = 'jar' class ApkFile(FileResource): name = 'apk' def __init__(self, owner, platform=None, uiauto=False): super(ApkFile, self).__init__(owner) self.platform = platform self.uiauto = uiauto def __str__(self): apk_type = 'uiautomator ' if self.uiauto else '' return '<{}\'s {} {}APK>'.format(self.owner, self.platform, apk_type)
Revert "AndroidResource: Add a UiautoApk resource type."
Revert "AndroidResource: Add a UiautoApk resource type." This reverts commit bc6af25366aacf394f96b5a93008109904a89e93.
Python
apache-2.0
bjackman/workload-automation,bjackman/workload-automation,jimboatarm/workload-automation,jimboatarm/workload-automation,jimboatarm/workload-automation,jimboatarm/workload-automation,bjackman/workload-automation,bjackman/workload-automation,jimboatarm/workload-automation,bjackman/workload-automation,jimboatarm/workload-automation,bjackman/workload-automation
78ca9c6b8393b1b4f4bddf41febc87696796d28a
openpassword/openssl_utils.py
openpassword/openssl_utils.py
from Crypto.Hash import MD5 def derive_openssl_key(key, salt, hash=MD5): key = key[0:-16] openssl_key = bytes() prev = bytes() while len(openssl_key) < 32: prev = hash.new(prev + key + salt).digest() openssl_key += prev return openssl_key
from Crypto.Hash import MD5 def derive_openssl_key(key, salt, hashing_function=MD5): key = key[0:-16] openssl_key = bytes() prev = bytes() while len(openssl_key) < 32: prev = hashing_function.new(prev + key + salt).digest() openssl_key += prev return openssl_key
Rename hash variable to prevent colision with native method
Rename hash variable to prevent colision with native method
Python
mit
openpassword/blimey,openpassword/blimey
2c6dd79d419699e61970719dbb369aefe359ea6e
tests/test_db.py
tests/test_db.py
from pypinfo import db CREDS_FILE = '/path/to/creds_file.json' def test_get_credentials(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Assert assert db.get_credentials() is None def test_set_credentials(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Act db.set_credentials(CREDS_FILE) def test_set_credentials_twice(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Act db.set_credentials(CREDS_FILE) db.set_credentials(CREDS_FILE) def test_round_trip(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Act db.set_credentials(CREDS_FILE) # Assert assert db.get_credentials() == CREDS_FILE def test_get_credentials_table(tmp_path): db.DB_FILE = str(tmp_path / 'db.json') with db.get_credentials_table() as table: assert not table._storage._storage._handle.closed with db.get_credentials_table(table) as table2: assert table2 is table assert not table._storage._storage._handle.closed assert table._storage._storage._handle.closed
from pypinfo import db CREDS_FILE = '/path/to/creds_file.json' def test_get_credentials(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Assert assert db.get_credentials() is None def test_set_credentials(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Act db.set_credentials(CREDS_FILE) def test_set_credentials_twice(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Act db.set_credentials(CREDS_FILE) db.set_credentials(CREDS_FILE) def test_round_trip(tmp_path): # Arrange db.DB_FILE = str(tmp_path / 'db.json') # Mock # Act db.set_credentials(CREDS_FILE) # Assert assert db.get_credentials() == CREDS_FILE def test_get_credentials_table(tmp_path): db.DB_FILE = str(tmp_path / 'db.json') with db.get_credentials_table() as table: assert not table._storage._handle.closed with db.get_credentials_table(table) as table2: assert table2 is table assert not table._storage._handle.closed assert table._storage._handle.closed
Fix tests for updated TinyDB/Tinyrecord
Fix tests for updated TinyDB/Tinyrecord
Python
mit
ofek/pypinfo
3f909cdfba61719dfa0a860aeba1e418fe740f33
indra/__init__.py
indra/__init__.py
from __future__ import print_function, unicode_literals import logging import os import sys __version__ = '1.10.0' __all__ = ['assemblers', 'belief', 'databases', 'explanation', 'literature', 'mechlinker', 'preassembler', 'sources', 'tools', 'util'] logging.basicConfig(format='%(levelname)s: [%(asctime)s] indra/%(name)s - %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') # Suppress INFO-level logging from some dependencies logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL) # This is specifically to suppress lib2to3 logging from networkx import lib2to3.pgen2.driver class Lib2to3LoggingModuleShim(object): def getLogger(self): return logging.getLogger('lib2to3') lib2to3.pgen2.driver.logging = Lib2to3LoggingModuleShim() logging.getLogger('lib2to3').setLevel(logging.ERROR) logger = logging.getLogger('indra') from .config import get_config, has_config
from __future__ import print_function, unicode_literals import logging import os import sys __version__ = '1.10.0' __all__ = ['assemblers', 'belief', 'databases', 'explanation', 'literature', 'mechlinker', 'preassembler', 'sources', 'tools', 'util'] logging.basicConfig(format=('%(levelname)s: [%(asctime)s] %(name)s' ' - %(message)s'), level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') # Suppress INFO-level logging from some dependencies logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL) # This is specifically to suppress lib2to3 logging from networkx import lib2to3.pgen2.driver class Lib2to3LoggingModuleShim(object): def getLogger(self): return logging.getLogger('lib2to3') lib2to3.pgen2.driver.logging = Lib2to3LoggingModuleShim() logging.getLogger('lib2to3').setLevel(logging.ERROR) logger = logging.getLogger('indra') from .config import get_config, has_config
Remove indra prefix from logger
Remove indra prefix from logger
Python
bsd-2-clause
bgyori/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/indra,bgyori/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra
a4dc298a487fcf6f1975346ab421cca705c025dc
storage/test_driver.py
storage/test_driver.py
#!/usr/bin/env python from storage import Storage NEW_REPORT = {'foo': 'bar', 'boo': 'baz'} def main(): db_store = Storage.get_storage() for key, value in db_store.__dict__.iteritems(): print '%s: %s' % (key, value) print '\n' # report_id = db_store.store(NEW_REPORT) report_id = 'AVM0dGOF6iQbRONBw9yB' print db_store.get_report(report_id) print db_store.get_report(3) # db_store.delete(report_id) # print db_store.delete(2) if __name__ == '__main__': main()
#!/usr/bin/env python from storage import Storage NEW_REPORT = {'foo': 'bar', 'boo': 'baz'} REPORTS = [ {'report_id': 1, 'report': {"/tmp/example": {"MD5": "53f43f9591749b8cae536ff13e48d6de", "SHA256": "815d310bdbc8684c1163b62f583dbaffb2df74b9104e2aadabf8f8491bafab66", "libmagic": "ASCII text"}}}, {'report_id': 2, 'report': {"/opt/other_file": {"MD5": "96b47da202ddba8d7a6b91fecbf89a41", "SHA256": "26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f", "libmagic": "a /bin/python script text executable"}}}, ] def populate_es(): db_store = Storage.get_storage() for report in REPORTS: db_store.store(report) def main(): db_store = Storage.get_storage() for key, value in db_store.__dict__.iteritems(): print '%s: %s' % (key, value) print '\n' # report_id = db_store.store(NEW_REPORT) report_id = 'AVM0dGOF6iQbRONBw9yB' print db_store.get_report(report_id) print db_store.get_report(3) # db_store.delete(report_id) # print db_store.delete(2) if __name__ == '__main__': main()
Add populate es function to test driver
Add populate es function to test driver
Python
mpl-2.0
awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,mitre/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner
e4f28d43eb62791b9679bc775c7bbbeb5944a1d1
Lib/py_compile.py
Lib/py_compile.py
# Routine to "compile" a .py file to a .pyc file. # This has intimate knowledge of how Python/import.c does it. # By Sjoerd Mullender (I forced him to write it :-). import imp MAGIC = imp.get_magic() def wr_long(f, x): f.write(chr( x & 0xff)) f.write(chr((x >> 8) & 0xff)) f.write(chr((x >> 16) & 0xff)) f.write(chr((x >> 24) & 0xff)) def compile(file, cfile = None): import os, marshal, __builtin__ f = open(file) codestring = f.read() f.close() timestamp = long(os.stat(file)[8]) codeobject = __builtin__.compile(codestring, file, 'exec') if not cfile: cfile = file + 'c' fc = open(cfile, 'wb') fc.write(MAGIC) wr_long(fc, timestamp) marshal.dump(codeobject, fc) fc.close() if os.name == 'mac': import macfs macfs.FSSpec(cfile).SetCreatorType('Pyth', 'PYC ') macfs.FSSpec(file).SetCreatorType('Pyth', 'TEXT')
# Routine to "compile" a .py file to a .pyc file. # This has intimate knowledge of how Python/import.c does it. # By Sjoerd Mullender (I forced him to write it :-). import imp MAGIC = imp.get_magic() def wr_long(f, x): f.write(chr( x & 0xff)) f.write(chr((x >> 8) & 0xff)) f.write(chr((x >> 16) & 0xff)) f.write(chr((x >> 24) & 0xff)) def compile(file, cfile = None): import os, marshal, __builtin__ f = open(file) codestring = f.read() f.close() timestamp = long(os.stat(file)[8]) codeobject = __builtin__.compile(codestring, file, 'exec') if not cfile: cfile = file + (__debug__ and 'c' or 'o') fc = open(cfile, 'wb') fc.write(MAGIC) wr_long(fc, timestamp) marshal.dump(codeobject, fc) fc.close() if os.name == 'mac': import macfs macfs.FSSpec(cfile).SetCreatorType('Pyth', 'PYC ') macfs.FSSpec(file).SetCreatorType('Pyth', 'TEXT')
Use the __debug__ flag to determine the proper filename extension to use for the cached module code object.
compile(): Use the __debug__ flag to determine the proper filename extension to use for the cached module code object.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
4ed027756885c060d0e8b14c2b0c17b2b85dcb59
demonstrare/mailer/__init__.py
demonstrare/mailer/__init__.py
import logging import requests from requests.exceptions import ConnectionError, Timeout log = logging.getLogger(__name__) class MailNotifier(object): url = None api_key = None sender = None @classmethod def send_message(cls, subject, to, message): data = {'from': cls.sender, 'to': to, 'subject': subject, 'html': message} try: requests.post(cls.url, auth=('api', cls.api_key), data=data, timeout=3.5) except (ConnectionError, Timeout): log.error('Failed to deliver message %s', data) def includeme(config): """ Sets the mailgun properties from .ini config file :param config: The pyramid ``Configurator`` object for your app. :type config: ``pyramid.config.Configurator`` """ settings = config.get_settings() MailNotifier.url = settings['mailgun.url'] MailNotifier.api_key = settings['mailgun.key'] MailNotifier.sender = settings['mailgun.sender']
import logging import requests from requests.exceptions import ConnectionError, Timeout log = logging.getLogger(__name__) class MailNotifier(object): url = None api_key = None sender = None @classmethod def send_message(cls, subject, to, message): data = {'from': cls.sender, 'to': to, 'subject': subject, 'html': message} try: requests.post(cls.url, auth=('api', cls.api_key), data=data, timeout=5) except (ConnectionError, Timeout): log.error('Failed to deliver message %s', data) def includeme(config): """ Sets the mailgun properties from .ini config file :param config: The pyramid ``Configurator`` object for your app. :type config: ``pyramid.config.Configurator`` """ settings = config.get_settings() MailNotifier.url = settings['mailgun.url'] MailNotifier.api_key = settings['mailgun.key'] MailNotifier.sender = settings['mailgun.sender']
Update mailer timeout to 5 seconds
Update mailer timeout to 5 seconds
Python
mit
josuemontano/api-starter,josuemontano/API-platform,josuemontano/pyramid-angularjs-starter,josuemontano/api-starter,josuemontano/pyramid-angularjs-starter,josuemontano/API-platform,josuemontano/API-platform,josuemontano/API-platform,josuemontano/api-starter,josuemontano/pyramid-angularjs-starter
4aaab8edebfe416ab814a1f9803b699616bd57da
config-example.py
config-example.py
# Your bucket to delete things from. BUCKET = 'ADDME' # How many processes to fork. PROCESS_COUNT = 10 # Maximum number of objects per bulk request. MAX_JOB_SIZE = 10000 # Your simple API access key from the APIs tab of # <https://code.google.com/apis/console>. DEVKEY = 'ADDME' # On that same page, create a Client ID for installed applications, and add the # generated client ID here. CLIENT_ID = 'ADDME' # Add the associated client secret here. CLIENT_SECRET = 'ADDME'
# Your bucket to delete things from. BUCKET = 'ADDME' # How many processes to fork. PROCESS_COUNT = 4 # Maximum number of objects per bulk request. MAX_JOB_SIZE = 100 # Your simple API access key from the APIs tab of # <https://code.google.com/apis/console>. DEVKEY = 'ADDME' # On that same page, create a Client ID for installed applications, and add the # generated client ID here. CLIENT_ID = 'ADDME' # Add the associated client secret here. CLIENT_SECRET = 'ADDME'
Set more reasonable job sizes
Set more reasonable job sizes I'll follow this up by adding exponential backoff and then increasing the defaults to something reasonable.
Python
apache-2.0
googlearchive/storage-bulk-delete-python,jonparrott/storage-bulk-delete-python
262cee3ac7da741387ee66e78e6a9fbb9ffc7f31
chainer/backends/__init__.py
chainer/backends/__init__.py
from chainer.backends import chainerx # NOQA from chainer.backends import cpu # NOQA from chainer.backends import cuda # NOQA from chainer.backends import intel64 # NOQA
from chainer.backends import chainerx # NOQA from chainer.backends import cpu # NOQA from chainer.backends import cuda # NOQA from chainer.backends import intel64 # NOQA # TODO(niboshi): Refactor registration of backend modules for functions like # chainer.get_device().
Add TODO to refactor backend registration
Add TODO to refactor backend registration
Python
mit
keisuke-umezawa/chainer,ktnyt/chainer,okuta/chainer,okuta/chainer,okuta/chainer,chainer/chainer,hvy/chainer,keisuke-umezawa/chainer,chainer/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,wkentaro/chainer,pfnet/chainer,chainer/chainer,jnishi/chainer,tkerola/chainer,ktnyt/chainer,jnishi/chainer,niboshi/chainer,wkentaro/chainer,niboshi/chainer,jnishi/chainer,wkentaro/chainer,okuta/chainer,hvy/chainer,ktnyt/chainer,jnishi/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,ktnyt/chainer,niboshi/chainer,niboshi/chainer,hvy/chainer
64c2a9ec42fba89225af07a3d0cf84dd9de98e4b
legislators/urls.py
legislators/urls.py
from . import views from django.conf.urls import url urlpatterns = [ url(r'^find_legislator/', views.find_legislator), url(r'^get_latlon/', views.get_latlon) ]
from . import views from django.conf.urls import url urlpatterns = [ url(r'^find_legislator/', views.find_legislator), url(r'^get_latlon/', views.get_latlon, name="get_latlon"), url(r'^latest_latlon/', views.latest_latlon, name="latest_latlon") ]
Add url for latest latlon url
Add url for latest latlon url
Python
mit
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
26b18cac1164d9e543f02f54fc396b25eb40b5cf
vmware_dvs/agent/__init__.py
vmware_dvs/agent/__init__.py
# Copyright 2016 Mirantis, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron.common import eventlet_utils eventlet_utils.monkey_patch()
Apply monkey patch in agent package for direct import main() from dvs_neutron_agent module
Apply monkey patch in agent package for direct import main() from dvs_neutron_agent module
Python
apache-2.0
ekosareva/vmware-dvs,ekosareva/vmware-dvs,Mirantis/vmware-dvs,VTabolin/vmware-dvs,Mirantis/vmware-dvs
2793f9a4e245c79ece58990622ae059a70514592
actions/aws_decrypt_password_data.py
actions/aws_decrypt_password_data.py
#!/usr/bin/env python import base64 import rsa import six from st2common.runners.base_action import Action class AwsDecryptPassworData(Action): def run(self, keyfile, password_data): # copied from: # https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122 self.logger.debug("Decrypting password data using: %s", keyfile) value = password_data if not value: return '' try: with open(keyfile) as pk_file: pk_contents = pk_file.read() private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents)) value = base64.b64decode(value) value = rsa.decrypt(value, private_key) return value.decode('utf-8') except Exception: msg = ('Unable to decrypt password data using ' 'provided private key file: {}').format(keyfile) self.logger.debug(msg, exc_info=True) raise ValueError(msg)
#!/usr/bin/env python import base64 import rsa import six from st2common.runners.base_action import Action class AwsDecryptPassworData(Action): def run(self, keyfile, password_data): # copied from: # https://github.com/aws/aws-cli/blob/master/awscli/customizations/ec2/decryptpassword.py#L96-L122 self.logger.debug("Decrypting password data using: %s", keyfile) value = password_data if not value: return '' # Hack because somewhere in the Mistral parameter "publish" pipeline, we # strip trailing and leading whitespace from a string which results in # an invalid base64 string if not value.startswith('\r\n'): value = '\r\n' + value if not value.endswith('\r\n'): value = value + '\r\n' value = base64.b64decode(value) try: with open(keyfile) as pk_file: pk_contents = pk_file.read() private_key = rsa.PrivateKey.load_pkcs1(six.b(pk_contents)) value = rsa.decrypt(value, private_key) return value.decode('utf-8') except Exception: msg = ('Unable to decrypt password data using ' 'provided private key file: {}').format(keyfile) self.logger.debug(msg, exc_info=True) raise ValueError(msg)
Add a workaround since somewhere in the Mistral "publish variable" pipeline, trealing and leading whitespace is removed.
Add a workaround since somewhere in the Mistral "publish variable" pipeline, trealing and leading whitespace is removed.
Python
apache-2.0
StackStorm/st2cd,StackStorm/st2cd
70b7090a438f7962f28acc23fa78cdb6f5d084a0
docs/sphinxext/configtraits.py
docs/sphinxext/configtraits.py
"""Directives and roles for documenting traitlets config options. :: .. configtrait:: Application.log_datefmt Description goes here. Cross reference like this: :configtrait:`Application.log_datefmt`. """ from sphinx.locale import l_ from sphinx.util.docfields import Field def setup(app): app.add_object_type('configtrait', 'configtrait', objname='Config option') metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} return metadata
"""Directives and roles for documenting traitlets config options. :: .. configtrait:: Application.log_datefmt Description goes here. Cross reference like this: :configtrait:`Application.log_datefmt`. """ def setup(app): app.add_object_type('configtrait', 'configtrait', objname='Config option') metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} return metadata
Fix compatibility with the latest release of Sphinx
Fix compatibility with the latest release of Sphinx `l_` from sphinx.locale has been deprecated for a long time. `_` is the new name for the same function but it seems that the imports there are useless. https://github.com/sphinx-doc/sphinx/commit/8d653a406dc0dc6c2632176ab4757ca15474b10f
Python
bsd-3-clause
ipython/ipython,ipython/ipython
255a9f65476433be99a9a96cf5681567e4825e65
dthm4kaiako/config/__init__.py
dthm4kaiako/config/__init__.py
"""Configuration for Django system.""" __version__ = "0.11.0" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.12.0" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
Increment version number to 0.12.0
Increment version number to 0.12.0
Python
mit
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
02c2551b2760fc225bb4851c560f1881c7d674a4
txircd/modules/extra/listmodules.py
txircd/modules/extra/listmodules.py
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class ModulesCommand(ModuleData): implements(IPlugin, IModuleData) name = "ModulesCommand" def actions(self): return [ ("statsruntype-modules", 1, self.listModules) ] def listModules(self): return sorted(self.ircd.loadedModules.keys()) modulesCommand = ModulesCommand()
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class ModulesCommand(ModuleData): implements(IPlugin, IModuleData) name = "ModulesCommand" def actions(self): return [ ("statsruntype-modules", 1, self.listModules) ] def listModules(self): modules = {} for modName in sorted(self.ircd.loadedModules.keys()): modules[modName] = "*" return modules modulesCommand = ModulesCommand()
Return the correct thing for modules
Return the correct thing for modules
Python
bsd-3-clause
Heufneutje/txircd
ea39c4ebba3d5ab42dfa202f88f7d76386e505fe
plugins/MeshView/MeshView.py
plugins/MeshView/MeshView.py
from Cura.View.View import View class MeshView(View): def __init__(self): super(MeshView, self).__init__() def render(self): scene = self.getController().getScene() renderer = self.getRenderer() self._renderObject(scene.getRoot(), renderer) def _renderObject(self, object, renderer): if object.getMeshData(): renderer.renderMesh(object.getGlobalTransformation(), object.getMeshData()) for child in object.getChildren(): self._renderObject(child, renderer)
from Cura.View.View import View class MeshView(View): def __init__(self): super(MeshView, self).__init__() def render(self): scene = self.getController().getScene() renderer = self.getRenderer() self._renderObject(scene.getRoot(), renderer) def _renderObject(self, object, renderer): if not object.render(): if object.getMeshData(): renderer.renderMesh(object.getGlobalTransformation(), object.getMeshData()) for child in object.getChildren(): self._renderObject(child, renderer)
Allow SceneObjects to render themselves
Allow SceneObjects to render themselves
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
e733b0d5192437a95c4eafd1babc02385fb4fcf7
cms/sitemaps/cms_sitemap.py
cms/sitemaps/cms_sitemap.py
# -*- coding: utf-8 -*- from django.contrib.sitemaps import Sitemap from django.utils import translation from cms.models import Title def from_iterable(iterables): """ Backport of itertools.chain.from_iterable """ for it in iterables: for element in it: yield element class CMSSitemap(Sitemap): changefreq = "monthly" priority = 0.5 def items(self): all_titles = Title.objects.public().filter(page__login_required=False) return all_titles def lastmod(self, title): modification_dates = [title.page.changed_date, title.page.publication_date] plugins_for_placeholder = lambda placeholder: placeholder.get_plugins() plugins = from_iterable(map(plugins_for_placeholder, title.page.placeholders.all())) plugin_modification_dates = map(lambda plugin: plugin.changed_date, plugins) modification_dates.extend(plugin_modification_dates) return max(modification_dates) def location(self, title): translation.activate(title.language) url = title.page.get_absolute_url(title.language) translation.deactivate() return url
# -*- coding: utf-8 -*- from django.contrib.sitemaps import Sitemap from django.db.models import Q from django.utils import translation from cms.models import Title def from_iterable(iterables): """ Backport of itertools.chain.from_iterable """ for it in iterables: for element in it: yield element class CMSSitemap(Sitemap): changefreq = "monthly" priority = 0.5 def items(self): # # It is counter-productive to provide entries for: # > Pages which redirect: # - If the page redirects to another page on this site, the # destination page will already be in the sitemap, and # - If the page redirects externally, then it shouldn't be # part of our sitemap anyway. # > Pages which cannot be accessed by anonymous users (like # search engines are). # all_titles = Title.objects.public().filter( Q(redirect='') | Q(redirect__isnull=True), page__login_required=False ) return all_titles def lastmod(self, title): modification_dates = [title.page.changed_date, title.page.publication_date] plugins_for_placeholder = lambda placeholder: placeholder.get_plugins() plugins = from_iterable(map(plugins_for_placeholder, title.page.placeholders.all())) plugin_modification_dates = map(lambda plugin: plugin.changed_date, plugins) modification_dates.extend(plugin_modification_dates) return max(modification_dates) def location(self, title): translation.activate(title.language) url = title.page.get_absolute_url(title.language) translation.deactivate() return url
Remove redirected pages from the sitemap
Remove redirected pages from the sitemap
Python
bsd-3-clause
ScholzVolkmer/django-cms,wyg3958/django-cms,donce/django-cms,robmagee/django-cms,DylannCordel/django-cms,frnhr/django-cms,jrief/django-cms,wuzhihui1123/django-cms,Livefyre/django-cms,dhorelik/django-cms,netzkolchose/django-cms,intip/django-cms,chkir/django-cms,jproffitt/django-cms,selecsosi/django-cms,czpython/django-cms,liuyisiyisi/django-cms,takeshineshiro/django-cms,saintbird/django-cms,czpython/django-cms,memnonila/django-cms,FinalAngel/django-cms,farhaadila/django-cms,webu/django-cms,divio/django-cms,SmithsonianEnterprises/django-cms,sephii/django-cms,jproffitt/django-cms,czpython/django-cms,astagi/django-cms,bittner/django-cms,AlexProfi/django-cms,dhorelik/django-cms,nostalgiaz/django-cms,netzkolchose/django-cms,iddqd1/django-cms,jeffreylu9/django-cms,astagi/django-cms,SachaMPS/django-cms,chmberl/django-cms,qnub/django-cms,chkir/django-cms,nostalgiaz/django-cms,SachaMPS/django-cms,rscnt/django-cms,benzkji/django-cms,360youlun/django-cms,cyberintruder/django-cms,jproffitt/django-cms,nostalgiaz/django-cms,Vegasvikk/django-cms,FinalAngel/django-cms,wuzhihui1123/django-cms,SachaMPS/django-cms,datakortet/django-cms,farhaadila/django-cms,nimbis/django-cms,owers19856/django-cms,nimbis/django-cms,cyberintruder/django-cms,frnhr/django-cms,intip/django-cms,rsalmaso/django-cms,qnub/django-cms,liuyisiyisi/django-cms,SofiaReis/django-cms,jsma/django-cms,Jaccorot/django-cms,sznekol/django-cms,frnhr/django-cms,nimbis/django-cms,jrief/django-cms,FinalAngel/django-cms,leture/django-cms,philippze/django-cms,nimbis/django-cms,FinalAngel/django-cms,jproffitt/django-cms,frnhr/django-cms,netzkolchose/django-cms,360youlun/django-cms,stefanw/django-cms,owers19856/django-cms,intip/django-cms,takeshineshiro/django-cms,robmagee/django-cms,MagicSolutions/django-cms,benzkji/django-cms,bittner/django-cms,nostalgiaz/django-cms,bittner/django-cms,isotoma/django-cms,vxsx/django-cms,SofiaReis/django-cms,Vegasvikk/django-cms,vxsx/django-cms,philippze/django-cms,vxsx/django-cms,saintbird/django-cms,SmithsonianEnterprises/django-cms,chkir/django-cms,Vegasvikk/django-cms,astagi/django-cms,Jaccorot/django-cms,stefanfoulis/django-cms,divio/django-cms,irudayarajisawa/django-cms,petecummings/django-cms,petecummings/django-cms,vad/django-cms,vstoykov/django-cms,mkoistinen/django-cms,jrclaramunt/django-cms,webu/django-cms,vad/django-cms,sznekol/django-cms,evildmp/django-cms,AlexProfi/django-cms,rsalmaso/django-cms,rryan/django-cms,josjevv/django-cms,yakky/django-cms,rsalmaso/django-cms,takeshineshiro/django-cms,DylannCordel/django-cms,jrief/django-cms,jeffreylu9/django-cms,stefanw/django-cms,intip/django-cms,stefanw/django-cms,mkoistinen/django-cms,chmberl/django-cms,isotoma/django-cms,donce/django-cms,chmberl/django-cms,saintbird/django-cms,leture/django-cms,datakortet/django-cms,dhorelik/django-cms,Livefyre/django-cms,keimlink/django-cms,divio/django-cms,jsma/django-cms,keimlink/django-cms,ScholzVolkmer/django-cms,MagicSolutions/django-cms,selecsosi/django-cms,qnub/django-cms,jsma/django-cms,kk9599/django-cms,andyzsf/django-cms,selecsosi/django-cms,vstoykov/django-cms,wyg3958/django-cms,jeffreylu9/django-cms,bittner/django-cms,jrclaramunt/django-cms,mkoistinen/django-cms,rsalmaso/django-cms,timgraham/django-cms,yakky/django-cms,rscnt/django-cms,vad/django-cms,kk9599/django-cms,benzkji/django-cms,stefanfoulis/django-cms,memnonila/django-cms,donce/django-cms,petecummings/django-cms,isotoma/django-cms,datakortet/django-cms,Livefyre/django-cms,josjevv/django-cms,wuzhihui1123/django-cms,evildmp/django-cms,josjevv/django-cms,stefanw/django-cms,jeffreylu9/django-cms,ScholzVolkmer/django-cms,robmagee/django-cms,MagicSolutions/django-cms,yakky/django-cms,irudayarajisawa/django-cms,czpython/django-cms,leture/django-cms,timgraham/django-cms,evildmp/django-cms,youprofit/django-cms,mkoistinen/django-cms,webu/django-cms,datakortet/django-cms,sephii/django-cms,vad/django-cms,jsma/django-cms,keimlink/django-cms,evildmp/django-cms,vxsx/django-cms,kk9599/django-cms,iddqd1/django-cms,Livefyre/django-cms,vstoykov/django-cms,liuyisiyisi/django-cms,AlexProfi/django-cms,wyg3958/django-cms,farhaadila/django-cms,netzkolchose/django-cms,360youlun/django-cms,Jaccorot/django-cms,iddqd1/django-cms,andyzsf/django-cms,sephii/django-cms,yakky/django-cms,sznekol/django-cms,stefanfoulis/django-cms,andyzsf/django-cms,andyzsf/django-cms,SmithsonianEnterprises/django-cms,benzkji/django-cms,irudayarajisawa/django-cms,youprofit/django-cms,selecsosi/django-cms,philippze/django-cms,timgraham/django-cms,cyberintruder/django-cms,stefanfoulis/django-cms,jrclaramunt/django-cms,memnonila/django-cms,SofiaReis/django-cms,rscnt/django-cms,rryan/django-cms,rryan/django-cms,youprofit/django-cms,divio/django-cms,wuzhihui1123/django-cms,isotoma/django-cms,jrief/django-cms,sephii/django-cms,rryan/django-cms,DylannCordel/django-cms,owers19856/django-cms
f48eb543c3ae2222a71080592ae8932c227dc605
roche/scripts/xml-load.py
roche/scripts/xml-load.py
# coding=utf-8 import sys sys.path.append('../../') import roche.settings from eulexistdb.db import ExistDB from roche.settings import EXISTDB_SERVER_URL # # Timeout higher? # xmldb = ExistDB(timeout=30) xmldb.createCollection('docker', True) xmldb.createCollection(u'docker/浙江大學圖書館', True) with open('../../../dublin-store/db/test_001.xml') as f: xmldb.load(f, '/docker/001.xml', True)
# coding=utf-8 # # Must be called in roche root dir # import sys sys.path.append('.') import roche.settings from eulexistdb.db import ExistDB from roche.settings import EXISTDB_SERVER_URL # # Timeout higher? # xmldb = ExistDB(timeout=30) xmldb.createCollection('docker', True) xmldb.createCollection(u'docker/浙江大學圖書館', True) with open('../dublin-store/db/test_001.xml') as f: xmldb.load(f, '/docker/001.xml', True)
Fix relative path in relation to app root dir
Fix relative path in relation to app root dir
Python
mit
beijingren/roche-website,beijingren/roche-website,beijingren/roche-website,beijingren/roche-website
eccedb9f938bd74574e4dcdd9ea63f71ac269f20
nydus/db/routers/__init__.py
nydus/db/routers/__init__.py
""" nydus.db.routers ~~~~~~~~~~~~~~~~ :copyright: (c) 2011 DISQUS. :license: Apache License 2.0, see LICENSE for more details. """ from .base import BaseRouter, RoundRobinRouter
""" nydus.db.routers ~~~~~~~~~~~~~~~~ :copyright: (c) 2011 DISQUS. :license: Apache License 2.0, see LICENSE for more details. """ from .base import BaseRouter, RoundRobinRouter, PartitionRouter
Add partition router to base
Add partition router to base
Python
apache-2.0
disqus/nydus
d4dd408e671d14518b3fabb964027cd006366fca
testfixtures/compat.py
testfixtures/compat.py
# compatibility module for different python versions import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
# compatibility module for different python versions import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str BytesLiteral = lambda x: x.encode('latin1') UnicodeLiteral = lambda x: x class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring BytesLiteral = lambda x: x UnicodeLiteral = lambda x: x.decode('latin1') class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
Add Python version agnostic helpers for creating byte and unicode literals.
Add Python version agnostic helpers for creating byte and unicode literals.
Python
mit
Simplistix/testfixtures,nebulans/testfixtures
03dd583c8d75b2b588079c69b517c2510eba71db
wsme/__init__.py
wsme/__init__.py
from wsme.api import expose, validate from wsme.root import WSRoot from wsme.types import wsattr, wsproperty
from wsme.api import expose, validate from wsme.root import WSRoot from wsme.types import wsattr, wsproperty, Unset
Add 'Unset' to the wsme module
Add 'Unset' to the wsme module
Python
mit
stackforge/wsme
bb3d9ec2d9932da2abb50f5cb6bceffae5112abb
mrbelvedereci/trigger/admin.py
mrbelvedereci/trigger/admin.py
from django.contrib import admin from mrbelvedereci.trigger.models import Trigger class TriggerAdmin(admin.ModelAdmin): list_display = ('repo', 'type', 'flows', 'org', 'regex', 'active', 'public') list_filter = ('active', 'public', 'repo', 'org', 'type') admin.site.register(Trigger, TriggerAdmin)
from django.contrib import admin from mrbelvedereci.trigger.models import Trigger class TriggerAdmin(admin.ModelAdmin): list_display = ('name', 'repo', 'type', 'flows', 'org', 'regex', 'active', 'public') list_filter = ('active', 'public', 'type', 'org', 'repo') admin.site.register(Trigger, TriggerAdmin)
Add name to trigger list view
Add name to trigger list view
Python
bsd-3-clause
SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci,SalesforceFoundation/mrbelvedereci
abbc6757df62d1dfbd4158dd50b99a74d204fc95
sandcats/trivial_tests.py
sandcats/trivial_tests.py
import requests def register_asheesh(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()}, ) def register_asheesh2_bad_key_type(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh2', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()}, )
import requests def register_asheesh(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()}, ) def register_asheesh2_bad_key_type(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh2', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()}, ) def register_asheesh3_x_forwarded_for(): # Provide the HTTP_FORWARDED_COUNT=1 environment variable to # Meteor before running this test. # # FIXME: This doesn't pass, but for now, I'm not *that* worried. return requests.post( 'http://localhost:3000/register', data={'rawHostname': 'asheesh3', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()}, headers={'X-Forwarded-For': '128.151.2.1'}, )
Add non-working test for X-Forwarded-For
Add non-working test for X-Forwarded-For
Python
apache-2.0
sandstorm-io/sandcats,sandstorm-io/sandcats,sandstorm-io/sandcats,sandstorm-io/sandcats
437eb8432fe91865d3cb24109e1b99818de8ce4e
pysc2/bin/battle_net_maps.py
pysc2/bin/battle_net_maps.py
#!/usr/bin/python # Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Print the list of available maps according to the game.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app from pysc2 import run_configs def main(unused_argv): with run_configs.get().start(want_rgb=False) as controller: available_maps = controller.available_maps() print("\n") print("Local map paths:") for m in available_maps.local_map_paths: print(m) print() print("Battle.net maps:") for m in available_maps.battlenet_map_names: print(m) if __name__ == "__main__": app.run(main)
#!/usr/bin/python # Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Print the list of available maps according to the game.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl import app from pysc2 import run_configs def main(unused_argv): with run_configs.get().start(want_rgb=False) as controller: available_maps = controller.available_maps() print("\n") print("Local map paths:") for m in sorted(available_maps.local_map_paths): print(" ", m) print() print("Battle.net maps:") for m in sorted(available_maps.battlenet_map_names): print(" ", m) if __name__ == "__main__": app.run(main)
Sort and indent the map lists.
Sort and indent the map lists. PiperOrigin-RevId: 249276696
Python
apache-2.0
deepmind/pysc2
73eb3c7c52c2a5c58cad0e1d4dbe09c1e713beeb
conductor/vendor/_stripe.py
conductor/vendor/_stripe.py
from django.conf import settings import stripe stripe.api_key = settings.STRIPE_API_KEY stripe.api_version = "2018-10-31" class StripeGateway: """A gateway to Stripe This insulates the rest of the system from Stripe errors and configures the Stripe module with the API key. """ def create_customer(self, user: settings.AUTH_USER_MODEL, stripe_token: str) -> str: """Add a user to Stripe and join them to the plan.""" # Let this fail on purpose. If it fails, the error monitoring system # will log it and I'll learn how to harden it for the conductor env. customer = stripe.Customer.create(email=user.email, source=stripe_token) stripe.Subscription.create( customer=customer.id, items=[{"plan": settings.STRIPE_PLAN}], trial_from_plan=True, ) return customer.id stripe_gateway = StripeGateway()
from django.conf import settings import stripe stripe.api_key = settings.STRIPE_API_KEY class StripeGateway: """A gateway to Stripe This insulates the rest of the system from Stripe errors and configures the Stripe module with the API key. """ def create_customer(self, user: settings.AUTH_USER_MODEL, stripe_token: str) -> str: """Add a user to Stripe and join them to the plan.""" # Let this fail on purpose. If it fails, the error monitoring system # will log it and I'll learn how to harden it for the conductor env. customer = stripe.Customer.create(email=user.email, source=stripe_token) stripe.Subscription.create( customer=customer.id, items=[{"plan": settings.STRIPE_PLAN}], trial_from_plan=True, ) return customer.id stripe_gateway = StripeGateway()
Remove pinned Stripe API version.
Remove pinned Stripe API version.
Python
bsd-2-clause
mblayman/lcp,mblayman/lcp,mblayman/lcp
2ca3f9a47a639365a8de4db1b80331ac8e06d0af
modules/pipeitembuilder.py
modules/pipeitembuilder.py
# pipeitembuilder.py # import urllib from pipe2py import util def pipe_itembuilder(context, _INPUT, conf, **kwargs): """This source builds an item. Keyword arguments: context -- pipeline context _INPUT -- source generator conf: attrs -- key, value pairs Yields (_OUTPUT): item """ attrs = conf['attrs'] for item in _INPUT: d = {} for attr in attrs: try: key = util.get_value(attr['key'], item, **kwargs) value = util.get_value(attr['value'], item, **kwargs) except KeyError: continue #ignore if the item is referenced but doesn't have our source or target field (todo: issue a warning if debugging?) util.set_value(d, key, value) yield d if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once break
# pipeitembuilder.py # import urllib from pipe2py import util def pipe_itembuilder(context, _INPUT, conf, **kwargs): """This source builds an item. Keyword arguments: context -- pipeline context _INPUT -- source generator conf: attrs -- key, value pairs Yields (_OUTPUT): item """ attrs = conf['attrs'] if not isinstance(attrs, list): attrs = [attrs] for item in _INPUT: d = {} for attr in attrs: try: key = util.get_value(attr['key'], item, **kwargs) value = util.get_value(attr['value'], item, **kwargs) except KeyError: continue #ignore if the item is referenced but doesn't have our source or target field (todo: issue a warning if debugging?) util.set_value(d, key, value) yield d if item == True: #i.e. this is being fed forever, i.e. not in a loop, so we just yield our item once break
Fix to suit latest Yahoo changes
Fix to suit latest Yahoo changes
Python
mit
nerevu/riko,nerevu/riko
e5dea3c91cfc82f58f80131ff79be402582a3412
dthm4kaiako/config/__init__.py
dthm4kaiako/config/__init__.py
"""Configuration for Django system.""" __version__ = "0.16.6" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.17.0" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
Increment version number to 0.17.0
Increment version number to 0.17.0
Python
mit
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
9d7f2626294fbf25934e7dda4892b7ac13bd5555
fireplace/cards/tgt/warlock.py
fireplace/cards/tgt/warlock.py
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Void Crusher class AT_023: inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
Implement more TGT Warlock cards
Implement more TGT Warlock cards
Python
agpl-3.0
liujimj/fireplace,beheh/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,oftc-ftw/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,jleclanche/fireplace,Meerkov/fireplace,NightKev/fireplace
27c115ba875136ced13023999fe13fcf0d798f0e
lux_udp_bridge.py
lux_udp_bridge.py
#!/usr/bin/env python import select import serial import socket def run_lux_udp(host, port, dev): with serial.Serial(dev, baudrate=3000000, xonxoff=False) as ser: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((host, port)) last_addr = None serial_buffer = "" while True: inputs, outputs, errors = select.select([sock.fileno(), ser.fileno()], [], []) if sock.fileno() in inputs: packet, last_addr = sock.recvfrom(1100) #print ">", repr(packet) if len(packet) == 0: # Ping, respond back sock.sendto("", 0, last_addr) else: ser.write(packet) if ser.fileno() in inputs: serial_buffer += ser.read() while "\0" in serial_buffer: packet, null, serial_buffer = serial_buffer.partition("\0") sock.sendto(packet + null, 0, last_addr) #print "<", repr(packet) if __name__ == "__main__": while True: try: run_lux_udp(host="0.0.0.0", port=1365, dev="/dev/ttyACM0") except Exception as e: print e select.select([], [], [], 5)
#!/usr/bin/env python import select import serial import socket def run_lux_udp(host, port, dev): with serial.Serial(dev, baudrate=3000000, xonxoff=False) as ser: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((host, port)) last_addr = None serial_buffer = "" while True: inputs, outputs, errors = select.select([sock.fileno(), ser.fileno()], [], []) while sock.fileno() in inputs: try: packet, last_addr = sock.recvfrom(1100, socket.MSG_DONTWAIT) except socket.error: break #print ">", repr(packet) if len(packet) == 0: # Ping, respond back sock.sendto("", 0, last_addr) else: ser.write(packet) if ser.fileno() in inputs: serial_buffer += ser.read() while "\0" in serial_buffer: packet, null, serial_buffer = serial_buffer.partition("\0") sock.sendto(packet + null, 0, last_addr) #print "<", repr(packet) if __name__ == "__main__": while True: try: run_lux_udp(host="0.0.0.0", port=1365, dev="/dev/ttyACM0") except Exception as e: print e select.select([], [], [], 5)
Fix buffering issue in udp bridge
Fix buffering issue in udp bridge
Python
mit
zbanks/radiance,zbanks/radiance,zbanks/radiance,zbanks/radiance
4c3e92c8847b35c4afa53a90dd823c89d1d534d1
mamba/__init__.py
mamba/__init__.py
__version__ = '0.8.6'
__version__ = '0.8.6' def description(message): pass def _description(message): pass def it(message): pass def _it(message): pass def context(message): pass def _context(message): pass def before(): pass def after(): pass
Make mamba more friendly to linters and IDE's
Make mamba more friendly to linters and IDE's Some empty functions are added and we are able to optionally import them
Python
mit
nestorsalceda/mamba
21bf18a03c485304aa00dc2af86aa91930e4b1ac
tests/test_grammar.py
tests/test_grammar.py
import pytest from parglare import Grammar from parglare.exceptions import GrammarError def test_terminal_nonterminal_conflict(): # Production A is a terminal ("a") and non-terminal at the same time. g = """ A = "a" | B; B = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e) def test_multiple_terminal_definition(): g = """ S = A A; A = "a"; A = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e)
import pytest from parglare import Grammar def test_terminal_nonterminal(): # Production A is a terminal ("a") and non-terminal at the same time. # Thus, it must be recognized as non-terminal. g = """ S = A B; A = "a" | B; B = "b"; """ Grammar.from_string(g) # Here A shoud be non-terminal while B will be terminal. g = """ S = A B; A = B; B = "b"; """ Grammar.from_string(g) def test_multiple_terminal_definition(): # A is defined multiple times as terminal thus it must be recognized # as non-terminal with alternative expansions. g = """ S = A A; A = "a"; A = "b"; """ Grammar.from_string(g)
Fix in tests for terminal definitions.
Fix in tests for terminal definitions.
Python
mit
igordejanovic/parglare,igordejanovic/parglare
eacbc67cdaa7016d1098e9f63a50ae7ca6b4924a
app/auth/views.py
app/auth/views.py
# Copyright (C) 2016 University of Zurich. All rights reserved. # # This file is part of MSRegistry Backend. # # MSRegistry Backend is free software: you can redistribute it and/or # modify it under the terms of the version 3 of the GNU Affero General # Public License as published by the Free Software Foundation, or any # other later version. # # MSRegistry Backend is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version # 3 of the GNU Affero General Public License for more details. # # You should have received a copy of the version 3 of the GNU Affero # General Public License along with MSRegistry Backend. If not, see # <http://www.gnu.org/licenses/>. __author__ = "Filippo Panessa <[email protected]>" __copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik," " University of Zurich") from . import auth from ..decorators import requires_auth @auth.route('/test') @requires_auth def authTest(): return "All good. You only get this message if you're authenticated."
# Copyright (C) 2016 University of Zurich. All rights reserved. # # This file is part of MSRegistry Backend. # # MSRegistry Backend is free software: you can redistribute it and/or # modify it under the terms of the version 3 of the GNU Affero General # Public License as published by the Free Software Foundation, or any # other later version. # # MSRegistry Backend is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version # 3 of the GNU Affero General Public License for more details. # # You should have received a copy of the version 3 of the GNU Affero # General Public License along with MSRegistry Backend. If not, see # <http://www.gnu.org/licenses/>. __author__ = "Filippo Panessa <[email protected]>" __copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik," " University of Zurich") from flask import jsonify from . import auth from ..decorators import requires_auth @auth.route('/test') @requires_auth def authTest(): return jsonify({'code': 'authorization_success', 'description': "All good. You only get this message if you're authenticated."})
Use JSON for API GET /auth/test response
Use JSON for API GET /auth/test response
Python
agpl-3.0
uzh/msregistry
64bff944752cbba16878b22025d6ee332e923007
pontoon/administration/management/commands/update_projects.py
pontoon/administration/management/commands/update_projects.py
import datetime from django.core.management.base import BaseCommand, CommandError from pontoon.administration.files import ( update_from_repository, extract_to_database, ) from pontoon.base.models import Project class Command(BaseCommand): args = '<project_id project_id ...>' help = 'Update projects from repositories and store changes to database' def handle(self, *args, **options): projects = Project.objects.all() if args: projects = projects.filter(pk__in=args) else: self.stdout.write(self.help.upper()) for project in projects: try: update_from_repository(project) extract_to_database(project) now = datetime.datetime.now() self.stdout.write( '[%s]: Updated project %s\n' % (now, project)) except Exception as e: now = datetime.datetime.now() raise CommandError( '[%s]: Update error: %s\n' % (now, unicode(e)))
import datetime from django.core.management.base import BaseCommand, CommandError from pontoon.administration.files import ( update_from_repository, extract_to_database, ) from pontoon.base.models import Project class Command(BaseCommand): args = '<project_id project_id ...>' help = 'Update projects from repositories and store changes to database' def handle(self, *args, **options): projects = Project.objects.all() if args: projects = projects.filter(pk__in=args) else: self.stdout.write('%s\n' % self.help.upper()) for project in projects: try: update_from_repository(project) extract_to_database(project) now = datetime.datetime.now() self.stdout.write( '[%s]: Updated project %s\n' % (now, project)) except Exception as e: now = datetime.datetime.now() raise CommandError( '[%s]: Update error: %s\n' % (now, unicode(e)))
Print new line after operation title
Print new line after operation title
Python
bsd-3-clause
jotes/pontoon,mathjazz/pontoon,participedia/pontoon,Jobava/mirror-pontoon,mathjazz/pontoon,m8ttyB/pontoon,vivekanand1101/pontoon,sudheesh001/pontoon,mastizada/pontoon,yfdyh000/pontoon,mastizada/pontoon,jotes/pontoon,mastizada/pontoon,vivekanand1101/pontoon,Osmose/pontoon,participedia/pontoon,yfdyh000/pontoon,vivekanand1101/pontoon,vivekanand1101/pontoon,m8ttyB/pontoon,mathjazz/pontoon,Jobava/mirror-pontoon,jotes/pontoon,mastizada/pontoon,yfdyh000/pontoon,Osmose/pontoon,Osmose/pontoon,m8ttyB/pontoon,Jobava/mirror-pontoon,jotes/pontoon,mathjazz/pontoon,sudheesh001/pontoon,m8ttyB/pontoon,yfdyh000/pontoon,sudheesh001/pontoon,sudheesh001/pontoon,mozilla/pontoon,mozilla/pontoon,mozilla/pontoon,mozilla/pontoon,participedia/pontoon,Jobava/mirror-pontoon,mozilla/pontoon,Osmose/pontoon,mathjazz/pontoon,participedia/pontoon
e8b1d0aff6333d6f8cfb4c81262550c670ea7e86
factory/tools/cat_StartdLog.py
factory/tools/cat_StartdLog.py
#!/bin/env python # # cat_StartdLog.py # # Print out the StartdLog for a glidein output file # # Usage: cat_StartdLog.py logname # import sys STARTUP_DIR=sys.path[0] sys.path.append(os.path.join(STARTUP_DIR,"lib")) import gWftLogParser USAGE="Usage: cat_StartdLog.py <logname>" def main(): try: print gWftLogParser.get_CondorLog(sys.argv[1],"StartdLog") except: sys.stderr.write("%s\n"%USAGE) sys.exit(1) if __name__ == '__main__': main()
#!/bin/env python # # cat_StartdLog.py # # Print out the StartdLog for a glidein output file # # Usage: cat_StartdLog.py logname # import os.path import sys STARTUP_DIR=sys.path[0] sys.path.append(os.path.join(STARTUP_DIR,"lib")) import gWftLogParser USAGE="Usage: cat_StartdLog.py <logname>" def main(): try: print gWftLogParser.get_CondorLog(sys.argv[1],"StartdLog") except: sys.stderr.write("%s\n"%USAGE) sys.exit(1) if __name__ == '__main__': main()
Allow for startup in a different dir
Allow for startup in a different dir
Python
bsd-3-clause
bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS
5835939302048f420bfc91ab027fea8f91e14a02
pybo/globalopt/__init__.py
pybo/globalopt/__init__.py
from .direct import * from .lbfgs import * __all__ = [] __all__ += direct.__all__ __all__ += lbfgsb.__all__
from .direct import * from .lbfgs import * __all__ = [] __all__ += direct.__all__ __all__ += lbfgs.__all__
Fix a minor bug due to renaming the lbfgs module.
Fix a minor bug due to renaming the lbfgs module.
Python
bsd-2-clause
jhartford/pybo,mwhoffman/pybo
7bf4083ef44585116f0eff86753080612a26b374
src/__init__.py
src/__init__.py
from bayeslite.api import barplot from bayeslite.api import cardinality from bayeslite.api import draw_crosscat from bayeslite.api import estimate_log_likelihood from bayeslite.api import heatmap from bayeslite.api import histogram from bayeslite.api import mi_hist from bayeslite.api import nullify from bayeslite.api import pairplot from bayeslite.api import plot_crosscat_chain_diagnostics """Main bdbcontrib API. The bdbcontrib module servers a sandbox for experimental and semi-stable features that are not yet ready for integreation to the bayeslite repository. """ __all__ = [ 'barplot', 'cardinality', 'draw_crosscat', 'estimate_log_likelihood', 'heatmap', 'histogram', 'mi_hist', 'nullify', 'pairplot', 'plot_crosscat_chain_diagnostics' ]
from bdbcontrib.api import barplot from bdbcontrib.api import cardinality from bdbcontrib.api import draw_crosscat from bdbcontrib.api import estimate_log_likelihood from bdbcontrib.api import heatmap from bdbcontrib.api import histogram from bdbcontrib.api import mi_hist from bdbcontrib.api import nullify from bdbcontrib.api import pairplot from bdbcontrib.api import plot_crosscat_chain_diagnostics """Main bdbcontrib API. The bdbcontrib module servers a sandbox for experimental and semi-stable features that are not yet ready for integreation to the bayeslite repository. """ __all__ = [ 'barplot', 'cardinality', 'draw_crosscat', 'estimate_log_likelihood', 'heatmap', 'histogram', 'mi_hist', 'nullify', 'pairplot', 'plot_crosscat_chain_diagnostics' ]
Fix big from bayeslite to bdbcontrib.
Fix big from bayeslite to bdbcontrib.
Python
apache-2.0
probcomp/bdbcontrib,probcomp/bdbcontrib
7a7ff71a4bfe6a14a820d8e90174ea168cb21fb2
appstack/settings/__init__.py
appstack/settings/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- debug = True port = 8000 processor = 1 settings = None app_static_path = None app_template_path = None # # import other settings for different environment # try: from develop_settings import * # DevelopSettings except ImportError: pass try: from production_settings import * # ProductionSettings except ImportError: pass try: from local_settings import * # LocalSettings except ImportError: pass
#!/usr/bin/env python # -*- coding: utf-8 -*- # Redis cache_driver="default" # default/socket cache_host="localhost" cache_name="" cache_port=6379 # for Redis # Posgresql database_driver="postgresql" # Posgresql database_host="localhost" database_name="" database_password="" database_port=5432 # for Posgresql database_username="" # Tornado cookie_secret="" debug=True static_path=None template_path=None xsrf_cookies=True # Aka. settings settings=None # Server port=8000 # run backend server on the given port processor=1 # run backend server with the processors # # import other settings for different environment # try: from .develop_settings import * # DevelopSettings except ImportError: pass try: from .production_settings import * # ProductionSettings except ImportError: pass try: from .local_settings import * # LocalSettings except ImportError: pass
Rewrite settings to application.very nice~
Rewrite settings to application.very nice~
Python
mit
mywaiting/appstack,mywaiting/appstack
27f503ef57a1582f7cc792d61f537bec71a4b02c
dhcp2nest/util.py
dhcp2nest/util.py
""" Utility functions for dhcp2nest """ from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) def _follow_file_thread(fn, fq): """ Queue lines from the given file (fn) continuously, even as the file grows or is replaced WARNING: This generator will block forever on the tail subprocess--no timeouts are enforced. """ # Use system tail with name-based following and retry p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
""" Utility functions for dhcp2nest """ from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) def _follow_file_thread(fn, fq): """ Queue lines from the given file (fn) continuously, even as the file grows or is replaced WARNING: This generator will block forever on the tail subprocess--no timeouts are enforced. """ # Use system tail with name-based following and retry p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq), daemon=True).start() # Return the queue return fq
Use daemon threads for follow_file()
Use daemon threads for follow_file() Signed-off-by: Jason Bernardino Alonso <[email protected]>
Python
mit
jbalonso/dhcp2nest
af0ec29ce0c830f096ab809fd2d69affd887ce16
feincms/module/page/admin.py
feincms/module/page/admin.py
# ------------------------------------------------------------------------ # coding=utf-8 # ------------------------------------------------------------------------ from __future__ import absolute_import from django.contrib import admin from .models import Page from .modeladmins import PageAdmin # ------------------------------------------------------------------------ admin.site.register(Page, PageAdmin) # ------------------------------------------------------------------------ # ------------------------------------------------------------------------
# ------------------------------------------------------------------------ # coding=utf-8 # ------------------------------------------------------------------------ from __future__ import absolute_import from django.contrib import admin from django.core.exceptions import ImproperlyConfigured from django.db.models import FieldDoesNotExist from .models import Page from .modeladmins import PageAdmin # ------------------------------------------------------------------------ try: Page._meta.get_field('template_key') except FieldDoesNotExist: raise ImproperlyConfigured( 'The page module requires a \'Page.register_templates()\' call somewhere' ' (\'Page.register_regions()\' is not sufficient).') admin.site.register(Page, PageAdmin) # ------------------------------------------------------------------------ # ------------------------------------------------------------------------
Abort when the page model does not have a template_key field
Abort when the page model does not have a template_key field
Python
bsd-3-clause
joshuajonah/feincms,mjl/feincms,matthiask/django-content-editor,matthiask/feincms2-content,feincms/feincms,feincms/feincms,pjdelport/feincms,pjdelport/feincms,mjl/feincms,michaelkuty/feincms,matthiask/django-content-editor,joshuajonah/feincms,nickburlett/feincms,matthiask/feincms2-content,matthiask/django-content-editor,michaelkuty/feincms,michaelkuty/feincms,joshuajonah/feincms,nickburlett/feincms,matthiask/feincms2-content,nickburlett/feincms,michaelkuty/feincms,pjdelport/feincms,matthiask/django-content-editor,joshuajonah/feincms,mjl/feincms,feincms/feincms,nickburlett/feincms
2c5650ef41aaf8c116f3922be02e7c5e7a79524b
pychecker/pychecker2/File.py
pychecker/pychecker2/File.py
from pychecker2.util import type_filter from compiler import ast class File: def __init__(self, name): self.name = name self.parseTree = None self.scopes = {} self.root_scope = None self.warnings = [] def __cmp__(self, other): return cmp(self.name, other.name) def warning(self, line, warn, *args): try: line = line.lineno except AttributeError: pass self.warnings.append( (line, warn, args) ) def scope_filter(self, type): return [(n, s) for n, s in self.scopes.iteritems() if isinstance(n, type) ] def function_scopes(self): return self.scope_filter(ast.Function) def class_scopes(self): return self.scope_filter(ast.Class)
from pychecker2.util import parents from compiler import ast class File: def __init__(self, name): self.name = name self.parseTree = None self.scopes = {} self.root_scope = None self.warnings = [] def __cmp__(self, other): return cmp(self.name, other.name) def warning(self, line, warn, *args): lineno = line try: lineno = line.lineno except AttributeError: pass if not lineno: try: for p in parents(line): if p.lineno: lineno = p.lineno break except AttributeError: pass self.warnings.append( (lineno, warn, args) ) def scope_filter(self, type): return [(n, s) for n, s in self.scopes.iteritems() if isinstance(n, type) ] def function_scopes(self): return self.scope_filter(ast.Function) def class_scopes(self): return self.scope_filter(ast.Class)
Add more ways to suck line numbers from nodes
Add more ways to suck line numbers from nodes
Python
bsd-3-clause
smspillaz/pychecker,smspillaz/pychecker,smspillaz/pychecker
2fb27cf8f4399ec6aba36b86d2993e6c3b81d0ee
coalib/bearlib/languages/__init__.py
coalib/bearlib/languages/__init__.py
""" This directory holds means to get generic information for specific languages. """ # Start ignoring PyUnusedCodeBear from .Language import Language from .Language import Languages from .definitions.Unknown import Unknown from .definitions.C import C from .definitions.CPP import CPP from .definitions.CSharp import CSharp from .definitions.CSS import CSS from .definitions.Java import Java from .definitions.JavaScript import JavaScript from .definitions.Python import Python from .definitions.Vala import Vala from .definitions.html import HTML # Stop ignoring PyUnusedCodeBear
""" This directory holds means to get generic information for specific languages. """ # Start ignoring PyUnusedCodeBear from .Language import Language from .Language import Languages from .definitions.Unknown import Unknown from .definitions.C import C from .definitions.CPP import CPP from .definitions.CSharp import CSharp from .definitions.CSS import CSS from .definitions.Fortran import Fortran from .definitions.Golang import Golang from .definitions.html import HTML from .definitions.Java import Java from .definitions.JavaScript import JavaScript from .definitions.JSP import JSP from .definitions.Matlab import Matlab from .definitions.ObjectiveC import ObjectiveC from .definitions.PHP import PHP from .definitions.PLSQL import PLSQL from .definitions.Python import Python from .definitions.Ruby import Ruby from .definitions.Scala import Scala from .definitions.Swift import Swift from .definitions.Vala import Vala # Stop ignoring PyUnusedCodeBear
Add definition into default import
Language: Add definition into default import Fixes https://github.com/coala/coala/issues/4688
Python
agpl-3.0
coala/coala,SanketDG/coala,shreyans800755/coala,karansingh1559/coala,kartikeys98/coala,kartikeys98/coala,jayvdb/coala,CruiseDevice/coala,Nosferatul/coala,shreyans800755/coala,aptrishu/coala,nemaniarjun/coala,aptrishu/coala,karansingh1559/coala,jayvdb/coala,rimacone/testing2,Asalle/coala,CruiseDevice/coala,shreyans800755/coala,coala-analyzer/coala,coala-analyzer/coala,nemaniarjun/coala,karansingh1559/coala,Asalle/coala,coala/coala,SanketDG/coala,coala-analyzer/coala,SanketDG/coala,rimacone/testing2,CruiseDevice/coala,coala/coala,aptrishu/coala,Nosferatul/coala,kartikeys98/coala,jayvdb/coala,Nosferatul/coala,rimacone/testing2,Asalle/coala,nemaniarjun/coala
88e61076e3a130d3a8579996966aef2fd6ec43ef
verification/tests.py
verification/tests.py
""" TESTS is a dict with all you tests. Keys for this will be categories' names. Each test is dict with "input" -- input data for user function "answer" -- your right answer "explanation" -- not necessary key, it's using for additional info in animation. """ TESTS = { "Basics": [ { "input": [2, 3], "answer": 5, "explanation": "2+3=?" }, { "input": [2, 7], "answer": 9, "explanation": "2+7=?" } ], "Additional": [ { "input": [6, 3], "answer": 9, "explanation": "6+3=?" }, { "input": [6, 7], "answer": 13, "explanation": "6+7=?" } ] }
""" TESTS is a dict with all you tests. Keys for this will be categories' names. Each test is dict with "input" -- input data for user function "answer" -- your right answer "explanation" -- not necessary key, it's using for additional info in animation. """ TESTS = { "Basics": [ { "input": [2, 3], "answer": 5, "explanation": "2+3=?" }, { "input": [2, 7], "answer": 9, "explanation": "2+7=?" } ], "Extra": [ { "input": [6, 3], "answer": 9, "explanation": "6+3=?" }, { "input": [6, 7], "answer": 13, "explanation": "6+7=?" } ] }
Test category example rename for ordering
Test category example rename for ordering
Python
mit
cielavenir/checkio-task-painting-wall,cielavenir/checkio-task-painting-wall,cielavenir/checkio-task-painting-wall
ac25dd0b2bf3188e1f4325ccdab78e79e7f0a937
spiceminer/kernel/__init__.py
spiceminer/kernel/__init__.py
#!/usr/bin/env python #-*- coding:utf-8 -*- from .highlevel import Kernel # Legacy support (DEPRECATED) from .legacy_support import * from ..bodies import get def load(path='.', recursive=True, followlinks=False): return Kernel.load(**locals()) def unload(path='.', recursive=True, followlinks=False): return Kernel.unload(**locals())
#!/usr/bin/env python #-*- coding:utf-8 -*- from .highlevel import Kernel def load(path='.', recursive=True, followlinks=False, force_reload=False): return Kernel.load(**locals()) def load_single(cls, path, extension=None, force_reload=False): return Kernel.load_single(**locals()) def unload(path='.', recursive=True, followlinks=False): return Kernel.unload(**locals())
Change the interface of the kernel submodule to prepare for the API change. * Remove legacy support * Add load_single() * Fix missing keyword argument in load()
Change the interface of the kernel submodule to prepare for the API change. * Remove legacy support * Add load_single() * Fix missing keyword argument in load()
Python
mit
DaRasch/spiceminer,DaRasch/spiceminer
4a4ee00ef79003ed4d98a3daca9e26cffc91210e
Seeder/class_based_comments/views.py
Seeder/class_based_comments/views.py
import forms from django.views.generic.edit import CreateView class CommentView(CreateView): """ View for creating and listing comments. """ anonymous_comment_form = forms.AnonymousCommentForm registered_comment_form = forms.RegisteredCommentForm anonymous_threaded_comment_form = forms.AnonymousThreadedCommentForm registered_threaded_comment_form = forms.RegisteredThreadedCommentForm prefix = 'comments' threaded = False # disable comments for anonymous users enforce_login = False def get_form(self, form_class=None): authenticated = self.request.user.is_authenticated() if self.enforce_login and not authenticated: raise NotImplemented('Report a bug to show interest in this ' 'feature... :P') if self.threaded: if authenticated: return self.registered_threaded_comment_form return self.anonymous_threaded_comment_form else: if authenticated: return self.registered_comment_form return self.anonymous_comment_form
import forms from django.views.generic.detail import SingleObjectMixin from django.views.generic.edit import CreateView from models import Comment class CommentView(CreateView, SingleObjectMixin): """ View for creating and listing comments. """ anonymous_comment_form = forms.AnonymousCommentForm registered_comment_form = forms.RegisteredCommentForm anonymous_threaded_comment_form = forms.AnonymousThreadedCommentForm registered_threaded_comment_form = forms.RegisteredThreadedCommentForm prefix = 'comment' threaded = False # disable comments for anonymous users enforce_login = False def get_context_data(self, **kwargs): context = super(CommentView, self).get_context_data(**kwargs) context['comments'] = Comment.objects.for_model(self.get_object()) return context def get_form(self, form_class=None): authenticated = self.request.user.is_authenticated() if self.enforce_login and not authenticated: raise NotImplemented('Report a bug to show interest in this ' 'feature...') if self.threaded: if authenticated: return self.registered_threaded_comment_form return self.anonymous_threaded_comment_form else: if authenticated: return self.registered_comment_form return self.anonymous_comment_form def form_valid(self, form): user = self.request.user comment = form.save(commit=False) if user.is_authenticated(): comment.user = user comment.save()
Comment view with saving and lists
Comment view with saving and lists
Python
mit
WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder
7ea0e2d8387b622f671638613a476dcbff6438e1
rest_framework_swagger/urls.py
rest_framework_swagger/urls.py
from django.conf.urls import patterns from django.conf.urls import url from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView urlpatterns = patterns( '', url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"), url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"), url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'), )
from django.conf.urls import url from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView urlpatterns = [ url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"), url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"), url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'), ]
Use the new style urlpatterns syntax to fix Django deprecation warnings
Use the new style urlpatterns syntax to fix Django deprecation warnings The `patterns()` syntax is now deprecated: https://docs.djangoproject.com/en/1.8/releases/1.8/#django-conf-urls-patterns And so under Django 1.8 results in warnings: rest_framework_swagger/urls.py:10: RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead. Fixes #380.
Python
bsd-2-clause
pombredanne/django-rest-swagger,aioTV/django-rest-swagger,cancan101/django-rest-swagger,visasq/django-rest-swagger,aioTV/django-rest-swagger,marcgibbons/django-rest-swagger,marcgibbons/django-rest-swagger,aioTV/django-rest-swagger,cancan101/django-rest-swagger,pombredanne/django-rest-swagger,arc6373/django-rest-swagger,cancan101/django-rest-swagger,visasq/django-rest-swagger,arc6373/django-rest-swagger,marcgibbons/django-rest-swagger,pombredanne/django-rest-swagger,marcgibbons/django-rest-swagger,visasq/django-rest-swagger,arc6373/django-rest-swagger,pombredanne/django-rest-swagger
3724f2895c704df595b083ecc56c56c351b6e32f
runbot_pylint/__openerp__.py
runbot_pylint/__openerp__.py
{ 'name': 'Runbot Pylint', 'category': 'Website', 'summary': 'Runbot', 'version': '1.0', 'description': "Runbot", 'author': 'OpenERP SA', 'depends': ['runbot'], 'external_dependencies': { }, 'data': [ "view/runbot_pylint_view.xml" ], 'installable': True, }
{ 'name': 'Runbot Pylint', 'category': 'Website', 'summary': 'Runbot', 'version': '1.0', 'description': "Runbot", 'author': 'OpenERP SA', 'depends': ['runbot'], 'external_dependencies': { 'bin': ['pylint'], }, 'data': [ "view/runbot_pylint_view.xml" ], 'installable': True, }
Add external depedencies to pylint bin
Add external depedencies to pylint bin
Python
agpl-3.0
amoya-dx/runbot-addons
d4722298a0fc03fca6ef17e246e3ffd74efc4d60
src/isomorphic_strings.py
src/isomorphic_strings.py
class Solution: # @param {string} s # @param {string} t # @return {boolean} def isIsomorphic(self, s, t): if len(s) != len(t): return False charDict = {} for i, c in enumerate(s): if c not in charDict.keys() and t[i] not in charDict.values(): charDict[c] = t[i] elif t[i] in charDict.values() or charDict[c] != t[i]: return False return True if __name__ == '__main__': test_list = [["ab","aa"],["aa", "bb"] result_list = [False, True] success = True solution = Solution() for i, s in enumerate(test_list): result = solution.isIsomorphic(s[0], s[1]) if result != result_list[i]: success = False print s print 'Expected value', result_list[i] print 'Actual value', result if success: print 'All the tests passed.' else: print 'Please fix the failed test'
class Solution: # @param {string} s # @param {string} t # @return {boolean} def isIsomorphic(self, s, t): if len(s) != len(t): return False charDict = {} for i, c in enumerate(s): if c not in charDict.keys() and t[i] not in charDict.values(): charDict[c] = t[i] elif c in charDict.keys() and charDict[c] != t[i]: return False elif t[i] in charDict.values(): if c not in charDict.keys(): return False elif charDict[c] != t[i]: return False return True if __name__ == '__main__': test_list = [["ab","aa"],["aa", "bb"], ["egg", "add"],["foo","bar"],["paper","title"]] result_list = [False, True, True, False, True] success = True solution = Solution() for i, s in enumerate(test_list): result = solution.isIsomorphic(s[0], s[1]) if result != result_list[i]: success = False print s print 'Expected value', result_list[i] print 'Actual value', result if success: print 'All the tests passed.' else: print 'Please fix the failed test'
Add solution for the isomorphic strings
Add solution for the isomorphic strings
Python
mit
chancyWu/leetcode
0ac82c9410476efd7494026c7089b8ba650061ad
paintings/urls.py
paintings/urls.py
from django.conf.urls import patterns, include, url from django.contrib import admin from paintings.views import GalleryDetailView, PaintingDetailView admin.autodiscover() urlpatterns = patterns('', url( r'^(?P<slug>[\w-]+)/$', GalleryDetailView.as_view(), name='gallery' ), url( r'^(?P<gallery>\w+)/(?P<slug>[\w-]+)/$', PaintingDetailView.as_view() ), )
from django.conf.urls import patterns, include, url from django.contrib import admin from paintings.views import GalleryDetailView, PaintingDetailView admin.autodiscover() urlpatterns = patterns('', url( r'^(?P<slug>[\w-]+)/$', GalleryDetailView.as_view(), name='gallery' ), url( r'^(?P<gallery>[\w-]+)/(?P<slug>[\w-]+)/$', PaintingDetailView.as_view() ), )
Fix bug with dash-separated gallery name
Fix bug with dash-separated gallery name
Python
mit
hombit/olgart,hombit/olgart,hombit/olgart,hombit/olgart
b19746badd83190b4e908144d6bc830178445dc2
cc/license/tests/test_cc_license.py
cc/license/tests/test_cc_license.py
"""Tests for functionality within the cc.license module. This file is a catch-all for tests with no place else to go.""" import cc.license def test_locales(): locales = cc.license.locales() for l in locales: assert type(l) == unicode for c in ('en', 'de', 'he', 'ja', 'fr'): assert c in locales
"""Tests for functionality within the cc.license module. This file is a catch-all for tests with no place else to go.""" import cc.license def test_locales(): locales = cc.license.locales() for l in locales: assert type(l) == unicode for c in ('en', 'de', 'he', 'ja', 'fr'): assert c in locales def test_cc_license_classes(): cc_dir = dir(cc.license) assert 'Jurisdiction' in cc_dir assert 'License' in cc_dir assert 'Question' in cc_dir assert 'LicenseSelector' in cc_dir
Add test to make sure certain classes are always found in cc.license, no matter where they are internally.
Add test to make sure certain classes are always found in cc.license, no matter where they are internally.
Python
mit
creativecommons/cc.license,creativecommons/cc.license
0ed7e87a6eeaab56d5c59a7e6874b5a5b0bab314
tests/test_pointcloud.py
tests/test_pointcloud.py
from simulocloud import PointCloud import json import numpy as np _TEST_XYZ = """[[10.0, 12.2, 14.4, 16.6, 18.8], [11.1, 13.3, 15.5, 17.7, 19.9], [0.1, 2.1, 4.5, 6.7, 8.9]]""" _EXPECTED_POINTS = np.array([( 10. , 11.1, 0.1), ( 12.2, 13.3, 2.1), ( 14.4, 15.5, 4.5), ( 16.6, 17.7, 6.7), ( 18.8, 19.9, 8.9)], dtype=[('x', '<f8'), ('y', '<f8'), ('z', '<f8')]) def test_PointCloud_from_lists(): """ Can PointCloud initialisable directly from `[[xs], [ys], [zs]]` ?""" assert np.all(PointCloud(json.loads(_TEST_XYZ)).points == _EXPECTED_POINTS)
from simulocloud import PointCloud import json import numpy as np _TEST_XYZ = [[10.0, 12.2, 14.4, 16.6, 18.8], [11.1, 13.3, 15.5, 17.7, 19.9], [0.1, 2.1, 4.5, 6.7, 8.9]] _EXPECTED_POINTS = np.array([( 10. , 11.1, 0.1), ( 12.2, 13.3, 2.1), ( 14.4, 15.5, 4.5), ( 16.6, 17.7, 6.7), ( 18.8, 19.9, 8.9)], dtype=[('x', '<f8'), ('y', '<f8'), ('z', '<f8')]) def test_PointCloud_from_lists(): """ Can PointCloud initialisable directly from `[[xs], [ys], [zs]]` ?""" assert np.all(PointCloud(_TEST_XYZ).points == _EXPECTED_POINTS)
Write test data as list unless otherwise needed
Write test data as list unless otherwise needed
Python
mit
stainbank/simulocloud
46b17b2798140c09690e5ae50a50794fd5210237
xdocker/run_worker.py
xdocker/run_worker.py
from rq import Connection, Queue, Worker from worker.exceptions import WorkerException def worker_exc_handler(job, exc_type, exc_value, traceback): if isinstance(exc_type, WorkerException): job.meta['exc_code'] = exc_type.code job.meta['exc_message'] = exc_type.message return True def main(): with Connection(): q = Queue() worker = Worker([q]) worker.push_exc_handler(worker_exc_handler) worker.work() if __name__ == '__main__': main()
from rq import Connection, Queue, Worker from worker.exceptions import WorkerException def worker_exc_handler(job, exc_type, exc_value, traceback): if issubclass(exc_type, WorkerException): job.meta['exc_code'] = exc_type.code job.meta['exc_message'] = exc_type.message return True def main(): with Connection(): q = Queue() worker = Worker([q]) worker.push_exc_handler(worker_exc_handler) worker.work() if __name__ == '__main__': main()
Fix exception subclass in worker exception handler
Fix exception subclass in worker exception handler
Python
apache-2.0
XDocker/Engine,XDocker/Engine
c8afee155a27632f85891688c096d2fa2b6988e9
ckanext/ckanext-apply_permissions_for_service/ckanext/apply_permissions_for_service/logic/auth.py
ckanext/ckanext-apply_permissions_for_service/ckanext/apply_permissions_for_service/logic/auth.py
from ckan.plugins import toolkit from ckanext.apply_permissions_for_service import model def service_permission_application_list(context, data_dict): return {'success': True} def service_permission_application_show(context, data_dict): permission_application_id = toolkit.get_or_bust(data_dict, 'id') application = model.ApplyPermission.get(permission_application_id).as_dict() organization = application.get('organization') target_organization = application.get('target_organization') membership_organizations = toolkit.get_action('organization_list_for_user')(context, {'permission': 'read'}) if any(True for x in [org.get('id') for org in membership_organizations] if x in (organization['id'], target_organization['id'])): return {'success': True} return {'success': False, "msg": toolkit._("User not authorized to view permission application.")} def service_permission_settings(context, data_dict): return {'success': toolkit.check_access('package_update', context, {'id': data_dict['subsystem_id']})} def service_permission_application_create(context, data_dict): editor_or_admin_orgs = toolkit.get_action('organization_list_for_user')(context, {'permission': 'create_dataset'}) return {'success': len(editor_or_admin_orgs) > 0}
from ckan.plugins import toolkit from ckanext.apply_permissions_for_service import model def service_permission_application_list(context, data_dict): return {'success': True} def service_permission_application_show(context, data_dict): permission_application_id = toolkit.get_or_bust(data_dict, 'id') application = model.ApplyPermission.get(permission_application_id).as_dict() organization = application.get('organization') target_organization = application.get('target_organization') membership_organizations = toolkit.get_action('organization_list_for_user')(context, {'permission': 'read'}) if any(True for x in (org.get('id') for org in membership_organizations) if x in (organization['id'], target_organization['id'])): return {'success': True} return {'success': False, "msg": toolkit._("User not authorized to view permission application.")} def service_permission_settings(context, data_dict): return {'success': toolkit.check_access('package_update', context, {'id': data_dict['subsystem_id']})} def service_permission_application_create(context, data_dict): editor_or_admin_orgs = toolkit.get_action('organization_list_for_user')(context, {'permission': 'create_dataset'}) return {'success': len(editor_or_admin_orgs) > 0}
Use generator instead of list.
Use generator instead of list. Co-authored-by: Teemu Erkkola <[email protected]>
Python
mit
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
8ce14cfb0044d90f2503a7bd940a7f6401c15db2
wagtail/admin/rich_text/editors/draftail.py
wagtail/admin/rich_text/editors/draftail.py
from django.forms import widgets from wagtail.admin.edit_handlers import RichTextFieldPanel from wagtail.admin.rich_text.converters.contentstate import ContentstateConverter from wagtail.core.rich_text import features class DraftailRichTextArea(widgets.Textarea): # this class's constructor accepts a 'features' kwarg accepts_features = True def get_panel(self): return RichTextFieldPanel def __init__(self, *args, **kwargs): self.options = kwargs.pop('options', None) self.features = kwargs.pop('features', None) if self.features is None: self.features = features.get_default_features() self.converter = ContentstateConverter(self.features) super().__init__(*args, **kwargs) def render(self, name, value, attrs=None): if value is None: translated_value = None else: translated_value = self.converter.from_database_format(value) return super().render(name, translated_value, attrs) def value_from_datadict(self, data, files, name): original_value = super().value_from_datadict(data, files, name) if original_value is None: return None return self.converter.to_database_format(original_value)
import json from django.forms import Media, widgets from wagtail.admin.edit_handlers import RichTextFieldPanel from wagtail.admin.rich_text.converters.contentstate import ContentstateConverter from wagtail.core.rich_text import features class DraftailRichTextArea(widgets.Textarea): # this class's constructor accepts a 'features' kwarg accepts_features = True def get_panel(self): return RichTextFieldPanel def __init__(self, *args, **kwargs): self.options = kwargs.pop('options', None) self.features = kwargs.pop('features', None) if self.features is None: self.features = features.get_default_features() self.converter = ContentstateConverter(self.features) super().__init__(*args, **kwargs) def render(self, name, value, attrs=None): if value is None: translated_value = None else: translated_value = self.converter.from_database_format(value) return super().render(name, translated_value, attrs) def render_js_init(self, id_, name, value): return "window.draftail.initEditor('{name}', {opts})".format( name=name, opts=json.dumps(self.options)) def value_from_datadict(self, data, files, name): original_value = super().value_from_datadict(data, files, name) if original_value is None: return None return self.converter.to_database_format(original_value) @property def media(self): return Media(js=[ 'wagtailadmin/js/draftail.js', ], css={ 'all': ['wagtailadmin/css/panels/dratail.css'] })
Integrate Draftail-related assets with Django widget
Integrate Draftail-related assets with Django widget
Python
bsd-3-clause
mikedingjan/wagtail,kaedroho/wagtail,timorieber/wagtail,mixxorz/wagtail,torchbox/wagtail,gasman/wagtail,gasman/wagtail,wagtail/wagtail,timorieber/wagtail,mixxorz/wagtail,nealtodd/wagtail,nimasmi/wagtail,kaedroho/wagtail,mikedingjan/wagtail,takeflight/wagtail,thenewguy/wagtail,zerolab/wagtail,timorieber/wagtail,thenewguy/wagtail,mixxorz/wagtail,FlipperPA/wagtail,zerolab/wagtail,takeflight/wagtail,nealtodd/wagtail,nimasmi/wagtail,zerolab/wagtail,takeflight/wagtail,zerolab/wagtail,mikedingjan/wagtail,mixxorz/wagtail,kaedroho/wagtail,torchbox/wagtail,thenewguy/wagtail,wagtail/wagtail,torchbox/wagtail,rsalmaso/wagtail,gasman/wagtail,rsalmaso/wagtail,zerolab/wagtail,nimasmi/wagtail,thenewguy/wagtail,wagtail/wagtail,rsalmaso/wagtail,nealtodd/wagtail,thenewguy/wagtail,timorieber/wagtail,rsalmaso/wagtail,torchbox/wagtail,nimasmi/wagtail,rsalmaso/wagtail,jnns/wagtail,kaedroho/wagtail,FlipperPA/wagtail,kaedroho/wagtail,takeflight/wagtail,gasman/wagtail,nealtodd/wagtail,wagtail/wagtail,FlipperPA/wagtail,wagtail/wagtail,jnns/wagtail,jnns/wagtail,FlipperPA/wagtail,mikedingjan/wagtail,jnns/wagtail,mixxorz/wagtail,gasman/wagtail
3db4d306c779ef3a84133dbbfc5614d514d72411
pi_gpio/handlers.py
pi_gpio/handlers.py
from flask.ext.restful import fields from meta import BasicResource from config.pins import PinHttpManager from pi_gpio import app HTTP_MANAGER = PinHttpManager() class Pin(BasicResource): def __init__(self): super(Pin, self).__init__() self.fields = { "num": fields.Integer, "mode": fields.String, "value": fields.Integer } def pin_not_found(self): return {'message': 'Pin not found'}, 404 class PinList(Pin): def get(self): result = HTTP_MANAGER.read_all() return self.response(result, 200) class PinDetail(Pin): def get(self, pin_num): result = HTTP_MANAGER.read_one(pin_num) if not result: return self.pin_not_found() return self.response(result, 200) def patch(self, pin_num): self.parser.add_argument('value', type=int) args = self.parser.parse_args() result = HTTP_MANAGER.update_value(pin_num, args['value']) if not result: return self.pin_not_found() return self.response(HTTP_MANAGER.read_one(pin_num), 200)
from flask.ext.restful import fields from meta import BasicResource from config.pins import PinHttpManager from pi_gpio import app HTTP_MANAGER = PinHttpManager() class Pin(BasicResource): def __init__(self): super(Pin, self).__init__() self.fields = { "num": fields.Integer, "mode": fields.String, "value": fields.Integer, "resistor": fields.String, "initial": fields.String, "event": fields.String, "bounce": fields.Integer } def pin_not_found(self): return {'message': 'Pin not found'}, 404 class PinList(Pin): def get(self): result = HTTP_MANAGER.read_all() return self.response(result, 200) class PinDetail(Pin): def get(self, pin_num): result = HTTP_MANAGER.read_one(pin_num) if not result: return self.pin_not_found() return self.response(result, 200) def patch(self, pin_num): self.parser.add_argument('value', type=int) args = self.parser.parse_args() result = HTTP_MANAGER.update_value(pin_num, args['value']) if not result: return self.pin_not_found() return self.response(HTTP_MANAGER.read_one(pin_num), 200)
Add new fields to response
Add new fields to response
Python
mit
projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server
afef9c39ab98ef6614b0fe5103db4e641a9f0d6e
grapheneapi/aio/websocket.py
grapheneapi/aio/websocket.py
# -*- coding: utf-8 -*- import asyncio import websockets import logging import json from jsonrpcclient.clients.websockets_client import WebSocketsClient from .rpc import Rpc log = logging.getLogger(__name__) class Websocket(Rpc): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.ws = None self.client = None async def connect(self): ssl = True if self.url[:3] == "wss" else None self.ws = await websockets.connect(self.url, ssl=ssl, loop=self.loop) self.client = WebSocketsClient(self.ws) async def disconnect(self): if self.ws: await self.ws.close() async def rpcexec(self, *args): """ Execute a RPC call :param args: args are passed as "params" in json-rpc request: {"jsonrpc": "2.0", "method": "call", "params": "[x, y, z]"} """ if not self.ws: await self.connect() log.debug(json.dumps(args)) response = await self.client.request("call", *args) # Return raw response (jsonrpcclient does own parsing) return response.text
# -*- coding: utf-8 -*- import asyncio import websockets import logging import json from jsonrpcclient.clients.websockets_client import WebSocketsClient from jsonrpcclient.exceptions import ReceivedErrorResponseError from .rpc import Rpc log = logging.getLogger(__name__) class Websocket(Rpc): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.ws = None self.client = None async def connect(self): ssl = True if self.url[:3] == "wss" else None self.ws = await websockets.connect(self.url, ssl=ssl, loop=self.loop) self.client = WebSocketsClient(self.ws) async def disconnect(self): if self.ws: await self.ws.close() async def rpcexec(self, *args): """ Execute a RPC call :param args: args are passed as "params" in json-rpc request: {"jsonrpc": "2.0", "method": "call", "params": "[x, y, z]"} """ if not self.ws: await self.connect() log.debug(json.dumps(args)) try: response = await self.client.request("call", *args) except ReceivedErrorResponseError as e: # Extract error data from ErrorResponse object response = {"error": e.response.data} return json.dumps(response) # Return raw response (jsonrpcclient does own parsing) return response.text
Fix aio RPC error extraction
Fix aio RPC error extraction
Python
mit
xeroc/python-graphenelib
23bbb5737602408ba553b77810103d7b32140c89
test.py
test.py
#!/usr/bin/env python import neukrill_net.utils as utils import neukrill_net.image_processing as image_processing import csv import pickle from sklearn.externals import joblib import numpy as np import glob import os def main(): settings = utils.Settings('settings.json') image_fname_dict = settings.image_fnames processing = lambda image: image_processing.resize_image(image, (48,48)) X, names = utils.load_data(image_fname_dict, processing=processing, verbose=True) clf = joblib.load('model.pkl') p = clf.predict_proba(X) with open('submission.csv', 'w') as csv_out: out_writer = csv.writer(csv_out, delimiter=',') out_writer.writerow(['image'] + list(settings.classes)) for index in range(len(names)): out_writer.writerow([names[index]] + list(p[index,])) if __name__ == '__main__': main()
#!/usr/bin/env python import neukrill_net.utils as utils import neukrill_net.image_processing as image_processing import csv import pickle from sklearn.externals import joblib import numpy as np import glob import os def main(): settings = utils.Settings('settings.json') image_fname_dict = settings.image_fnames processing = lambda image: image_processing.resize_image(image, (48,48)) X, names = utils.load_data(image_fname_dict, processing=processing, verbose=True) clf = joblib.load('model.pkl') p = clf.predict_proba(X) utils.write_predictions('submission.csv', p, names, settings) if __name__ == '__main__': main()
Swap to using submission prediction writer function
Swap to using submission prediction writer function
Python
mit
Neuroglycerin/neukrill-net-work,Neuroglycerin/neukrill-net-work,Neuroglycerin/neukrill-net-work
17ae37bbc42c9e0f8633eec8fe9b49defb647191
sjkscan/logging.py
sjkscan/logging.py
import logging def init_logging(): """Initialise logging. Set up basic logging. """ logging.basicConfig( level=logging.DEBUG, datefmt='%Y-%m-%d %H:%M:%S', format='%(asctime)s %(message)s', ) logging.debug('Initialising logging')
import logging def init_logging(): """Initialise logging. Set up basic logging. """ logging.basicConfig( level=logging.DEBUG, datefmt='%Y-%m-%d %H:%M:%S', format='%(asctime)s [%(levelname)s] %(message)s', ) logging.debug('Initialising logging')
Add levelname to log output
Add levelname to log output
Python
bsd-2-clause
sjktje/sjkscan,sjktje/sjkscan
9a19da30a933bc2872b9fc5b5966823c43e1982f
website/pages/tests.py
website/pages/tests.py
# -*- coding: utf-8 -*- """ File: tests.py Creator: MazeFX Date: 12-7-2016 Tests written for testing main website pages (home, about, contact, etc) """ from django.core.urlresolvers import resolve from django.test import TestCase from django.http import HttpRequest from django.template.loader import render_to_string from website.pages.views import home_page, send_email class HomePageTest(TestCase): def test_root_url_resolves_to_home_page_view(self): found = resolve('/') self.assertEqual(found.func, home_page) def test_home_page_returns_correct_html(self): request = HttpRequest() response = home_page(request) expected_html = render_to_string('pages/home.html') self.assertEqual(response.content.decode(), expected_html) class SendEmailTest(TestCase): def test_send_email_url_resolves_to_send_email_view(self): found = resolve('/send-email/') self.assertEqual(found.func, send_email) def test_send_email_returns_correct_html(self): request = HttpRequest() response = send_email(request) expected_html = render_to_string('pages/send_email.html') self.assertEqual(response.content.decode(), expected_html)
# -*- coding: utf-8 -*- """ File: tests.py Creator: MazeFX Date: 12-7-2016 Tests written for testing main website pages (home, about, contact, etc) Contact page has the ability to send emails through anymail/mailgun. """ from django.core.urlresolvers import resolve from django.test import TestCase from django.http import HttpRequest from django.template.loader import render_to_string from website.pages.views import home_page, contact class HomePageTest(TestCase): def test_root_url_resolves_to_home_page_view(self): found = resolve('/') self.assertEqual(found.func, home_page) def test_home_page_returns_correct_html(self): request = HttpRequest() response = home_page(request) expected_html = render_to_string('pages/home.html') self.assertEqual(response.content.decode(), expected_html) class ContactTest(TestCase): def test_contact_url_resolves_to_contact_view(self): found = resolve('/contact/') self.assertEqual(found.func, contact) def test_contact_returns_correct_html(self): request = HttpRequest() response = contact(request) expected_html = render_to_string('pages/contact.html') self.assertEqual(response.content.decode(), expected_html)
Change send email to contact namespace
Change send email to contact namespace
Python
mit
MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project,MazeFX/cookiecutter_website_project
f185f04f6efdabe161ae29ba72f7208b8adccc41
bulletin/tools/plugins/models.py
bulletin/tools/plugins/models.py
from django.db import models from bulletin.models import Post class Event(Post): start_date = models.DateTimeField() end_date = models.DateTimeField(null=True, blank=True) time = models.CharField(max_length=255, null=True, blank=True) organization = models.CharField(max_length=255, null=True, blank=True) location = models.CharField(max_length=255) class Job(Post): organization = models.CharField(max_length=255) class NewResource(Post): blurb = models.TextField() class Opportunity(Post): blurb = models.TextField() class Meta: verbose_name_plural = 'opportunities' class Story(Post): blurb = models.TextField() date = models.DateTimeField() class Meta: verbose_name_plural = 'stories'
from django.db import models from bulletin.models import Post class Event(Post): start_date = models.DateTimeField() end_date = models.DateTimeField(null=True, blank=True) time = models.CharField(max_length=255, null=True, blank=True) organization = models.CharField(max_length=255, null=True, blank=True) location = models.CharField(max_length=255) class Job(Post): organization = models.CharField(max_length=255) class NewResource(Post): blurb = models.TextField() verbose_name = 'newresource' class Opportunity(Post): blurb = models.TextField() class Meta: verbose_name_plural = 'opportunities' class Story(Post): blurb = models.TextField() date = models.DateTimeField() class Meta: verbose_name_plural = 'stories'
Set verbose name of NewResource.
Set verbose name of NewResource.
Python
mit
AASHE/django-bulletin,AASHE/django-bulletin,AASHE/django-bulletin
4641b9a1b9a79fdeb0aaa3264de7bd1703b1d1fa
alexandria/web.py
alexandria/web.py
from alexandria import app, mongo from decorators import * from flask import render_template, request, jsonify, g, send_from_directory, redirect, url_for, session, flash import os import shutil import requests from pymongo import MongoClient from functools import wraps import bcrypt from bson.objectid import ObjectId @app.route('/', methods=['GET']) @authenticated def index(): return render_template('app.html') @app.route('/portal') def portal(): if not session.get('username'): return render_template('portal.html') else: return render_template('index.html') @app.route('/logout') def logout(): session.pop('username', None) session.pop('role', None) session.pop('realname', None) return redirect(url_for('index')) @app.route('/download/<id>/<format>') @authenticated def download(id, format): book = mongo.Books.find({'id':id})[0] response = send_from_directory(app.config['LIB_DIR'], id+'.'+format) response.headers.add('Content-Disposition', 'attachment; filename="' + book['title'] + '.' + format + '"') return response @app.route('/upload') @authenticated @administrator def upload(): return render_template('upload.html') if __name__ == "__main__": app.run()
from alexandria import app, mongo from decorators import * from flask import render_template, request, jsonify, g, send_from_directory, redirect, url_for, session, flash import os import shutil import requests from pymongo import MongoClient from functools import wraps import bcrypt from bson.objectid import ObjectId @app.route('/', methods=['GET']) @authenticated def index(): return render_template('app.html') @app.route('/portal') def portal(): if not session.get('username'): return render_template('portal.html') else: return redirect(url_for('index')) @app.route('/logout') def logout(): session.pop('username', None) session.pop('role', None) session.pop('realname', None) return redirect(url_for('index')) @app.route('/download/<id>/<format>') @authenticated def download(id, format): book = mongo.Books.find({'id':id})[0] response = send_from_directory(app.config['LIB_DIR'], id+'.'+format) response.headers.add('Content-Disposition', 'attachment; filename="' + book['title'] + '.' + format + '"') return response @app.route('/upload') @authenticated @administrator def upload(): return render_template('upload.html') if __name__ == "__main__": app.run()
Fix return on active user accessing the portal
Fix return on active user accessing the portal
Python
mit
citruspi/Alexandria,citruspi/Alexandria
e3928f489f481c9e44c634d7ee98afc5425b4432
tests/test_yaml_utils.py
tests/test_yaml_utils.py
import pytest from apispec import yaml_utils def test_load_yaml_from_docstring(): def f(): """ Foo bar baz quux --- herp: 1 derp: 2 """ result = yaml_utils.load_yaml_from_docstring(f.__doc__) assert result == {"herp": 1, "derp": 2} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_yaml_from_docstring_empty_docstring(docstring): assert yaml_utils.load_yaml_from_docstring(docstring) == {} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_operations_from_docstring_empty_docstring(docstring): assert yaml_utils.load_operations_from_docstring(docstring) == {}
import pytest from apispec import yaml_utils def test_load_yaml_from_docstring(): def f(): """ Foo bar baz quux --- herp: 1 derp: 2 """ result = yaml_utils.load_yaml_from_docstring(f.__doc__) assert result == {"herp": 1, "derp": 2} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_yaml_from_docstring_empty_docstring(docstring): assert yaml_utils.load_yaml_from_docstring(docstring) == {} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_operations_from_docstring_empty_docstring(docstring): assert yaml_utils.load_operations_from_docstring(docstring) == {} def test_dict_to_yaml_unicode(): assert yaml_utils.dict_to_yaml({"가": "나"}) == '"\\uAC00": "\\uB098"\n' assert yaml_utils.dict_to_yaml({"가": "나"}, {"allow_unicode": True}) == "가: 나\n"
Add regression test for generating yaml with unicode
Add regression test for generating yaml with unicode
Python
mit
marshmallow-code/smore,marshmallow-code/apispec
c380c328b43f399f4d614486c53163709170f33b
cob/celery/app.py
cob/celery/app.py
from celery import Celery from celery.loaders.base import BaseLoader class CobLoader(BaseLoader): def on_worker_init(self): from ..app import build_app # this will make the tasks grains to be properly loaded and discovered build_app() celery_app = Celery('cob-celery', backend='rpc://', loader=CobLoader)
from celery import Celery from celery.loaders.base import BaseLoader from logbook.compat import LoggingHandler class CobLoader(BaseLoader): def on_worker_init(self): from ..app import build_app # this will make the tasks grains to be properly loaded and discovered LoggingHandler(level=logbook.DEBUG).push_application() build_app() celery_app = Celery('cob-celery', backend='rpc://', loader=CobLoader)
Use LoggingHandler in Celery worker to emit logs from workers
Use LoggingHandler in Celery worker to emit logs from workers
Python
bsd-3-clause
getweber/weber-cli
baf09f8b308626abb81431ddca4498409fc9d5ce
campaigns/tests/test_views.py
campaigns/tests/test_views.py
from django.test import TestCase from django.http import HttpRequest from campaigns.views import create_campaign from campaigns.models import Campaign from campaigns.forms import CampaignForm class HomePageTest(TestCase): def test_does_root_url_resolves_the_home_page(self): called = self.client.get('/') self.assertTemplateUsed(called, 'home.html') class CampaignsViewsTest(TestCase): def test_does_create_campaign_resolves_the_right_url(self): called = self.client.get('/campaigns/new') self.assertTemplateUsed(called, 'new_campaign.html') # Trying to do self.client.post was using GET request for some # reason so i made it that ugly def test_does_create_camapign_saves_objects_with_POST_requests(self): self.assertEqual(Campaign.objects.count(), 0) request = HttpRequest() request.method = 'POST' request.POST['title'] = 'C1' request.POST['description'] = 'C1Descr' create_campaign(request) campaign = Campaign.objects.first() self.assertEqual(Campaign.objects.count(), 1) self.assertEqual(campaign.title, 'C1') self.assertEqual(campaign.description, 'C1Descr')
from django.test import TestCase from django.http import HttpRequest from campaigns.views import create_campaign from campaigns.models import Campaign from campaigns.forms import CampaignForm def make_POST_request(titleValue, descriptionValue): request = HttpRequest() request.method = 'POST' request.POST['title'] = titleValue request.POST['description'] = descriptionValue return request class HomePageTest(TestCase): def test_does_root_url_resolves_the_home_page(self): called = self.client.get('/') self.assertTemplateUsed(called, 'home.html') class CampaignsViewsTest(TestCase): def test_does_create_campaign_resolves_the_right_url(self): called = self.client.get('/campaigns/new') self.assertTemplateUsed(called, 'new_campaign.html') # Trying to do self.client.post was using GET request for some # reason so i made it that ugly def test_does_create_campaign_saves_objects_with_POST_requests(self): self.assertEqual(Campaign.objects.count(), 0) create_campaign(make_POST_request('C1', 'C1Descr')) campaign = Campaign.objects.first() self.assertEqual(Campaign.objects.count(), 1) self.assertEqual(campaign.title, 'C1') self.assertEqual(campaign.description, 'C1Descr') def test_create_campaign_dont_saves_empty_objects(self): self.assertEqual(Campaign.objects.count(), 0) create_campaign(make_POST_request('', '')) self.assertEqual(Campaign.objects.count(), 0)
Refactor some redundancy in the views tests
Refactor some redundancy in the views tests
Python
apache-2.0
Springsteen/tues_admission,Springsteen/tues_admission,Springsteen/tues_admission,Springsteen/tues_admission
2ca6f765a3bd1eca6bd255f9c679c9fbea78484a
run_maya_tests.py
run_maya_tests.py
"""Use Mayapy for testing Usage: $ mayapy run_maya_tests.py """ import sys import nose import warnings from nose_exclude import NoseExclude warnings.filterwarnings("ignore", category=DeprecationWarning) if __name__ == "__main__": from maya import standalone standalone.initialize() argv = sys.argv[:] argv.extend([ # Sometimes, files from Windows accessed # from Linux cause the executable flag to be # set, and Nose has an aversion to these # per default. "--exe", "--verbose", "--with-doctest", "--with-coverage", "--cover-html", "--cover-tests", "--cover-erase", "--exclude-dir=mindbender/nuke", "--exclude-dir=mindbender/houdini", "--exclude-dir=mindbender/schema", "--exclude-dir=mindbender/plugins", # We can expect any vendors to # be well tested beforehand. "--exclude-dir=mindbender/vendor", ]) nose.main(argv=argv, addplugins=[NoseExclude()])
"""Use Mayapy for testing Usage: $ mayapy run_maya_tests.py """ import sys import nose import logging import warnings from nose_exclude import NoseExclude warnings.filterwarnings("ignore", category=DeprecationWarning) if __name__ == "__main__": from maya import standalone standalone.initialize() log = logging.getLogger() # Discard default Maya logging handler log.handlers[:] = [] argv = sys.argv[:] argv.extend([ # Sometimes, files from Windows accessed # from Linux cause the executable flag to be # set, and Nose has an aversion to these # per default. "--exe", "--verbose", "--with-doctest", "--with-coverage", "--cover-html", "--cover-tests", "--cover-erase", "--exclude-dir=mindbender/nuke", "--exclude-dir=mindbender/houdini", "--exclude-dir=mindbender/schema", "--exclude-dir=mindbender/plugins", # We can expect any vendors to # be well tested beforehand. "--exclude-dir=mindbender/vendor", ]) nose.main(argv=argv, addplugins=[NoseExclude()])
Enhance readability of test output
Enhance readability of test output
Python
mit
MoonShineVFX/core,mindbender-studio/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core,getavalon/core
6578b6d2dfca38940be278d82e4f8d8248ae3c79
convert_codecs.py
convert_codecs.py
#!/usr/env python # -*- coding: utf-8 -*- """Convert file codecs Usage: convert_codecs.py <sourceFile> <targetFile> <sourceEncoding> <targetEncoding> convert_codecs.py (-h | --help) """ import codecs from docopt import docopt __version__ = '0.1' __author__ = 'Honghe' BLOCKSIZE = 1024**2 # size in bytes def convert(sourceFile, targetFile, sourceEncoding, targetEncoding): with codecs.open(sourceFile, 'rb', sourceEncoding) as sfile: with codecs.open(targetFile, 'wb', targetEncoding) as tfile: while True: contents = sfile.read(BLOCKSIZE) if not contents: break tfile.write(contents) if __name__ == '__main__': arguments = docopt(__doc__) sourceFile = arguments['<sourceFile>'] targetFile = arguments['<targetFile>'] sourceEncoding = arguments['<sourceEncoding>'] targetEncoding = arguments['<targetEncoding>'] convert(sourceFile, targetFile, sourceEncoding, targetEncoding)
#!/usr/env python # -*- coding: utf-8 -*- """Convert file codecs Usage: convert_codecs.py <sourceFile> <targetFile> <sourceEncoding> <targetEncoding> convert_codecs.py (-h | --help) """ import codecs from docopt import docopt __version__ = '0.1' __author__ = 'Honghe' BLOCKSIZE = 1024 # number of characters in corresponding encoding, not bytes def convert(sourceFile, targetFile, sourceEncoding, targetEncoding): with codecs.open(sourceFile, 'rb', sourceEncoding) as sfile: with codecs.open(targetFile, 'wb', targetEncoding) as tfile: while True: contents = sfile.read(BLOCKSIZE) if not contents: break tfile.write(contents) if __name__ == '__main__': arguments = docopt(__doc__) sourceFile = arguments['<sourceFile>'] targetFile = arguments['<targetFile>'] sourceEncoding = arguments['<sourceEncoding>'] targetEncoding = arguments['<targetEncoding>'] convert(sourceFile, targetFile, sourceEncoding, targetEncoding)
Correct the comment of BLOCKSIZE
Correct the comment of BLOCKSIZE
Python
apache-2.0
Honghe/convert_codecs
e0b2ac4b71859708f4b35ae8ef7227b630a6e2d9
ctypeslib/test/test_toolchain.py
ctypeslib/test/test_toolchain.py
import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": def test(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen if __name__ == "__main__": import unittest unittest.main()
import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": def test_windows(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen def test(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "stdio.h", "-o", "_stdio_gen.xml"]) xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"]) import _stdio_gen if __name__ == "__main__": import unittest unittest.main()
Add a test for stdio.h.
Add a test for stdio.h.
Python
mit
sugarmanz/ctypeslib
649bea9ce3ebaf4ba44919097b731ba915703852
alembic/versions/30d0a626888_add_username.py
alembic/versions/30d0a626888_add_username.py
"""Add username Revision ID: 30d0a626888 Revises: 51375067b45 Create Date: 2015-10-29 10:32:03.077400 """ # revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
"""Add username Revision ID: 30d0a626888 Revises: 51375067b45 Create Date: 2015-10-29 10:32:03.077400 """ # revision identifiers, used by Alembic. revision = '30d0a626888' down_revision = '51375067b45' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa def upgrade(): """ SQL That equal to the following ALTER TABLE app_name._user ADD COLUMN username varchar(255); ALTER TABLE app_name._user ADD CONSTRAINT '_user_email_key' UNIQUE('email'); UPDATE app_name._version set version_num = '30d0a626888; """ op.add_column('_user', sa.Column('username', sa.Unicode(255), unique=True)) op.create_unique_constraint( '_user_email_key', '_user', ['email']) def downgrade(): op.drop_column('_user', 'username') op.drop_constraint( '_user_email_key', table_name='_user', type_='unique')
Add generate sql example as comment
Add generate sql example as comment
Python
apache-2.0
SkygearIO/skygear-server,rickmak/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,rickmak/skygear-server,SkygearIO/skygear-server,SkygearIO/skygear-server
281a096cea735845bdb74d60abf14f1422f2c624
test_runner/executable.py
test_runner/executable.py
import argh from .environments import Environment from .frameworks import Tempest from .utils import cleanup, Reporter LOG = Reporter(__name__).setup() def main(endpoint, username='admin', password='secrete', test_path='api'): environment = Environment(username, password, endpoint) with cleanup(environment): environment.build() framework = Tempest(environment, repo_dir='/opt/tempest', test_path=test_path) results = framework.run_tests() LOG.info('Results: {0}'.format(results)) if __name__ == '__main__': argh.dispatch_command(main)
import argh from .environments import Environment from .frameworks import Tempest from .utils import cleanup, Reporter LOG = Reporter(__name__).setup() def main(endpoint, username='admin', password='secrete', test_path='api'): environment = Environment(username, password, endpoint) with cleanup(environment): environment.build() framework = Tempest(environment, repo_dir='/opt/tempest', test_path=test_path) results = framework.run_tests() LOG.info('Results: {0}'.format(results)) argh.dispatch_command(main)
Move command dispatch into full module
Move command dispatch into full module
Python
mit
rcbops-qa/test_runner
0dd935a383d4b8d066dc091226b61119d245a7f9
threeOhOne.py
threeOhOne.py
#!/usr/bin/env python # -*- Coding: utf-8 -*- """ " In its present form, it simply takes a comma delimited .csv file and outputs a .txt file containing valid 301 redirect statements for an .htaccess file " " author: Claude Müller " wbsite: http://mediarocket.co.za " """ import csv class ThreeOhOne: def __ini__(self): pass def main(): threeOhOne = ThreeOhOne() if __name__ == "__main__": main()
#!/usr/bin/env python # -*- Coding: utf-8 -*- """ " In its present form, it simply takes a comma delimited .csv file and outputs a .txt file containing valid 301 redirect statements for an .htaccess file " " author: Claude Müller " wbsite: http://mediarocket.co.za " """ import sys import csv class ThreeOhOne: outputDir = 'outputs' def __init__(self, filename): self._process(sys.argv[1]) def _process(self, filename): try: fd = open(filename, 'rt') except FileNotFoundError: print('Error: File not found ;/') def main(): if len(sys.argv) < 2: print("usage: " + sys.argv[0] + " <the_file.csv>") exit(1) else: threeOhOne = ThreeOhOne(sys.argv[1]) if __name__ == "__main__": main()
Add command line argument capability
[py] Add command line argument capability
Python
mit
claudemuller/301csv2htaccess
464a5ffd02347b17ab6d2263d39e7868a01178f9
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Jon Surrell # Copyright (c) 2014 Jon Surrell # # License: MIT # """This module exports the Govet plugin class.""" from SublimeLinter.lint import Linter, util class Govet(Linter): """Provides an interface to go vet.""" syntax = ('go', 'gosublime-go') cmd = ('go', 'tool', 'vet') regex = r'^.+:(?P<line>\d+):\d+:\s+(?P<message>.+)' tempfile_suffix = 'go' error_stream = util.STREAM_STDERR
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Written by Jon Surrell # Copyright (c) 2014 Jon Surrell # # License: MIT # """This module exports the Govet plugin class.""" from SublimeLinter.lint import Linter, util class Govet(Linter): """Provides an interface to go vet.""" syntax = ('go', 'gosublime-go') cmd = ('go', 'tool', 'vet') regex = r'^.+:(?P<line>\d+):(?P<col>\d+):\s+(?P<message>.+)' tempfile_suffix = 'go' error_stream = util.STREAM_STDERR
Use named capture group "col" for column number
Use named capture group "col" for column number Reference: http://www.sublimelinter.com/en/latest/linter_attributes.html#regex
Python
mit
sirreal/SublimeLinter-contrib-govet
52c7321c78c8a81b6b557d67fe5af44b8b32df4c
src/octoprint/__main__.py
src/octoprint/__main__.py
#!/usr/bin/env python2 # coding=utf-8 from __future__ import absolute_import, division, print_function if __name__ == "__main__": import octoprint octoprint.main()
#!/usr/bin/env python2 # coding=utf-8 from __future__ import absolute_import, division, print_function import sys if sys.version_info[0] >= 3: raise Exception("Octoprint does not support Python 3") if __name__ == "__main__": import octoprint octoprint.main()
Handle unsupported version at runtime.
Handle unsupported version at runtime. If you have an ancient setuptools, 4a36ddb3aa77b8d1b1a64c197607fa652705856c won't successfully prevent installing. These changes will at least give a sane error, rather then just barfing on random syntax errors due to the `unicode` type not being present in py3k. Cherry picked from 2f20f2d
Python
agpl-3.0
Jaesin/OctoPrint,foosel/OctoPrint,Jaesin/OctoPrint,Jaesin/OctoPrint,foosel/OctoPrint,Jaesin/OctoPrint,foosel/OctoPrint,foosel/OctoPrint
ca7fb88d36b386defab610388761609539e0a9cf
conary/build/capsulerecipe.py
conary/build/capsulerecipe.py
# # Copyright (c) 2009 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # from conary.build import defaultrecipes from conary.build.recipe import RECIPE_TYPE_CAPSULE from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe class AbstractCapsuleRecipe(AbstractPackageRecipe): internalAbstractBaseClass = 1 internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' ) _recipeType = RECIPE_TYPE_CAPSULE def __init__(self, *args, **kwargs): klass = self._getParentClass('AbstractPackageRecipe') klass.__init__(self, *args, **kwargs) from conary.build import source self._addSourceAction('source.addCapsule', source.addCapsule) self._addSourceAction('source.addSource', source.addSource) exec defaultrecipes.CapsuleRecipe
# # Copyright (c) 2009 rPath, Inc. # # This program is distributed under the terms of the Common Public License, # version 1.0. A copy of this license should have been distributed with this # source file in a file called LICENSE. If it is not present, the license # is always available at http://www.rpath.com/permanent/licenses/CPL-1.0. # # This program is distributed in the hope that it will be useful, but # without any warranty; without even the implied warranty of merchantability # or fitness for a particular purpose. See the Common Public License for # full details. # import inspect from conary.build import action, defaultrecipes from conary.build.recipe import RECIPE_TYPE_CAPSULE from conary.build.packagerecipe import BaseRequiresRecipe, AbstractPackageRecipe class AbstractCapsuleRecipe(AbstractPackageRecipe): internalAbstractBaseClass = 1 internalPolicyModules = ( 'packagepolicy', 'capsulepolicy' ) _recipeType = RECIPE_TYPE_CAPSULE def __init__(self, *args, **kwargs): klass = self._getParentClass('AbstractPackageRecipe') klass.__init__(self, *args, **kwargs) from conary.build import build for name, item in build.__dict__.items(): if inspect.isclass(item) and issubclass(item, action.Action): self._addBuildAction(name, item) def loadSourceActions(self): self._loadSourceActions(lambda item: item._packageAction is True) exec defaultrecipes.CapsuleRecipe
Enable building hybrid capsule/non-capsule packages (CNY-3271)
Enable building hybrid capsule/non-capsule packages (CNY-3271)
Python
apache-2.0
fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary,fedora-conary/conary
d54e5f25601fe2f57a2c6be5524430f0068e05c4
image_translate/frames_rendering.py
image_translate/frames_rendering.py
# need to install python-opencv, pygame, numpy, scipy, PIL import sys import pygame from pygame.locals import * import opencv #this is important for capturing/displaying images from opencv import highgui def get_image(camera): img = highgui.cvQueryFrame(camera) # Add the line below if you need it (Ubuntu 8.04+) # im = opencv.cvGetMat(im) # convert Ipl image to PIL image return opencv.adaptors.Ipl2PIL(img) def render_flipped_camera(): camera = highgui.cvCreateCameraCapture(0) fps = 30.0 pygame.init() pygame.display.set_mode((640, 480)) pygame.display.set_caption("WebCam Demo") screen = pygame.display.get_surface() while True: events = pygame.event.get() for event in events: if event.type == QUIT or event.type == KEYDOWN: sys.exit(0) im = get_image(camera) pg_img = pygame.image.frombuffer(im.tostring(), im.size, im.mode) screen.blit(pg_img, (0, 0)) pygame.display.flip() pygame.time.delay(int(1000 * 1.0/fps)) if __name__ == "__main__": render_flipped_camera()
# need to install python-opencv, pygame, numpy, scipy, PIL import sys import pygame from pygame.locals import QUIT, KEYDOWN import opencv #this is important for capturing/displaying images from opencv import highgui def get_image(camera): img = highgui.cvQueryFrame(camera) # Add the line below if you need it (Ubuntu 8.04+) # im = opencv.cvGetMat(im) # convert Ipl image to PIL image return opencv.adaptors.Ipl2PIL(img) def render_flipped_camera(): camera = highgui.cvCreateCameraCapture(0) fps = 30.0 pygame.init() pygame.display.set_mode((640, 480)) pygame.display.set_caption("WebCam Demo") screen = pygame.display.get_surface() while True: events = pygame.event.get() for event in events: if event.type == QUIT or event.type == KEYDOWN: sys.exit(0) im = get_image(camera) pg_img = pygame.image.frombuffer(im.tostring(), im.size, im.mode) screen.blit(pg_img, (0, 0)) pygame.display.flip() pygame.time.delay(int(1000 * 1.0/fps)) if __name__ == "__main__": render_flipped_camera()
Remove brute and inconvinient star import
Remove brute and inconvinient star import
Python
mit
duboviy/study_languages
1d53f6dc8346a655a86e670d0d4de56f7dc93d04
gala/sparselol.py
gala/sparselol.py
import numpy as np from scipy import sparse from .sparselol_cy import extents_count def extents(labels): """Compute the extents of every integer value in ``arr``. Parameters ---------- labels : array of ints The array of values to be mapped. Returns ------- locs : sparse.csr_matrix A sparse matrix in which the nonzero elements of row i are the indices of value i in ``arr``. """ labels = labels.ravel() counts = np.bincount(labels) indptr = np.concatenate([[0], np.cumsum(counts)]) indices = np.empty(labels.size, int) extents_count(labels.ravel(), indptr.copy(), out=indices) locs = sparse.csr_matrix((indices, indices, indptr), dtype=int) return locs
import numpy as np from scipy import sparse from .sparselol_cy import extents_count def extents(labels): """Compute the extents of every integer value in ``arr``. Parameters ---------- labels : array of ints The array of values to be mapped. Returns ------- locs : sparse.csr_matrix A sparse matrix in which the nonzero elements of row i are the indices of value i in ``arr``. """ labels = labels.ravel() counts = np.bincount(labels) indptr = np.concatenate([[0], np.cumsum(counts)]) indices = np.empty(labels.size, int) extents_count(labels.ravel(), indptr.copy(), out=indices) one = np.ones((1,), dtype=int) data = np.lib.as_strided(one, shape=indices.shape, strides=(0,)) locs = sparse.csr_matrix((data, indices, indptr), dtype=int) return locs
Use stride tricks to save data memory
Use stride tricks to save data memory
Python
bsd-3-clause
janelia-flyem/gala,jni/gala
7ebc9a4511d52707ce88a1b8bc2d3fa638e1fb91
c2rst.py
c2rst.py
import sphinx.parsers import docutils.parsers.rst as rst class CStrip(sphinx.parsers.Parser): def __init__(self): self.rst_parser = rst.Parser() def parse(self, inputstring, document): stripped = [] for line in inputstring.split("\n"): line = line.strip() if line == "//|": stripped.append("") elif line.startswith("//| "): stripped.append(line[len("//| "):]) stripped = "\r\n".join(stripped) self.rst_parser.parse(stripped, document)
import docutils.parsers import docutils.parsers.rst as rst class CStrip(docutils.parsers.Parser): def __init__(self): self.rst_parser = rst.Parser() def parse(self, inputstring, document): stripped = [] for line in inputstring.split("\n"): line = line.strip() if line == "//|": stripped.append("") elif line.startswith("//| "): stripped.append(line[len("//| "):]) stripped = "\r\n".join(stripped) self.rst_parser.parse(stripped, document)
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
Python
mit
adafruit/circuitpython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython
e37c7cace441e837120b820936c6f4ae8de78996
sts/controller_manager.py
sts/controller_manager.py
from sts.util.console import msg class ControllerManager(object): ''' Encapsulate a list of controllers objects ''' def __init__(self, controllers): self.uuid2controller = { controller.uuid : controller for controller in controllers } @property def controllers(self): return self.uuid2controller.values() @property def live_controllers(self): alive = [controller for controller in self.controllers if controller.alive] return set(alive) @property def down_controllers(self): down = [controller for controller in self.controllers if not controller.alive] return set(down) def get_controller(self, uuid): if uuid not in self.uuid2controller: raise ValueError("unknown uuid %s" % str(uuid)) return self.uuid2controller[uuid] def kill_all(self): for c in self.live_controllers: c.kill() self.uuid2controller = {} @staticmethod def kill_controller(controller): msg.event("Killing controller %s" % str(controller)) controller.kill() @staticmethod def reboot_controller(controller): msg.event("Restarting controller %s" % str(controller)) controller.start() def check_controller_processes_alive(self): controllers_with_problems = [] for c in self.live_controllers: (rc, msg) = c.check_process_status() if not rc: c.alive = False controllers_with_problems.append ( (c, msg) ) return controllers_with_problems
from sts.util.console import msg class ControllerManager(object): ''' Encapsulate a list of controllers objects ''' def __init__(self, controllers): self.uuid2controller = { controller.uuid : controller for controller in controllers } @property def controllers(self): cs = self.uuid2controller.values() cs.sort(key=lambda c: c.uuid) return cs @property def live_controllers(self): alive = [controller for controller in self.controllers if controller.alive] return set(alive) @property def down_controllers(self): down = [controller for controller in self.controllers if not controller.alive] return set(down) def get_controller(self, uuid): if uuid not in self.uuid2controller: raise ValueError("unknown uuid %s" % str(uuid)) return self.uuid2controller[uuid] def kill_all(self): for c in self.live_controllers: c.kill() self.uuid2controller = {} @staticmethod def kill_controller(controller): msg.event("Killing controller %s" % str(controller)) controller.kill() @staticmethod def reboot_controller(controller): msg.event("Restarting controller %s" % str(controller)) controller.start() def check_controller_processes_alive(self): controllers_with_problems = [] live = list(self.live_controllers) live.sort(key=lambda c: c.uuid) for c in live: (rc, msg) = c.check_process_status() if not rc: c.alive = False controllers_with_problems.append ( (c, msg) ) return controllers_with_problems
Make .contollers() deterministic (was using hash.values())
Make .contollers() deterministic (was using hash.values())
Python
apache-2.0
ucb-sts/sts,jmiserez/sts,ucb-sts/sts,jmiserez/sts
34b57742801f888af7597378bd00f9d06c2d3b66
packages/Python/lldbsuite/test/repl/quicklookobject/TestREPLQuickLookObject.py
packages/Python/lldbsuite/test/repl/quicklookobject/TestREPLQuickLookObject.py
# TestREPLQuickLookObject.py # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See http://swift.org/LICENSE.txt for license information # See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors # # ------------------------------------------------------------------------------ """Test that QuickLookObject works correctly in the REPL""" import os, time import unittest2 import lldb from lldbsuite.test.lldbrepl import REPLTest, load_tests import lldbsuite.test.lldbtest as lldbtest class REPLQuickLookTestCase (REPLTest): mydir = REPLTest.compute_mydir(__file__) def doTest(self): self.command('true.customPlaygroundQuickLook()', patterns=['Logical = true']) self.command('1.25.customPlaygroundQuickLook()', patterns=['Double = 1.25']) self.command('Float(1.25).customPlaygroundQuickLook()', patterns=['Float = 1.25']) self.command('"Hello".customPlaygroundQuickLook()', patterns=['Text = \"Hello\"'])
# TestREPLQuickLookObject.py # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See http://swift.org/LICENSE.txt for license information # See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors # # ------------------------------------------------------------------------------ """Test that QuickLookObject works correctly in the REPL""" import os, time import unittest2 import lldb from lldbsuite.test.lldbrepl import REPLTest, load_tests import lldbsuite.test.lldbtest as lldbtest class REPLQuickLookTestCase (REPLTest): mydir = REPLTest.compute_mydir(__file__) def doTest(self): self.command('PlaygroundQuickLook(reflecting: true)', patterns=['Logical = true']) self.command('PlaygroundQuickLook(reflecting: 1.25)', patterns=['Double = 1.25']) self.command('PlaygroundQuickLook(reflecting: Float(1.25))', patterns=['Float = 1.25']) self.command('PlaygroundQuickLook(reflecting: "Hello")', patterns=['Text = \"Hello\"'])
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
Use the PlaygroundQuickLook(reflecting:) constructor in this test case
Python
apache-2.0
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
58c13375f7ea4acaf21ccf1151460d79e59bfdf1
spotify/__init__.py
spotify/__init__.py
from __future__ import unicode_literals import os import cffi __version__ = '2.0.0a1' _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) from spotify.error import * # noqa
from __future__ import unicode_literals import os import weakref import cffi __version__ = '2.0.0a1' _header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h') _header = open(_header_file).read() _header += '#define SPOTIFY_API_VERSION ...\n' ffi = cffi.FFI() ffi.cdef(_header) lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')]) # Mapping between keys and objects that should be kept alive as long as the key # is alive. May be used to keep objects alive when there isn't a more # convenient place to keep a reference to it. The keys are weakrefs, so entries # disappear from the dict when the key is garbage collected, potentially # causing objects associated to the key to be garbage collected as well. For # further details, refer to the CFFI docs. global_weakrefs = weakref.WeakKeyDictionary() from spotify.error import * # noqa
Add global_weakrefs dict to keep strings in structs, etc alive
Add global_weakrefs dict to keep strings in structs, etc alive
Python
apache-2.0
kotamat/pyspotify,felix1m/pyspotify,jodal/pyspotify,mopidy/pyspotify,jodal/pyspotify,kotamat/pyspotify,mopidy/pyspotify,jodal/pyspotify,felix1m/pyspotify,felix1m/pyspotify,kotamat/pyspotify
9c8ae361d680f851c4cddb5fddf309190ce9f2b1
telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
telemetry/telemetry/core/backends/chrome/inspector_memory_unittest.py
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import benchmark from telemetry.unittest import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): @benchmark.Enabled('has tabs') @benchmark.Disabled # http://crbug.com/422244 def testGetDOMStats(self): # Due to an issue with CrOS, we create a new tab here rather than # using the existing tab to get a consistent starting page on all platforms. self._tab = self._browser.tabs.New() self.Navigate('dom_counter_sample.html') # Document_count > 1 indicates that WebCore::Document loaded in Chrome # is leaking! The baseline should exactly match the numbers on: # unittest_data/dom_counter_sample.html # Please contact kouhei@, hajimehoshi@ when rebaselining. counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1, 'Document leak is detected! '+ 'The previous document is likely retained unexpectedly.') self.assertEqual(counts['node_count'], 14, 'Node leak is detected!') self.assertEqual(counts['event_listener_count'], 2, 'EventListener leak is detected!')
Convert GpuBenchmarkingExtension to a gin::Wrappable class
Convert GpuBenchmarkingExtension to a gin::Wrappable class v8 extensions are slowing down context creation and we're trying to get rid of them. BUG=334679 [email protected],[email protected] Review URL: https://codereview.chromium.org/647433003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#300957}
Python
bsd-3-clause
catapult-project/catapult,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,sahiljain/catapult,benschmaus/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm