commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
2d698b1df6da2d5a0b3697891744d3c05e99cb95
sympy/core/tests/test_compatibility.py
sympy/core/tests/test_compatibility.py
from sympy.core.compatibility import default_sort_key, as_int, ordered from sympy.core.singleton import S from sympy.utilities.pytest import raises from sympy.abc import x def test_default_sort_key(): func = lambda x: x assert sorted([func, x, func], key=default_sort_key) == [func, func, x] def test_as_int(): raises(ValueError, lambda : as_int(1.1)) raises(ValueError, lambda : as_int([])) def test_ordered(): # Issue 7210 - this had been failing with python2/3 problems assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \ [{1: 3}, {1: 3, 2: 4, 9: 10}]) # warnings should not be raised for identical items l = [1, 1] assert list(ordered(l, warn=True)) == l l = [[1], [2], [1]] assert list(ordered(l, warn=True)) == [[1], [1], [2]] raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]], default=False, warn=True)))
from sympy.core.compatibility import default_sort_key, as_int, ordered, iterable from sympy.core.singleton import S from sympy.utilities.pytest import raises from sympy.abc import x def test_default_sort_key(): func = lambda x: x assert sorted([func, x, func], key=default_sort_key) == [func, func, x] def test_as_int(): raises(ValueError, lambda : as_int(1.1)) raises(ValueError, lambda : as_int([])) def test_iterable(): assert iterable(0) == False assert iterable(1) == False assert iterable(None) == False def test_ordered(): # Issue 7210 - this had been failing with python2/3 problems assert (list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \ [{1: 3}, {1: 3, 2: 4, 9: 10}]) # warnings should not be raised for identical items l = [1, 1] assert list(ordered(l, warn=True)) == l l = [[1], [2], [1]] assert list(ordered(l, warn=True)) == [[1], [1], [2]] raises(ValueError, lambda: list(ordered(['a', 'ab'], keys=[lambda x: x[0]], default=False, warn=True)))
Test some basic properties of iterable()
Test some basic properties of iterable()
Python
bsd-3-clause
Gadal/sympy,jerli/sympy,souravsingh/sympy,Curious72/sympy,wanglongqi/sympy,chaffra/sympy,atsao72/sympy,sahilshekhawat/sympy,moble/sympy,skidzo/sympy,madan96/sympy,atreyv/sympy,lindsayad/sympy,skidzo/sympy,asm666/sympy,beni55/sympy,asm666/sympy,oliverlee/sympy,saurabhjn76/sympy,grevutiu-gabriel/sympy,drufat/sympy,postvakje/sympy,pandeyadarsh/sympy,wanglongqi/sympy,mafiya69/sympy,kaushik94/sympy,Davidjohnwilson/sympy,grevutiu-gabriel/sympy,hargup/sympy,sunny94/temp,postvakje/sympy,moble/sympy,souravsingh/sympy,bukzor/sympy,Shaswat27/sympy,rahuldan/sympy,jamesblunt/sympy,jaimahajan1997/sympy,maniteja123/sympy,VaibhavAgarwalVA/sympy,kumarkrishna/sympy,yukoba/sympy,ChristinaZografou/sympy,asm666/sympy,sahmed95/sympy,Mitchkoens/sympy,diofant/diofant,abloomston/sympy,abloomston/sympy,pbrady/sympy,AkademieOlympia/sympy,Titan-C/sympy,lindsayad/sympy,ahhda/sympy,debugger22/sympy,garvitr/sympy,abhiii5459/sympy,ga7g08/sympy,emon10005/sympy,sunny94/temp,sampadsaha5/sympy,atreyv/sympy,cswiercz/sympy,Arafatk/sympy,farhaanbukhsh/sympy,kevalds51/sympy,atreyv/sympy,jamesblunt/sympy,kaushik94/sympy,AunShiLord/sympy,yukoba/sympy,meghana1995/sympy,maniteja123/sympy,shipci/sympy,jerli/sympy,bukzor/sympy,rahuldan/sympy,liangjiaxing/sympy,iamutkarshtiwari/sympy,dqnykamp/sympy,cswiercz/sympy,yukoba/sympy,atsao72/sympy,mcdaniel67/sympy,cswiercz/sympy,aktech/sympy,bukzor/sympy,farhaanbukhsh/sympy,iamutkarshtiwari/sympy,kumarkrishna/sympy,ChristinaZografou/sympy,yashsharan/sympy,Curious72/sympy,mcdaniel67/sympy,cccfran/sympy,ahhda/sympy,souravsingh/sympy,Shaswat27/sympy,Designist/sympy,pandeyadarsh/sympy,kaushik94/sympy,toolforger/sympy,ChristinaZografou/sympy,Gadal/sympy,iamutkarshtiwari/sympy,emon10005/sympy,vipulroxx/sympy,Sumith1896/sympy,Mitchkoens/sympy,Davidjohnwilson/sympy,liangjiaxing/sympy,garvitr/sympy,wyom/sympy,pandeyadarsh/sympy,oliverlee/sympy,Titan-C/sympy,Designist/sympy,shikil/sympy,Sumith1896/sympy,Mitchkoens/sympy,lindsayad/sympy,VaibhavAgarwalVA/sympy,yashsharan/sympy,AkademieOlympia/sympy,farhaanbukhsh/sympy,saurabhjn76/sympy,ga7g08/sympy,kevalds51/sympy,Vishluck/sympy,debugger22/sympy,shikil/sympy,rahuldan/sympy,kaichogami/sympy,Titan-C/sympy,hargup/sympy,kumarkrishna/sympy,Curious72/sympy,pbrady/sympy,kaichogami/sympy,yashsharan/sympy,Shaswat27/sympy,sahmed95/sympy,wyom/sympy,postvakje/sympy,kevalds51/sympy,chaffra/sympy,AunShiLord/sympy,shipci/sympy,jbbskinny/sympy,mcdaniel67/sympy,abhiii5459/sympy,madan96/sympy,VaibhavAgarwalVA/sympy,maniteja123/sympy,moble/sympy,kaichogami/sympy,debugger22/sympy,drufat/sympy,Gadal/sympy,wyom/sympy,Arafatk/sympy,shikil/sympy,sampadsaha5/sympy,dqnykamp/sympy,MechCoder/sympy,sampadsaha5/sympy,toolforger/sympy,Sumith1896/sympy,MechCoder/sympy,aktech/sympy,Arafatk/sympy,MechCoder/sympy,Vishluck/sympy,atsao72/sympy,vipulroxx/sympy,jbbskinny/sympy,skirpichev/omg,sahmed95/sympy,hargup/sympy,abloomston/sympy,AkademieOlympia/sympy,pbrady/sympy,saurabhjn76/sympy,oliverlee/sympy,madan96/sympy,shipci/sympy,MridulS/sympy,sahilshekhawat/sympy,ahhda/sympy,liangjiaxing/sympy,toolforger/sympy,chaffra/sympy,sunny94/temp,mafiya69/sympy,Designist/sympy,Vishluck/sympy,meghana1995/sympy,sahilshekhawat/sympy,AunShiLord/sympy,dqnykamp/sympy,mafiya69/sympy,Davidjohnwilson/sympy,jbbskinny/sympy,cccfran/sympy,abhiii5459/sympy,garvitr/sympy,MridulS/sympy,vipulroxx/sympy,wanglongqi/sympy,jaimahajan1997/sympy,beni55/sympy,emon10005/sympy,aktech/sympy,MridulS/sympy,jamesblunt/sympy,meghana1995/sympy,ga7g08/sympy,jerli/sympy,drufat/sympy,skidzo/sympy,beni55/sympy,cccfran/sympy,grevutiu-gabriel/sympy,jaimahajan1997/sympy
e632fa3e12d3627abaf26f41a9f0483aaea24adf
imager/ImagerProfile/tests.py
imager/ImagerProfile/tests.py
from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.ImagerProfile' django_get_or_create = ('username',) username = 'John'
from django.test import TestCase import factory class UserFactory(factory.django.DjangoModelFactory): class Meta: model = 'imagerprofile.User' django_get_or_create = ('username',) username = factory.Sequence(lambda n: "Agent %03d" % n)
Change test UserFactory model to point to User
Change test UserFactory model to point to User
Python
mit
nbeck90/django-imager,nbeck90/django-imager
a0e8c92a9d12846c8cfe6819ea26d1e08dd4098a
example/models.py
example/models.py
import i18n from i18n.models import TranslatableModel class Document(TranslatableModel): charfield = i18n.LocalizedCharField(max_length=50) textfield = i18n.LocalizedTextField(max_length=512) filefield = i18n.LocalizedFileField(null=True, upload_to='files') imagefield = i18n.LocalizedImageField(null=True, upload_to='images') booleanfield = i18n.LocalizedBooleanField() datefield = i18n.LocalizedDateField() fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True, related_name='+') urlfied = i18n.LocalizedURLField() decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2) integerfield = i18n.LocalizedIntegerField() def __str__(self): return '%d, %s' % (self.pk, self.charfield) class Meta: app_label = 'example'
from django.db import models import i18n from i18n.models import TranslatableModel class Document(TranslatableModel): untranslated_charfield = models.CharField(max_length=50, blank=True) charfield = i18n.LocalizedCharField(max_length=50) textfield = i18n.LocalizedTextField(max_length=500, blank=True) filefield = i18n.LocalizedFileField(null=True, upload_to='files', blank=True) imagefield = i18n.LocalizedImageField(null=True, upload_to='images', blank=True) booleanfield = i18n.LocalizedBooleanField() datefield = i18n.LocalizedDateField(blank=True, null=True) fkfield = i18n.LocalizedForeignKey('self', null=True, blank=True, related_name='+') urlfied = i18n.LocalizedURLField(null=True, blank=True) decimalfield = i18n.LocalizedDecimalField(max_digits=4, decimal_places=2, null=True, blank=True) integerfield = i18n.LocalizedIntegerField(null=True, blank=True) def __str__(self): return '%d, %s' % (self.pk, self.charfield) class Meta: app_label = 'example'
Make fields in example app non required
Make fields in example app non required
Python
bsd-3-clause
jonasundderwolf/django-localizedfields,jonasundderwolf/django-localizedfields
d93014618636ba23ebfd99c466072e8b4c265a42
wikiwhere/plot_data_generation/count_generation.py
wikiwhere/plot_data_generation/count_generation.py
''' Created on May 3, 2016 @author: Martin Koerner <[email protected]> ''' class CountGeneration(object): def generate_counts(self,collected_features_array,feature_name): feature_counts = {} for instance in collected_features_array: if feature_name in instance: feature = instance[feature_name] if feature in feature_counts: feature_counts[feature] += 1 else: feature_counts[feature] = 1 return feature_counts def get_as_array(self,feature_counts): feature_count_array = [] for label in feature_counts: dict_for_label = {} dict_for_label["label"] = label dict_for_label["count"] = feature_counts[label] feature_count_array.append(dict_for_label) return feature_count_array
''' Created on May 3, 2016 @author: Martin Koerner <[email protected]> ''' import operator class CountGeneration(object): def generate_counts(self,collected_features_array,feature_name): feature_counts = {} for instance in collected_features_array: if feature_name in instance: feature = instance[feature_name] if feature in feature_counts: feature_counts[feature] += 1 else: feature_counts[feature] = 1 return feature_counts def get_as_array(self,feature_counts): feature_count_array = [] sorted_feature_counts = sorted(feature_counts.items(), key=operator.itemgetter(1),reverse=True) for feature_count_tuple in sorted_feature_counts: dict_for_label = {} dict_for_label["label"] = feature_count_tuple[0] dict_for_label["count"] = feature_count_tuple[1] feature_count_array.append(dict_for_label) return feature_count_array
Add reverse sorting of count_array
Add reverse sorting of count_array
Python
mit
mkrnr/wikiwhere
3ea1c6b718e19d99d123feb734ca5f1a44174bf9
Lib/test/test_fcntl.py
Lib/test/test_fcntl.py
#! /usr/bin/env python """Test program for the fcntl C module. Roger E. Masse """ import struct import fcntl import FCNTL import os from test_support import verbose filename = '/tmp/delete-me' # the example from the library docs f = open(filename,'w') rv = fcntl.fcntl(f.fileno(), FCNTL.O_NDELAY, 1) if verbose: print 'Status from fnctl with O_NDELAY: ', rv lockdata = struct.pack('hhllhh', FCNTL.F_WRLCK, 0, 0, 0, 0, 0) if verbose: print 'struct.pack: ', lockdata rv = fcntl.fcntl(f.fileno(), FCNTL.F_SETLKW, lockdata) if verbose: print 'String from fcntl with F_SETLKW: ', rv f.close() os.unlink(filename)
#! /usr/bin/env python """Test program for the fcntl C module. Roger E. Masse """ import struct import fcntl import FCNTL import os from test_support import verbose filename = '/tmp/delete-me' # the example from the library docs f = open(filename,'w') rv = fcntl.fcntl(f.fileno(), FCNTL.F_SETFL, FCNTL.FNDELAY) if verbose: print 'Status from fnctl with O_NDELAY: ', rv lockdata = struct.pack('hhllhh', FCNTL.F_WRLCK, 0, 0, 0, 0, 0) if verbose: print 'struct.pack: ', `lockdata` rv = fcntl.fcntl(f.fileno(), FCNTL.F_SETLKW, lockdata) if verbose: print 'String from fcntl with F_SETLKW: ', `rv` f.close() os.unlink(filename)
Fix the NDELAY test; avoid outputting binary garbage.
Fix the NDELAY test; avoid outputting binary garbage.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
ad7d331868706c97caa0bf0abff88d6ab5537d8d
pyramid_skosprovider/__init__.py
pyramid_skosprovider/__init__.py
# -*- coding: utf8 -*- from zope.interface import Interface from skosprovider.registry import Registry class ISkosRegistry(Interface): pass def _build_skos_registry(registry): skos_registry = registry.queryUtility(ISkosRegistry) if skos_registry is not None: return skos_registry skos_registry = Registry() registry.registerUtility(skos_registry, ISkosRegistry) return registry.queryUtility(ISkosRegistry) def get_skos_registry(registry): #Argument might be a config or request regis = getattr(registry, 'registry', None) if regis is None: regis = registry return regis.queryUtility(ISkosRegistry) def includeme(config): _build_skos_registry(config.registry) config.add_directive('get_skos_registry', get_skos_registry) config.add_route('skosprovider.conceptschemes', '/conceptschemes') config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}') config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts') config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}') config.scan()
# -*- coding: utf8 -*- from zope.interface import Interface from skosprovider.registry import Registry class ISkosRegistry(Interface): pass def _build_skos_registry(registry): skos_registry = registry.queryUtility(ISkosRegistry) if skos_registry is not None: return skos_registry skos_registry = Registry() registry.registerUtility(skos_registry, ISkosRegistry) return registry.queryUtility(ISkosRegistry) def get_skos_registry(registry): #Argument might be a config or request regis = getattr(registry, 'registry', None) if regis is None: regis = registry return regis.queryUtility(ISkosRegistry) def includeme(config): _build_skos_registry(config.registry) config.add_directive('get_skos_registry', get_skos_registry) config.add_request_method(get_skos_registry, 'skos_registry', reify=True) config.add_route('skosprovider.conceptschemes', '/conceptschemes') config.add_route('skosprovider.conceptscheme', '/conceptschemes/{scheme_id}') config.add_route('skosprovider.conceptscheme.concepts', '/conceptschemes/{scheme_id}/concepts') config.add_route('skosprovider.concept', '/conceptschemes/{scheme_id}/concepts/{concept_id}') config.scan()
Add skos_registry to the request.
Add skos_registry to the request. Add the skos_registry to the request through the add_request_method directive.
Python
mit
koenedaele/pyramid_skosprovider
6b2ae24a3989728dcf5015fbb7768ba1b4eed723
messaging/message_producer.py
messaging/message_producer.py
"""Message broker that sends to Unix domain sockets.""" import os import socket import time class MessageProducer(object): """Message broker that sends to Unix domain sockets.""" def __init__(self, message_type): self._message_type = message_type socket_address = os.sep.join( ('.', 'messaging', 'sockets', message_type) ) if not os.path.exists(socket_address): raise ValueError('Socket does not exist: {}'.format(socket_address)) self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) self._socket.connect(socket_address) def publish(self, message): """Publishes a message.""" self._socket.send(message.encode('utf-8')) def kill(self): """Kills all listening consumers.""" try: self._socket.send(b'QUIT') except ConnectionRefusedError: # pylint: disable=undefined-variable pass
"""Message broker that sends to Unix domain sockets.""" import os import socket import time class MessageProducer(object): """Message broker that sends to Unix domain sockets.""" def __init__(self, message_type): self._message_type = message_type socket_address = os.sep.join( ('.', 'messaging', 'sockets', message_type) ) if not os.path.exists(socket_address): raise ValueError('Socket does not exist: {}'.format(socket_address)) self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) self._socket.connect(socket_address) def publish(self, message): """Publishes a message.""" self._socket.sendall(message.encode('utf-8')) def kill(self): """Kills all listening consumers.""" try: self._socket.sendall(b'QUIT') except ConnectionRefusedError: # pylint: disable=undefined-variable pass
Use sendall instead of send for socket messages
Use sendall instead of send for socket messages I kept getting Errno 111 connection refused errors; I hope this fixes it.
Python
mit
bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc
638ea1b12b71f74b357d60b09f1284625db73b2d
migrations/versions/0040_adjust_mmg_provider_rate.py
migrations/versions/0040_adjust_mmg_provider_rate.py
"""mmg rates now set to 1.65 pence per sms Revision ID: 0040_adjust_mmg_provider_rate Revises: 0039_fix_notifications Create Date: 2016-07-06 15:19:23.124212 """ # revision identifiers, used by Alembic. revision = '0040_adjust_mmg_provider_rate' down_revision = '0039_fix_notifications' import uuid from datetime import datetime from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() conn.execute( sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) " "VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")), id=uuid.uuid4(), valid_from=datetime.utcnow(), rate=1.65 ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() conn.execute(("DELETE FROM provider_rates " "WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') " "AND rate = 1.65")) ### end Alembic commands ###
"""mmg rates now set to 1.65 pence per sms Revision ID: 0040_adjust_mmg_provider_rate Revises: 0039_fix_notifications Create Date: 2016-07-06 15:19:23.124212 """ # revision identifiers, used by Alembic. revision = '0040_adjust_mmg_provider_rate' down_revision = '0039_fix_notifications' import uuid from datetime import datetime from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() conn.execute( sa.sql.text(("INSERT INTO provider_rates (id, valid_from, rate, provider_id) " "VALUES (:id, :valid_from, :rate, (SELECT id FROM provider_details WHERE identifier = 'mmg'))")), id=uuid.uuid4(), valid_from=datetime(2016, 7, 1), rate=1.65 ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() conn.execute(("DELETE FROM provider_rates " "WHERE provider_id = (SELECT id FROM provider_details WHERE identifier = 'mmg') " "AND rate = 1.65")) ### end Alembic commands ###
Set the start date for the new rate as July 1
Set the start date for the new rate as July 1
Python
mit
alphagov/notifications-api,alphagov/notifications-api
0ceedd5b22a42634889b572018db1153e1ef2855
tests/integration/services/user_avatar/test_update_avatar_image.py
tests/integration/services/user_avatar/test_update_avatar_image.py
""" :Copyright: 2006-2021 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from pathlib import Path import pytest from byceps.services.user_avatar import service as user_avatar_service from byceps.util.image.models import ImageType @pytest.mark.parametrize( 'image_extension, image_type', [ ('jpeg', ImageType.jpeg), ('png', ImageType.png), ], ) def test_path(data_path, site_app, user, image_extension, image_type): with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f: avatar_id = user_avatar_service.update_avatar_image( user.id, f, {image_type} ) avatar = user_avatar_service.get_db_avatar(avatar_id) expected_filename = f'{avatar.id}.{image_extension}' assert avatar.path == data_path / 'global/users/avatars' / expected_filename
""" :Copyright: 2006-2021 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from pathlib import Path import pytest from byceps.services.user_avatar import service as user_avatar_service from byceps.util.image.models import ImageType @pytest.mark.parametrize( 'image_extension, image_type', [ ('jpeg', ImageType.jpeg), ('png', ImageType.png), ], ) def test_path(data_path, site_app, user, image_extension, image_type): with Path(f'tests/fixtures/images/image.{image_extension}').open('rb') as f: avatar_id = user_avatar_service.update_avatar_image( user.id, f, {image_type} ) avatar = user_avatar_service.get_db_avatar(avatar_id) expected_filename = f'{avatar.id}.{image_extension}' expected = data_path / 'global' / 'users' / 'avatars' / expected_filename assert avatar.path == expected
Use `/` operator to assemble path
Use `/` operator to assemble path
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
1da520787717117b0413715f9a6df834f2d9e7e1
press_releases/migrations/0009_auto_20170519_1308.py
press_releases/migrations/0009_auto_20170519_1308.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_press_releases', '0008_auto_20161128_1049'), ] operations = [ migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this item", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this item', blank=True), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('icekit_press_releases', '0008_auto_20161128_1049'), ] operations = [ migrations.AddField( model_name='pressreleaselisting', name='admin_notes', field=models.TextField(help_text=b"Administrator's notes about this content", blank=True), ), migrations.AddField( model_name='pressreleaselisting', name='brief', field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True), ), ]
Change help text wording to follow WorkflowStateMixin
Change help text wording to follow WorkflowStateMixin
Python
mit
ic-labs/django-icekit,ic-labs/icekit-press-releases,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/icekit-press-releases
f05cd9d2249ea5ef616accf931418f413bce00ba
appengine/swarming/swarming_bot/bot_code/common.py
appengine/swarming/swarming_bot/bot_code/common.py
# Copyright 2015 The Swarming Authors. All rights reserved. # Use of this source code is governed by the Apache v2.0 license that can be # found in the LICENSE file. """Utilities.""" import logging import os import signal import sys from utils import subprocess42 def exec_python(args): """Executes a python process, replacing the current process if possible. On Windows, it returns the child process code. The caller must exit at the earliest opportunity. """ cmd = [sys.executable] + args if sys.platform not in ('cygwin', 'win32'): os.execv(cmd[0], cmd) return 1 try: # On Windows, we cannot sanely exec() so shell out the child process # instead. But we need to forward any signal received that the bot may care # about. This means processes accumulate, sadly. # TODO(maruel): If stdin closes, it tells the child process that the parent # process died. proc = subprocess42.Popen(cmd, detached=True, stdin=subprocess42.PIPE) def handler(sig, _): logging.info('Got signal %s', sig) # Always send SIGTERM, which is properly translated. proc.send_signal(signal.SIGTERM) with subprocess42.set_signal_handler([signal.SIGBREAK], handler): proc.wait() return proc.returncode except Exception as e: logging.exception('failed to start: %s', e) # Swallow the exception. return 1
# Copyright 2015 The Swarming Authors. All rights reserved. # Use of this source code is governed by the Apache v2.0 license that can be # found in the LICENSE file. """Utilities.""" import logging import os import signal import sys from utils import subprocess42 def exec_python(args): """Executes a python process, replacing the current process if possible. On Windows, it returns the child process code. The caller must exit at the earliest opportunity. """ cmd = [sys.executable] + args if sys.platform not in ('cygwin', 'win32'): os.execv(cmd[0], cmd) return 1 try: # On Windows, we cannot sanely exec() so shell out the child process # instead. But we need to forward any signal received that the bot may care # about. This means processes accumulate, sadly. # TODO(maruel): If stdin closes, it tells the child process that the parent # process died. proc = subprocess42.Popen(cmd, detached=True, stdin=subprocess42.PIPE) def handler(sig, _): logging.info('Got signal %s', sig) # Always send SIGTERM, which is properly translated. proc.send_signal(signal.SIGTERM) sig = signal.SIGBREAK if sys.platform == 'win32' else signal.SIGTERM with subprocess42.set_signal_handler([sig], handler): proc.wait() return proc.returncode except Exception as e: logging.exception('failed to start: %s', e) # Swallow the exception. return 1
Fix regression on 6269f48ba356c4e7f in cygwin.
Fix regression on 6269f48ba356c4e7f in cygwin. signal.SIGBREAK is not defined on cygwin, causing an exception. [email protected] BUG= Review URL: https://codereview.chromium.org/1349183005
Python
apache-2.0
luci/luci-py,luci/luci-py,luci/luci-py,luci/luci-py
3f2be07e5df6bcf8bcfa9fce143291d476e93d9b
lib/reinteract/doc_format.py
lib/reinteract/doc_format.py
import re import pydoc import gtk from data_format import insert_with_tag, is_data_object BOLD_RE = re.compile("(?:(.)\b(.))+") STRIP_BOLD_RE = re.compile("(.)\b(.)") def insert_docs(buf, iter, obj, bold_tag): """Insert documentation about obj into a gtk.TextBuffer buf -- the buffer to insert the documentation into iter -- the location to insert the documentation obj -- the object to get documentation about bold_tag -- the tag to use for bold text, such as headings """ # If the routine is an instance, we get help on the type instead if not is_data_object(obj): obj = type(obj) name = getattr(obj, '__name__', None) document = pydoc.text.document(obj, name) # pydoc.text.document represents boldface with overstrikes, we need to # reverse engineer this and find the spans of bold text pos = 0 while True: m = BOLD_RE.search(document, pos) if m == None: # Strip the trailing newline; this isn't very justifiable in general terms, # but matches what we need in Reinteract if document.endswith("\n"): buf.insert(iter, document[pos:-1]) else: buf.insert(iter, document[pos:]) break buf.insert(iter, document[pos:m.start()]) insert_with_tag(buf, iter, STRIP_BOLD_RE.sub(lambda m: m.group(1), m.group()), bold_tag) pos = m.end()
import re import pydoc import gtk from data_format import insert_with_tag, is_data_object BOLD_RE = re.compile("(?:(.)\b(.))+") STRIP_BOLD_RE = re.compile("(.)\b(.)") def insert_docs(buf, iter, obj, bold_tag): """Insert documentation about obj into a gtk.TextBuffer buf -- the buffer to insert the documentation into iter -- the location to insert the documentation obj -- the object to get documentation about bold_tag -- the tag to use for bold text, such as headings """ # If the routine is an instance, we get help on the type instead if is_data_object(obj): obj = type(obj) name = getattr(obj, '__name__', None) document = pydoc.text.document(obj, name) # pydoc.text.document represents boldface with overstrikes, we need to # reverse engineer this and find the spans of bold text pos = 0 while True: m = BOLD_RE.search(document, pos) if m == None: # Strip the trailing newline; this isn't very justifiable in general terms, # but matches what we need in Reinteract if document.endswith("\n"): buf.insert(iter, document[pos:-1]) else: buf.insert(iter, document[pos:]) break buf.insert(iter, document[pos:m.start()]) insert_with_tag(buf, iter, STRIP_BOLD_RE.sub(lambda m: m.group(1), m.group()), bold_tag) pos = m.end()
Fix typo breaking doc popups
Fix typo breaking doc popups
Python
bsd-2-clause
rschroll/reinteract,johnrizzo1/reinteract,johnrizzo1/reinteract,alexey4petrov/reinteract,johnrizzo1/reinteract,alexey4petrov/reinteract,rschroll/reinteract,rschroll/reinteract,jbaayen/reinteract,jbaayen/reinteract,alexey4petrov/reinteract,jbaayen/reinteract
199f9ace071b95822a9a0fb53c9becfb0ab4abd2
tests/pytests/unit/modules/test_win_servermanager.py
tests/pytests/unit/modules/test_win_servermanager.py
import os import pytest import salt.modules.win_servermanager as win_servermanager from tests.support.mock import MagicMock, patch @pytest.fixture def configure_loader_modules(): return {win_servermanager: {}} def test_install(): mock_out = { "FeatureResult": { } } with patch.object(win_servermanager, "_pshell_json", return_value=""):
import os import pytest import salt.modules.win_servermanager as win_servermanager from tests.support.mock import MagicMock, patch @pytest.fixture def configure_loader_modules(): return { win_servermanager: { "__grains__": {"osversion": "6.2"} } } def test_install(): mock_out = { 'Success': True, 'RestartNeeded': 1, 'FeatureResult': [ { 'Id': 338, 'Name': 'XPS-Viewer', 'DisplayName': 'XPS Viewer', 'Success': True, 'RestartNeeded': False, 'Message': '', 'SkipReason': 0 } ], 'ExitCode': 0 } expected = { "ExitCode": 0, "RestartNeeded": False, "Restarted": False, "Features": { "XPS-Viewer": { "DisplayName": "XPS Viewer", "Message": "", "RestartNeeded": False, "SkipReason": 0, "Success": True } }, "Success": True} mock_reboot = MagicMock(return_value=True) with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \ patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}): result = win_servermanager.install("XPS-Viewer") assert result == expected def test_install_restart(): mock_out = { 'Success': True, 'RestartNeeded': 1, 'FeatureResult': [ { 'Id': 338, 'Name': 'XPS-Viewer', 'DisplayName': 'XPS Viewer', 'Success': True, 'RestartNeeded': True, 'Message': '', 'SkipReason': 0 } ], 'ExitCode': 0 } expected = { "ExitCode": 0, "RestartNeeded": True, "Restarted": True, "Features": { "XPS-Viewer": { "DisplayName": "XPS Viewer", "Message": "", "RestartNeeded": True, "SkipReason": 0, "Success": True } }, "Success": True} mock_reboot = MagicMock(return_value=True) with patch.object(win_servermanager, "_pshell_json", return_value=mock_out), \ patch.dict(win_servermanager.__salt__, {"system.reboot": mock_reboot}): result = win_servermanager.install("XPS-Viewer", restart=True) mock_reboot.assert_called_once() assert result == expected
Add some unit tests for install
Add some unit tests for install
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
6c40079139e714ff145e0a4adff8c3a537172ef5
erpnext/patches/v4_1/fix_delivery_and_billing_status_for_draft_so.py
erpnext/patches/v4_1/fix_delivery_and_billing_status_for_draft_so.py
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe def execute(): frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered' where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0""") frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed' where billing_status = 'Billed' and ifnull(per_billed, 0) = 0""")
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe def execute(): frappe.db.sql("""update `tabSales Order` set delivery_status = 'Not Delivered' where delivery_status = 'Delivered' and ifnull(per_delivered, 0) = 0 and docstatus = 0""") frappe.db.sql("""update `tabSales Order` set billing_status = 'Not Billed' where billing_status = 'Billed' and ifnull(per_billed, 0) = 0 and docstatus = 0""")
Update delivery and billing status in SO
Update delivery and billing status in SO
Python
agpl-3.0
gangadharkadam/saloon_erp,njmube/erpnext,Tejal011089/fbd_erpnext,anandpdoshi/erpnext,SPKian/Testing,indictranstech/focal-erpnext,mbauskar/helpdesk-erpnext,4commerce-technologies-AG/erpnext,mbauskar/helpdesk-erpnext,indictranstech/vestasi-erpnext,indictranstech/internal-erpnext,indictranstech/phrerp,indictranstech/buyback-erp,suyashphadtare/test,indictranstech/vestasi-erpnext,ShashaQin/erpnext,indictranstech/erpnext,gangadhar-kadam/verve-erp,BhupeshGupta/erpnext,gangadharkadam/saloon_erp,shitolepriya/test-erp,rohitwaghchaure/erpnext-receipher,BhupeshGupta/erpnext,gangadhar-kadam/smrterp,mbauskar/alec_frappe5_erpnext,mbauskar/Das_Erpnext,gangadhar-kadam/verve-erp,indictranstech/buyback-erp,indictranstech/Das_Erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,SPKian/Testing,sheafferusa/erpnext,SPKian/Testing,gangadharkadam/office_erp,Tejal011089/paypal_erpnext,ThiagoGarciaAlves/erpnext,netfirms/erpnext,gangadharkadam/letzerp,indictranstech/biggift-erpnext,gangadharkadam/v6_erp,SPKian/Testing2,Drooids/erpnext,saurabh6790/test-erp,hernad/erpnext,pombredanne/erpnext,mahabuber/erpnext,indictranstech/internal-erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp_install,mahabuber/erpnext,susuchina/ERPNEXT,indictranstech/tele-erpnext,gangadharkadam/tailorerp,gangadhar-kadam/helpdesk-erpnext,indictranstech/erpnext,mbauskar/omnitech-erpnext,gangadhar-kadam/verve_test_erp,suyashphadtare/vestasi-update-erp,sheafferusa/erpnext,sagar30051991/ozsmart-erp,hanselke/erpnext-1,tmimori/erpnext,mahabuber/erpnext,Tejal011089/paypal_erpnext,Tejal011089/osmosis_erpnext,netfirms/erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/osmosis_erpnext,njmube/erpnext,mbauskar/omnitech-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v4_erp,fuhongliang/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/v4_erp,gangadharkadam/verveerp,gangadhar-kadam/verve_erp,mbauskar/sapphire-erpnext,suyashphadtare/gd-erp,indictranstech/osmosis-erpnext,mbauskar/omnitech-demo-erpnext,gangadharkadam/verveerp,Drooids/erpnext,rohitwaghchaure/digitales_erpnext,BhupeshGupta/erpnext,indictranstech/biggift-erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/sher,hanselke/erpnext-1,suyashphadtare/vestasi-erp-jan-end,ShashaQin/erpnext,Tejal011089/huntercamp_erpnext,fuhongliang/erpnext,Tejal011089/digitales_erpnext,anandpdoshi/erpnext,Tejal011089/trufil-erpnext,gangadhar-kadam/smrterp,MartinEnder/erpnext-de,rohitwaghchaure/erpnext-receipher,Tejal011089/huntercamp_erpnext,rohitwaghchaure/digitales_erpnext,Tejal011089/fbd_erpnext,gangadhar-kadam/verve_live_erp,indictranstech/phrerp,gangadharkadam/smrterp,mbauskar/helpdesk-erpnext,suyashphadtare/sajil-erp,indictranstech/tele-erpnext,gangadhar-kadam/verve_test_erp,fuhongliang/erpnext,netfirms/erpnext,rohitwaghchaure/digitales_erpnext,rohitwaghchaure/GenieManager-erpnext,hatwar/Das_erpnext,dieface/erpnext,hatwar/focal-erpnext,gangadhar-kadam/latestchurcherp,treejames/erpnext,susuchina/ERPNEXT,4commerce-technologies-AG/erpnext,hatwar/focal-erpnext,shitolepriya/test-erp,hatwar/Das_erpnext,mbauskar/sapphire-erpnext,mbauskar/phrerp,dieface/erpnext,shitolepriya/test-erp,Tejal011089/fbd_erpnext,indictranstech/Das_Erpnext,pawaranand/phrerp,BhupeshGupta/erpnext,fuhongliang/erpnext,suyashphadtare/vestasi-erp-final,gangadharkadam/v4_erp,suyashphadtare/sajil-final-erp,gangadhar-kadam/latestchurcherp,meisterkleister/erpnext,suyashphadtare/vestasi-erp-1,hernad/erpnext,pawaranand/phrerp,mbauskar/Das_Erpnext,gangadharkadam/saloon_erp_install,gangadharkadam/johnerp,geekroot/erpnext,indictranstech/osmosis-erpnext,gangadhar-kadam/verve_erp,gangadharkadam/v6_erp,gangadharkadam/verveerp,gmarke/erpnext,rohitwaghchaure/New_Theme_Erp,rohitwaghchaure/GenieManager-erpnext,indictranstech/reciphergroup-erpnext,indictranstech/biggift-erpnext,mbauskar/phrerp,njmube/erpnext,hatwar/Das_erpnext,suyashphadtare/sajil-final-erp,ShashaQin/erpnext,suyashphadtare/gd-erp,mbauskar/alec_frappe5_erpnext,indictranstech/tele-erpnext,Tejal011089/huntercamp_erpnext,indictranstech/reciphergroup-erpnext,suyashphadtare/vestasi-erp-jan-end,mbauskar/phrerp,treejames/erpnext,rohitwaghchaure/New_Theme_Erp,ThiagoGarciaAlves/erpnext,gangadhar-kadam/laganerp,gangadhar-kadam/verve_live_erp,gangadhar-kadam/verve_live_erp,anandpdoshi/erpnext,indictranstech/focal-erpnext,gangadharkadam/v4_erp,sagar30051991/ozsmart-erp,gangadharkadam/v5_erp,tmimori/erpnext,MartinEnder/erpnext-de,4commerce-technologies-AG/erpnext,gangadhar-kadam/verve_test_erp,suyashphadtare/test,gangadhar-kadam/laganerp,Tejal011089/digitales_erpnext,Tejal011089/huntercamp_erpnext,pombredanne/erpnext,shitolepriya/test-erp,gangadhar-kadam/verve_live_erp,rohitwaghchaure/digitales_erpnext,mbauskar/alec_frappe5_erpnext,SPKian/Testing2,shft117/SteckerApp,suyashphadtare/vestasi-update-erp,suyashphadtare/sajil-erp,Tejal011089/osmosis_erpnext,gangadharkadam/letzerp,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/contributionerp,netfirms/erpnext,indictranstech/phrerp,Tejal011089/trufil-erpnext,indictranstech/tele-erpnext,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/latestchurcherp,meisterkleister/erpnext,Tejal011089/digitales_erpnext,aruizramon/alec_erpnext,indictranstech/focal-erpnext,treejames/erpnext,gangadharkadam/v5_erp,mbauskar/omnitech-erpnext,indictranstech/vestasi-erpnext,Tejal011089/trufil-erpnext,hanselke/erpnext-1,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/vlinkerp,gangadharkadam/smrterp,suyashphadtare/gd-erp,Drooids/erpnext,gangadharkadam/saloon_erp,sheafferusa/erpnext,rohitwaghchaure/New_Theme_Erp,indictranstech/vestasi-erpnext,hatwar/focal-erpnext,gangadharkadam/v5_erp,Tejal011089/paypal_erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/reciphergroup-erpnext,hatwar/focal-erpnext,aruizramon/alec_erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp,indictranstech/osmosis-erpnext,indictranstech/internal-erpnext,hanselke/erpnext-1,sheafferusa/erpnext,gangadharkadam/sher,mbauskar/Das_Erpnext,susuchina/ERPNEXT,gangadhar-kadam/verve_erp,gangadhar-kadam/helpdesk-erpnext,gangadhar-kadam/latestchurcherp,rohitwaghchaure/erpnext_smart,suyashphadtare/vestasi-erp-final,gmarke/erpnext,indictranstech/fbd_erpnext,gangadharkadam/contributionerp,sagar30051991/ozsmart-erp,suyashphadtare/test,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/helpdesk-erpnext,gangadharkadam/v6_erp,gmarke/erpnext,Suninus/erpnext,hatwar/buyback-erpnext,mbauskar/helpdesk-erpnext,gangadhar-kadam/verve-erp,gangadharkadam/vlinkerp,gangadharkadam/vlinkerp,ShashaQin/erpnext,pombredanne/erpnext,geekroot/erpnext,gangadharkadam/letzerp,rohitwaghchaure/GenieManager-erpnext,indictranstech/erpnext,Suninus/erpnext,tmimori/erpnext,suyashphadtare/vestasi-update-erp,indictranstech/trufil-erpnext,indictranstech/erpnext,indictranstech/trufil-erpnext,indictranstech/trufil-erpnext,indictranstech/fbd_erpnext,gangadharkadam/letzerp,MartinEnder/erpnext-de,indictranstech/internal-erpnext,rohitwaghchaure/New_Theme_Erp,aruizramon/alec_erpnext,MartinEnder/erpnext-de,gangadharkadam/office_erp,indictranstech/fbd_erpnext,Tejal011089/trufil-erpnext,Tejal011089/osmosis_erpnext,ThiagoGarciaAlves/erpnext,hatwar/buyback-erpnext,indictranstech/biggift-erpnext,Suninus/erpnext,SPKian/Testing,saurabh6790/test-erp,mbauskar/alec_frappe5_erpnext,shft117/SteckerApp,gangadharkadam/sterp,geekroot/erpnext,treejames/erpnext,gangadharkadam/johnerp,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,gsnbng/erpnext,rohitwaghchaure/erpnext-receipher,indictranstech/fbd_erpnext,Suninus/erpnext,mbauskar/omnitech-erpnext,suyashphadtare/vestasi-erp-1,gangadharkadam/saloon_erp_install,suyashphadtare/vestasi-erp-1,gangadharkadam/sterp,suyashphadtare/vestasi-erp-jan-end,indictranstech/phrerp,ThiagoGarciaAlves/erpnext,meisterkleister/erpnext,gangadharkadam/v5_erp,Tejal011089/paypal_erpnext,hernad/erpnext,hernad/erpnext,gangadharkadam/verveerp,gangadhar-kadam/verve_test_erp,hatwar/Das_erpnext,suyashphadtare/gd-erp,indictranstech/reciphergroup-erpnext,susuchina/ERPNEXT,mbauskar/sapphire-erpnext,mbauskar/omnitech-demo-erpnext,gmarke/erpnext,indictranstech/osmosis-erpnext,Tejal011089/fbd_erpnext,Aptitudetech/ERPNext,gsnbng/erpnext,shft117/SteckerApp,gangadhar-kadam/verve_erp,suyashphadtare/sajil-erp,geekroot/erpnext,gangadharkadam/tailorerp,aruizramon/alec_erpnext,gangadharkadam/v6_erp,saurabh6790/test-erp,meisterkleister/erpnext,SPKian/Testing2,suyashphadtare/sajil-final-erp,mbauskar/phrerp,indictranstech/focal-erpnext,gangadharkadam/contributionerp,gangadharkadam/vlinkerp,gangadhar-kadam/laganerp,indictranstech/buyback-erp,njmube/erpnext,pombredanne/erpnext,dieface/erpnext,gsnbng/erpnext,mbauskar/Das_Erpnext,SPKian/Testing2,hatwar/buyback-erpnext,gangadharkadam/contributionerp,gsnbng/erpnext,tmimori/erpnext,sagar30051991/ozsmart-erp,suyashphadtare/vestasi-erp-final,rohitwaghchaure/erpnext_smart,indictranstech/buyback-erp,pawaranand/phrerp,pawaranand/phrerp,gangadharkadam/saloon_erp_install,gangadharkadam/office_erp,Drooids/erpnext,shft117/SteckerApp,dieface/erpnext,indictranstech/trufil-erpnext
b745e05cd4f2ca2a6683f2e057d52dee454d5b23
lib/authenticator.py
lib/authenticator.py
# # HamperAuthenticator is the class to handle the authentication part of the provisioning portal. # Instantiate with the email and password you want, it'll pass back the cookie jar if successful, # or an error message on failure # from helpers.driver import HamperDriver from helpers.error import HamperError from termcolor import colored class HamperAuthenticator(object): def __init__(self): super(HamperAuthenticator, self).__init__() def sign_in(self, email=None, password=None): # Grab the HamperDriver singleton driver = HamperDriver() print colored("Authenticating user...", "blue") # Open the profile URL. This will forward to the sign in page if session is invalid driver.get("https://developer.apple.com/account/ios/profile/") email_element = driver.find_element_by_name("appleId") email_element.send_keys(email) password_element = driver.find_element_by_name("accountPassword") password_element.send_keys(password) driver.find_element_by_id("submitButton2").click() if len(driver.find_elements_by_class_name("dserror")) > 0: raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML")))
# # HamperAuthenticator is the class to handle the authentication part of the provisioning portal. # Instantiate with the email and password you want, it'll pass back the cookie jar if successful, # or an error message on failure # from helpers.driver import HamperDriver from helpers.error import HamperError from termcolor import colored class HamperAuthenticator(object): def __init__(self): super(HamperAuthenticator, self).__init__() def sign_in(self, email=None, password=None): print colored("Authenticating user...", "blue") # If no login credentials were provided if not email or not password: raise Exception(HamperError(HamperError.HECodeLogInError, "Either the email and/or password wasn't provided. Call 'hamper auth login' with the login credentials.")) # Grab the HamperDriver singleton driver = HamperDriver() # Open the profile URL. This will forward to the sign in page if session is invalid driver.get("https://developer.apple.com/account/ios/profile/") email_element = driver.find_element_by_name("appleId") email_element.send_keys(email) password_element = driver.find_element_by_name("accountPassword") password_element.send_keys(password) driver.find_element_by_id("submitButton2").click() if len(driver.find_elements_by_class_name("dserror")) > 0: raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML")))
Throw exception if no login credentials are provided
Throw exception if no login credentials are provided
Python
mit
MobileXLabs/hamper
a6f8e42d3e297776a19c8e76dd7f1cfded32a266
pycon/tutorials/tests/test_utils.py
pycon/tutorials/tests/test_utils.py
"""Test for the tutorials.utils package""" import datetime import unittest from mock import patch from django.template import Template from pycon.bulkemail.models import BulkEmail from ..utils import queue_email_message today = datetime.date.today() class TestSendEmailMessage(unittest.TestCase): @patch('django.core.mail.message.EmailMessage.send') @patch('pycon.tutorials.utils.get_template') def test_send_email_message(self, get_template, send_mail): # queue_email_message comes up with the expected template names # and calls send_mail with the expected arguments test_template = Template("test template") get_template.return_value = test_template context = {'a': 1, 'b': 2} queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context) args, kwargs = get_template.call_args_list[0] expected_template_name = "tutorials/email/TESTNAME/subject.txt" self.assertEqual(expected_template_name, args[0]) args, kwargs = get_template.call_args_list[1] expected_template_name = "tutorials/email/TESTNAME/body.txt" self.assertEqual(expected_template_name, args[0]) # Creates a BulkEmail object self.assertEqual(1, BulkEmail.objects.count())
"""Test for the tutorials.utils package""" import datetime from mock import patch from django.template import Template from django.test import TestCase from pycon.bulkemail.models import BulkEmail from ..utils import queue_email_message today = datetime.date.today() class TestSendEmailMessage(TestCase): @patch('django.core.mail.message.EmailMessage.send') @patch('pycon.tutorials.utils.get_template') def test_send_email_message(self, get_template, send_mail): # queue_email_message comes up with the expected template names # and calls send_mail with the expected arguments test_template = Template("test template") get_template.return_value = test_template context = {'a': 1, 'b': 2} queue_email_message("TESTNAME", "from_address", ["1", "2"], [], context) args, kwargs = get_template.call_args_list[0] expected_template_name = "tutorials/email/TESTNAME/subject.txt" self.assertEqual(expected_template_name, args[0]) args, kwargs = get_template.call_args_list[1] expected_template_name = "tutorials/email/TESTNAME/body.txt" self.assertEqual(expected_template_name, args[0]) # Creates a BulkEmail object self.assertEqual(1, BulkEmail.objects.count())
Use django TestCase in tutorial send email test
Use django TestCase in tutorial send email test It was using regular Python unittest.TestCase for some reason, resulting in leaving old BulkEmail objects in the database that other tests weren't expecting.
Python
bsd-3-clause
PyCon/pycon,PyCon/pycon,PyCon/pycon,njl/pycon,PyCon/pycon,njl/pycon,njl/pycon,njl/pycon
d62ec0008b4ca65a784a1017e2c9253f0e0ab749
taiga/projects/migrations/0006_auto_20141029_1040.py
taiga/projects/migrations/0006_auto_20141029_1040.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def update_total_milestones(apps, schema_editor): Project = apps.get_model("projects", "Project") for project in Project.objects.filter(total_milestones__isnull=True): project.total_milestones = 0 project.save() class Migration(migrations.Migration): dependencies = [ ('projects', '0005_membership_invitation_extra_text'), ] operations = [ migrations.RunPython(update_total_milestones), migrations.AlterField( model_name='project', name='total_milestones', field=models.IntegerField(verbose_name='total of milestones', default=0), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations def update_total_milestones(apps, schema_editor): Project = apps.get_model("projects", "Project") qs = Project.objects.filter(total_milestones__isnull=True) qs.update(total_milestones=0) class Migration(migrations.Migration): dependencies = [ ('projects', '0005_membership_invitation_extra_text'), ] operations = [ migrations.RunPython(update_total_milestones), migrations.AlterField( model_name='project', name='total_milestones', field=models.IntegerField(verbose_name='total of milestones', default=0), ), ]
Make 0006 migration of project more efficient.
Make 0006 migration of project more efficient.
Python
agpl-3.0
frt-arch/taiga-back,dycodedev/taiga-back,obimod/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,bdang2012/taiga-back-casting,gauravjns/taiga-back,xdevelsistemas/taiga-back-community,Tigerwhit4/taiga-back,Zaneh-/bearded-tribble-back,bdang2012/taiga-back-casting,CoolCloud/taiga-back,bdang2012/taiga-back-casting,taigaio/taiga-back,19kestier/taiga-back,coopsource/taiga-back,taigaio/taiga-back,taigaio/taiga-back,Rademade/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,crr0004/taiga-back,joshisa/taiga-back,jeffdwyatt/taiga-back,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,obimod/taiga-back,gam-phon/taiga-back,forging2012/taiga-back,coopsource/taiga-back,CMLL/taiga-back,astronaut1712/taiga-back,seanchen/taiga-back,gam-phon/taiga-back,Zaneh-/bearded-tribble-back,Tigerwhit4/taiga-back,forging2012/taiga-back,astronaut1712/taiga-back,crr0004/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,astagi/taiga-back,astagi/taiga-back,joshisa/taiga-back,dayatz/taiga-back,EvgeneOskin/taiga-back,CMLL/taiga-back,Zaneh-/bearded-tribble-back,dayatz/taiga-back,EvgeneOskin/taiga-back,rajiteh/taiga-back,seanchen/taiga-back,frt-arch/taiga-back,gauravjns/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,obimod/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,forging2012/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,rajiteh/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,rajiteh/taiga-back,CMLL/taiga-back,xdevelsistemas/taiga-back-community,19kestier/taiga-back,CMLL/taiga-back,jeffdwyatt/taiga-back,WALR/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,astagi/taiga-back,Tigerwhit4/taiga-back,CoolCloud/taiga-back,joshisa/taiga-back,Tigerwhit4/taiga-back,joshisa/taiga-back,gauravjns/taiga-back,coopsource/taiga-back,astagi/taiga-back,obimod/taiga-back,Rademade/taiga-back,coopsource/taiga-back,dycodedev/taiga-back,astronaut1712/taiga-back,frt-arch/taiga-back,dayatz/taiga-back,WALR/taiga-back,seanchen/taiga-back,seanchen/taiga-back,astronaut1712/taiga-back,dycodedev/taiga-back,forging2012/taiga-back,CoolCloud/taiga-back,dycodedev/taiga-back
f869cf9a94749ea210d38178317d196fbdd15fac
resolwe/flow/tests/test_backend.py
resolwe/flow/tests/test_backend.py
# pylint: disable=missing-docstring import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engine import manager from resolwe.flow.models import Data, Tool class ManagerTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', '[email protected]', 'test') t = Tool(slug='test-processor', name='Test Processor', contributor=u, type='data:test', version=1) t.save() d = Data(slug='test-data', name='Test Data', contributor=u, tool=t) d.save() shutil.rmtree(settings.FLOW['BACKEND']['DATA_PATH']) os.makedirs(settings.FLOW['BACKEND']['DATA_PATH']) def test_manager(self): manager.communicate()
# pylint: disable=missing-docstring import os import shutil from django.conf import settings from django.contrib.auth import get_user_model from django.test import TestCase from resolwe.flow.engine import manager from resolwe.flow.models import Data, Tool class ManagerTest(TestCase): def setUp(self): u = get_user_model().objects.create_superuser('test', '[email protected]', 'test') t = Tool(slug='test-processor', name='Test Processor', contributor=u, type='data:test', version=1) t.save() d = Data(slug='test-data', name='Test Data', contributor=u, tool=t) d.save() data_path = settings.FLOW['BACKEND']['DATA_PATH'] if os.path.exists(data_path): shutil.rmtree(data_path) os.makedirs(data_path) def test_manager(self): manager.communicate()
Fix error if no data path
Fix error if no data path
Python
apache-2.0
jberci/resolwe,jberci/resolwe,genialis/resolwe,genialis/resolwe
a2920b9bf5386b3f92a8e2cd5f7c4251439b2c42
newswall/admin.py
newswall/admin.py
from django.contrib import admin from newswall.models import Source, Story admin.site.register(Source, list_display=('name', 'is_active', 'ordering'), list_filter=('is_active',), prepopulated_fields={'slug': ('name',)}, ) admin.site.register(Story, date_hierarchy='timestamp', list_display=('title', 'source', 'is_active', 'timestamp'), list_filter=('source', 'is_active'), search_fields=('object_url', 'title', 'author', 'content'), )
from django.contrib import admin from newswall.models import Source, Story admin.site.register(Source, list_display=('name', 'is_active', 'ordering'), list_editable=('is_active', 'ordering'), list_filter=('is_active',), prepopulated_fields={'slug': ('name',)}, ) admin.site.register(Story, date_hierarchy='timestamp', list_display=('title', 'source', 'is_active', 'timestamp'), list_editable=('is_active',), list_filter=('source', 'is_active'), search_fields=('object_url', 'title', 'author', 'content'), )
Make a few fields editable from the changelist
Make a few fields editable from the changelist
Python
bsd-3-clause
matthiask/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall,HerraLampila/django-newswall,matthiask/django-newswall,HerraLampila/django-newswall,registerguard/django-newswall,michaelkuty/django-newswall
170bfa1aea96c0d1cbe13557ce158effff91466c
pilight.py
pilight.py
#!/usr/bin/python import ctypes import errno import os import select import traceback import cepoll import ctimerfd def on_timer (): pass def main (): spec = ctimerfd.itimerspec () spec.it_interval.tv_sec = 0 spec.it_interval.tv_nsec = long (1e9/60) spec.it_value.tv_sec = 0 spec.it_value.tv_nsec = 1 t = ctimerfd.timerfd_create (ctimerfd.CLOCK_MONOTONIC, ctimerfd.TFD_CLOEXEC|ctimerfd.TFD_NONBLOCK) ctimerfd.timerfd_settime (t, 0, ctypes.pointer (spec), None) poll = select.epoll.fromfd (cepoll.epoll_create (cepoll.EPOLL_CLOEXEC)) poll.register (t, select.EPOLLIN) while True: try: for fd, event in poll.poll (): try: if fd == t: on_timer () except: traceback.print_exc () except IOError, e: if e.errno == errno.EINTR: continue raise if __name__ == '__main__': main ()
#!/usr/bin/python import ctypes import errno import os import select import traceback import cepoll import ctimerfd def on_timer (): pass def eintr_wrap (fn, *args, **kwargs): while True: try: return fn (*args, **kwargs) except IOError, e: if e.errno == errno.EINTR: continue raise def wrap (fn, *args, **kwargs): try: fn (*args, **kwargs) except: traceback.print_exc () def main (): spec = ctimerfd.itimerspec () spec.it_interval.tv_sec = 0 spec.it_interval.tv_nsec = long (1e9/60) spec.it_value.tv_sec = 0 spec.it_value.tv_nsec = 1 t = ctimerfd.timerfd_create (ctimerfd.CLOCK_MONOTONIC, ctimerfd.TFD_CLOEXEC|ctimerfd.TFD_NONBLOCK) ctimerfd.timerfd_settime (t, 0, ctypes.pointer (spec), None) epoll = select.epoll.fromfd (cepoll.epoll_create (cepoll.EPOLL_CLOEXEC)) epoll.register (t, select.EPOLLIN) while True: for fd, event in eintr_wrap (epoll.poll): if fd == t: os.read (t, 8) wrap (on_timer) if __name__ == '__main__': main ()
Add wrapper functions to deal with EINTR and exceptions in dispatched-to-functions
Add wrapper functions to deal with EINTR and exceptions in dispatched-to-functions
Python
mit
yrro/pilight
d68910e98eea4836a372e6230cc11044f2e59214
packet_sniffer/pcapreader.py
packet_sniffer/pcapreader.py
from scapy.all import * import unirest import json def callbackFunction(response): pass # "http://54.68.246.202:3000/rssi" def main(): print "Reading pcap file %s"%sys.argv[1] myreader = PcapReader(sys.argv[1]) packets = [] routerId = sys.argv[2] for pkt in myreader: try: extra = pkt.notdecoded except: extra = None if extra!=None: signal_strength = (256-ord(extra[14:15])) signal_strength = signal_strength - 256 if signal_strength > 127 else signal_strength signal_strength = -signal_strength try: print "[%d] MAC: %s RSSi: %d"%(pkt.time, pkt.addr1, signal_strength) packets.append({'created': pkt.time * 1000, 'mac': pkt.addr1, 'rssi': signal_strength, 'router': routerId, 'processed': False}) if len(packets) > 300: thread = unirest.post("http://127.0.0.1:3000/rssi", headers = {"Content-Type": "application/json"}, params = json.dumps(packets), callback = callbackFunction) packets = [] except: print "Caught exception" if __name__=="__main__": main()
from scapy.all import * import unirest import json def callbackFunction(response): pass # "http://54.68.246.202:3000/rssi" def main(): print "Reading pcap file %s"%sys.argv[1] myreader = PcapReader(sys.argv[1]) packets = [] routerId = sys.argv[2] for pkt in myreader: try: extra = pkt.notdecoded except: extra = None if extra!=None: signal_strength = (256-ord(extra[14:15])) signal_strength = signal_strength - 256 if signal_strength > 127 else signal_strength signal_strength = -signal_strength try: print "[%d] MAC: %s RSSi: %d"%(pkt.time, pkt.addr1, signal_strength) packets.append({'created': pkt.time * 1000, 'mac': pkt.addr1, 'rssi': signal_strength, 'router': routerId, 'processed': False}) if len(packets) > 300: thread = unirest.post("http://54.68.246.202:3000/rssi", headers = {"Content-Type": "application/json"}, params = json.dumps(packets), callback = callbackFunction) packets = [] except: print "Caught exception" if __name__=="__main__": main()
Change script to point to AWS
Change script to point to AWS
Python
mit
cheung31/bigbrother,cheung31/bigbrother,cheung31/bigbrother,cheung31/bigbrother
ce052f8e19d46f6db202e7eee054d5b88af01d9b
nanagogo/__init__.py
nanagogo/__init__.py
#!/usr/bin/env python3 from nanagogo.api import NanagogoRequest, NanagogoError def get(path, params={}): r = NanagogoRequest(path, method="GET", params=params) return r.wrap() def post(path, params={}, data=None): r = NanagogoRequest(path, method="POST", params=params, data=data) return r.wrap() class NanagogoTalk(object): def __init__(self, name): self.name = name @property def info(self): path = ("talks", self.name) return get(path) def feed(self, count=30, targetid=None, direction="PREV"): path = ("talks", self.name, "posts") params = {'limit': count, 'targetId': targetid, 'direction': direction} return get(path, params=params) def iterfeed(self, count=200, targetid=None): while True: feed = self.feed(count=count, targetid=targetid, direction="PREV") if len(feed) == 0: break yield feed targetid = feed[-1]['post']['postId'] - 1 if targetid <= 0: break if __name__ == "__main__": tani = NanagogoTalk('tani-marika') print(tani.info)
#!/usr/bin/env python3 from nanagogo.api import NanagogoRequest, NanagogoError, s def get(path, params={}): r = NanagogoRequest(path, method="GET", params=params) return r.wrap() def post(path, params={}, data=None): r = NanagogoRequest(path, method="POST", params=params, data=data) return r.wrap() class NanagogoTalk(object): def __init__(self, name): self.name = name @property def info(self): path = ("talks", self.name) return get(path) def feed(self, count=30, targetid=None, direction="PREV"): path = ("talks", self.name, "posts") params = {'limit': count, 'targetId': targetid, 'direction': direction.upper()} return get(path, params=params) def iterfeed(self, count=200, targetid=None): while True: feed = self.feed(count=count, targetid=targetid, direction="PREV") if len(feed) == 0: break yield feed targetid = feed[-1]['post']['postId'] - 1 if targetid <= 0: break if __name__ == "__main__": tani = NanagogoTalk('tani-marika') print(tani.info)
Convert direction to upper case
Convert direction to upper case
Python
mit
kastden/nanagogo
04cca2c87cc8e56ecd84e1b3125a7a7b8c67b026
norc_utils/backup.py
norc_utils/backup.py
import os from norc.settings import (NORC_LOG_DIR, BACKUP_SYSTEM, AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME) if BACKUP_SYSTEM == 'AmazonS3': from norc.norc_utils.aws import set_s3_key def s3_backup(fp, target): NUM_TRIES = 3 for i in range(NUM_TRIES): try: set_s3_key(target, fp) return True except: if i == NUM_TRIES - 1: raise return False BACKUP_SYSTEMS = { 'AmazonS3': s3_backup, } def backup_log(rel_log_path): log_path = os.path.join(NORC_LOG_DIR, rel_log_path) log_file = open(log_path, 'rb') target = os.path.join('norc_logs/', rel_log_path) try: return _backup_file(log_file, target) finally: log_file.close() def _backup_file(fp, target): if BACKUP_SYSTEM: return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target) else: return False
import os from norc.settings import NORC_LOG_DIR, BACKUP_SYSTEM if BACKUP_SYSTEM == 'AmazonS3': from norc.norc_utils.aws import set_s3_key from norc.settings import (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME) def s3_backup(fp, target): NUM_TRIES = 3 for i in range(NUM_TRIES): try: set_s3_key(target, fp) return True except: if i == NUM_TRIES - 1: raise return False BACKUP_SYSTEMS = { 'AmazonS3': s3_backup, } def backup_log(rel_log_path): log_path = os.path.join(NORC_LOG_DIR, rel_log_path) log_file = open(log_path, 'rb') target = os.path.join('norc_logs/', rel_log_path) try: return _backup_file(log_file, target) finally: log_file.close() def _backup_file(fp, target): if BACKUP_SYSTEM: return BACKUP_SYSTEMS[BACKUP_SYSTEM](fp, target) else: return False
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
Move AWS_ setting imports under the check for AmazonS3 so Norc doesn't break without them.
Python
bsd-3-clause
darrellsilver/norc,darrellsilver/norc
428fda845c79f70c6e3d64302bbc716da5130625
src/django_richenum/forms/fields.py
src/django_richenum/forms/fields.py
from abc import ABCMeta from abc import abstractmethod from django import forms class _BaseEnumField(forms.TypedChoiceField): __metaclass__ = ABCMeta def __init__(self, enum, *args, **kwargs): self.enum = enum kwargs.setdefault('empty_value', None) if 'choices' in kwargs: raise ValueError('Cannot explicitly supply choices to enum fields.') if 'coerce' in kwargs: raise ValueError('Cannot explicitly supply coercion function to enum fields.') kwargs['choices'] = self.get_choices() kwargs['coerce'] = self.coerce_value super(_BaseEnumField, self).__init__(*args, **kwargs) @abstractmethod def get_choices(self): pass @abstractmethod def coerce_value(self, val): pass class CanonicalEnumField(_BaseEnumField): """ Uses the RichEnum/OrderedRichEnum canonical_name as form field values """ def get_choices(self): return self.enum.choices() def coerce_value(self, name): return self.enum.from_canonical(name) class IndexEnumField(_BaseEnumField): """ Uses the OrderedRichEnum index as form field values """ def get_choices(self): return self.enum.choices(value_field='index') def coerce_value(self, index): return self.enum.from_index(int(index))
from abc import ABCMeta from abc import abstractmethod from django import forms class _BaseEnumField(forms.TypedChoiceField): __metaclass__ = ABCMeta def __init__(self, enum, *args, **kwargs): self.enum = enum kwargs.setdefault('empty_value', None) if 'choices' in kwargs: raise ValueError('Cannot explicitly supply choices to enum fields.') if 'coerce' in kwargs: raise ValueError('Cannot explicitly supply coercion function to enum fields.') kwargs['choices'] = self.get_choices() kwargs['coerce'] = self.coerce_value super(_BaseEnumField, self).__init__(*args, **kwargs) @abstractmethod def get_choices(self): pass @abstractmethod def coerce_value(self, val): pass def run_validators(self, value): # These have to be from a set, so it's hard for me to imagine a useful # custom validator. # The run_validators method in the superclass checks the value against # None, [], {}, etc, which causes warnings in the RichEnum.__eq__ # method... arguably we shouldn't warn in those cases, but for now we # do. pass class CanonicalEnumField(_BaseEnumField): """ Uses the RichEnum/OrderedRichEnum canonical_name as form field values """ def get_choices(self): return self.enum.choices() def coerce_value(self, name): return self.enum.from_canonical(name) class IndexEnumField(_BaseEnumField): """ Uses the OrderedRichEnum index as form field values """ def get_choices(self): return self.enum.choices(value_field='index') def coerce_value(self, index): return self.enum.from_index(int(index))
Make run_validators method a no-op
_BaseEnumField: Make run_validators method a no-op See the comment in this commit-- I can't see value in allowing custom validators on EnumFields and the implementation in the superclass causes warnings in RichEnum.__eq__. Arguably those warnings aren't useful (warning against []/falsy compare). In that case, we can revert this when they're silenced. Alternatively, if we need the warnings and need this functionality, we'd have re-implement the method in the superclass without said check, or live with warnings every time a form containing an EnumField is validated, which sucks.
Python
mit
hearsaycorp/django-richenum,adepue/django-richenum,dhui/django-richenum,asherf/django-richenum,hearsaycorp/django-richenum
0782ab8774f840c7ab2e66ddd168ac3ccfa3fc4f
openprescribing/pipeline/management/commands/clean_up_bq_test_data.py
openprescribing/pipeline/management/commands/clean_up_bq_test_data.py
import os from django.core.management import BaseCommand, CommandError from gcutils.bigquery import Client class Command(BaseCommand): help = 'Removes any datasets whose tables have all expired' def handle(self, *args, **kwargs): if os.environ['DJANGO_SETTINGS_MODULE'] != \ 'openprescribing.settings.test': raise CommandError('Command must run with test settings') gcbq_client = Client().gcbq_client for dataset_list_item in gcbq_client.list_datasets(): dataset_ref = dataset_list_item.reference tables = list(gcbq_client.list_tables(dataset_ref)) if len(tables) == 0: gcbq_client.delete_dataset(dataset_ref)
import os from django.core.management import BaseCommand, CommandError from gcutils.bigquery import Client class Command(BaseCommand): help = 'Removes any datasets whose tables have all expired' def handle(self, *args, **kwargs): if os.environ['DJANGO_SETTINGS_MODULE'] != \ 'openprescribing.settings.test': raise CommandError('Command must run with test settings') gcbq_client = Client().gcbq_client datasets = list(gcbq_client.list_datasets()) for dataset_list_item in datasets: dataset_ref = dataset_list_item.reference tables = list(gcbq_client.list_tables(dataset_ref)) if len(tables) == 0: gcbq_client.delete_dataset(dataset_ref)
Clean up BQ test data properly
Clean up BQ test data properly If you delete datasets while iterating over datasets, you eventually get errors. This fixes that by building a list of all datasets before we delete any.
Python
mit
ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc
90dfa38014ba91de2e8c0c75d63788aab3c95f38
Python/python2_version/klampt/__init__.py
Python/python2_version/klampt/__init__.py
from robotsim import * import atexit atexit.register(destroy) __all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters', 'SimRobotController','SimRobotSensor','SimBody','Simulator', 'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid', 'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver', 'model','math','io','plan','sim']
from __future__ import print_function,division from robotsim import * import atexit atexit.register(destroy) __all__ = ['WorldModel','RobotModel','RobotModelLink','RigidObjectModel','TerrainModel','Mass','ContactParameters', 'SimRobotController','SimRobotSensor','SimBody','Simulator', 'Geometry3D','Appearance','DistanceQuerySettings','DistanceQueryResult','TriangleMesh','PointCloud','GeometricPrimitive','VolumeGrid', 'IKObjective','IKSolver','GeneralizedIKObjective','GeneralizedIKSolver', 'model','math','io','plan','sim']
Allow some compatibility between python2 and updated python 3 files
Allow some compatibility between python2 and updated python 3 files
Python
bsd-3-clause
krishauser/Klampt,krishauser/Klampt,krishauser/Klampt,krishauser/Klampt,krishauser/Klampt,krishauser/Klampt
3a321a93f9779f9e27da8e85e3ffc7460bbbef12
src/python/yalix/test/utils_test.py
src/python/yalix/test/utils_test.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest import yalix.utils as utils class UtilsTest(unittest.TestCase): def test_log_progress_reports_FAILED(self): with utils.capture() as out: with self.assertRaises(KeyError): with utils.log_progress("Testing log message"): raise KeyError self.assertTrue('Testing log message' in out[0]) self.assertTrue('FAILED' in out[0]) def test_log_progress_reports_DONE(self): with utils.capture() as out: with utils.log_progress("Testing log message"): pass self.assertTrue('Testing log message' in out[0]) self.assertTrue('DONE' in out[0]) def test_syntax_highligher(self): import hashlib sample_code = "(define (identity x) x)" output = utils.highlight_syntax(sample_code) m = hashlib.sha224(bytes(output)) self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest()) if __name__ == '__main__': unittest.main()
#!/usr/bin/env python # -*- coding: utf-8 -*- import unittest import yalix.utils as utils class UtilsTest(unittest.TestCase): def test_log_progress_reports_FAILED(self): with utils.capture() as out: with self.assertRaises(KeyError): with utils.log_progress("Testing log message"): raise KeyError self.assertTrue('Testing log message' in out[0]) self.assertTrue('FAILED' in out[0]) def test_log_progress_reports_DONE(self): with utils.capture() as out: with utils.log_progress("Testing log message"): pass self.assertTrue('Testing log message' in out[0]) self.assertTrue('DONE' in out[0]) # def test_syntax_highligher(self): # import hashlib # sample_code = "(define (identity x) x)" # output = utils.highlight_syntax(sample_code) # if output != sample_code: # # Pygments in action # m = hashlib.sha224(output.encode('utf-8')) # self.assertEquals('7ec4fce8a935c23538e701e1da3dfc6ce124ee5555cd90e7b5cd877e', m.hexdigest()) if __name__ == '__main__': unittest.main()
Comment out failing test on Python3 env
Comment out failing test on Python3 env
Python
mit
rm-hull/yalix
94790371e7ec8dc189409e39e193680b9c6b1a08
raven/contrib/django/apps.py
raven/contrib/django/apps.py
# -*- coding: utf-8 -*- from django.apps import AppConfig class RavenConfig(AppConfig): name = 'raven.contrib.django' label = 'raven.contrib.django' verbose_name = 'Raven'
# -*- coding: utf-8 -*- from __future__ import absolute_import from django.apps import AppConfig class RavenConfig(AppConfig): name = 'raven.contrib.django' label = 'raven.contrib.django' verbose_name = 'Raven'
Add missing __future__ import to pass coding guidelines.
Add missing __future__ import to pass coding guidelines.
Python
bsd-3-clause
getsentry/raven-python,lepture/raven-python,smarkets/raven-python,Photonomie/raven-python,akalipetis/raven-python,danriti/raven-python,jbarbuto/raven-python,akheron/raven-python,ronaldevers/raven-python,johansteffner/raven-python,smarkets/raven-python,jmagnusson/raven-python,akheron/raven-python,jbarbuto/raven-python,Photonomie/raven-python,smarkets/raven-python,percipient/raven-python,Photonomie/raven-python,arthurlogilab/raven-python,arthurlogilab/raven-python,nikolas/raven-python,johansteffner/raven-python,lepture/raven-python,ewdurbin/raven-python,hzy/raven-python,jmp0xf/raven-python,arthurlogilab/raven-python,ronaldevers/raven-python,jbarbuto/raven-python,dbravender/raven-python,jmagnusson/raven-python,getsentry/raven-python,ronaldevers/raven-python,hzy/raven-python,johansteffner/raven-python,jmp0xf/raven-python,jbarbuto/raven-python,akalipetis/raven-python,recht/raven-python,hzy/raven-python,arthurlogilab/raven-python,jmagnusson/raven-python,percipient/raven-python,smarkets/raven-python,recht/raven-python,ewdurbin/raven-python,percipient/raven-python,dbravender/raven-python,ewdurbin/raven-python,akalipetis/raven-python,getsentry/raven-python,nikolas/raven-python,someonehan/raven-python,akheron/raven-python,danriti/raven-python,lepture/raven-python,danriti/raven-python,nikolas/raven-python,someonehan/raven-python,jmp0xf/raven-python,recht/raven-python,someonehan/raven-python,dbravender/raven-python,nikolas/raven-python
ba3c46dc19afe79647ea07d80c495fbf7ad47514
rocketleaguereplayanalysis/util/transcode.py
rocketleaguereplayanalysis/util/transcode.py
def render_video(render_type, out_frame_rate=30, overlay=None, extra_cmd=None): import os import subprocess from rocketleaguereplayanalysis.render.do_render import get_video_prefix from rocketleaguereplayanalysis.parser.frames import get_frames from rocketleaguereplayanalysis.util.sync import get_sync_time_type video_prefix = get_video_prefix() cmd = ['ffmpeg', '-loop', '1', '-i', os.path.join('assets', overlay + '.png'), '-t', str(get_frames()[-1]['time'][get_sync_time_type()])] cmd += extra_cmd cmd += ['-r', str(out_frame_rate), render_type + '.mp4', '-y'] print('FFmpeg Command:', cmd) p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT) p.communicate()
def render_video(render_type, out_frame_rate=30, overlay=None, extra_cmd=None): import os import subprocess from rocketleaguereplayanalysis.render.do_render import get_video_prefix from rocketleaguereplayanalysis.parser.frames import get_frames from rocketleaguereplayanalysis.util.sync import get_sync_time_type video_prefix = get_video_prefix() cmd = ['ffmpeg', '-loop', '1', '-i', os.path.join('assets', overlay + '.png'), '-t', str(get_frames()[-1]['time'][get_sync_time_type()])] cmd += extra_cmd cmd += ['-r', str(out_frame_rate), '-crf', '18', render_type + '.mp4', '-y'] print('FFmpeg Command:', cmd) p = subprocess.Popen(cmd, cwd=video_prefix, stderr=subprocess.STDOUT) p.communicate()
FIx render output (missing crf value)
FIx render output (missing crf value)
Python
agpl-3.0
enzanki-ars/rocket-league-minimap-generator
2158763bb6226ba5e5de83527a6ec45a4adcbfa1
shoop/front/apps/simple_order_notification/templates.py
shoop/front/apps/simple_order_notification/templates.py
# -*- coding: utf-8 -*- # This file is part of Shoop. # # Copyright (c) 2012-2015, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received" MESSAGE_BODY_TEMPLATE = """ Thank you for your order, {{ order.customer }}! Your order has been received and will be processed as soon as possible. For reference, here's a list of your order's contents. {% for line in order.lines.all() %} {%- if line.taxful_total_price -%} * {{ line.quantity }} x {{ line.text }} - {{ line.taxful_total_price|money }} {%- endif -%} {%- endfor %} Order Total: {{ order.taxful_total_price|money }} {% if not order.is_paid() %} Please note that no record of your order being paid currently exists. {% endif %} Thank you for shopping with us! """.strip()
# -*- coding: utf-8 -*- # This file is part of Shoop. # # Copyright (c) 2012-2015, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received" MESSAGE_BODY_TEMPLATE = """ Thank you for your order, {{ order.customer }}! Your order has been received and will be processed as soon as possible. For reference, here's a list of your order's contents. {% for line in order.lines.all() %} {%- if line.taxful_total_price %} * {{ line.quantity }} x {{ line.text }} - {{ line.taxful_total_price|money }} {% endif -%} {%- endfor %} Order Total: {{ order.taxful_total_price|money }} {% if not order.is_paid() %} Please note that no record of your order being paid currently exists. {% endif %} Thank you for shopping with us! """.strip()
Fix default template of order received notification
Fix default template of order received notification Order lines were rendered on a single line. Fix that by adding a line break after each order line.
Python
agpl-3.0
akx/shoop,hrayr-artunyan/shuup,akx/shoop,jorge-marques/shoop,shoopio/shoop,suutari/shoop,suutari-ai/shoop,taedori81/shoop,suutari/shoop,suutari-ai/shoop,shawnadelic/shuup,taedori81/shoop,jorge-marques/shoop,shawnadelic/shuup,shoopio/shoop,hrayr-artunyan/shuup,taedori81/shoop,suutari-ai/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,suutari/shoop,jorge-marques/shoop,shoopio/shoop,akx/shoop
b870028ce8edcb5001f1a4823517d866db0324a8
pyglab/apirequest.py
pyglab/apirequest.py
import enum import json from pyglab.exceptions import RequestError import requests @enum.unique class RequestType(enum.Enum): GET = 1 POST = 2 PUT = 3 DELETE = 4 class ApiRequest: _request_creators = { RequestType.GET: requests.get, RequestType.POST: requests.post, RequestType.PUT: requests.put, RequestType.DELETE: requests.delete, } def __init__(self, request_type, url, token, params={}, sudo=None, page=None, per_page=None): # Build header header = {'PRIVATE-TOKEN': token} if sudo is not None: header['SUDO', sudo] # Build parameters if page is not None: params['page'] = page if per_page is not None: params['per_page'] = per_page r = self._request_creators[request_type](url, params=params, headers=header) content = json.loads(r.text) if RequestError.is_error(r.status_code): raise RequestError.error_class(r.status_code)(content) self._content = content @property def content(self): return self._content
import json from pyglab.exceptions import RequestError import requests class RequestType(object): GET = 1 POST = 2 PUT = 3 DELETE = 4 class ApiRequest: _request_creators = { RequestType.GET: requests.get, RequestType.POST: requests.post, RequestType.PUT: requests.put, RequestType.DELETE: requests.delete, } def __init__(self, request_type, url, token, params={}, sudo=None, page=None, per_page=None): # Build header header = {'PRIVATE-TOKEN': token} if sudo is not None: header['SUDO', sudo] # Build parameters if page is not None: params['page'] = page if per_page is not None: params['per_page'] = per_page r = self._request_creators[request_type](url, params=params, headers=header) content = json.loads(r.text) if RequestError.is_error(r.status_code): raise RequestError.error_class(r.status_code)(content) self._content = content @property def content(self): return self._content
Make RequestType a normal class, not an enum.
Make RequestType a normal class, not an enum. This removes the restriction of needing Python >= 3.4. RequestType is now a normal class with class variables (fixes #19).
Python
mit
sloede/pyglab,sloede/pyglab
deed4cf02bf919a06bffa0ac5b5948390740a97e
tests/test_channel_shim.py
tests/test_channel_shim.py
import gevent from gevent import queue from wal_e import channel def test_channel_shim(): v = tuple(int(x) for x in gevent.__version__.split('.')) if v >= (0, 13, 0) and v < (1, 0, 0): assert isinstance(channel.Channel(), queue.Queue) elif v >= (1, 0, 0): assert isinstance(channel.Channel(), queue.Channel) else: assert False, 'Unexpected version ' + gevent.__version__
import gevent from gevent import queue from wal_e import channel def test_channel_shim(): v = tuple(int(x) for x in gevent.__version__.split('.')) print 'Version info:', gevent.__version__, v if v >= (0, 13) and v < (1, 0): assert isinstance(channel.Channel(), queue.Queue) elif v >= (1, 0): assert isinstance(channel.Channel(), queue.Channel) else: assert False, 'Unexpected version ' + gevent.__version__
Fix channel shim test for gevent 1.0.0
Fix channel shim test for gevent 1.0.0 Gevent 1.0 specifies this as its version, not 1.0.0, breaking the comparison spuriously if one has version 1.0 installed exactly.
Python
bsd-3-clause
nagual13/wal-e,equa/wal-e,wal-e/wal-e,DataDog/wal-e,ArtemZ/wal-e,intoximeters/wal-e,heroku/wal-e,ajmarks/wal-e,fdr/wal-e,tenstartups/wal-e,RichardKnop/wal-e,x86Labs/wal-e
8ecb32004aca75c0b6cb70bd1a00e38f3a65c8c8
sound/irc/auth/controller.py
sound/irc/auth/controller.py
# encoding: utf-8 from __future__ import unicode_literals from web.auth import authenticate, deauthenticate from web.core import config, url from web.core.http import HTTPFound from brave.api.client import API log = __import__('logging').getLogger(__name__) class AuthenticationMixIn(object): def authorize(self): # Perform the initial API call and direct the user. api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public']) success = str(url.complete('/authorized')) failure = str(url.complete('/nolove')) result = api.core.authorize(success=success, failure=failure) raise HTTPFound(location=result.location) def ciao(self): deauthenticate(True) raise HTTPFound(location='/') def authorized(self, token): # Capture the returned token and use it to look up the user details. # If we don't have this character, create them. # Store the token against this user account. # Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'. # (Though they can be shorter!) # We request an authenticated session from the server. authenticate(token) raise HTTPFound(location='/') def nolove(self, token): return 'sound.irc.template.whynolove', dict()
# encoding: utf-8 from __future__ import unicode_literals from web.auth import authenticate, deauthenticate from web.core import config, url, session from web.core.http import HTTPFound from brave.api.client import API log = __import__('logging').getLogger(__name__) class AuthenticationMixIn(object): def authorize(self): # Perform the initial API call and direct the user. api = API(config['api.endpoint'], config['api.identity'], config['api.private'], config['api.public']) success = str(url.complete('/authorized')) failure = str(url.complete('/nolove')) result = api.core.authorize(success=success, failure=failure) raise HTTPFound(location=result.location) def ciao(self): deauthenticate(True) raise HTTPFound(location='/') def authorized(self, token): # Capture the returned token and use it to look up the user details. # If we don't have this character, create them. # Store the token against this user account. # Note that our own 'sessions' may not last beyond the UTC date returned as 'expires'. # (Though they can be shorter!) # Prevent users from specifying their session IDs (Some user-agents were sending null ids, leading to users # authenticated with a session id of null session.regenerate_id() # We request an authenticated session from the server. authenticate(token) raise HTTPFound(location='/') def nolove(self, token): return 'sound.irc.template.whynolove', dict()
Fix a bug where user-agents could specify their own session ID.
Fix a bug where user-agents could specify their own session ID.
Python
mit
eve-val/irc,eve-val/irc,eve-val/irc
1a7c4a027628241f415cc5cc3f7aca09ad9a4027
scripts/lib/check-database-compatibility.py
scripts/lib/check-database-compatibility.py
#!/usr/bin/env python3 import logging import os import sys ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, ZULIP_PATH) from scripts.lib.setup_path import setup_path from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from from version import ZULIP_VERSION as new_version assert_not_running_as_root() setup_path() os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings" import django from django.db import connection from django.db.migrations.loader import MigrationLoader django.setup() loader = MigrationLoader(connection) missing = set(loader.applied_migrations) for key, migration in loader.disk_migrations.items(): missing.discard(key) missing.difference_update(migration.replaces) if not missing: sys.exit(0) current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current")) logging.error( "This is not an upgrade -- the current deployment (version %s) " "contains database migrations which %s (version %s) does not.", current_version, len(missing), ZULIP_PATH, new_version, ) sys.exit(1)
#!/usr/bin/env python3 import logging import os import sys ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, ZULIP_PATH) from scripts.lib.setup_path import setup_path from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, assert_not_running_as_root, parse_version_from from version import ZULIP_VERSION as new_version assert_not_running_as_root() setup_path() os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings" import django from django.db import connection from django.db.migrations.loader import MigrationLoader django.setup() loader = MigrationLoader(connection) missing = set(loader.applied_migrations) for key, migration in loader.disk_migrations.items(): missing.discard(key) missing.difference_update(migration.replaces) if not missing: sys.exit(0) current_version = parse_version_from(os.path.join(DEPLOYMENTS_DIR, "current")) logging.error( "This is not an upgrade -- the current deployment (version %s) " "contains %s database migrations which %s (version %s) does not.", current_version, len(missing), ZULIP_PATH, new_version, ) sys.exit(1)
Fix typo in logging statement.
scripts: Fix typo in logging statement.
Python
apache-2.0
rht/zulip,zulip/zulip,andersk/zulip,kou/zulip,zulip/zulip,andersk/zulip,andersk/zulip,rht/zulip,zulip/zulip,kou/zulip,rht/zulip,zulip/zulip,andersk/zulip,andersk/zulip,zulip/zulip,kou/zulip,rht/zulip,kou/zulip,kou/zulip,andersk/zulip,kou/zulip,kou/zulip,rht/zulip,zulip/zulip,rht/zulip,zulip/zulip,andersk/zulip,rht/zulip
50aa4ddeaad1d45687b8ab7d99a26602896a276b
indico/modules/events/persons/__init__.py
indico/modules/events/persons/__init__.py
# This file is part of Indico. # Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN). # # Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from flask import session from indico.core import signals from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem @signals.menu.items.connect_via('event-management-sidemenu') def _sidemenu_items(sender, event, **kwargs): if event.type == 'lecture' or not event.can_manage(session.user): return return SideMenuItem('lists', _('Roles'), url_for('persons.person_list', event), section='reports')
# This file is part of Indico. # Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN). # # Indico is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # Indico is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Indico; if not, see <http://www.gnu.org/licenses/>. from __future__ import unicode_literals from flask import session from indico.core import signals from indico.core.logger import Logger from indico.util.i18n import _ from indico.web.flask.util import url_for from indico.web.menu import SideMenuItem logger = Logger.get('events.persons') @signals.menu.items.connect_via('event-management-sidemenu') def _sidemenu_items(sender, event, **kwargs): if event.type == 'lecture' or not event.can_manage(session.user): return return SideMenuItem('lists', _('Roles'), url_for('persons.person_list', event), section='reports')
Add logger to events.persons module
Add logger to events.persons module
Python
mit
mic4ael/indico,OmeGak/indico,mic4ael/indico,ThiefMaster/indico,pferreir/indico,pferreir/indico,ThiefMaster/indico,OmeGak/indico,ThiefMaster/indico,DirkHoffmann/indico,indico/indico,indico/indico,mvidalgarcia/indico,DirkHoffmann/indico,pferreir/indico,mic4ael/indico,OmeGak/indico,mvidalgarcia/indico,mvidalgarcia/indico,OmeGak/indico,DirkHoffmann/indico,ThiefMaster/indico,indico/indico,mic4ael/indico,DirkHoffmann/indico,indico/indico,pferreir/indico,mvidalgarcia/indico
8befea283830f76dfa41cfd10d7eb916c68f7ef9
intern/views.py
intern/views.py
# -*- coding: utf-8 -*- from django.contrib.auth.decorators import login_required from django.shortcuts import render from filer.models import File from filer.models import Folder @login_required def documents(request): files = File.objects.all() folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
# -*- coding: utf-8 -*- from django.contrib.auth.decorators import login_required from django.shortcuts import render from filer.models import File from filer.models import Folder @login_required def documents(request): files = File.objects.all().order_by("-modified_at") folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
Sort files by last modification
Sort files by last modification
Python
mit
n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb
a3f611220afa9cc0ba1b2eb8fb8a4d4c220e99dd
kokki/cookbooks/busket/recipes/default.py
kokki/cookbooks/busket/recipes/default.py
import os from kokki import * Package("erlang") Script("install-busket", not_if = lambda:os.path.exists(env.config.busket.path), cwd = "/usr/local/src", code = ( "git clone git://github.com/samuel/busket.git busket\n" "cd busket\n" "make release\n" "mv rel/busket {install_path}\n" ).format(install_path=env.config.busket.path) )
import os from kokki import * Package("erlang") Package("mercurial", provider = "kokki.providers.package.easy_install.EasyInstallProvider") Script("install-busket", not_if = lambda:os.path.exists(env.config.busket.path), cwd = "/usr/local/src", code = ( "git clone git://github.com/samuel/busket.git busket\n" "cd busket\n" "make release\n" "mv rel/busket {install_path}\n" ).format(install_path=env.config.busket.path) )
Install mercurial to install busket
Install mercurial to install busket
Python
bsd-3-clause
samuel/kokki
71b7885bc1e3740adf8c07c23b41835e1e69f8a2
sqlobject/tests/test_class_hash.py
sqlobject/tests/test_class_hash.py
from sqlobject import * from sqlobject.tests.dbtest import * ######################################## # Test hashing a column instance ######################################## class ClassHashTest(SQLObject): name = StringCol(length=50, alternateID=True, dbName='name_col') def test_class_hash(): setupClass(ClassHashTest) ClassHashTest(name='bob') conn = ClassHashTest._connection b = ClassHashTest.byName('bob') hashed = hash(b) b.expire() b = ClassHashTest.byName('bob') assert hash(b) == hashed
from sqlobject import * from sqlobject.tests.dbtest import * ######################################## # Test hashing a column instance ######################################## class ClassHashTest(SQLObject): name = StringCol(length=50, alternateID=True, dbName='name_col') def test_class_hash(): setupClass(ClassHashTest) ClassHashTest(name='bob') b = ClassHashTest.byName('bob') hashed = hash(b) b.expire() b = ClassHashTest.byName('bob') assert hash(b) == hashed
Fix flake8 warning in test case
Fix flake8 warning in test case
Python
lgpl-2.1
drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject
725605cd20b29e200f6aaa90f29053bc623b0e51
thefuck/rules/unknown_command.py
thefuck/rules/unknown_command.py
import re from thefuck.utils import replace_command def match(command): return (re.search(r"([^:]*): Unknown command.*", command.stderr) != None and re.search(r"Did you mean ([^?]*)?", command.stderr) != None) def get_new_command(command): broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0] matched = re.findall(r"Did you mean ([^?]*)?", command.stderr) return replace_command(command, broken_cmd, matched)
import re from thefuck.utils import replace_command def match(command): return (re.search(r"([^:]*): Unknown command.*", command.stderr) is not None and re.search(r"Did you mean ([^?]*)?", command.stderr) is not None) def get_new_command(command): broken_cmd = re.findall(r"([^:]*): Unknown command.*", command.stderr)[0] matched = re.findall(r"Did you mean ([^?]*)?", command.stderr) return replace_command(command, broken_cmd, matched)
Fix flake8 errors: E711 comparison to None should be 'if cond is not None:'
Fix flake8 errors: E711 comparison to None should be 'if cond is not None:'
Python
mit
mlk/thefuck,mlk/thefuck,nvbn/thefuck,Clpsplug/thefuck,SimenB/thefuck,nvbn/thefuck,scorphus/thefuck,Clpsplug/thefuck,SimenB/thefuck,scorphus/thefuck
27065fd302c20937d44b840472d943ce8aa652e7
plugins/candela/girder_plugin_candela/__init__.py
plugins/candela/girder_plugin_candela/__init__.py
############################################################################### # Copyright Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from girder.plugin import GirderPlugin class CandelaPlugin(GirderPlugin): NPM_PACKAGE_NAME = '@girder/candela' def load(self, info): pass
############################################################################### # Copyright Kitware Inc. # # Licensed under the Apache License, Version 2.0 ( the "License" ); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################### from girder.plugin import GirderPlugin class CandelaPlugin(GirderPlugin): DISPLAY_NAME = 'Candela Visualization' NPM_PACKAGE_NAME = '@girder/candela' def load(self, info): pass
Add a plugin displayName property
Add a plugin displayName property This allows the web client to display an arbitrary plugin title rather than to be restricted to valid python/javascript tokens.
Python
apache-2.0
Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela,Kitware/candela
65b7d1f1eafd32d3895e3ec15a559dca608b5c23
addons/sale_coupon/models/mail_compose_message.py
addons/sale_coupon/models/mail_compose_message.py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models class MailComposeMessage(models.TransientModel): _inherit = 'mail.compose.message' def send_mail(self, **kwargs): for wizard in self: if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids: self.env[wizard.model].browse(wizard.res_id).state = 'sent' return super().send_mail(**kwargs)
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models class MailComposeMessage(models.TransientModel): _inherit = 'mail.compose.message' def send_mail(self, **kwargs): for wizard in self: if self._context.get('mark_coupon_as_sent') and wizard.model == 'sale.coupon' and wizard.partner_ids: # Mark coupon as sent in sudo, as helpdesk users don't have the right to write on coupons self.env[wizard.model].sudo().browse(wizard.res_id).state = 'sent' return super().send_mail(**kwargs)
Allow helpdesk users to send coupon by email
[IMP] sale_coupon: Allow helpdesk users to send coupon by email Purpose ======= Helpdesk users don't have the right to write on a coupon. When sending a coupon by email, the coupon is marked as 'sent'. Allow users to send coupons by executing the state change in sudo. closes odoo/odoo#45091 Taskid: 2179609 Related: odoo/enterprise#8143 Signed-off-by: Yannick Tivisse (yti) <[email protected]>
Python
agpl-3.0
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
96b554c62fb9449760d423f7420ae75d78998269
nodeconductor/quotas/handlers.py
nodeconductor/quotas/handlers.py
def add_quotas_to_scope(sender, instance, created=False, **kwargs): if created: from nodeconductor.quotas import models for quota_name in sender.QUOTAS_NAMES: models.Quota.objects.create(name=quota_name, scope=instance)
from django.db.models import signals def add_quotas_to_scope(sender, instance, created=False, **kwargs): if created: from nodeconductor.quotas import models for quota_name in sender.QUOTAS_NAMES: models.Quota.objects.create(name=quota_name, scope=instance) def quantity_quota_handler_fabric(path_to_quota_scope, quota_name, count=1): """ Return signal handler that increases or decreases quota usage by <count> on object creation or deletion :param path_to_quota_scope: path to object with quotas from created object :param quota_name: name of changed quota :param count: value, that will be added to quota usage Example. This code will add 1 to customer "nc-instances" quotas on instance creation and remove 1 on instance deletion: .. code-block:: python # handlers.py: increase_customer_nc_instances_quota = quotas_handlers.quantity_quota_handler_fabric( path_to_quota_scope='cloud_project_membership.project.customer', quota_name='nc-instances', count=1, ) # apps.py signals.post_save.connect( handlers.increase_customer_nc_instances_quota, sender=Instance, dispatch_uid='nodeconductor.iaas.handlers.increase_customer_nc_instances_quota', ) """ def handler(sender, instance, **kwargs): signal = kwargs['signal'] assert signal in (signals.post_save, signals.post_delete), \ '"quantity_quota_handler" can be used only with post_delete or post_save signals' scope = reduce(getattr, path_to_quota_scope.split("."), instance) if signal == signals.post_save and kwargs.get('created'): scope.add_quota_usage(quota_name, count) elif signal == signals.post_delete: scope.add_quota_usage(quota_name, -count) return handler
Create generic quantity quota handler(saas-217)
Create generic quantity quota handler(saas-217)
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
8be551ad39f3aedff5ea0ceb536378ea0e851864
src/waldur_auth_openid/management/commands/import_openid_accounts.py
src/waldur_auth_openid/management/commands/import_openid_accounts.py
from __future__ import unicode_literals from django.conf import settings from django.contrib.auth import get_user_model from django.db import transaction from waldur_core.core.utils import DryRunCommand User = get_user_model() class Command(DryRunCommand): help_text = 'Append civil number with country code for OpenID users.' def handle(self, dry_run, *args, **options): conf = settings.WALDUR_AUTH_OPENID country_code = conf['COUNTRY_CODE'] registration_method = conf['NAME'] with transaction.atomic(): users = User.objects.filter(registration_method=registration_method)\ .exclude(civil_number__startswith=country_code)\ .exclude(civil_number='') \ .exclude(civil_number=None) count = users.count() if not dry_run: for user in users: user.civil_number = '%s%s' % (country_code, user.civil_number) user.save(update_fields=['civil_number']) self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
from __future__ import unicode_literals from django.conf import settings from django.contrib.auth import get_user_model from django.db import transaction from waldur_core.core.utils import DryRunCommand User = get_user_model() class Command(DryRunCommand): help_text = 'Append civil number with country code for OpenID users.' def handle(self, dry_run, *args, **options): conf = settings.WALDUR_AUTH_OPENID country_code = conf['COUNTRY_CODE'] registration_method = conf['NAME'] with transaction.atomic(): users = User.objects.filter(registration_method=registration_method)\ .exclude(civil_number__startswith=country_code)\ .exclude(civil_number='') \ .exclude(civil_number=None) count = users.count() for user in users: new_civil_number = '%s%s' % (country_code, user.civil_number) self.stdout.write('Username: %s, before: %s, after: %s' % ( user.username, user.civil_number, new_civil_number)) if not dry_run: user.civil_number = new_civil_number user.save(update_fields=['civil_number']) self.stdout.write(self.style.SUCCESS('Civil numbers have been updated for %s users.' % count))
Print out civil_number before and after
Print out civil_number before and after [WAL-2172]
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind
3f22453c43b6111c22796f9375622eb6d978d669
content/test/gpu/gpu_tests/trace_test_expectations.py
content/test/gpu/gpu_tests/trace_test_expectations.py
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class TraceTestExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('TraceTest.Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) self.Skip('TraceTest.Canvas2DRedBox') self.Skip('TraceTest.CSS3DBlueBox') pass class DeviceTraceTestExpectations(GpuTestExpectations): def SetExpectations(self): # Device traces are not supported on all machines. self.Skip('*')
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class TraceTestExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('TraceTest.Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) self.Skip('TraceTest.Canvas2DRedBox') self.Skip('TraceTest.CSS3DBlueBox') # Flaky, mainly on Windows. Leave this un-suppressed on other # platforms for the moment to have at least some test coverage. # Once test expectations are refactored (Issue 495870), this could # be marked flaky. self.Fail('TraceTest.WebGLGreenTriangle', ['win'], bug=517232) pass class DeviceTraceTestExpectations(GpuTestExpectations): def SetExpectations(self): # Device traces are not supported on all machines. self.Skip('*')
Mark TraceTest.WebGLGreenTriangle as expected failure on Windows.
Mark TraceTest.WebGLGreenTriangle as expected failure on Windows. BUG=517232 [email protected] Review URL: https://codereview.chromium.org/1276403003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#342511}
Python
bsd-3-clause
CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend
1359af2cd9c038d050cb1c4619637143ab020a70
onepercentclub/settings/salesforcesync.py
onepercentclub/settings/salesforcesync.py
try: from .secrets import * except ImportError: import sys sys.exit('secrets.py settings file not found. Please run `prepare.sh` to create one.') from .base import * # # We need this specific override because having the salesforce app and bluebottle_salesforce # enabled causes tests to fail in our other apps with this error: # # AttributeError: _original_allowed_hosts # # There seems to be some strange database interactions / side-effects when running with SaleforceModels that have # Meta: managed = False set with the salesforce info configured in DATABASES. # TODO: Investigate this issue to see if we can put the Saleforce apps back into base.py. # # # Put the salesforce sync environment specific overrides below. # DEBUG = False TEMPLATE_DEBUG = False INSTALLED_APPS += ( 'salesforce', 'apps.bluebottle_salesforce', ) # Send email for real EMAIL_BACKEND = 'apps.bluebottle_utils.email_backend.DKIMBackend'
try: from .secrets import * except ImportError: import sys sys.exit('secrets.py settings file not found. Please run `prepare.sh` to create one.') from .base import * # # We need this specific override because having the salesforce app and bluebottle_salesforce # enabled causes tests to fail in our other apps with this error: # # AttributeError: _original_allowed_hosts # # There seems to be some strange database interactions / side-effects when running with SaleforceModels that have # Meta: managed = False set with the salesforce info configured in DATABASES. # TODO: Investigate this issue to see if we can put the Saleforce apps back into base.py. # # # Put the salesforce sync environment specific overrides below. # DEBUG = False TEMPLATE_DEBUG = False INSTALLED_APPS += ( 'salesforce', 'apps.bluebottle_salesforce', ) # Send email for real EMAIL_BACKEND = 'bluebottle.bluebottle_utils.email_backend.DKIMBackend'
Set correct email config for salesforce sync.
Set correct email config for salesforce sync. BB-1530
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
53c4d10ecb7a9592f3cdf311ca2ddc5cb52c413c
gitlabform/gitlabform/test/test_project_settings.py
gitlabform/gitlabform/test/test_project_settings.py
import pytest from gitlabform.gitlabform import GitLabForm from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME PROJECT_NAME = 'project_settings_project' GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME @pytest.fixture(scope="module") def gitlab(request): create_group(GROUP_NAME) create_project_in_group(GROUP_NAME, PROJECT_NAME) gl = get_gitlab() def fin(): gl.delete_project(GROUP_AND_PROJECT_NAME) request.addfinalizer(fin) return gl # provide fixture value config_builds_for_private_projects = """ gitlab: api_version: 4 project_settings: project_settings: builds_access_level: private visibility: private """ class TestProjectSettings: def test__builds_for_private_projects(self, gitlab): gf = GitLabForm(config_string=config_builds_for_private_projects, project_or_group=GROUP_AND_PROJECT_NAME) gf.main() settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME) assert settings['builds_access_level'] is 'private' assert settings['visibility'] is 'private'
import pytest from gitlabform.gitlabform import GitLabForm from gitlabform.gitlabform.test import create_group, create_project_in_group, get_gitlab, GROUP_NAME PROJECT_NAME = 'project_settings_project' GROUP_AND_PROJECT_NAME = GROUP_NAME + '/' + PROJECT_NAME @pytest.fixture(scope="module") def gitlab(request): create_group(GROUP_NAME) create_project_in_group(GROUP_NAME, PROJECT_NAME) gl = get_gitlab() def fin(): gl.delete_project(GROUP_AND_PROJECT_NAME) request.addfinalizer(fin) return gl # provide fixture value config_builds_for_private_projects = """ gitlab: api_version: 4 project_settings: project_settings: builds_access_level: private visibility: private """ class TestProjectSettings: def test__builds_for_private_projects(self, gitlab): gf = GitLabForm(config_string=config_builds_for_private_projects, project_or_group=GROUP_AND_PROJECT_NAME) gf.main() settings = gitlab.get_project_settings(GROUP_AND_PROJECT_NAME) assert settings['visibility'] is 'private' # there is no such field in the "Get single project" API :/ #assert settings['builds_access_level'] is 'private'
Comment out what can't be checked
Comment out what can't be checked
Python
mit
egnyte/gitlabform,egnyte/gitlabform
e5fb2f327b5ec51cd908e5915ef5415ff2b9dcc3
stackviz/views/dstat/api.py
stackviz/views/dstat/api.py
from django.http import HttpResponse from django.views.generic import View from stackviz import settings _cached_csv = None def _load_csv(): global _cached_csv if _cached_csv: return _cached_csv with open(settings.DSTAT_CSV, 'r') as f: _cached_csv = f.readlines() return _cached_csv class DStatCSVEndpoint(View): def get(self, request): return HttpResponse(_load_csv(), content_type="text/csv")
import os from django.http import HttpResponse, Http404 from django.views.generic import View from stackviz import settings _cached_csv = None def _load_csv(): global _cached_csv if _cached_csv: return _cached_csv try: with open(settings.DSTAT_CSV, 'r') as f: _cached_csv = f.readlines() return _cached_csv except IOError: return None class DStatCSVEndpoint(View): def get(self, request): csv = _load_csv() if not csv: raise Http404("DStat log not loaded.") return HttpResponse(csv, content_type="text/csv")
Return a 404 error when no dstat csv can be loaded
Return a 404 error when no dstat csv can be loaded
Python
apache-2.0
openstack/stackviz,timothyb89/stackviz-ng,dklyle/stackviz-ng,timothyb89/stackviz-ng,timothyb89/stackviz-ng,timothyb89/stackviz,timothyb89/stackviz,timothyb89/stackviz,dklyle/stackviz-ng,openstack/stackviz,openstack/stackviz
ee9c5c8265b4971a9b593d252711a88f59fe6b75
test/suite/out/long_lines.py
test/suite/out/long_lines.py
if True: if True: if True: self.__heap.sort( ) # pylint: builtin sort probably faster than O(n)-time heapify if True: foo = '( ' + array[0] + ' '
if True: if True: if True: self.__heap.sort( ) # pylint: builtin sort probably faster than O(n)-time heapify if True: foo = '( ' + \ array[0] + ' '
Update due to correction to E501 usage
Update due to correction to E501 usage
Python
mit
Vauxoo/autopep8,hhatto/autopep8,SG345/autopep8,hhatto/autopep8,MeteorAdminz/autopep8,vauxoo-dev/autopep8,MeteorAdminz/autopep8,Vauxoo/autopep8,SG345/autopep8,vauxoo-dev/autopep8
fe0d86df9c4be9d33a461578b71c43865f79c715
tests/builtins/test_input.py
tests/builtins/test_input.py
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): functions = ["input"] not_implemented = [ 'test_bool', 'test_bytearray', 'test_bytes', 'test_class', 'test_complex', 'test_dict', 'test_float', 'test_frozenset', 'test_int', 'test_list', 'test_set', 'test_str', 'test_tuple', ]
from .. utils import TranspileTestCase, BuiltinFunctionTestCase class InputTests(TranspileTestCase): pass # class BuiltinInputFunctionTests(BuiltinFunctionTestCase, TranspileTestCase): # functions = ["input"] # not_implemented = [ # 'test_bool', # 'test_bytearray', # 'test_bytes', # 'test_class', # 'test_complex', # 'test_dict', # 'test_float', # 'test_frozenset', # 'test_int', # 'test_list', # 'test_set', # 'test_str', # 'test_tuple', # ]
Disable builtin tests for input() as it hangs
Disable builtin tests for input() as it hangs
Python
bsd-3-clause
cflee/voc,Felix5721/voc,ASP1234/voc,cflee/voc,glasnt/voc,ASP1234/voc,glasnt/voc,freakboy3742/voc,freakboy3742/voc,gEt-rIgHt-jR/voc,Felix5721/voc,gEt-rIgHt-jR/voc,pombredanne/voc,pombredanne/voc
a6788c5fda5760c6ad81a418e91597b4170e6149
websmash/default_settings.py
websmash/default_settings.py
from os import path ############# Configuration ############# DEBUG = True SECRET_KEY = "development_key" RESULTS_PATH = path.join(path.dirname(path.dirname(__file__)), 'results') RESULTS_URL = '/upload' NCBI_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi' NCBI_URL += '?db=nucleotide&email="%s"&tool="antiSMASH"&id=%s&rettype=gbwithparts' NCBI_URL += '&retmode=text' NCBI_PROT_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi' NCBI_PROT_URL += '?db=protein&email="%s"&tool="antiSMASH"&id=%s&rettype=fasta' NCBI_PROT_URL += '&retmode=text' # Flask-Mail settings MAIL_SERVER = "smtpserv.uni-tuebingen.de" DEFAULT_MAIL_SENDER = "[email protected]" DEFAULT_RECIPIENTS = ["[email protected]"] # Flask-SQLAlchemy settings SQLALCHEMY_DATABASE_URI = 'sqlite:///../jobs.db' # Flask-Downloader settings DEFAULT_DOWNLOAD_DIR = RESULTS_PATH #########################################
from os import path ############# Configuration ############# DEBUG = True SECRET_KEY = "development_key" RESULTS_PATH = path.join(path.dirname(path.dirname(__file__)), 'results') RESULTS_URL = '/upload' NCBI_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi' NCBI_URL += '?db=nucleotide&email="%s"&tool="antiSMASH"&id=%s&rettype=gbwithparts' NCBI_URL += '&retmode=text' NCBI_PROT_URL = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi' NCBI_PROT_URL += '?db=protein&email="%s"&tool="antiSMASH"&id=%s&rettype=fasta' NCBI_PROT_URL += '&retmode=text' # Flask-Mail settings MAIL_SERVER = "smtpserv.uni-tuebingen.de" DEFAULT_MAIL_SENDER = "[email protected]" DEFAULT_RECIPIENTS = ["[email protected]"] # Flask-SQLAlchemy settings SQLALCHEMY_DATABASE_URI = 'sqlite:///../jobs.db' # Flask-Downloader settings DEFAULT_DOWNLOAD_DIR = RESULTS_PATH BAD_CONTENT = ('Error reading from remote server', 'Bad gateway', 'Cannot process ID list', 'server is temporarily unable to service your request') #########################################
Add some error states for the NCBI download option
settings: Add some error states for the NCBI download option Signed-off-by: Kai Blin <94ddc6985b47aef772521e302594241f46a8f665@biotech.uni-tuebingen.de>
Python
agpl-3.0
antismash/ps-web,antismash/ps-web,antismash/websmash,antismash/ps-web
a72468f6988ba3fc5f815b68a07c990809f80864
main.py
main.py
#ODB2 datalogger import obd connection = obd.OBD() while true: request = connection.query(obd.commands.RPM) if not r.is_null(): print(r.value)
#ODB2 datalogger import obd import signal import sys #What to do when we receive a signal def signal_handler(signal, frame): connection.close() sys.exit(0) #Register our signal handler signal.signal(signal.SIGINT, signal_handler) #Find and connect OBD adapter connection = obd.OBD() while True: request = connection.query(obd.commands.RPM) if not request.is_null(): print(request.value)
Handle ctrl+c with signal Fix more typos
Handle ctrl+c with signal Fix more typos
Python
mit
ProtaconSolutions/iot-hackday-2015-obd2
8d7657ed52a40070136bbbe3da7069dcbe3fc1c3
altair/vegalite/v2/examples/stem_and_leaf.py
altair/vegalite/v2/examples/stem_and_leaf.py
""" Steam and Leaf Plot ------------------- This example shows how to make a steam and leaf plot. """ import altair as alt import pandas as pd import numpy as np np.random.seed(42) # Generating Random Data original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)}) # Splitting Steam and Leaf original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1]) original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1]) # Grouping Leafs for each Stem grouped_data = pd.DataFrame(columns=['stem', 'leaf']) for key, group in original_data.groupby('stem'): grouped_data = grouped_data.append({'stem':key, 'leaf': ''.join(group['leaf'].sort_values())}, ignore_index=True) # Plotting Stems and Leafs chart = alt.Chart(grouped_data).mark_text(align='left', baseline='middle',dx=-40).encode( y = alt.Y('stem', axis=alt.Axis(title='', tickSize=0)), text = 'leaf' ).properties(width=400).configure_axis(labelFontSize=20).configure_text(fontSize=20)
""" Steam and Leaf Plot ------------------- This example shows how to make a steam and leaf plot. """ import altair as alt import pandas as pd import numpy as np np.random.seed(42) # Generating random data original_data = pd.DataFrame({'samples':np.array(np.random.normal(50, 15, 100), dtype=np.int)}) # Splitting steam and leaf original_data['stem'] = original_data['samples'].apply(lambda x: str(x)[:-1]) original_data['leaf'] = original_data['samples'].apply(lambda x: str(x)[-1]) original_data.sort_values(by=['stem', 'leaf'], inplace=True) # Determining position position = np.array([], dtype=np.int64) for key, group in original_data.groupby('stem'): position = np.hstack([position, [*group.reset_index().index.values]]) original_data['position'] = position + 1 # Creating stem and leaf plot chart = alt.Chart(original_data).mark_text(align='left', baseline='middle', dx=-5).encode( y = alt.Y('stem:N', axis=alt.Axis(title='', tickSize=0)), x = alt.X('position:Q', axis=alt.Axis(title='', ticks=False,labels=False,grid=False)), text = 'leaf:N' ).configure_axis(labelFontSize=20).configure_text(fontSize=20)
Modify example to calculate leaf position
Modify example to calculate leaf position
Python
bsd-3-clause
altair-viz/altair,ellisonbg/altair,jakevdp/altair
a26fa991e7a01188f09e755da67442a71cee3deb
planet_alignment/config/bunch_parser.py
planet_alignment/config/bunch_parser.py
""" .. module:: config_parser :platform: linux :synopsis: Module to parse a YAML configuration file using the bunch module. .. moduleauthor:: Paul Fanelli <[email protected]> .. modulecreated:: 6/26/15 """ from bunch import fromYAML import sys from yaml.parser import ParserError from zope.interface import implements from planet_alignment.config.interface import IBunchParser class BunchParser(object): implements(IBunchParser) def __init__(self): self._data = None def parse(self, path): try: with open(path) as f: self._data = fromYAML(f) except IOError as ioe: print("ERROR: No configuration file '{}' found!".format(path)) sys.exit("ERROR: {}".format(ioe)) except ParserError as pe: print("ERROR: Error parsing the configuration file '{}'!".format(path)) sys.exit("ERROR: {}".format(pe)) except Exception, e: print("ERROR: Unknown exception '{}'".format(e)) sys.exit("ERROR: {}".format(e)) return self._data
""" .. module:: config_parser :platform: linux :synopsis: Module to parse a YAML configuration file using the bunch module. .. moduleauthor:: Paul Fanelli <[email protected]> .. modulecreated:: 6/26/15 """ from bunch import fromYAML import sys from yaml.parser import ParserError from zope.interface import implements from planet_alignment.config.interface import IBunchParser class BunchParser(object): implements(IBunchParser) def __init__(self): self._data = None def parse(self, path): try: with open(path) as f: self._data = fromYAML(f) except IOError as ioe: print("ERROR: No configuration file '{}' found!".format(path)) sys.exit("ERROR: {}".format(ioe)) except ParserError as pe: print("ERROR: Error parsing the configuration file '{}'!".format(path)) sys.exit("ERROR: {}".format(pe)) except Exception as e: print("ERROR: Unknown exception '{}'".format(e)) sys.exit("ERROR: {}".format(e)) return self._data
Fix an inconsistency is the bunch parser.
Fix an inconsistency is the bunch parser. Most of the exception handling is using the keyword 'as'. One is using the comma. Change the comma style to 'as'.
Python
mit
paulfanelli/planet_alignment
06df514496612f194a6103167b867debf6657f5e
src/engine/SCons/Platform/darwin.py
src/engine/SCons/Platform/darwin.py
"""engine.SCons.Platform.darwin Platform-specific initialization for Mac OS X systems. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Platform.Platform() selection method. """ # # Copyright (c) 2001, 2002, 2003, 2004 Steven Knight # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # import posix import os def generate(env): posix.generate(env) env['SHLIBSUFFIX'] = '.dylib'
"""engine.SCons.Platform.darwin Platform-specific initialization for Mac OS X systems. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Platform.Platform() selection method. """ # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import posix import os def generate(env): posix.generate(env) env['SHLIBSUFFIX'] = '.dylib'
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module. git-svn-id: 7892167f69f80ee5d3024affce49f20c74bcb41d@1037 fdb21ef1-2011-0410-befe-b5e4ea1792b1
Python
mit
datalogics/scons,azverkan/scons,datalogics/scons,datalogics-robb/scons,azverkan/scons,datalogics-robb/scons,azverkan/scons,azverkan/scons,datalogics-robb/scons,datalogics/scons,datalogics/scons,datalogics-robb/scons,azverkan/scons
d8f32f7b6d0b1db0f467a61677586daa76bbaa4e
account_fiscal_year/__manifest__.py
account_fiscal_year/__manifest__.py
# Copyright 2016 Camptocamp SA # Copyright 2018 Lorenzo Battistini <https://github.com/eLBati> # License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). { "name": "Account Fiscal Year", "summary": "Create a menu for Account Fiscal Year", "version": "13.0.1.0.0", "development_status": "Beta", "category": "Accounting", "website": "https://github.com/OCA/account-financial-tools", "author": "Agile Business Group, Camptocamp SA, " "Odoo Community Association (OCA)", "maintainers": ["eLBati"], "license": "LGPL-3", "application": False, "installable": True, "depends": ["account", "date_range"], "data": ["data/date_range_type.xml", "views/account_views.xml"], }
# Copyright 2016 Camptocamp SA # Copyright 2018 Lorenzo Battistini <https://github.com/eLBati> # License AGPL-3.0 or later (https://www.gnu.org/licenses/lgpl). { "name": "Account Fiscal Year", "summary": "Create a menu for Account Fiscal Year", "version": "13.0.1.0.0", "development_status": "Beta", "category": "Accounting", "website": "https://github.com/OCA/account-financial-tools", "author": "Agile Business Group, Camptocamp SA, " "Odoo Community Association (OCA)", "maintainers": ["eLBati"], "license": "AGPL-3", "application": False, "installable": True, "depends": ["account", "date_range"], "data": ["data/date_range_type.xml", "views/account_views.xml"], }
Use AGPL license, as it depends on `date_range` that uses that
[FIX] account_fiscal_year: Use AGPL license, as it depends on `date_range` that uses that
Python
agpl-3.0
Vauxoo/account-financial-tools,Vauxoo/account-financial-tools,Vauxoo/account-financial-tools
7e5973b5490fd938078ce50723527d0c09f8e11e
rest_framework_friendly_errors/handlers.py
rest_framework_friendly_errors/handlers.py
from rest_framework.views import exception_handler from rest_framework_friendly_errors import settings from rest_framework_friendly_errors.utils import is_pretty def friendly_exception_handler(exc, context): response = exception_handler(exc, context) if response is not None: if is_pretty(response): return response error_message = response.data['detail'] error_code = settings.FRIENDLY_EXCEPTION_DICT.get( exc.__class__.__name__) response.data.pop('detail', {}) response.data['code'] = error_code response.data['message'] = error_message response.data['status_code'] = response.status_code # response.data['exception'] = exc.__class__.__name__ return response
from rest_framework.views import exception_handler from rest_framework.exceptions import APIException from rest_framework_friendly_errors import settings from rest_framework_friendly_errors.utils import is_pretty def friendly_exception_handler(exc, context): response = exception_handler(exc, context) if not response and settings.FRIENDLY_CATCH_ALL_EXCEPTIONS: response = exception_handler(APIException(exc), context) if response is not None: if is_pretty(response): return response error_message = response.data['detail'] error_code = settings.FRIENDLY_EXCEPTION_DICT.get( exc.__class__.__name__) response.data.pop('detail', {}) response.data['code'] = error_code response.data['message'] = error_message response.data['status_code'] = response.status_code # response.data['exception'] = exc.__class__.__name__ return response
Build APIException all exceptions must be handled
Build APIException all exceptions must be handled
Python
mit
oasiswork/drf-friendly-errors,FutureMind/drf-friendly-errors
4c3fee1ebce086d93424592f7145a378c40fd794
medical_prescription_disease/models/medical_prescription_order_line.py
medical_prescription_disease/models/medical_prescription_order_line.py
# -*- coding: utf-8 -*- # © 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import models, fields, api class MedicalPrescriptionOrderLine(models.Model): _inherit = 'medical.prescription.order.line' disease_id = fields.Many2one( string='Disease', comodel_name='medical.patient.disease', required=True, help='Disease diagnosis related to prescription.', ) @api.multi @api.onchange('patient_id') def _onchange_patient_id(self, ): self.ensure_one() return { 'domain': { 'disease_id': [('patient_id', '=', self.patient_id.id)], 'prescription_order_id': [ ('patient_id', '=', self.patient_id.id) ], } } @api.multi @api.onchange('disease_id') def _onchange_disease_id(self, ): for rec_id in self: rec_id.patient_id = rec_id.disease_id.patient_id.id
# -*- coding: utf-8 -*- # © 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp import models, fields, api class MedicalPrescriptionOrderLine(models.Model): _inherit = 'medical.prescription.order.line' disease_id = fields.Many2one( string='Disease', comodel_name='medical.patient.disease', help='Disease diagnosis related to prescription.', ) @api.multi @api.onchange('patient_id') def _onchange_patient_id(self, ): self.ensure_one() return { 'domain': { 'disease_id': [('patient_id', '=', self.patient_id.id)], 'prescription_order_id': [ ('patient_id', '=', self.patient_id.id) ], } } @api.multi @api.onchange('disease_id') def _onchange_disease_id(self, ): for rec_id in self: rec_id.patient_id = rec_id.disease_id.patient_id.id
Remove required from disease_id in medical_prescription_disease
Remove required from disease_id in medical_prescription_disease
Python
agpl-3.0
laslabs/vertical-medical,laslabs/vertical-medical
f9b2f8cd60af9b37ad80db10c42b36059ca5a10f
tests/unit/core/migrations_tests.py
tests/unit/core/migrations_tests.py
# -*- coding: utf-8 -*- import os from django.test import TestCase import oscar.apps class TestMigrations(TestCase): def check_for_auth_model(self, filepath): with open(filepath) as f: s = f.read() return 'auth.User' in s or 'auth.user' in s def test_dont_contain_hardcoded_user_model(self): root_path = os.path.dirname(oscar.apps.__file__) matches = [] for dir, __, migrations in os.walk(root_path): if dir.endswith('migrations'): paths = [os.path.join(dir, migration) for migration in migrations if migration.endswith('.py')] matches += filter(self.check_for_auth_model, paths) if matches: pretty_matches = '\n'.join( [match.replace(root_path, '') for match in matches]) self.fail('References to hardcoded User model found in the ' 'following migration(s):\n' + pretty_matches)
# -*- coding: utf-8 -*- import os import re from django.test import TestCase import oscar.apps class TestMigrations(TestCase): def setUp(self): self.root_path = os.path.dirname(oscar.apps.__file__) self.migration_filenames = [] for path, __, migrations in os.walk(self.root_path): if path.endswith('migrations'): paths = [ os.path.join(path, migration) for migration in migrations if migration.endswith('.py') and migration != '__init__.py'] self.migration_filenames += paths def test_dont_contain_hardcoded_user_model(self): def check_for_auth_model(filepath): with open(filepath) as f: s = f.read() return 'auth.User' in s or 'auth.user' in s matches = filter(check_for_auth_model, self.migration_filenames) if matches: pretty_matches = '\n'.join( [match.replace(self.root_path, '') for match in matches]) self.fail('References to hardcoded User model found in the ' 'following migration(s):\n' + pretty_matches) def test_no_duplicate_migration_numbers(self): # pull app name and migration number regexp = re.compile(r'^.+oscar/apps/([\w/]+)/migrations/(\d{4}).+$') keys = [] for migration in self.migration_filenames: match = regexp.match(migration) keys.append(match.group(1) + match.group(2)) self.assertEqual(len(keys), len(set(keys)))
Add unit test for duplicate migration numbers
Add unit test for duplicate migration numbers Duplicate migration numbers can happen when merging changes from different branches. This test ensures that we address the issue right away.
Python
bsd-3-clause
django-oscar/django-oscar,django-oscar/django-oscar,Bogh/django-oscar,anentropic/django-oscar,pdonadeo/django-oscar,manevant/django-oscar,nickpack/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,faratro/django-oscar,QLGu/django-oscar,eddiep1101/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,jmt4/django-oscar,solarissmoke/django-oscar,dongguangming/django-oscar,amirrpp/django-oscar,vovanbo/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,rocopartners/django-oscar,ahmetdaglarbas/e-commerce,adamend/django-oscar,jmt4/django-oscar,thechampanurag/django-oscar,binarydud/django-oscar,django-oscar/django-oscar,bschuon/django-oscar,machtfit/django-oscar,monikasulik/django-oscar,eddiep1101/django-oscar,mexeniz/django-oscar,itbabu/django-oscar,sonofatailor/django-oscar,pasqualguerrero/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,spartonia/django-oscar,spartonia/django-oscar,kapari/django-oscar,anentropic/django-oscar,QLGu/django-oscar,manevant/django-oscar,mexeniz/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,jinnykoo/wuyisj.com,manevant/django-oscar,spartonia/django-oscar,nickpack/django-oscar,itbabu/django-oscar,pasqualguerrero/django-oscar,eddiep1101/django-oscar,thechampanurag/django-oscar,jinnykoo/wuyisj,rocopartners/django-oscar,django-oscar/django-oscar,jlmadurga/django-oscar,saadatqadri/django-oscar,jinnykoo/christmas,sasha0/django-oscar,jmt4/django-oscar,solarissmoke/django-oscar,ahmetdaglarbas/e-commerce,binarydud/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,mexeniz/django-oscar,michaelkuty/django-oscar,rocopartners/django-oscar,dongguangming/django-oscar,kapt/django-oscar,faratro/django-oscar,QLGu/django-oscar,bnprk/django-oscar,eddiep1101/django-oscar,sasha0/django-oscar,faratro/django-oscar,josesanch/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj.com,sasha0/django-oscar,jinnykoo/wuyisj,adamend/django-oscar,saadatqadri/django-oscar,dongguangming/django-oscar,marcoantoniooliveira/labweb,WadeYuChen/django-oscar,taedori81/django-oscar,QLGu/django-oscar,john-parton/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,john-parton/django-oscar,mexeniz/django-oscar,amirrpp/django-oscar,marcoantoniooliveira/labweb,marcoantoniooliveira/labweb,kapt/django-oscar,josesanch/django-oscar,Jannes123/django-oscar,WillisXChen/django-oscar,binarydud/django-oscar,lijoantony/django-oscar,adamend/django-oscar,bschuon/django-oscar,michaelkuty/django-oscar,machtfit/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,itbabu/django-oscar,john-parton/django-oscar,pdonadeo/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,kapari/django-oscar,marcoantoniooliveira/labweb,bschuon/django-oscar,jlmadurga/django-oscar,ademuk/django-oscar,machtfit/django-oscar,jinnykoo/wuyisj,ademuk/django-oscar,pdonadeo/django-oscar,dongguangming/django-oscar,spartonia/django-oscar,kapari/django-oscar,adamend/django-oscar,bnprk/django-oscar,amirrpp/django-oscar,ka7eh/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,okfish/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,saadatqadri/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj,nfletton/django-oscar,WillisXChen/django-oscar,jlmadurga/django-oscar,WadeYuChen/django-oscar,Bogh/django-oscar,nickpack/django-oscar,solarissmoke/django-oscar,Bogh/django-oscar,Bogh/django-oscar,okfish/django-oscar,WadeYuChen/django-oscar,kapt/django-oscar,manevant/django-oscar,sasha0/django-oscar,amirrpp/django-oscar,monikasulik/django-oscar,okfish/django-oscar,jinnykoo/christmas,Jannes123/django-oscar,sonofatailor/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,michaelkuty/django-oscar,nfletton/django-oscar,lijoantony/django-oscar,thechampanurag/django-oscar,anentropic/django-oscar,vovanbo/django-oscar,sonofatailor/django-oscar,taedori81/django-oscar,nickpack/django-oscar,josesanch/django-oscar,kapari/django-oscar,lijoantony/django-oscar,MatthewWilkes/django-oscar,thechampanurag/django-oscar,jinnykoo/christmas,taedori81/django-oscar,faratro/django-oscar,taedori81/django-oscar,ahmetdaglarbas/e-commerce,saadatqadri/django-oscar,bschuon/django-oscar,pdonadeo/django-oscar,ahmetdaglarbas/e-commerce,pasqualguerrero/django-oscar,vovanbo/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,Jannes123/django-oscar,ademuk/django-oscar
d20f147a9baf0c0eee48fe2b6242020d500018cc
packages/Python/lldbsuite/test/repl/error_return/TestREPLThrowReturn.py
packages/Python/lldbsuite/test/repl/error_return/TestREPLThrowReturn.py
# TestREPLThrowReturn.py # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See http://swift.org/LICENSE.txt for license information # See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors # # ------------------------------------------------------------------------------ """Test that the REPL correctly handles the case that a called function throws.""" import os, time import unittest2 import lldb from lldbsuite.test.lldbrepl import REPLTest, load_tests import lldbsuite.test.decorators as decorators class REPLThrowReturnTestCase (REPLTest): mydir = REPLTest.compute_mydir(__file__) @decorators.swiftTest @decorators.skipUnlessDarwin @decorators.no_debug_info_test @decorators.expectedFlakeyDarwin def testREPL(self): REPLTest.testREPL(self) def doTest(self): self.sendline('import Foundation; Data()') self.sendline('enum VagueProblem: Error { case SomethingWentWrong }; func foo() throws -> Int { throw VagueProblem.SomethingWentWrong }') self.promptSync() self.command('foo()', patterns=['\\$E0', 'SomethingWentWrong'])
# TestREPLThrowReturn.py # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2015 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See http://swift.org/LICENSE.txt for license information # See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors # # ------------------------------------------------------------------------------ """Test that the REPL correctly handles the case that a called function throws.""" import os, time import unittest2 import lldb from lldbsuite.test.lldbrepl import REPLTest, load_tests import lldbsuite.test.decorators as decorators class REPLThrowReturnTestCase (REPLTest): mydir = REPLTest.compute_mydir(__file__) @decorators.swiftTest @decorators.skipUnlessDarwin @decorators.no_debug_info_test @decorators.expectedFailureAll(oslist=["macosx"], bugnumber="rdar://27648290") def testREPL(self): REPLTest.testREPL(self) def doTest(self): self.sendline('import Foundation; Data()') self.sendline('enum VagueProblem: Error { case SomethingWentWrong }; func foo() throws -> Int { throw VagueProblem.SomethingWentWrong }') self.promptSync() self.command('foo()', patterns=['\\$E0', 'SomethingWentWrong'])
Mark this test as xfail
Mark this test as xfail
Python
apache-2.0
apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
36f4144a01ed56baea9036e4e09a5d90b1c13372
crits/core/management/commands/mapreduces.py
crits/core/management/commands/mapreduces.py
from django.core.management.base import BaseCommand import crits.stats.handlers as stats class Command(BaseCommand): """ Script Class. """ help = "Runs mapreduces for CRITs." def handle(self, *args, **options): """ Script Execution. """ stats.generate_yara_hits() stats.generate_sources() stats.generate_filetypes() stats.generate_filetypes() stats.generate_campaign_stats() stats.generate_counts() stats.target_user_stats() stats.campaign_date_stats()
from django.core.management.base import BaseCommand import crits.stats.handlers as stats class Command(BaseCommand): """ Script Class. """ help = "Runs mapreduces for CRITs." def handle(self, *args, **options): """ Script Execution. """ stats.generate_yara_hits() stats.generate_sources() stats.generate_filetypes() stats.generate_campaign_stats() stats.generate_counts() stats.target_user_stats() stats.campaign_date_stats()
Remove duplicate call to generate_filetypes()
Remove duplicate call to generate_filetypes()
Python
mit
Magicked/crits,lakiw/cripts,Magicked/crits,lakiw/cripts,lakiw/cripts,Magicked/crits,Magicked/crits,lakiw/cripts
027f89292c1d8e334e9e69222d1ec8753020e8bd
candidates/management/commands/candidates_check_for_inconsistent_data.py
candidates/management/commands/candidates_check_for_inconsistent_data.py
from __future__ import print_function, unicode_literals import sys from django.core.management.base import BaseCommand from candidates.models import check_paired_models class Command(BaseCommand): def handle(self, *args, **options): errors = check_paired_models() if errors: for error in errors: print(error) sys.exit(1)
from __future__ import print_function, unicode_literals import sys from django.core.management.base import BaseCommand from candidates.models import ( check_paired_models, check_membership_elections_consistent) class Command(BaseCommand): def handle(self, *args, **options): errors = check_paired_models() + check_membership_elections_consistent() if errors: for error in errors: print(error) sys.exit(1)
Add check_membership_elections_consistent to the data checking command
Add check_membership_elections_consistent to the data checking command
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
fa3841fd79c4cbc8545b253a2797cfed2b644284
red_green_bar2.py
red_green_bar2.py
#!/usr/bin/env python2 ''' Given: 1. status code: (0 - OK, other value - BAD) 2. terminal window width shows red/green bar to visualize return code of previous command ''' import sys if len(sys.argv) >= 2: code = sys.argv[1] value = int(code) if value: col_char = '1' else: col_char = '2' cols_limit = int(sys.argv[2]) esc = chr(27) print (''.join(( esc, '[4', col_char, 'm', ' ' * (cols_limit - 2), esc, '[0m', ))) else: print (''' Usage: %(prog_name)s status_code number_of_columns 1. status code: 0 - OK (green color), other values - BAD (red color) 2. number of columns: the width of text console ''' % dict( prog_name=sys.argv[0], ))
#!/usr/bin/env python2 ''' Given: 1. status code: (0 - OK, other value - BAD) 2. terminal window width shows red/green bar to visualize return code of previous command ''' import sys if len(sys.argv) >= 2: code = sys.argv[1] if code == 'y': col_char = '3' else: value = int(code) if value: col_char = '1' else: col_char = '2' cols_limit = int(sys.argv[2]) esc = chr(27) print (''.join(( esc, '[4', col_char, 'm', ' ' * (cols_limit - 2), esc, '[0m', ))) else: print (''' Usage: %(prog_name)s status_code number_of_columns 1. status code: 0 - OK (green color), other values - BAD (red color) 2. number of columns: the width of text console ''' % dict( prog_name=sys.argv[0], ))
Allow for yellow color after specifying y
Allow for yellow color after specifying y
Python
mit
kwadrat/rgb_tdd
b0efb7db50080dd1e9e96ad8d818e3b0859bbca3
retry/__init__.py
retry/__init__.py
# -*- coding: utf-8 -*- from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
# -*- coding: utf-8 -*- from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) @retry() # detect whatsoever errors and retry 3 times ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
Add a usage in retry
Add a usage in retry
Python
mit
soasme/retries
54b21220db28dc4ce34a360d7754add872f702c7
systemvm/patches/debian/config/opt/cloud/bin/cs_ip.py
systemvm/patches/debian/config/opt/cloud/bin/cs_ip.py
from pprint import pprint #[{u'accountId': 2, #u'add': True, #u'broadcastUri': u'vlan://untagged', #u'firstIP': False, #u'networkRate': 200, #u'newNic': False, #u'nicDevId': 1, #u'oneToOneNat': False, #u'publicIp': u'10.0.2.102', #u'sourceNat': True, #u'trafficType': u'Public', #u'vifMacAddress': u'06:f6:5e:00:00:03', #u'vlanGateway': u'10.0.2.1', #u'vlanNetmask': u'255.255.255.0'}] def merge(dbag, ip): added = False for mac in dbag: if mac == "id": continue for address in dbag[mac]: if address['publicIp'] == ip['publicIp']: dbag[mac].remove(address) if ip['add']: dbag.setdefault('eth' + str(ip['nicDevId']), []).append( ip ) return dbag
from pprint import pprint #[{u'accountId': 2, #u'add': True, #u'broadcastUri': u'vlan://untagged', #u'firstIP': False, #u'networkRate': 200, #u'newNic': False, #u'nicDevId': 1, #u'oneToOneNat': False, #u'publicIp': u'10.0.2.102', #u'sourceNat': True, #u'trafficType': u'Public', #u'vifMacAddress': u'06:f6:5e:00:00:03', #u'vlanGateway': u'10.0.2.1', #u'vlanNetmask': u'255.255.255.0'}] def merge(dbag, ip): added = False for mac in dbag: if mac == "id": continue for address in dbag[mac]: if address['public_ip'] == ip['public_ip']: dbag[mac].remove(address) if ip['add']: dbag.setdefault('eth' + str(ip['nic_dev_id']), []).append( ip ) return dbag
Use json naming standards instead of camelCase
Use json naming standards instead of camelCase
Python
apache-2.0
jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,jcshen007/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,resmo/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,resmo/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,resmo/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,resmo/cloudstack,jcshen007/cloudstack,resmo/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack
4b52f2c237ff3c73af15846e7ae23436af8ab6c7
airesources/Python/BasicBot.py
airesources/Python/BasicBot.py
from hlt import * from networking import * playerTag, gameMap = getInit() sendInit("BasicBot"+str(playerTag)) turtleFactor = random.randint(1, 20) while True: moves = [] gameMap = getFrame() for y in range(0, len(gameMap.contents)): for x in range(0, len(gameMap.contents[y])): site = gameMap.contents[y][x] if site.owner == playerTag: direction = random.randint(0, 5) if site.strength < turtleFactor*site.production: direction = STILL else: for d in CARDINALS: if gameMap.getSite(Location(x, y), d).owner != playerTag: direction = d break moves.append(Move(Location(x, y), direction)) sendFrame(moves)
from hlt import * from networking import * playerTag, gameMap = getInit() sendInit("BasicBot"+str(playerTag)) while True: moves = [] gameMap = getFrame() for y in range(0, len(gameMap.contents)): for x in range(0, len(gameMap.contents[y])): site = gameMap.contents[y][x] if site.owner == playerTag: direction = random.randint(0, 5) if site.strength < 5*site.production: direction = STILL else: for d in CARDINALS: if gameMap.getSite(Location(x, y), d).owner != playerTag: direction = d break moves.append(Move(Location(x, y), direction)) sendFrame(moves)
Revert basic bot random turtle factor
Revert basic bot random turtle factor Former-commit-id: 53ffe42cf718cfedaa3ec329b0688c093513683c Former-commit-id: 6a282c036f4e11a0aa9e954f72050053059ac557 Former-commit-id: c52f52d401c4a3768c7d590fb02f3d08abd38002
Python
mit
HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II
0bff34400d912806a9d831f5e0436082d359a531
tomviz/python/tomviz/state/_pipeline.py
tomviz/python/tomviz/state/_pipeline.py
from tomviz._wrapping import PipelineStateManagerBase class PipelineStateManager(PipelineStateManagerBase): _instance = None # Need to define a constructor as the implementation on the C++ side is # static. def __init__(self): pass def __call__(cls): if cls._instance is None: cls._instance = super(PipelineStateManager, cls).__call__() return cls._instances
from tomviz._wrapping import PipelineStateManagerBase class PipelineStateManager(PipelineStateManagerBase): _instance = None def __new__(cls, *args, **kwargs): if cls._instance is None: cls._instance = PipelineStateManagerBase.__new__(cls, *args, **kwargs) return cls._instance
Fix singleton to work with wrapped manager class
Fix singleton to work with wrapped manager class Signed-off-by: Chris Harris <[email protected]>
Python
bsd-3-clause
OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz
5a4401df95d3b8cb72e78edb30669d6fa88e4712
transaction_downloader/transaction_downloader.py
transaction_downloader/transaction_downloader.py
"""Transaction Downloader. Usage: transaction-downloader auth --account=<account-name> transaction-downloader -h | --help transaction-downloader --version Options: -h --help Show this screen. --version Show version. --account=<account-name> Account to work with. """ import json from docopt import docopt from pkg_resources import require def read_credentials(account): credentials = {} with open('plaid-credentials.json') as json_data: credentials = json.load(json_data) with open('cfg/%s.json'%account) as json_data: credentials["account"] = {}; credentials["account"]["name"] = account credentials["account"]["credentials"] = json.load(json_data) return credentials def main(): version = require("transaction-downloader")[0].version args = docopt(__doc__, version=version) print(args) if __name__ == '__main__': main()
"""Transaction Downloader. Usage: transaction-downloader auth --account=<account-name> transaction-downloader -h | --help transaction-downloader --version Options: -h --help Show this screen. --version Show version. --account=<account-name> Account to work with. """ import json from docopt import docopt from pkg_resources import require def read_credentials(account): credentials = {} with open('plaid-credentials.json') as json_data: credentials = json.load(json_data) with open('cfg/%s.json'%account) as json_data: credentials["account"] = {}; credentials["account"]["name"] = account credentials["account"]["credentials"] = json.load(json_data) return credentials def main(): version = require("transaction-downloader")[0].version args = docopt(__doc__, version=version) print(args) credentials = read_credentials(args['--account']) if __name__ == '__main__': main()
Read credentials based on account.
Read credentials based on account.
Python
mit
ebridges/plaid2qif,ebridges/plaid2qif,ebridges/plaid2qif
cbdcdf16285823a8e13a68c8e86d6957aa7aa6d8
kivy/tools/packaging/pyinstaller_hooks/pyi_rth_kivy.py
kivy/tools/packaging/pyinstaller_hooks/pyi_rth_kivy.py
import os import sys root = os.path.join(sys._MEIPASS, 'kivy_install') os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data') os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules') os.environ['GST_PLUGIN_PATH'] = '{};{}'.format( sys._MEIPASS, os.path.join(sys._MEIPASS, 'gst-plugins')) os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin') sys.path += [os.path.join(root, '_libs')] if sys.platform == 'darwin': sitepackages = os.path.join(sys._MEIPASS, 'sitepackages') sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')] os.putenv('GST_REGISTRY_FORK', 'no')
import os import sys root = os.path.join(sys._MEIPASS, 'kivy_install') os.environ['KIVY_DATA_DIR'] = os.path.join(root, 'data') os.environ['KIVY_MODULES_DIR'] = os.path.join(root, 'modules') os.environ['GST_PLUGIN_PATH'] = os.path.join(sys._MEIPASS, 'gst-plugins') os.environ['GST_REGISTRY'] = os.path.join(sys._MEIPASS, 'registry.bin') sys.path += [os.path.join(root, '_libs')] if sys.platform == 'darwin': sitepackages = os.path.join(sys._MEIPASS, 'sitepackages') sys.path += [sitepackages, os.path.join(sitepackages, 'gst-0.10')] os.putenv('GST_REGISTRY_FORK', 'no')
Fix GST_PLUGIN_PATH in runtime hook
Fix GST_PLUGIN_PATH in runtime hook - Only include `gst-plugins` - Also, semicolon was only correct on Windows
Python
mit
inclement/kivy,inclement/kivy,kivy/kivy,kivy/kivy,akshayaurora/kivy,akshayaurora/kivy,kivy/kivy,matham/kivy,rnixx/kivy,matham/kivy,inclement/kivy,matham/kivy,matham/kivy,rnixx/kivy,akshayaurora/kivy,rnixx/kivy
2a9ac93236838b12b58f2f180265a23658e2a95b
programmingtheorems/python/theorem_of_selection.py
programmingtheorems/python/theorem_of_selection.py
#! /usr/bin/env python # Copyright Lajos Katona # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def selection_brute(mylist, t): for i, l in enumerate(mylist): if t == l: return i return 0 def selection_pythonic(mylist, t): return mylist.index(t) if __name__ == '__main__': mylist = [1, 2, 3, 4, 5] print(selection_brute(mylist, 4)) print(selection_pythonic(mylist, 4))
#! /usr/bin/env python # Copyright Lajos Katona # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def selection_brute(mlist, t): for i, l in enumerate(mlist): if t == l: return i return -1 def selection_pythonic(mlist, t): return mlist.index(t) if __name__ == '__main__': mylist = [1, 2, 3, 4, 5] print(selection_brute(mylist, 4)) print(selection_pythonic(mylist, 4))
Fix typo in selection theorem
Fix typo in selection theorem Change-Id: Ieff9fe7e5783e0d3b995fb0ddbfc11015ca9197a
Python
apache-2.0
elajkat/hugradexam,elajkat/hugradexam
f22cabf494f13535cdbb489f12e98c7358a29f74
openstack/tests/functional/telemetry/v2/test_sample.py
openstack/tests/functional/telemetry/v2/test_sample.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from openstack.telemetry.v2 import sample from openstack.tests.functional import base @unittest.skipUnless(base.service_exists(service_type="metering"), "Metering service does not exist") class TestSample(base.BaseFunctionalTest): def test_list(self): for meter in self.conn.telemetry.meters(): sot = next(self.conn.telemetry.samples(meter)) assert isinstance(sot, sample.Sample)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from openstack.telemetry.v2 import sample from openstack.tests.functional import base @unittest.skipUnless(base.service_exists(service_type="metering"), "Metering service does not exist") class TestSample(base.BaseFunctionalTest): def test_list(self): for meter in self.conn.telemetry.meters(): for sot in self.conn.telemetry.samples(meter): assert isinstance(sot, sample.Sample)
Fix the telemetry sample test
Fix the telemetry sample test This test works fine on devstack, but on the test gate not all the meters have samples, so only iterate over them if there are samples. Partial-bug: #1665495 Change-Id: I8f327737a53194aeba08925391f1976f1b506aa0
Python
apache-2.0
dtroyer/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk
618a1f520f2584ec3cf56b29cf71c9ad6b4240fd
tests/acceptance/assignments/one_second_timeout/correct_solution/sleep.py
tests/acceptance/assignments/one_second_timeout/correct_solution/sleep.py
from time import sleep sleep(1)
from time import sleep # Due to the overhead of Python, sleeping for 1 second will cause testing to # time out if the timeout is 1 second sleep(1)
Add comment to one_second_timeout assignment
Add comment to one_second_timeout assignment
Python
agpl-3.0
git-keeper/git-keeper,git-keeper/git-keeper
1f3e56b79f933a1d450074d1c4485e34c97f2806
pyqt.py
pyqt.py
#! /usr/bin/python3 import sys from PyQt5.QtWidgets import (QWidget, QHBoxLayout, QLabel, QApplication, QPushButton) from PyQt5.QtGui import QPixmap from PyQt5.QtCore import QObject class FreakingQtImageViewer(QWidget): def __init__(self, function): super().__init__() self.function = function self.initUI(function) def refresh(self): self.function() pixmap = QPixmap("tmp.png") pixmap = pixmap.scaledToWidth(800) self.lbl.setPixmap(pixmap) def initUI(self, function): hbox = QHBoxLayout(self) self.lbl = QLabel(self) self.refresh() btn = QPushButton(self) btn.setText('Drück mich') btn.clicked.connect(self.refresh) hbox.addWidget(self.lbl) hbox.addWidget(btn) self.setLayout(hbox) self.move(300, 200) self.setWindowTitle('Freaking Qt Image Viewer') self.show()
#! /usr/bin/python3 import sys import time from PyQt5.QtWidgets import (QWidget, QHBoxLayout, QLabel, QApplication, QPushButton) from PyQt5.QtGui import QPixmap from PyQt5.QtCore import QObject class FreakingQtImageViewer(QWidget): def __init__(self, function): super().__init__() self.function = function self.initUI(function) self.refresh = False def refresh(self): if !self.refresh: self.refresh = True while self.refresh: self.function() pixmap = QPixmap("tmp.png") pixmap = pixmap.scaledToWidth(800) self.lbl.setPixmap(pixmap) time.sleep(0.5) else: self.refresh = False def initUI(self, function): hbox = QHBoxLayout(self) self.lbl = QLabel(self) self.refresh() btn = QPushButton(self) btn.setText('Drück mich') btn.clicked.connect(self.refresh) hbox.addWidget(self.lbl) hbox.addWidget(btn) self.setLayout(hbox) self.move(300, 200) self.setWindowTitle('Freaking Qt Image Viewer') self.show()
Update image every 0.5s till button gets pressed again
Update image every 0.5s till button gets pressed again
Python
mit
philipptrenz/draughtsCV,philipptrenz/Physical-Image-Manipulation-Program,philipptrenz/draughtsCV
008f0a2b0a7823e619410c5af70061d093c6f3de
timeseries.py
timeseries.py
#!/usr/bin/env python #Go through an OpenXC trace file and plot a time series graph using #matplotlib import json import sys import argparse from pylab import * def main(): #Set up the command line argument parser parser = argparse.ArgumentParser() parser.add_argument("input_file", help = "name of the input file") parser.add_argument("-y", help = "the key to use for the function being plotted") args = parser.parse_args() input_file_name = args.input_file y_key = str(args.y) #initialize the x axis and function to be plotted x = [] y = [] with open(input_file_name, 'r') as input_file: for line in input_file: if not line.rstrip() : continue try: obj = json.loads(line) except ValueError: print("Skipping invalid JSON: %s" % line) continue #if the parsed JSON object has the key we're looking for, #add the key's value to the y graph and the timestamp #to the x list if obj['name'] == y_key: y.append(obj['value']) x.append(obj['timestamp']) autoscale(True, 'both') plot(x, y, label = y_key) legend(loc='upper left') show() if __name__ == "__main__": main()
#!/usr/bin/env python #Go through an OpenXC trace file and plot a time series graph using #matplotlib import json import sys import argparse from pylab import * def main(): #Set up the command line argument parser parser = argparse.ArgumentParser() parser.add_argument("input_file", help = "name of the input file") parser.add_argument("-y", help = "the key to use for the function being plotted") parser.add_argument("-x", help = "the key to use for the function being plotted", default=None) args = parser.parse_args() input_file_name = args.input_file y_key = str(args.y) x_key = args.x #initialize the x axis and function to be plotted x = [] y = [] with open(input_file_name, 'r') as input_file: for line in input_file: if not line.rstrip() : continue try: obj = json.loads(line) except ValueError: print("Skipping invalid JSON: %s" % line) continue #if the parsed JSON object has the key we're looking for, #add the key's value to the y graph and the timestamp #to the x list if obj['name'] == y_key: y.append(obj['value']) if x_key is None: x.append(obj['timestamp']) if obj['name'] == x_key: x.append(obj['value']) autoscale(True, 'both') xlabel(x_key or 'timestamp') ylabel(y_key) plot(x, y, 'ro') show() if __name__ == "__main__": main()
Allow plotting two types against one another.
Allow plotting two types against one another.
Python
bsd-3-clause
openxc/openxc-data-tools
7d9ec40e8a48e747880a35279b63439afccc1284
urls.py
urls.py
# -*- coding: utf-8 -*- from django.conf.urls import patterns, url from django.conf import settings import os.path static_files_path = os.path.join(settings.PROJECT_DIR, "static") urlpatterns = patterns('vortaro.views', url(r'^informo$', 'about', name="about"), url(r'^informo/api$', 'about_the_api', name="about_the_api"), url(ur'^serĉo$', 'search_word', name="search_word"), url(r'^vorto/(?P<word>.*)$', 'view_word', name="view_word"), url(u'^$', 'index', name="index"), ) urlpatterns += patterns('api.views', url(u'^api/v1/vorto/(?P<word>.+)$', 'view_word', name="api_view_word"), # We're deliberately using a non-UTF8 URL prefix to hopefully make it easier # to use the API. url(u'^api/v1/trovi/(?P<search_term>.+)$', 'search_word', name="api_search_word"), ) if settings.DEBUG: # Serve static files using Django during development. urlpatterns += patterns('', (r'^resources/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_files_path}))
# -*- coding: utf-8 -*- from django.conf.urls import patterns, url from django.conf import settings from django.views.generic import TemplateView import os.path static_files_path = os.path.join(settings.PROJECT_DIR, "static") urlpatterns = patterns('vortaro.views', url(r'^informo$', 'about', name="about"), url(r'^informo/api$', 'about_the_api', name="about_the_api"), url(ur'^serĉo$', 'search_word', name="search_word"), url(r'^vorto/(?P<word>.*)$', 'view_word', name="view_word"), url(u'^$', 'index', name="index"), ) urlpatterns += patterns('api.views', url(u'^api/v1/vorto/(?P<word>.+)$', 'view_word', name="api_view_word"), # We're deliberately using a non-UTF8 URL prefix to hopefully make it easier # to use the API. url(u'^api/v1/trovi/(?P<search_term>.+)$', 'search_word', name="api_search_word"), ) if settings.DEBUG: # Serve static files using Django during development. urlpatterns += patterns('', (r'^resources/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_files_path}), url(r'^404$', TemplateView.as_view(template_name='404.html')), )
Allow viewing the 404 page during development.
Allow viewing the 404 page during development.
Python
agpl-3.0
Wilfred/simpla-vortaro,Wilfred/simpla-vortaro
7481c6aad4cd844b0c3fab6f05e4d24aa3c17770
src/nodeconductor_assembly_waldur/invoices/log.py
src/nodeconductor_assembly_waldur/invoices/log.py
from nodeconductor.logging.loggers import EventLogger, event_logger class InvoiceLogger(EventLogger): month = int year = int customer = 'structure.Customer' class Meta: event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled') event_logger.register('invoice', InvoiceLogger)
from nodeconductor.logging.loggers import EventLogger, event_logger class InvoiceLogger(EventLogger): month = int year = int customer = 'structure.Customer' class Meta: event_types = ('invoice_created', 'invoice_paid', 'invoice_canceled') event_groups = { 'customers': event_types, 'invoices': event_types, } event_logger.register('invoice', InvoiceLogger)
Define groups for the invoice events.
Define groups for the invoice events. - wal-202
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind
6618b12cef2759174148d1c7f69cbb91b8ea4482
mygpo/podcasts/migrations/0015_auto_20140616_2126.py
mygpo/podcasts/migrations/0015_auto_20140616_2126.py
# encoding: utf8 from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('podcasts', '0014_auto_20140615_1032'), ] operations = [ migrations.AlterField( model_name='slug', name='scope', field=models.CharField(db_index=True, max_length=32, blank=True), ), migrations.AlterField( model_name='url', name='scope', field=models.CharField(db_index=True, max_length=32, blank=True), ), ]
# encoding: utf8 from __future__ import unicode_literals from django.db import models, migrations def set_scope(apps, schema_editor): URL = apps.get_model('podcasts', 'URL') Slug = apps.get_model('podcasts', 'Slug') URL.objects.filter(scope__isnull=True).update(scope='') Slug.objects.filter(scope__isnull=True).update(scope='') class Migration(migrations.Migration): dependencies = [ ('podcasts', '0014_auto_20140615_1032'), ] operations = [ migrations.AlterField( model_name='slug', name='scope', field=models.CharField(db_index=True, max_length=32, blank=True, null=True), ), migrations.AlterField( model_name='url', name='scope', field=models.CharField(db_index=True, max_length=32, blank=True, null=True), ), migrations.RunPython(set_scope), migrations.AlterField( model_name='slug', name='scope', field=models.CharField(db_index=True, max_length=32, blank=True, null=False), ), migrations.AlterField( model_name='url', name='scope', field=models.CharField(db_index=True, max_length=32, blank=True, null=False), ), ]
Fix data migration when making scope non-null
[DB] Fix data migration when making scope non-null
Python
agpl-3.0
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
e5af653b2133b493c7888bb305488e932acb2274
doc/examples/special/plot_hinton.py
doc/examples/special/plot_hinton.py
""" ============== Hinton diagram ============== Hinton diagrams are useful for visualizing the values of a 2D array. Positive and negative values represented by white and black squares, respectively, and the size of each square represents the magnitude of each value. The `special.hinton` function is based off of the Hinton demo in the matplotlib gallery [1]_. This implementation, however, uses a `RegularPolyCollection` to draw squares, which is much more efficient than drawing individual rectangles. Obscure example use: For my Ph.D., I wrote a numerical solver using finite-differences and writing down Jacobian matrices analytically, was incredibly-prone to bugs. To debug my code, I calculated the numerical Jacobian (calculated using `scipy.optimize.slsqp.approx_jacobian`) and plotted the Hinton diagram for for the difference of the numerical and analytical results. You could, of course, use `pcolor` or `imshow` in a similar situation. """ import numpy as np import matplotlib.pyplot as plt from mpltools import special A = np.random.uniform(-1, 1, size=(20, 20)) special.hinton(A) plt.show()
""" ============== Hinton diagram ============== Hinton diagrams are useful for visualizing the values of a 2D array: Positive and negative values are represented by white and black squares, respectively, and the size of each square represents the magnitude of each value. ``special.hinton`` is based off of the `Hinton demo`_ in the matplotlib gallery. This implementation, however, uses a ``RegularPolyCollection`` to draw squares, which is much more efficient than drawing individual rectangles. Obscure example use: For my Ph.D., I wrote a numerical solver using finite-differences. For speed, the Jacobian matrices were calculated analytically, which was incredibly-prone to bugs. To debug my code, I calculated the numerical Jacobian (calculated using ``scipy.optimize.slsqp.approx_jacobian``) and plotted the Hinton diagram for the difference of the numerical and analytical results. That allowed me to narrow down where the bugs were (boundary conditions!) instead of blindly checking every equation. You could, of course, use ``pcolor`` or ``imshow`` in a similar situation. .. _Hinton demo: http://matplotlib.sourceforge.net/examples/api/hinton_demo.html """ import numpy as np import matplotlib.pyplot as plt from mpltools import special A = np.random.uniform(-1, 1, size=(20, 20)) special.hinton(A) plt.show()
Clean up hinton example text.
DOC: Clean up hinton example text.
Python
bsd-3-clause
tonysyu/mpltools,matteoicardi/mpltools
d2a0d0d22a8369c99626ca754a337ea8076f7efa
aybu/core/models/migrations/versions/587c89cfa8ea_added_column_weight_.py
aybu/core/models/migrations/versions/587c89cfa8ea_added_column_weight_.py
"""Added column 'weight' to Banner, Logo and Background. Revision ID: 587c89cfa8ea Revises: 2c0bfc379e01 Create Date: 2012-05-11 14:36:15.518757 """ # downgrade revision identifier, used by Alembic. revision = '587c89cfa8ea' down_revision = '2c0bfc379e01' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('files', sa.Column('weight', sa.Integer(), nullable=False, default=0)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('files', 'weight') ### end Alembic commands ###
"""Added column 'weight' to Banner, Logo and Background. Revision ID: 587c89cfa8ea Revises: 2c0bfc379e01 Create Date: 2012-05-11 14:36:15.518757 """ # downgrade revision identifier, used by Alembic. revision = '587c89cfa8ea' down_revision = '2c0bfc379e01' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('files', sa.Column('weight', sa.Integer(), nullable=True, default=0)) connection = op.get_bind() connection.execute('UPDATE files SET weight=0') op.alter_column('files', 'weight', existing_type=sa.Integer, nullable=False) def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('files', 'weight') ### end Alembic commands ###
Fix bug in migration script
Fix bug in migration script
Python
apache-2.0
asidev/aybu-core
319927dd4548f8d5990bad4be271bfce7f29b10b
subscribe/management/commands/refresh_issuers.py
subscribe/management/commands/refresh_issuers.py
from django.core.management.base import BaseCommand from django.db.transaction import commit_on_success from subscribe.models import IdealIssuer from lib import mollie # command to update bank list (ideal issuers) # run as 'python manage.py refresh_issuers' class Command(BaseCommand): @commit_on_success def handle(self, *args, **options): # Clean old issuers IdealIssuer.objects.all().delete() for bank in mollie.banklist(): issuer = IdealIssuer(issuer_id=bank.bank_id, name=bank.bank_name) issuer.save() print "%d\t%s" % (bank.bank_id, bank.bank_name)
from django.core.management.base import BaseCommand from django.db import transaction from subscribe.models import IdealIssuer from lib import mollie # command to update bank list (ideal issuers) # run as 'python manage.py refresh_issuers' class Command(BaseCommand): @transaction.atomic def handle(self, *args, **options): # Clean old issuers IdealIssuer.objects.all().delete() for bank in mollie.banklist(): issuer = IdealIssuer(issuer_id=bank.bank_id, name=bank.bank_name) issuer.save() print "%d\t%s" % (bank.bank_id, bank.bank_name)
Replace deprecated commit_on_success by atomic
Replace deprecated commit_on_success by atomic
Python
mit
jonge-democraten/dyonisos,jonge-democraten/dyonisos,jonge-democraten/dyonisos
5e368e1fbf30a3e489be6c754d8b888a31bfde47
wger/manager/migrations/0011_remove_set_exercises.py
wger/manager/migrations/0011_remove_set_exercises.py
# Generated by Django 3.1.5 on 2021-02-28 14:10 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('manager', '0010_auto_20210102_1446'), ] operations = [ migrations.RemoveField( model_name='set', name='exercises', ), ]
# Generated by Django 3.1.5 on 2021-02-28 14:10 from django.db import migrations def increment_order(apps, schema_editor): """ Increment the oder in settings so ensure the order is preserved Otherwise, and depending on the database, when a set has supersets, the exercises could be ordered alphabetically. """ WorkoutSet = apps.get_model("manager", "Set") for workout_set in WorkoutSet.objects.all(): counter = 1 for exercise in workout_set.exercises.all(): for setting in workout_set.setting_set.filter(exercise=exercise): setting.order = counter setting.save() counter += 1 class Migration(migrations.Migration): dependencies = [ ('manager', '0010_auto_20210102_1446'), ] operations = [ migrations.RunPython(increment_order), migrations.RemoveField( model_name='set', name='exercises', ), ]
Increment the oder in settings so ensure the order is preserved
Increment the oder in settings so ensure the order is preserved Otherwise, and depending on the database, when a set has supersets, the exercises could be ordered alphabetically.
Python
agpl-3.0
wger-project/wger,petervanderdoes/wger,wger-project/wger,wger-project/wger,petervanderdoes/wger,wger-project/wger,petervanderdoes/wger,petervanderdoes/wger
29c437e15f7793886c80b71ca6764184caff2597
readthedocs/oauth/management/commands/load_project_remote_repo_relation.py
readthedocs/oauth/management/commands/load_project_remote_repo_relation.py
import json from django.core.management.base import BaseCommand from readthedocs.oauth.models import RemoteRepository class Command(BaseCommand): help = "Load Project and RemoteRepository Relationship from JSON file" def add_arguments(self, parser): # File path of the json file containing relationship data parser.add_argument( '--file', required=True, nargs=1, type=str, help='File path of the json file containing relationship data.', ) def handle(self, *args, **options): file = options.get('file')[0] try: # Load data from the json file with open(file, 'r') as f: data = json.load(f) except Exception as e: self.stdout.write( self.style.ERROR( f'Exception occurred while trying to load the file "{file}". ' f'Exception: {e}.' ) ) return for item in data: try: RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) except Exception as e: self.stdout.write( self.style.ERROR( f"Exception occurred while trying to update {item['slug']}'s " f"relationship with {item['html_url']}, " f"username: {item['username']}, Exception: {e}." ) )
import json from django.core.management.base import BaseCommand from readthedocs.oauth.models import RemoteRepository class Command(BaseCommand): help = "Load Project and RemoteRepository Relationship from JSON file" def add_arguments(self, parser): # File path of the json file containing relationship data parser.add_argument( '--file', required=True, nargs=1, type=str, help='File path of the json file containing relationship data.', ) def handle(self, *args, **options): file = options.get('file')[0] try: # Load data from the json file with open(file, 'r') as f: data = json.load(f) except Exception as e: self.stdout.write( self.style.ERROR( f'Exception occurred while trying to load the file "{file}". ' f'Exception: {e}.' ) ) return for item in data: try: update_count = RemoteRepository.objects.filter( remote_id=item['remote_id'] ).update(project_id=item['project_id']) if update_count < 1: self.stdout.write( self.style.ERROR( f"Could not update {item['slug']}'s " f"relationship with {item['html_url']}, " f"remote_id {item['remote_id']}, " f"username: {item['username']}." ) ) except Exception as e: self.stdout.write( self.style.ERROR( f"Exception occurred while trying to update {item['slug']}'s " f"relationship with {item['html_url']}, " f"username: {item['username']}, Exception: {e}." ) )
Check if the remote_repo was updated or not and log error
Check if the remote_repo was updated or not and log error
Python
mit
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
9696cbc35830b69767320166424e21d717e71d12
tests/__init__.py
tests/__init__.py
# -*- coding: utf-8 """ Python implementation of Non-Stationary Gabor Transform (NSGT) derived from MATLAB code by NUHAG, University of Vienna, Austria Thomas Grill, 2011-2015 http://grrrr.org/nsgt Austrian Research Institute for Artificial Intelligence (OFAI) AudioMiner project, supported by Vienna Science and Technology Fund (WWTF) --- Unit test module """
# -*- coding: utf-8 """ Python implementation of Non-Stationary Gabor Transform (NSGT) derived from MATLAB code by NUHAG, University of Vienna, Austria Thomas Grill, 2011-2015 http://grrrr.org/nsgt Austrian Research Institute for Artificial Intelligence (OFAI) AudioMiner project, supported by Vienna Science and Technology Fund (WWTF) --- Unit test module """ import random import numpy as np # seed random generators for unit testing random.seed(666) np.random.seed(666)
Initialize random generator seed for unit testing
Initialize random generator seed for unit testing
Python
artistic-2.0
grrrr/nsgt
dd42c1c1b1cd0cbe55c27cafe9d2db5466782bc4
server/users-microservice/src/api/users/userModel.py
server/users-microservice/src/api/users/userModel.py
from index import db class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email self.password = password self.application = application def __repr__(self): return self.name
from index import db, brcypt class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email self.application = application self.set_password(password) def __repr__(self): return self.name def set_password(self, password): self.password = bcrypt.generate_password_hash(password) def check_password(self, password): return bcrypt.check_password_hash(self.password, password)
Encrypt password before saving user
Encrypt password before saving user
Python
mit
Madmous/Trello-Clone,Madmous/madClones,Madmous/madClones,Madmous/madClones,Madmous/madClones,Madmous/Trello-Clone,Madmous/Trello-Clone
17492956ea8b4ed8b5465f6a057b6e026c2d4a75
openquake/engine/tests/export/core_test.py
openquake/engine/tests/export/core_test.py
# Copyright (c) 2010-2014, GEM Foundation. # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os import unittest from openquake.commonlib import nrml def number_of(elem_name, tree): """ Given an element name (including the namespaces prefix, if applicable), return the number of occurrences of the element in a given XML document. """ expr = '//%s' % elem_name return len(tree.xpath(expr, namespaces=nrml.PARSE_NS_MAP)) class BaseExportTestCase(unittest.TestCase): def _test_exported_file(self, filename): self.assertTrue(os.path.exists(filename)) self.assertTrue(os.path.isabs(filename)) self.assertTrue(os.path.getsize(filename) > 0)
# Copyright (c) 2010-2014, GEM Foundation. # # OpenQuake is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os import unittest from openquake.commonlib import nrml def number_of(elem_name, tree): """ Given an element name (including the namespaces prefix, if applicable), return the number of occurrences of the element in a given XML document. """ expr = '//%s' % elem_name return len(tree.xpath(expr, namespaces={'nrml': nrml.NRML05})) class BaseExportTestCase(unittest.TestCase): def _test_exported_file(self, filename): self.assertTrue(os.path.exists(filename)) self.assertTrue(os.path.isabs(filename)) self.assertTrue(os.path.getsize(filename) > 0)
Fix a broken export test
Fix a broken export test Former-commit-id: 4b369edfcb5782a2461742547f5b6af3bab4f759 [formerly e37e964bf9d2819c0234303d31ed2839c317be04] Former-commit-id: 5b8a20fa99eab2f33c8f293a505a2dbadad36eee
Python
agpl-3.0
gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine
17ab8c01a88bda8dba4aaa5e57c857babfeb9444
debtcollector/fixtures/disable.py
debtcollector/fixtures/disable.py
# -*- coding: utf-8 -*- # Copyright (C) 2015 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import absolute_import import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False
# -*- coding: utf-8 -*- # Copyright (C) 2015 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fixtures from debtcollector import _utils class DisableFixture(fixtures.Fixture): """Fixture that disables debtcollector triggered warnings. This does **not** disable warnings calls emitted by other libraries. This can be used like:: from debtcollector.fixtures import disable with disable.DisableFixture(): <some code that calls into depreciated code> """ def _setUp(self): self.addCleanup(setattr, _utils, "_enabled", True) _utils._enabled = False
Stop to use the __future__ module.
Stop to use the __future__ module. The __future__ module [1] was used in this context to ensure compatibility between python 2 and python 3. We previously dropped the support of python 2.7 [2] and now we only support python 3 so we don't need to continue to use this module and the imports listed below. Imports commonly used and their related PEPs: - `division` is related to PEP 238 [3] - `print_function` is related to PEP 3105 [4] - `unicode_literals` is related to PEP 3112 [5] - `with_statement` is related to PEP 343 [6] - `absolute_import` is related to PEP 328 [7] [1] https://docs.python.org/3/library/__future__.html [2] https://governance.openstack.org/tc/goals/selected/ussuri/drop-py27.html [3] https://www.python.org/dev/peps/pep-0238 [4] https://www.python.org/dev/peps/pep-3105 [5] https://www.python.org/dev/peps/pep-3112 [6] https://www.python.org/dev/peps/pep-0343 [7] https://www.python.org/dev/peps/pep-0328 Change-Id: I2b2f006e0ec145730bec843add4147345797b920
Python
apache-2.0
openstack/debtcollector
00479e3d59e7472c77ea2357f25d5579ad5d5b25
director/director/config/local.py
director/director/config/local.py
from configurations import values from .common import Common class Local(Common): JWT_SECRET = values.Value('not-a-secret') DEBUG = values.BooleanValue(True)
from configurations import values from .common import Common class Local(Common): JWT_SECRET = values.Value('not-a-secret') DEBUG = values.BooleanValue(True) EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
Use console EMAIL_BACKEND in development
Use console EMAIL_BACKEND in development
Python
apache-2.0
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
e75e35bd7ffb44b8f5c5a5d674a15c6c366f84ac
django_medusa/management/commands/staticsitegen.py
django_medusa/management/commands/staticsitegen.py
from django.conf import settings from django.core.management.base import BaseCommand from django.core.urlresolvers import set_script_prefix from django_medusa.renderers import StaticSiteRenderer from django_medusa.utils import get_static_renderers class Command(BaseCommand): can_import_settings = True help = 'Looks for \'renderers.py\' in each INSTALLED_APP, which defines '\ 'a class for processing one or more URL paths into static files.' def handle(self, *args, **options): StaticSiteRenderer.initialize_output() renderers = [Renderer() for Renderer in get_static_renderers()] for renderer in renderers: # memoize this first to avoid script prefix pollution renderer.paths # Set script prefix here url_prefix = getattr(settings, 'MEDUSA_URL_PREFIX') if url_prefix is not None: set_script_prefix(url_prefix) # And now generate stuff for renderer in renderers: renderer.generate() StaticSiteRenderer.finalize_output()
from django.conf import settings from django.core.management.base import BaseCommand from django.core.urlresolvers import set_script_prefix from django_medusa.renderers import StaticSiteRenderer from django_medusa.utils import get_static_renderers class Command(BaseCommand): can_import_settings = True help = 'Looks for \'renderers.py\' in each INSTALLED_APP, which defines '\ 'a class for processing one or more URL paths into static files.' def handle(self, *args, **options): StaticSiteRenderer.initialize_output() renderers = [Renderer() for Renderer in get_static_renderers()] for renderer in renderers: # memoize this first to avoid script prefix pollution renderer.paths # Set script prefix here url_prefix = getattr(settings, 'MEDUSA_URL_PREFIX', None) if url_prefix is not None: set_script_prefix(url_prefix) # And now generate stuff for renderer in renderers: renderer.generate() StaticSiteRenderer.finalize_output()
Handle cases when MEDUSA_URL_PREFIX isn't set
Handle cases when MEDUSA_URL_PREFIX isn't set
Python
mit
hyperair/django-medusa
6160507169c3cdc837b3472bdeb4c604b5c0d5fd
driver27/templatetags/driver27.py
driver27/templatetags/driver27.py
# -*- coding: utf-8 -*- from django import template from ..models import Season, Race from ..common import ordered_position register = template.Library() @register.filter def champion_filter(season_id): if season_id: season = Season.objects.get(pk=season_id) return '<span class="champion_tag">&#9818;</span>' if season.has_champion() else '' else: return '' @register.filter(is_safe=False) def get_attribute(obj, attr): if attr is None: return None return getattr(obj, attr) @register.filter(is_safe=False) def order_results(results, pos_key): return sorted(results, key=lambda result: (ordered_position(result, pos_key))) @register.filter def print_pos(pos): str_pos = u'' if pos: str_pos = u'{pos}º'.format(pos=pos) if pos == 1: str_pos = '<strong>{0}</strong>'.format(str_pos) return str_pos @register.filter def race_url(race_id): race = Race.objects.get(pk=race_id) return race.get_absolute_url()
# -*- coding: utf-8 -*- from django import template from ..models import Season, Race from ..common import ordered_position register = template.Library() @register.filter def champion_filter(season_id): if season_id: season = Season.objects.get(pk=season_id) return '<span class="champion_tag">&#9818;</span>' if season.has_champion() else '' else: return '' @register.filter(is_safe=False) def get_attribute(obj, attr): if attr is None: return None return getattr(obj, attr) @register.filter(is_safe=False) def order_results(results, pos_key): return sorted(results, key=lambda result: (ordered_position(result, pos_key))) @register.filter def print_pos(pos): str_pos = u'' if pos: str_pos = u'{pos}º'.format(pos=pos) if pos == 1: str_pos = u'<strong>{0}</strong>'.format(str_pos) return str_pos @register.filter def race_url(race_id): race = Race.objects.get(pk=race_id) return race.get_absolute_url()
Fix 'print_pos' templatetag for 2.7
Fix 'print_pos' templatetag for 2.7
Python
mit
SRJ9/django-driver27,SRJ9/django-driver27,SRJ9/django-driver27
5aba92fff0303546be0850f786a25659453674a6
masters/master.chromium.webkit/master_source_cfg.py
masters/master.chromium.webkit/master_source_cfg.py
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from buildbot.changes import svnpoller from buildbot.scheduler import AnyBranchScheduler from common import chromium_utils from master import build_utils from master import gitiles_poller def WebkitFileSplitter(path): """split_file for webkit.org repository.""" projects = ['trunk'] return build_utils.SplitPath(projects, path) def Update(config, _active_master, c): # Polls config.Master.trunk_url for changes cr_poller = gitiles_poller.GitilesPoller( 'https://chromium.googlesource.com/chromium/src', pollInterval=30, project='chromium') c['change_source'].append(cr_poller) webkit_url = 'http://src.chromium.org/viewvc/blink?view=rev&revision=%s' webkit_poller = svnpoller.SVNPoller( svnurl=config.Master.webkit_root_url, svnbin=chromium_utils.SVN_BIN, split_file=WebkitFileSplitter, pollinterval=30, revlinktmpl=webkit_url, cachepath='webkit.svnrev', project='webkit') c['change_source'].append(webkit_poller) c['schedulers'].append(AnyBranchScheduler( name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60, builderNames=[])) c['schedulers'].append(AnyBranchScheduler( name='global_deps_scheduler', branches=['master'], treeStableTimer=60, builderNames=[]))
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from buildbot.scheduler import AnyBranchScheduler from master import gitiles_poller def Update(config, _active_master, c): # Polls config.Master.trunk_url for changes cr_poller = gitiles_poller.GitilesPoller( 'https://chromium.googlesource.com/chromium/src', pollInterval=30, project='chromium') c['change_source'].append(cr_poller) c['schedulers'].append(AnyBranchScheduler( name='global_scheduler', branches=['trunk', 'master'], treeStableTimer=60, builderNames=[]))
Remove blink scheduler from chromium.webkit
Remove blink scheduler from chromium.webkit For context, please see: https://groups.google.com/a/chromium.org/d/msg/blink-dev/S-P3N0kdkMM/ohfRyTNyAwAJ https://groups.google.com/a/chromium.org/d/msg/blink-dev/3APcgCM52JQ/OyqNugnFAAAJ BUG=431478 Review URL: https://codereview.chromium.org/1351623005 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@296754 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
4c99dc23e1406b5e73c541993c4ffa4f92bc9a8a
src/_version.py
src/_version.py
__all__ = ('version', '__version__') version = (0, 14, 0) __version__ = '.'.join(str(x) for x in version)
__all__ = ('version', '__version__') version = (0, 14, 999, 1) __version__ = '.'.join(str(x) for x in version)
Bump version to 0.14.999.1 (next release on this branch will be 0.15.0)
Bump version to 0.14.999.1 (next release on this branch will be 0.15.0) 20080110143356-53eee-be816768f9cc7e023de858d0d314cbbec894ffa1.gz
Python
lgpl-2.1
PabloCastellano/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,max-posedon/telepathy-python,epage/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,detrout/telepathy-python,detrout/telepathy-python,max-posedon/telepathy-python,epage/telepathy-python
659614a6b845a95ce7188e86adae4bdc2c5416e7
examples/benchmark/__init__.py
examples/benchmark/__init__.py
#import benchmark_fibonacci import benchmark_twisted_names __all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
import benchmark_fibonacci import benchmark_twisted_names __all__ = ['benchmark_fibonacci', 'benchmark_twisted_names']
Add back commented out Fibonacci benchmark.
Add back commented out Fibonacci benchmark.
Python
mit
AlekSi/benchmarking-py
aaaaa3a143c370f387edf42ebd6b22c924845afa
falcom/luhn/check_digit_number.py
falcom/luhn/check_digit_number.py
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class CheckDigitNumber: def __init__ (self, number = None): self.__set_number(number) def get_check_digit (self): if self: return self.generate_from_int(self.number) else: return None def has_valid_check_digit (self): if self: digit = self.number % 10 static = self.number // 10 return digit == self.generate_from_int(static) else: return False def __bool__ (self): return self.number is not None def __repr__ (self): return "<{} {}>".format(self.__class__.__name__, repr(self.number)) def __set_number (self, number): if isinstance(number, int): self.number = number elif isinstance(number, str): self.__try_to_extract_number_from_str(number) else: self.number = None def __try_to_extract_number_from_str (self, number): try: self.number = int(number) except ValueError: self.number = None
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class CheckDigitNumber: def __init__ (self, number = None): self.__set_number(number) def generate_from_int (self, n): raise NotImplementedError def get_check_digit (self): if self: return self.generate_from_int(self.number) else: return None def has_valid_check_digit (self): if self: digit = self.number % 10 static = self.number // 10 return digit == self.generate_from_int(static) else: return False def __bool__ (self): return self.number is not None def __repr__ (self): return "<{} {}>".format(self.__class__.__name__, repr(self.number)) def __set_number (self, number): if isinstance(number, int): self.number = number elif isinstance(number, str): self.__try_to_extract_number_from_str(number) else: self.number = None def __try_to_extract_number_from_str (self, number): try: self.number = int(number) except ValueError: self.number = None
Make it clear that the user must implement generate_from_int
Make it clear that the user must implement generate_from_int
Python
bsd-3-clause
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
0dcecfbd1e6ce9e35febc9f4ee9bcbfac1fb8f6a
hytra/util/skimage_tifffile_hack.py
hytra/util/skimage_tifffile_hack.py
from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals from skimage.external import tifffile def hack(input_tif): """ This method allows to bypass the strange faulty behaviour of skimage.external.tifffile.imread() when it gets a list of paths or a glob pattern. This function extracts the image names and the path. Then, one can os.chdir(path) and call tifffile.imread(name), what will now behave well. """ name = []; path = str() for i in input_tif: name.append(i.split('/')[-1]) path_split = list(input_tif)[0].split('/')[0:-1] for i in path_split: path += i+'/' return path, name
from __future__ import print_function, absolute_import, nested_scopes, generators, division, with_statement, unicode_literals from skimage.external import tifffile import os.path def hack(input_tif): """ This method allows to bypass the strange faulty behaviour of skimage.external.tifffile.imread() when it gets a list of paths or a glob pattern. This function extracts the image names and the path. Then, one can os.chdir(path) and call tifffile.imread(names), what will now behave well. """ assert len(input_tif) > 0 names = [] path = str() for i in input_tif: names.append(os.path.basename(i)) path = os.path.dirname(input_tif[0]) return path, names
Fix tiffile hack to use os.path
Fix tiffile hack to use os.path
Python
mit
chaubold/hytra,chaubold/hytra,chaubold/hytra
f48063cfb9674c1e5f1f94e62ff43b239f687abd
examples/plot_tot_histogram.py
examples/plot_tot_histogram.py
""" ================== ToT histogram. ================== Create a simple histogram of the PMT signals (ToTs) in all events. """ # Author: Tamas Gal <[email protected]> # License: BSD-3 import pandas as pd import matplotlib.pyplot as plt import km3pipe.style km3pipe.style.use("km3pipe") filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5" hits = pd.read_hdf(filename, 'hits', mode='r') hits.hist("tot", bins=254, log=True, edgecolor='none') plt.title("ToT distribution") plt.xlabel("ToT [ns]")
""" ================== ToT histogram. ================== Create a simple histogram of the PMT signals (ToTs) in all events. """ # Author: Tamas Gal <[email protected]> # License: BSD-3 import tables as tb import matplotlib.pyplot as plt import km3pipe.style km3pipe.style.use("km3pipe") filename = "data/km3net_jul13_90m_muatm50T655.km3_v5r1.JTE_r2356.root.0-499.h5" with tb.File(filename) as f: tots = f.get_node("/hits/tot")[:] plt.hist(tots, bins=254, log=True, edgecolor='none') plt.title("ToT distribution") plt.xlabel("ToT [ns]")
Fix for new km3hdf5 version 4
Fix for new km3hdf5 version 4
Python
mit
tamasgal/km3pipe,tamasgal/km3pipe
34125781c38af9aacc33d20117b6c3c6dbb89211
migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py
migrations/versions/070_fix_folder_easfoldersyncstatus_unique_constraints.py
"""Fix Folder, EASFolderSyncStatus unique constraints Revision ID: 2525c5245cc2 Revises: 479b3b84a73e Create Date: 2014-07-28 18:57:24.476123 """ # revision identifiers, used by Alembic. revision = '2525c5245cc2' down_revision = '479b3b84a73e' from alembic import op import sqlalchemy as sa from inbox.ignition import main_engine engine = main_engine() Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) def upgrade(): op.drop_constraint('folder_ibfk_1', 'folder', type_='foreignkey') op.drop_constraint('account_id', 'folder', type_='unique') op.create_foreign_key('folder_ibfk_1', 'folder', 'account', ['account_id'], ['id']) op.create_unique_constraint('account_id', 'folder', ['account_id', 'name', 'canonical_name']) if 'easfoldersyncstatus' in Base.metadata.tables: op.create_unique_constraint('account_id_2', 'easfoldersyncstatus', ['account_id', 'eas_folder_id']) def downgrade(): raise Exception('Unsupported, going back will break things.')
"""Fix Folder, EASFolderSyncStatus unique constraints Revision ID: 2525c5245cc2 Revises: 479b3b84a73e Create Date: 2014-07-28 18:57:24.476123 """ # revision identifiers, used by Alembic. revision = '2525c5245cc2' down_revision = '479b3b84a73e' from alembic import op import sqlalchemy as sa from inbox.ignition import main_engine engine = main_engine() Base = sa.ext.declarative.declarative_base() Base.metadata.reflect(engine) def upgrade(): op.drop_constraint('folder_fk1', 'folder', type_='foreignkey') op.drop_constraint('account_id', 'folder', type_='unique') op.create_foreign_key('folder_fk1', 'folder', 'account', ['account_id'], ['id']) op.create_unique_constraint('account_id', 'folder', ['account_id', 'name', 'canonical_name']) if 'easfoldersyncstatus' in Base.metadata.tables: op.create_unique_constraint('account_id_2', 'easfoldersyncstatus', ['account_id', 'eas_folder_id']) def downgrade(): raise Exception('Unsupported, going back will break things.')
Rename FK in migration 70 - For some reason, Gunks' db has it named differently than ours.
Rename FK in migration 70 - For some reason, Gunks' db has it named differently than ours.
Python
agpl-3.0
gale320/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,closeio/nylas,wakermahmud/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,nylas/sync-engine,gale320/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,ErinCall/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,closeio/nylas,EthanBlackburn/sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,Eagles2F/sync-engine,gale320/sync-engine,ErinCall/sync-engine,jobscore/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,closeio/nylas,Eagles2F/sync-engine
8521ff7dcac5b81067e9e601b0901a182c24d050
processors/fix_changeline_budget_titles.py
processors/fix_changeline_budget_titles.py
import json import logging if __name__ == "__main__": input = sys.argv[1] output = sys.argv[2] processor = fix_changeline_budget_titles().process(input,output,[]) class fix_changeline_budget_titles(object): def process(self,inputs,output): out = [] budgets = {} changes_jsons, budget_jsons = inputs for line in file(budget_jsons): line = json.loads(line.strip()) budgets["%(year)s/%(code)s" % line] = line['title'] outfile = file(output,"w") changed_num = 0 for line in file(changes_jsons): line = json.loads(line.strip()) key = "%(year)s/%(budget_code)s" % line title = budgets.get(key) if title != None and title != line['budget_title']: line['budget_title'] = title changed_num += 1 else: logging.error("Failed to find title for change with key %s" % key) outfile.write(json.dumps(line,sort_keys=True)+"\n") print "updated %d entries" % changed_num
import json import logging if __name__ == "__main__": input = sys.argv[1] output = sys.argv[2] processor = fix_changeline_budget_titles().process(input,output,[]) class fix_changeline_budget_titles(object): def process(self,inputs,output): out = [] budgets = {} changes_jsons, budget_jsons = inputs for line in file(budget_jsons): line = json.loads(line.strip()) budgets["%(year)s/%(code)s" % line] = line['title'] outfile = file(output,"w") changed_num = 0 for line in file(changes_jsons): line = json.loads(line.strip()) key = "%(year)s/%(budget_code)s" % line title = budgets.get(key) if title != None: if title != line['budget_title']: line['budget_title'] = title changed_num += 1 else: logging.error("Failed to find title for change with key %s" % key) raise Exception() outfile.write(json.dumps(line,sort_keys=True)+"\n") print "updated %d entries" % changed_num
Fix bug in changeling title fix - it used to remove some lines on the way...
Fix bug in changeling title fix - it used to remove some lines on the way...
Python
mit
omerbartal/open-budget-data,omerbartal/open-budget-data,OpenBudget/open-budget-data,OpenBudget/open-budget-data
f0b188f398d82b000fdaa40e0aa776520a962a65
integration_tests/testpyagglom.py
integration_tests/testpyagglom.py
import sys import platform import h5py import numpy segh5 = sys.argv[1] predh5 = sys.argv[2] classifier = sys.argv[3] threshold = float(sys.argv[4]) from neuroproof import Agglomeration # open as uint32 and float respectively seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32) pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32) pred = pred.transpose((2,1,0,3)) pred = pred.copy() res = Agglomeration.agglomerate(seg, pred, classifier, threshold) # The 'golden' results depend on std::unordered, and therefore # the expected answer is different on Mac and Linux. if platform.system() == "Darwin": expected_unique = 239 else: expected_unique = 233 result_unique = len(numpy.unique(res)) assert result_unique == expected_unique, \ "Expected {} unique labels (including 0) in the resulting segmentation, but got {}"\ .format(expected_unique, len(numpy.unique(res))) print("SUCCESS")
import sys import platform import h5py import numpy segh5 = sys.argv[1] predh5 = sys.argv[2] classifier = sys.argv[3] threshold = float(sys.argv[4]) from neuroproof import Agglomeration # open as uint32 and float respectively seg = numpy.array(h5py.File(segh5)['stack'], numpy.uint32) pred = numpy.array(h5py.File(predh5)['volume/predictions'], numpy.float32) pred = pred.transpose((2,1,0,3)) pred = pred.copy() res = Agglomeration.agglomerate(seg, pred, classifier, threshold) # The 'golden' results depend on std::unordered, and therefore # the expected answer is different on Mac and Linux. if platform.system() == "Darwin": expected_unique = [239] else: # Depending on which linux stdlib we use, we might get different results expected_unique = [232, 233] result_unique = len(numpy.unique(res)) assert result_unique in expected_unique, \ "Wrong number of unique labels in the segmentation. Expected one of {}, but got {}"\ .format(expected_unique, len(numpy.unique(res))) print("SUCCESS")
Allow multiple 'golden' results for agglomeration test on Linux
tests: Allow multiple 'golden' results for agglomeration test on Linux
Python
bsd-3-clause
janelia-flyem/NeuroProof,janelia-flyem/NeuroProof,janelia-flyem/NeuroProof,janelia-flyem/NeuroProof
bb22c2f673e97ff1f11546d63e990bede4bb2526
linkfiles/.config/ipython/profile_grace/startup/30-grace.py
linkfiles/.config/ipython/profile_grace/startup/30-grace.py
# (c) Stefan Countryman 2017 # set up an interactive environment with gracedb rest api access. import ligo.gracedb.rest client = ligo.gracedb.rest.GraceDb()
# (c) Stefan Countryman 2017 # set up an interactive environment with gracedb rest api access. import ligo.gracedb.rest client = ligo.gracedb.rest.GraceDb() def gcn_notice_filenames(graceids): """Take a list of GraceIDs and check whether they have LVC GCN-notices. If so, print those notice filenames for GraceDB.""" for gid in graceids: print("GraceID: {}".format(gid)) f = client.files(gid).json() print filter(lambda k: 'Initial' in k, f.keys())
Add gcn_notice_filename function to igrace
Add gcn_notice_filename function to igrace
Python
mit
stefco/dotfiles,stefco/dotfiles,stefco/dotfiles
c2a1ce0ad4e2f2e9ff5ec72b89eb98967e445ea5
labsys/utils/custom_fields.py
labsys/utils/custom_fields.py
from wtforms.fields import RadioField class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): if isinstance(value, bool) is False and value is not None: self.data = None else: self.data = value def process_formdata(self, valuelist): if valuelist: try: self.data = valuelist[0] except ValueError: raise ValueError(self.gettext('Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
from wtforms.fields import RadioField class NullBooleanField(RadioField): DEFAULT_CHOICES = ((True, 'Sim'), (False, 'Não'), (None, 'Ignorado')) TRUE_VALUES = ('True', 'true') FALSE_VALUES = ('False', 'false') NONE_VALUES = ('None', 'none', 'null', '') def __init__(self, **kwargs): super().__init__(**kwargs) self.choices = kwargs.pop('choices', self.DEFAULT_CHOICES) def iter_choices(self): for value, label in self.choices: yield (value, label, value == self.data) def process_data(self, value): if value not in (True, False): self.data = None else: self.data = value def _parse_str_to_null_bool(self, input_str): if input_str in self.TRUE_VALUES: return True if input_str in self.FALSE_VALUES: return False if input_str in self.NONE_VALUES: return None raise ValueError def process_formdata(self, valuelist): if valuelist: try: self.data = self._parse_str_to_null_bool(valuelist[0]) except ValueError: raise ValueError(self.gettext( 'Invalid Choice: could not coerce')) def pre_validate(self, form): for value, _ in self.choices: if self.data == value: break else: raise ValueError(self.gettext('Not a valid choice'))
Improve NullBooleanField with Truthy/Falsy values
:art: Improve NullBooleanField with Truthy/Falsy values
Python
mit
gems-uff/labsys,gems-uff/labsys,gems-uff/labsys
ebcb9a4449bd22aa39a5b05fff91bd46e06086b4
python/csgo-c4-hue-server.py
python/csgo-c4-hue-server.py
import json import requests from flask import Flask, session, request, current_app app = Flask(__name__) @app.route("/", methods=["POST"]) def main(): f = open('bomb_status', 'w') json_data = json.loads(request.data) round_data = json_data.get('round', {}) bomb_status = str(round_data.get('bomb', '')) f.write(bomb_status) f.close() print bomb_status return 'plant the c4!' if __name__ == "__main__": app.run(port=8080, debug=True)
import json import requests from flask import Flask, session, request, current_app app = Flask(__name__) @app.route("/", methods=["POST"]) def main(): with open('bomb_status', 'w') as f: json_data = json.loads(request.data) round_data = json_data.get('round', {}) bomb_status = str(round_data.get('bomb', '')) f.write(bomb_status) print bomb_status return 'plant the c4!' if __name__ == "__main__": app.run(port=8080, debug=True)
Use `with` for writing to file
Use `with` for writing to file
Python
mit
doobix/csgo-c4-hue