commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
9f005120c6d408e8cf3097dd74d5dada24305c88 | src/jsonlogger.py | src/jsonlogger.py | import logging
import json
import re
from datetime import datetime
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
mappings = {
'asctime': create_timestamp,
'message': lambda r: r.msg,
}
formatters = self.parse()
log_record = {}
for formatter in formatters:
try:
log_record[formatter] = mappings[formatter](record)
except KeyError:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
def create_timestamp(record):
"""Creates a human readable timestamp for a log records created date"""
timestamp = datetime.fromtimestamp(record.created)
return timestamp.strftime("%y-%m-%d %H:%M:%S,%f"),
| import logging
import json
import re
class JsonFormatter(logging.Formatter):
"""A custom formatter to format logging records as json objects"""
def parse(self):
standard_formatters = re.compile(r'\((.*?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def format(self, record):
"""Formats a log record and serializes to json"""
formatters = self.parse()
record.message = record.getMessage()
# only format time if needed
if "asctime" in formatters:
record.asctime = self.formatTime(record, self.datefmt)
log_record = {}
for formatter in formatters:
log_record[formatter] = record.__dict__[formatter]
return json.dumps(log_record)
| Use the same logic to format message and asctime than the standard library. | Use the same logic to format message and asctime than the standard library.
This way we producte better message text on some circumstances when not logging
a string and use the date formater from the base class that uses the date format
configured from a file or a dict.
| Python | bsd-2-clause | madzak/python-json-logger,bbc/python-json-logger |
937fd7c07dfe98a086a9af07f0f7b316a6f2f6d8 | invoke/main.py | invoke/main.py | """
Invoke's own 'binary' entrypoint.
Dogfoods the `program` module.
"""
from ._version import __version__
from .program import Program
program = Program(name="Invoke", binary='inv[oke]', version=__version__)
| """
Invoke's own 'binary' entrypoint.
Dogfoods the `program` module.
"""
from . import __version__, Program
program = Program(
name="Invoke",
binary='inv[oke]',
version=__version__,
)
| Clean up binstub a bit | Clean up binstub a bit
| Python | bsd-2-clause | frol/invoke,frol/invoke,pyinvoke/invoke,mkusz/invoke,mattrobenolt/invoke,pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,mattrobenolt/invoke,pfmoore/invoke |
0b1587a484bd63632dbddfe5f0a4fe3c898e4fb0 | awacs/dynamodb.py | awacs/dynamodb.py | # Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action
service_name = 'Amazon DynamoDB'
prefix = 'dynamodb'
BatchGetItem = Action(prefix, 'BatchGetItem')
CreateTable = Action(prefix, 'CreateTable')
DeleteItem = Action(prefix, 'DeleteItem')
DeleteTable = Action(prefix, 'DeleteTable')
DescribeTable = Action(prefix, 'DescribeTable')
GetItem = Action(prefix, 'GetItem')
ListTables = Action(prefix, 'ListTables')
PutItem = Action(prefix, 'PutItem')
Query = Action(prefix, 'Query')
Scan = Action(prefix, 'Scan')
UpdateItem = Action(prefix, 'UpdateItem')
UpdateTable = Action(prefix, 'UpdateTable')
| # Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from aws import Action
from aws import ARN as BASE_ARN
service_name = 'Amazon DynamoDB'
prefix = 'dynamodb'
class ARN(BASE_ARN):
def __init__(self, region, account, table=None, index=None):
sup = super(ARN, self)
resource = '*'
if table:
resource = 'table/' + table
if index:
resource += '/index/' + index
sup.__init__(prefix, region=region, account=account, resource=resource)
BatchGetItem = Action(prefix, 'BatchGetItem')
CreateTable = Action(prefix, 'CreateTable')
DeleteItem = Action(prefix, 'DeleteItem')
DeleteTable = Action(prefix, 'DeleteTable')
DescribeTable = Action(prefix, 'DescribeTable')
GetItem = Action(prefix, 'GetItem')
ListTables = Action(prefix, 'ListTables')
PutItem = Action(prefix, 'PutItem')
Query = Action(prefix, 'Query')
Scan = Action(prefix, 'Scan')
UpdateItem = Action(prefix, 'UpdateItem')
UpdateTable = Action(prefix, 'UpdateTable')
| Add logic for DynamoDB ARNs | Add logic for DynamoDB ARNs
See:
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/UsingIAMWithDDB.html
I also decided not to name the ARN object 'DynamoDB_ARN' or anything
like that, and instead went with just 'ARN' since the class is already
stored in the dynamodb module. Kind of waffling on whether or not that
was the right thing to do, since it's not how this is handled elsewhere,
but it seems like it makes sense. If you're going to deal with multiple
ARNs, say in SDB & Dynamo, then it seems like you should be doing:
from awacs.sdb import ARN as SDB_ARN
from awacs.dynamodb import ARN as DynamoDB_ARN
Let me know what you guys think about that.
| Python | bsd-2-clause | craigbruce/awacs,cloudtools/awacs |
f996755665c9e55af5139a473b859aa0eb507515 | back2back/wsgi.py | back2back/wsgi.py | import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling, MediaCling
application = Cling(MediaCling(get_wsgi_application()))
| import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "back2back.settings")
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
application = Cling(get_wsgi_application())
| Remove MediaCling as there isn't any. | Remove MediaCling as there isn't any.
| Python | bsd-2-clause | mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back,mjtamlyn/back2back |
05c38777cb4a65199a605fbe75278a2170f84256 | debreach/compat.py | debreach/compat.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
try:
# Django >= 1.4.5
from django.utils.encoding import force_bytes, force_text, smart_text # NOQA
from django.utils.six import string_types, text_type, binary_type # NOQA
except ImportError: # pragma: no cover
# Django < 1.4.5
from django.utils.encoding import ( # NOQA
smart_unicode as smart_text, smart_str as force_bytes) # NOQA
force_text = smart_text
string_types = basestring
text_type = unicode
binary_type = str
try:
# Django >= 1.4
from django.utils.crypto import get_random_string # NOQA
except ImportError: # pragma: no cover
# Django < 1.4
from random import choice
get_random_string = lambda n: ''.join(
[choice('abcdefghijklmnopqrstuvwxyz0123456789') for i in range(n)])
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
try:
# Django >= 1.4.5
from django.utils.encoding import force_bytes, force_text, smart_text # NOQA
from django.utils.six import string_types, text_type, binary_type # NOQA
except ImportError: # pragma: no cover
# Django < 1.4.5
from django.utils.encoding import ( # NOQA
smart_unicode as smart_text, smart_str as force_bytes) # NOQA
force_text = smart_text
string_types = (basestring,)
text_type = unicode
binary_type = str
try:
# Django >= 1.4
from django.utils.crypto import get_random_string # NOQA
except ImportError: # pragma: no cover
# Django < 1.4
from random import choice
get_random_string = lambda n: ''.join(
[choice('abcdefghijklmnopqrstuvwxyz0123456789') for i in range(n)])
| Fix failure on Django < 1.4.5 | Fix failure on Django < 1.4.5
| Python | bsd-2-clause | lpomfrey/django-debreach,lpomfrey/django-debreach |
3e98ed8801d380b6ab40156b1f20a1f9fe23a755 | books/views.py | books/views.py | from rest_framework import viewsets
from books.models import BookPage
from books.serializers import BookPageSerializer
class BookPageViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows BookPages to be viewed or edited.
"""
queryset = BookPage.objects.all()
serializer_class = BookPageSerializer
| from rest_framework import viewsets
from books.models import BookPage
from books.serializers import BookPageSerializer
class BookPageViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows BookPages to be viewed or edited.
"""
queryset = BookPage.objects.order_by('page_number')
serializer_class = BookPageSerializer
| Order book pages by page number. | Order book pages by page number.
| Python | mit | Pepedou/Famas |
fe7ab3060c43d509f995cc64998139a623b21a4a | bot/cogs/owner.py | bot/cogs/owner.py | import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
def setup(bot):
bot.add_cog(Owner(bot))
| import discord
from discord.ext import commands
class Owner:
"""Admin-only commands that make the bot dynamic."""
def __init__(self, bot):
self.bot = bot
@commands.command()
@commands.is_owner()
async def close(self, ctx: commands.Context):
"""Closes the bot safely. Can only be used by the owner."""
await self.bot.logout()
@commands.command()
@commands.is_owner()
async def status(self, ctx: commands.Context, *, status: str):
"""Changes the bot's status. Can only be used by the owner."""
await self.bot.change_presence(activity=discord.Game(name=status))
@commands.command(name="reload")
@commands.is_owner()
async def _reload(self, ctx, *, ext: str = None):
"""Reloads a module. Can only be used by the owner."""
if ext:
self.bot.unload_extension(ext)
self.bot.load_extension(ext)
else:
for m in self.bot.initial_extensions:
self.bot.unload_extension(m)
self.bot.load_extension(m)
await ctx.message.add_reaction(self.bot.emoji_rustok)
def setup(bot):
bot.add_cog(Owner(bot))
| Add OK reaction to reload command | Add OK reaction to reload command
| Python | mit | ivandardi/RustbotPython,ivandardi/RustbotPython |
a703bed82bb2cfcf8b18b5e651bd2e992a590696 | numpy/_array_api/_types.py | numpy/_array_api/_types.py | """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
Array = ndarray
Device = TypeVar('device')
Dtype = Literal[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]
SupportsDLPack = TypeVar('SupportsDLPack')
SupportsBufferProtocol = TypeVar('SupportsBufferProtocol')
PyCapsule = TypeVar('PyCapsule')
| """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = ['Array', 'Device', 'Dtype', 'SupportsDLPack',
'SupportsBufferProtocol', 'PyCapsule']
from typing import Any, Sequence, Type, Union
from . import (Array, int8, int16, int32, int64, uint8, uint16, uint32,
uint64, float32, float64)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Any
Dtype = Type[Union[[int8, int16, int32, int64, uint8, uint16,
uint32, uint64, float32, float64]]]
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
| Use better type definitions for the array API custom types | Use better type definitions for the array API custom types
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy |
f26a59aae33fd1afef919427e0c36e744cb904fc | test/test_normalizedString.py | test/test_normalizedString.py | from rdflib import *
import unittest
class test_normalisedString(unittest.TestCase):
def test1(self):
lit2 = Literal("\two\nw", datatype=XSD.normalizedString)
lit = Literal("\two\nw", datatype=XSD.string)
self.assertEqual(lit == lit2, False)
def test2(self):
lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString)
st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string)
self.assertFalse(Literal.eq(st,lit))
def test3(self):
lit=Literal("hey\nthere", datatype=XSD.normalizedString).n3()
print(lit)
self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>")
if __name__ == "__main__":
unittest.main() | from rdflib import Literal
from rdflib.namespace import XSD
import unittest
class test_normalisedString(unittest.TestCase):
def test1(self):
lit2 = Literal("\two\nw", datatype=XSD.normalizedString)
lit = Literal("\two\nw", datatype=XSD.string)
self.assertEqual(lit == lit2, False)
def test2(self):
lit = Literal("\tBeing a Doctor Is\n\ta Full-Time Job\r", datatype=XSD.normalizedString)
st = Literal(" Being a Doctor Is a Full-Time Job ", datatype=XSD.string)
self.assertFalse(Literal.eq(st,lit))
def test3(self):
lit = Literal("hey\nthere", datatype=XSD.normalizedString).n3()
self.assertTrue(lit=="\"hey there\"^^<http://www.w3.org/2001/XMLSchema#normalizedString>")
def test4(self):
lit = Literal("hey\nthere\ta tab\rcarriage return", datatype=XSD.normalizedString)
expected = Literal("""hey there a tab carriage return""", datatype=XSD.string)
self.assertEqual(str(lit), str(expected))
if __name__ == "__main__":
unittest.main()
| Add a new test to test all chars that are getting replaced | Add a new test to test all chars that are getting replaced
| Python | bsd-3-clause | RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib |
543fc894120db6e8d854e746d631c87cc53f622b | website/noveltorpedo/tests.py | website/noveltorpedo/tests.py | from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
import unittest
from django.utils import timezone
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
author = Author()
author.name = "Jack Frost"
author.save()
story = Story()
story.title = "The Big One"
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = "Chapter One"
segment.contents = "This is how it all went down..."
segment.save() | from django.test import TestCase
from django.test import Client
from noveltorpedo.models import *
from django.utils import timezone
from django.core.management import call_command
client = Client()
class SearchTests(TestCase):
def test_that_the_front_page_loads_properly(self):
response = client.get('/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'NovelTorpedo Search')
def test_insertion_and_querying_of_data(self):
# Create a new story in the database.
author = Author()
author.name = 'Jack Frost'
author.save()
story = Story()
story.title = 'The Big One'
story.save()
story.authors.add(author)
segment = StorySegment()
segment.published = timezone.now()
segment.story = story
segment.title = 'Chapter Three'
segment.contents = 'This is how it all went down...'
segment.save()
# Index the new story.
call_command('update_index')
# Query via author name.
response = client.get('/', {'q': 'Jack Frost'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via story name.
response = client.get('/', {'q': 'The Big One'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
# Query via segment contents.
response = client.get('/', {'q': 'Chapter Three'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Jack Frost')
self.assertContains(response, 'The Big One')
self.assertContains(response, 'Chapter Three')
self.assertContains(response, 'This is how it all went down...')
| Rebuild index and test variety of queries | Rebuild index and test variety of queries
| Python | mit | NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo,NovelTorpedo/noveltorpedo |
80524970b9e802787918af9ce6d25110be825df4 | moderngl/__init__.py | moderngl/__init__.py | '''
ModernGL: PyOpenGL alternative
'''
from .error import *
from .buffer import *
from .compute_shader import *
from .conditional_render import *
from .context import *
from .framebuffer import *
from .program import *
from .program_members import *
from .query import *
from .renderbuffer import *
from .scope import *
from .texture import *
from .texture_3d import *
from .texture_array import *
from .texture_cube import *
from .vertex_array import *
from .sampler import *
__version__ = '5.6.1'
| '''
ModernGL: High performance rendering for Python 3
'''
from .error import *
from .buffer import *
from .compute_shader import *
from .conditional_render import *
from .context import *
from .framebuffer import *
from .program import *
from .program_members import *
from .query import *
from .renderbuffer import *
from .scope import *
from .texture import *
from .texture_3d import *
from .texture_array import *
from .texture_cube import *
from .vertex_array import *
from .sampler import *
__version__ = '5.6.1'
| Update module level description of moderngl | Update module level description of moderngl
| Python | mit | cprogrammer1994/ModernGL,cprogrammer1994/ModernGL,cprogrammer1994/ModernGL |
cb557823258fe61c2e86db30a7bfe8d0de120f15 | tests/conftest.py | tests/conftest.py | import betamax
import os
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'once'
if os.environ.get('TRAVIS_GH3'):
record_mode = 'never'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'xxxxxxxxxxx')
)
| import betamax
import os
with betamax.Betamax.configure() as config:
config.cassette_library_dir = 'tests/cassettes'
record_mode = 'once'
if os.environ.get('TRAVIS_GH3'):
record_mode = 'never'
config.default_cassette_options['record_mode'] = record_mode
config.define_cassette_placeholder(
'<AUTH_TOKEN>',
os.environ.get('GH_AUTH', 'x' * 20)
)
| Update the default value for the placeholder | Update the default value for the placeholder
If I decide to start matching on headers this will be necessary
| Python | bsd-3-clause | krxsky/github3.py,ueg1990/github3.py,agamdua/github3.py,sigmavirus24/github3.py,wbrefvem/github3.py,h4ck3rm1k3/github3.py,balloob/github3.py,degustaf/github3.py,christophelec/github3.py,jim-minter/github3.py,itsmemattchung/github3.py,icio/github3.py |
e4d5fa8c70dd283d4511f155da5be5835b1836f7 | tests/unit/test_validate.py | tests/unit/test_validate.py | import pytest
import mock
import synapseclient
from genie import validate
center = "SAGE"
syn = mock.create_autospec(synapseclient.Synapse)
@pytest.fixture(params=[
# tuple with (input, expectedOutput)
(["data_CNA_SAGE.txt"], "cna"),
(["data_clinical_supp_SAGE.txt"], "clinical"),
(["data_clinical_supp_sample_SAGE.txt",
"data_clinical_supp_patient_SAGE.txt"], "clinical")])
def filename_fileformat_map(request):
return request.param
def test_perfect_get_filetype(filename_fileformat_map):
(filepath_list, fileformat) = filename_fileformat_map
assert validate.determine_filetype(
syn, filepath_list, center) == fileformat
# def test_wrongfilename_get_filetype():
# assert input_to_database.get_filetype(syn, ['wrong.txt'], center) is None
| import pytest
import mock
import synapseclient
import pytest
from genie import validate
center = "SAGE"
syn = mock.create_autospec(synapseclient.Synapse)
@pytest.fixture(params=[
# tuple with (input, expectedOutput)
(["data_CNA_SAGE.txt"], "cna"),
(["data_clinical_supp_SAGE.txt"], "clinical"),
(["data_clinical_supp_sample_SAGE.txt",
"data_clinical_supp_patient_SAGE.txt"], "clinical")])
def filename_fileformat_map(request):
return request.param
def test_perfect_get_filetype(filename_fileformat_map):
(filepath_list, fileformat) = filename_fileformat_map
assert validate.determine_filetype(
syn, filepath_list, center) == fileformat
def test_wrongfilename_get_filetype():
with pytest.raises(
ValueError,
match="Your filename is incorrect! "
"Please change your filename before you run "
"the validator or specify --filetype if you are "
"running the validator locally"):
validate.determine_filetype(syn, ['wrong.txt'], center)
| Add in unit tests for validate.py | Add in unit tests for validate.py
| Python | mit | thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie |
1bb00e56897d17d9779f535dc50f602321c26eca | genes/gnu_coreutils/commands.py | genes/gnu_coreutils/commands.py | #!/usr/bin/env python
from genes.posix.traits import only_posix
from genes.process.commands import run
@only_posix()
def chgrp(path, group):
run(['chgrp', group, path])
@only_posix()
def chown(path, user):
run(['chown', user, path])
@only_posix()
def groupadd(*args):
run(['groupadd'] + list(*args))
@only_posix()
def ln(*args):
run(['ln'] + list(*args))
@only_posix()
def mkdir(path, mode=None):
if mode:
run(['mkdir', '-m', mode, path])
else:
run(['mkdir', path])
@only_posix()
def useradd(*args):
# FIXME: this is a bad way to do things
# FIXME: sigh. this is going to be a pain to make it idempotent
run(['useradd'] + list(*args))
@only_posix()
def usermod(*args):
# FIXME: this is a bad way to do things
run(['usermod'] + list(*args))
| #!/usr/bin/env python
from genes.posix.traits import only_posix
from genes.process.commands import run
@only_posix()
def chgrp(path, group):
run(['chgrp', group, path])
@only_posix()
def chown(path, user):
run(['chown', user, path])
@only_posix()
def groupadd(*args):
run(['groupadd'] + list(args))
@only_posix()
def ln(*args):
run(['ln'] + list(args))
@only_posix()
def mkdir(path, mode=None):
if mode:
run(['mkdir', '-m', mode, path])
else:
run(['mkdir', path])
@only_posix()
def useradd(*args):
# FIXME: this is a bad way to do things
# FIXME: sigh. this is going to be a pain to make it idempotent
run(['useradd'] + list(args))
@only_posix()
def usermod(*args):
# FIXME: this is a bad way to do things
run(['usermod'] + list(args))
| Fix args to list. Args is a tuple, list takes a tuple | Fix args to list. Args is a tuple, list takes a tuple | Python | mit | hatchery/Genepool2,hatchery/genepool |
96365d3467e1b0a9520eaff8086224d2d181b03b | mopidy/mixers/osa.py | mopidy/mixers/osa.py | from subprocess import Popen, PIPE
from mopidy.mixers import BaseMixer
class OsaMixer(BaseMixer):
def _get_volume(self):
try:
return int(Popen(
['osascript', '-e', 'output volume of (get volume settings)'],
stdout=PIPE).communicate()[0])
except ValueError:
return None
def _set_volume(self, volume):
Popen(['osascript', '-e', 'set volume output volume %d' % volume])
| from subprocess import Popen, PIPE
import time
from mopidy.mixers import BaseMixer
CACHE_TTL = 30
class OsaMixer(BaseMixer):
_cache = None
_last_update = None
def _valid_cache(self):
return (self._cache is not None
and self._last_update is not None
and (int(time.time() - self._last_update) < CACHE_TTL))
def _get_volume(self):
if not self._valid_cache():
try:
self._cache = int(Popen(
['osascript', '-e',
'output volume of (get volume settings)'],
stdout=PIPE).communicate()[0])
except ValueError:
self._cache = None
self._last_update = int(time.time())
return self._cache
def _set_volume(self, volume):
Popen(['osascript', '-e', 'set volume output volume %d' % volume])
self._cache = volume
self._last_update = int(time.time())
| Add caching of OsaMixer volume | Add caching of OsaMixer volume
If volume is just managed through Mopidy it is always correct. If another
application changes the volume, Mopidy will be correct within 30 seconds.
| Python | apache-2.0 | tkem/mopidy,quartz55/mopidy,tkem/mopidy,pacificIT/mopidy,bacontext/mopidy,mokieyue/mopidy,vrs01/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,priestd09/mopidy,bencevans/mopidy,swak/mopidy,jmarsik/mopidy,jodal/mopidy,ali/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,diandiankan/mopidy,adamcik/mopidy,SuperStarPL/mopidy,abarisain/mopidy,ZenithDK/mopidy,ali/mopidy,liamw9534/mopidy,dbrgn/mopidy,adamcik/mopidy,SuperStarPL/mopidy,bencevans/mopidy,quartz55/mopidy,mopidy/mopidy,quartz55/mopidy,ali/mopidy,bencevans/mopidy,quartz55/mopidy,swak/mopidy,SuperStarPL/mopidy,hkariti/mopidy,pacificIT/mopidy,mokieyue/mopidy,jmarsik/mopidy,diandiankan/mopidy,jodal/mopidy,kingosticks/mopidy,jcass77/mopidy,glogiotatidis/mopidy,bacontext/mopidy,priestd09/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,mokieyue/mopidy,diandiankan/mopidy,dbrgn/mopidy,jmarsik/mopidy,kingosticks/mopidy,glogiotatidis/mopidy,mopidy/mopidy,vrs01/mopidy,tkem/mopidy,hkariti/mopidy,woutervanwijk/mopidy,swak/mopidy,ZenithDK/mopidy,jmarsik/mopidy,vrs01/mopidy,rawdlite/mopidy,liamw9534/mopidy,ali/mopidy,pacificIT/mopidy,mopidy/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,swak/mopidy,priestd09/mopidy,hkariti/mopidy,rawdlite/mopidy,mokieyue/mopidy,kingosticks/mopidy,vrs01/mopidy,bencevans/mopidy,bacontext/mopidy,diandiankan/mopidy,bacontext/mopidy,pacificIT/mopidy,tkem/mopidy,dbrgn/mopidy,abarisain/mopidy,jcass77/mopidy,hkariti/mopidy,rawdlite/mopidy,woutervanwijk/mopidy,jcass77/mopidy |
0180aead701820d2de140791c3e271b4b8a7d231 | tests/__init__.py | tests/__init__.py | import os
def fixture_response(path):
return open(os.path.join(
os.path.dirname(__file__),
'fixtures',
path)).read()
| import os
def fixture_response(path):
with open(os.path.join(os.path.dirname(__file__),
'fixtures',
path)) as fixture:
return fixture.read()
| Fix file handlers being left open for fixtures | Fix file handlers being left open for fixtures
| Python | mit | accepton/accepton-python |
d07bf029b7ba9b5ef1f494d119a2eca004c1818a | tests/basics/list_slice_3arg.py | tests/basics/list_slice_3arg.py | x = list(range(10))
print(x[::-1])
print(x[::2])
print(x[::-2])
| x = list(range(10))
print(x[::-1])
print(x[::2])
print(x[::-2])
x = list(range(9))
print(x[::-1])
print(x[::2])
print(x[::-2])
| Add small testcase for 3-arg slices. | tests: Add small testcase for 3-arg slices.
| Python | mit | neilh10/micropython,danicampora/micropython,tuc-osg/micropython,noahchense/micropython,ahotam/micropython,alex-march/micropython,SungEun-Steve-Kim/test-mp,suda/micropython,SungEun-Steve-Kim/test-mp,noahwilliamsson/micropython,neilh10/micropython,aethaniel/micropython,noahwilliamsson/micropython,chrisdearman/micropython,redbear/micropython,AriZuu/micropython,praemdonck/micropython,ceramos/micropython,firstval/micropython,rubencabrera/micropython,selste/micropython,pozetroninc/micropython,galenhz/micropython,omtinez/micropython,dmazzella/micropython,turbinenreiter/micropython,vriera/micropython,toolmacher/micropython,kostyll/micropython,hiway/micropython,SungEun-Steve-Kim/test-mp,ernesto-g/micropython,xyb/micropython,ernesto-g/micropython,dxxb/micropython,kostyll/micropython,vitiral/micropython,PappaPeppar/micropython,dmazzella/micropython,TDAbboud/micropython,matthewelse/micropython,lbattraw/micropython,xyb/micropython,stonegithubs/micropython,orionrobots/micropython,kerneltask/micropython,ChuckM/micropython,selste/micropython,omtinez/micropython,rubencabrera/micropython,xuxiaoxin/micropython,alex-march/micropython,xhat/micropython,jlillest/micropython,kostyll/micropython,cloudformdesign/micropython,infinnovation/micropython,blazewicz/micropython,deshipu/micropython,hosaka/micropython,feilongfl/micropython,henriknelson/micropython,adafruit/micropython,Peetz0r/micropython-esp32,mgyenik/micropython,hiway/micropython,Vogtinator/micropython,alex-robbins/micropython,mianos/micropython,martinribelotta/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,pramasoul/micropython,HenrikSolver/micropython,skybird6672/micropython,suda/micropython,kostyll/micropython,pfalcon/micropython,puuu/micropython,tralamazza/micropython,blazewicz/micropython,ruffy91/micropython,Timmenem/micropython,heisewangluo/micropython,Timmenem/micropython,xuxiaoxin/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,oopy/micropython,puuu/micropython,adafruit/circuitpython,tdautc19841202/micropython,torwag/micropython,paul-xxx/micropython,KISSMonX/micropython,suda/micropython,PappaPeppar/micropython,skybird6672/micropython,orionrobots/micropython,dxxb/micropython,skybird6672/micropython,lbattraw/micropython,alex-robbins/micropython,xuxiaoxin/micropython,drrk/micropython,cloudformdesign/micropython,slzatz/micropython,ruffy91/micropython,danicampora/micropython,heisewangluo/micropython,SungEun-Steve-Kim/test-mp,emfcamp/micropython,ericsnowcurrently/micropython,hosaka/micropython,ahotam/micropython,MrSurly/micropython-esp32,misterdanb/micropython,xuxiaoxin/micropython,lowRISC/micropython,xyb/micropython,deshipu/micropython,cwyark/micropython,jimkmc/micropython,trezor/micropython,supergis/micropython,kostyll/micropython,deshipu/micropython,jmarcelino/pycom-micropython,mgyenik/micropython,AriZuu/micropython,praemdonck/micropython,ganshun666/micropython,rubencabrera/micropython,trezor/micropython,vitiral/micropython,danicampora/micropython,EcmaXp/micropython,ceramos/micropython,TDAbboud/micropython,micropython/micropython-esp32,orionrobots/micropython,lbattraw/micropython,supergis/micropython,galenhz/micropython,redbear/micropython,toolmacher/micropython,ceramos/micropython,cnoviello/micropython,paul-xxx/micropython,dhylands/micropython,EcmaXp/micropython,tralamazza/micropython,Vogtinator/micropython,rubencabrera/micropython,noahwilliamsson/micropython,bvernoux/micropython,hosaka/micropython,mhoffma/micropython,selste/micropython,heisewangluo/micropython,xhat/micropython,warner83/micropython,methoxid/micropystat,vitiral/micropython,supergis/micropython,praemdonck/micropython,utopiaprince/micropython,noahchense/micropython,tdautc19841202/micropython,oopy/micropython,pozetroninc/micropython,torwag/micropython,deshipu/micropython,HenrikSolver/micropython,feilongfl/micropython,ganshun666/micropython,swegener/micropython,torwag/micropython,aethaniel/micropython,EcmaXp/micropython,oopy/micropython,swegener/micropython,xhat/micropython,tdautc19841202/micropython,heisewangluo/micropython,tdautc19841202/micropython,deshipu/micropython,ryannathans/micropython,paul-xxx/micropython,danicampora/micropython,toolmacher/micropython,ryannathans/micropython,blazewicz/micropython,galenhz/micropython,xhat/micropython,hosaka/micropython,noahwilliamsson/micropython,mgyenik/micropython,toolmacher/micropython,mpalomer/micropython,xyb/micropython,ChuckM/micropython,Timmenem/micropython,supergis/micropython,ernesto-g/micropython,misterdanb/micropython,MrSurly/micropython-esp32,ryannathans/micropython,swegener/micropython,KISSMonX/micropython,vriera/micropython,alex-robbins/micropython,matthewelse/micropython,danicampora/micropython,mgyenik/micropython,KISSMonX/micropython,suda/micropython,tuc-osg/micropython,warner83/micropython,blazewicz/micropython,slzatz/micropython,mhoffma/micropython,AriZuu/micropython,dxxb/micropython,Vogtinator/micropython,drrk/micropython,tuc-osg/micropython,tuc-osg/micropython,cnoviello/micropython,tobbad/micropython,jimkmc/micropython,blmorris/micropython,alex-march/micropython,adamkh/micropython,heisewangluo/micropython,adamkh/micropython,cloudformdesign/micropython,pramasoul/micropython,firstval/micropython,stonegithubs/micropython,torwag/micropython,ChuckM/micropython,Peetz0r/micropython-esp32,ganshun666/micropython,MrSurly/micropython,AriZuu/micropython,methoxid/micropystat,swegener/micropython,adafruit/circuitpython,skybird6672/micropython,blazewicz/micropython,ceramos/micropython,Timmenem/micropython,neilh10/micropython,mhoffma/micropython,paul-xxx/micropython,emfcamp/micropython,EcmaXp/micropython,neilh10/micropython,lbattraw/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,galenhz/micropython,kerneltask/micropython,cnoviello/micropython,feilongfl/micropython,toolmacher/micropython,emfcamp/micropython,EcmaXp/micropython,praemdonck/micropython,alex-robbins/micropython,matthewelse/micropython,utopiaprince/micropython,vriera/micropython,adafruit/micropython,micropython/micropython-esp32,blmorris/micropython,stonegithubs/micropython,ericsnowcurrently/micropython,lowRISC/micropython,emfcamp/micropython,tdautc19841202/micropython,dhylands/micropython,bvernoux/micropython,dinau/micropython,oopy/micropython,PappaPeppar/micropython,MrSurly/micropython,alex-march/micropython,warner83/micropython,aethaniel/micropython,TDAbboud/micropython,Timmenem/micropython,aethaniel/micropython,SungEun-Steve-Kim/test-mp,dxxb/micropython,mianos/micropython,ernesto-g/micropython,jlillest/micropython,trezor/micropython,tobbad/micropython,redbear/micropython,cnoviello/micropython,xuxiaoxin/micropython,HenrikSolver/micropython,redbear/micropython,omtinez/micropython,hiway/micropython,SHA2017-badge/micropython-esp32,ganshun666/micropython,blmorris/micropython,dinau/micropython,emfcamp/micropython,cnoviello/micropython,ryannathans/micropython,kerneltask/micropython,redbear/micropython,infinnovation/micropython,adafruit/micropython,henriknelson/micropython,ericsnowcurrently/micropython,paul-xxx/micropython,kerneltask/micropython,misterdanb/micropython,jlillest/micropython,pramasoul/micropython,vriera/micropython,noahwilliamsson/micropython,ceramos/micropython,dinau/micropython,dmazzella/micropython,swegener/micropython,ernesto-g/micropython,mgyenik/micropython,mpalomer/micropython,ahotam/micropython,skybird6672/micropython,noahchense/micropython,ahotam/micropython,pfalcon/micropython,mhoffma/micropython,blmorris/micropython,xyb/micropython,micropython/micropython-esp32,dinau/micropython,noahchense/micropython,lbattraw/micropython,puuu/micropython,jmarcelino/pycom-micropython,misterdanb/micropython,turbinenreiter/micropython,matthewelse/micropython,martinribelotta/micropython,tobbad/micropython,warner83/micropython,adamkh/micropython,tobbad/micropython,alex-march/micropython,bvernoux/micropython,slzatz/micropython,ruffy91/micropython,adafruit/micropython,chrisdearman/micropython,SHA2017-badge/micropython-esp32,praemdonck/micropython,mianos/micropython,mpalomer/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,feilongfl/micropython,turbinenreiter/micropython,torwag/micropython,jlillest/micropython,drrk/micropython,henriknelson/micropython,alex-robbins/micropython,firstval/micropython,AriZuu/micropython,SHA2017-badge/micropython-esp32,cwyark/micropython,puuu/micropython,orionrobots/micropython,pramasoul/micropython,martinribelotta/micropython,feilongfl/micropython,adafruit/circuitpython,TDAbboud/micropython,mhoffma/micropython,hosaka/micropython,MrSurly/micropython-esp32,TDAbboud/micropython,puuu/micropython,firstval/micropython,misterdanb/micropython,Peetz0r/micropython-esp32,utopiaprince/micropython,pozetroninc/micropython,lowRISC/micropython,infinnovation/micropython,mianos/micropython,trezor/micropython,drrk/micropython,dinau/micropython,neilh10/micropython,adafruit/circuitpython,PappaPeppar/micropython,micropython/micropython-esp32,HenrikSolver/micropython,adafruit/circuitpython,mianos/micropython,methoxid/micropystat,adafruit/micropython,jimkmc/micropython,chrisdearman/micropython,Vogtinator/micropython,cwyark/micropython,ericsnowcurrently/micropython,utopiaprince/micropython,chrisdearman/micropython,MrSurly/micropython,slzatz/micropython,henriknelson/micropython,aethaniel/micropython,blmorris/micropython,MrSurly/micropython,matthewelse/micropython,cwyark/micropython,dhylands/micropython,kerneltask/micropython,vitiral/micropython,selste/micropython,ahotam/micropython,vitiral/micropython,suda/micropython,orionrobots/micropython,ChuckM/micropython,dxxb/micropython,cloudformdesign/micropython,mpalomer/micropython,adamkh/micropython,adafruit/circuitpython,chrisdearman/micropython,supergis/micropython,jlillest/micropython,stonegithubs/micropython,selste/micropython,trezor/micropython,ruffy91/micropython,jimkmc/micropython,xhat/micropython,mpalomer/micropython,pfalcon/micropython,cwyark/micropython,tobbad/micropython,micropython/micropython-esp32,MrSurly/micropython,omtinez/micropython,pozetroninc/micropython,ruffy91/micropython,infinnovation/micropython,SHA2017-badge/micropython-esp32,omtinez/micropython,dhylands/micropython,oopy/micropython,adamkh/micropython,martinribelotta/micropython,ChuckM/micropython,bvernoux/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,turbinenreiter/micropython,matthewelse/micropython,KISSMonX/micropython,methoxid/micropystat,dhylands/micropython,dmazzella/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,firstval/micropython,hiway/micropython,ryannathans/micropython,lowRISC/micropython,tralamazza/micropython,lowRISC/micropython,vriera/micropython,turbinenreiter/micropython,warner83/micropython,utopiaprince/micropython,pozetroninc/micropython,drrk/micropython,bvernoux/micropython,martinribelotta/micropython,ganshun666/micropython,rubencabrera/micropython,MrSurly/micropython-esp32,galenhz/micropython,noahchense/micropython,cloudformdesign/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,KISSMonX/micropython,methoxid/micropystat,pramasoul/micropython,Vogtinator/micropython,hiway/micropython,ericsnowcurrently/micropython,slzatz/micropython,stonegithubs/micropython,tralamazza/micropython,jimkmc/micropython |
2a43183f5d2c14bacb92fe563d3c2ddf61b116da | tests/testMain.py | tests/testMain.py | import os
import unittest
import numpy
import arcpy
from utils import *
# import our constants;
# configure test data
# XXX: use .ini files for these instead? used in other 'important' unit tests
from config import *
# import our local directory so we can use the internal modules
import_paths = ['../Install/toolbox', '../Install']
addLocalPaths(import_paths)
class TestBpiScript(unittest.TestCase):
from scripts import bpi
def testBpiImport(self, method=bpi):
self.assertRaises(ValueError, method.main(), None)
def testBpiRun(self):
pass
class TestStandardizeBpiGridsScript(unittest.TestCase):
from scripts import standardize_bpi_grids
def testStdImport(self, method=standardize_bpi_grids):
pass
def testStdRun(self):
pass
class TestBtmDocument(unittest.TestCase):
# XXX this won't automatically get the right thing... how can we fix it?
import utils
def testXMLDocumentExists(self):
self.assertTrue(os.path.exists(xml_doc))
if __name__ == '__main__':
unittest.main()
| import os
import unittest
import numpy
import arcpy
from utils import *
# import our constants;
# configure test data
# XXX: use .ini files for these instead? used in other 'important' unit tests
from config import *
# import our local directory so we can use the internal modules
import_paths = ['../Install/toolbox', '../Install']
addLocalPaths(import_paths)
class TestBpiScript(unittest.TestCase):
from scripts import bpi
def testBpiImport(self, method=bpi):
self.assertRaises(ValueError, method.main(), None)
def testBpiRun(self):
pass
class TestStandardizeBpiGridsScript(unittest.TestCase):
from scripts import standardize_bpi_grids
def testStdImport(self, method=standardize_bpi_grids):
pass
def testStdRun(self):
pass
class TestBtmDocument(unittest.TestCase):
# XXX this won't automatically get the right thing... how can we fix it?
import utils
def testXmlDocumentExists(self):
self.assertTrue(os.path.exists(xml_doc))
def testCsvDocumentExists(self):
self.assertTrue(os.path.exists(csv_doc))
if __name__ == '__main__':
unittest.main()
| Make naming consistent with our standard (camelcase always, even with acronymn) | Make naming consistent with our standard (camelcase always, even with acronymn)
| Python | mpl-2.0 | EsriOceans/btm |
457d8002a3758cc8f28ba195a21afc4e0d33965a | tests/vec_test.py | tests/vec_test.py | """Tests for vectors."""
from sympy import sympify
from drudge import Vec
def test_vecs_has_basic_properties():
"""Tests the basic properties of vector instances."""
base = Vec('v')
v_ab = Vec('v', indices=['a', 'b'])
v_ab_1 = base['a', 'b']
v_ab_2 = (base['a'])['b']
indices_ref = (sympify('a'), sympify('b'))
hash_ref = hash(v_ab)
str_ref = 'v[a, b]'
repr_ref = "Vec('v', (a, b))"
for i in [v_ab, v_ab_1, v_ab_2]:
assert i.base == base.base
assert i.indices == indices_ref
assert hash(i) == hash_ref
assert i == v_ab
assert str(i) == str_ref
assert repr(i) == repr_ref
| """Tests for vectors."""
from sympy import sympify
from drudge import Vec
def test_vecs_has_basic_properties():
"""Tests the basic properties of vector instances."""
base = Vec('v')
v_ab = Vec('v', indices=['a', 'b'])
v_ab_1 = base['a', 'b']
v_ab_2 = (base['a'])['b']
indices_ref = (sympify('a'), sympify('b'))
hash_ref = hash(v_ab)
str_ref = 'v[a, b]'
repr_ref = "Vec('v', (a, b))"
for i in [v_ab, v_ab_1, v_ab_2]:
assert i.label == base.label
assert i.base == base
assert i.indices == indices_ref
assert hash(i) == hash_ref
assert i == v_ab
assert str(i) == str_ref
assert repr(i) == repr_ref
| Update tests for vectors for the new protocol | Update tests for vectors for the new protocol
Now the tests for vectors are updated for the new non backward
compatible change for the concepts of label and base.
| Python | mit | tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge |
df00a5319028e53826c1a4fd29ed39bb671b4911 | tutorials/urls.py | tutorials/urls.py | from django.conf.urls import include, url
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view()),
] | from django.conf.urls import include, url
from markdownx import urls as markdownx
from tutorials import views
urlpatterns = [
url(r'^$', views.ListTutorials.as_view()),
url(r'^(?P<tutorial_id>[\w\-]+)/$', views.TutorialDetail.as_view()),
# this not working correctly - some error in gatherTutorials
url(r'/add/', views.NewTutorial.as_view(), name='add_tutorial'),
url(r'^markdownx/', include(markdownx)),
] | Add markdownx url, Add add-tutrorial url | Add markdownx url, Add add-tutrorial url
| Python | agpl-3.0 | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform |
3bce013c51c454721de3a868ea6d8e8c6d335112 | cycli/neo4j.py | cycli/neo4j.py | import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.host = host
self.port = port
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
| import requests
from py2neo import Graph, authenticate
class Neo4j:
def __init__(self, host, port, username=None, password=None):
self.username = username
self.password = password
self.host_port = "{host}:{port}".format(host=host, port=port)
self.url = "http://{host_port}/db/data/".format(host_port=self.host_port)
def connection(self):
if self.username and self.password:
authenticate(self.host_port, self.username, self.password)
graph = Graph(self.url)
return graph
def cypher(self, query):
tx = self.connection().cypher.begin()
try:
tx.append(query)
results = tx.process()
tx.commit()
except Exception as e:
results = e
except KeyboardInterrupt:
tx.rollback()
results = ""
return results
def labels(self):
return sorted(list(self.connection().node_labels))
def relationship_types(self):
return sorted(list(self.connection().relationship_types))
def properties(self):
url = self.url + "propertykeys"
r = requests.get(url, auth=(self.username, self.password))
props = r.json()
return sorted(props)
| Remove host and port attributes from Neo4j | Remove host and port attributes from Neo4j
| Python | mit | nicolewhite/cycli,nicolewhite/cycli |
70c9deb44cbbce13fbe094640786398cb4683b08 | ldap_sync/tasks.py | ldap_sync/tasks.py | from django.core.management import call_command
from celery import task
@task
def syncldap():
"""
Call the appropriate management command to synchronize the LDAP users
with the local database.
"""
call_command('syncldap')
| from django.core.management import call_command
from celery import shared_task
@shared_task
def syncldap():
"""
Call the appropriate management command to synchronize the LDAP users
with the local database.
"""
call_command('syncldap')
| Change Celery task to shared task | Change Celery task to shared task
| Python | bsd-3-clause | alexsilva/django-ldap-sync,jbittel/django-ldap-sync,PGower/django-ldap3-sync,alexsilva/django-ldap-sync |
026fade3f064f0185fa3a6f2075d43353e041970 | whois-scraper.py | whois-scraper.py | from lxml import html
from PIL import Image
import requests
def enlarge_image(image_file):
image = Image.open(image_file)
enlarged_size = map(lambda x: x*2, image.size)
enlarged_image = image.resize(enlarged_size)
return enlarged_image
def extract_text(image_file):
image = enlarge_image(image_file)
# Use Tesseract to extract text from the enlarged image. Then Return it.
domain = 'speedtest.net'
page = requests.get('http://www.whois.com/whois/{}'.format(domain))
tree = html.fromstring(page.content)
| from lxml import html
from PIL import Image
import requests
import urllib.request
def enlarge_image(image_file):
image = Image.open(image_file)
enlarged_size = map(lambda x: x*2, image.size)
enlarged_image = image.resize(enlarged_size)
return enlarged_image
def extract_text(image_file):
image = enlarge_image(image_file)
# Use Tesseract to extract text from the enlarged image. Then Return it.
def fix_emails(whois_data, image_urls):
count = 0
for index, item in enumerate(whois_data):
if item.startswith('@'):
with urllib.request.urlopen(image_urls[count]) as response:
email_username = extract_text(image_urls[count])
whois_data[index-1:index+1] = [whois_data[index-1] + email_username + whois_data[index]]
count += 1
return whois_data
def scrape_whois(domain):
domain = 'speedtest.net'
page = requests.get('http://www.whois.com/whois/{}'.format(domain))
tree = html.fromstring(page.content)
registrar_data = tree.xpath('//*[@id="registrarData"]/text()')
registrar_images = list(map(lambda x: 'http://www.whois.com' + x, tree.xpath('//*[@id="registrarData"]/img/@src')))
registry_data = tree.xpath('//*[@id="registryData"]/text()')
registry_images = list(map(lambda x: 'http://www.whois.com' + x, tree.xpath('//*[@id="registryData"]/img/@src'))) | Add functions to scrape whois data and fix the e-mails in it | Add functions to scrape whois data and fix the e-mails in it
- Add function scrape_whois which scrapes the raw whois information for a given domain from http://www.whois.com/whois.
- Add function fix_emails. http://www.whois.com hides the username-part of the contact e-mails from the whois info by displaying it as an image. This function fixes that using the extract_text function.
| Python | mit | SkullTech/whois-scraper |
b89f6981d4f55790aa919f36e02a6312bd5f1583 | tests/__init__.py | tests/__init__.py | import unittest
import sys
from six import PY3
if PY3:
from urllib.parse import urlsplit, parse_qsl
else:
from urlparse import urlsplit, parse_qsl
import werkzeug as wz
from flask import Flask, url_for, render_template_string
from flask.ext.images import Images, ImageSize, resized_img_src
import flask
flask_version = tuple(map(int, flask.__version__.split('.')))
class TestCase(unittest.TestCase):
def setUp(self):
self.app = self.create_app()
self.app_ctx = self.app.app_context()
self.app_ctx.push()
self.req_ctx = self.app.test_request_context('http://localhost:8000/')
self.req_ctx.push()
self.client = self.app.test_client()
def create_app(self):
app = Flask(__name__)
app.config['TESTING'] = True
app.config['SERVER_NAME'] = 'localhost'
app.config['SECRET_KEY'] = 'secret secret'
app.config['IMAGES_PATH'] = ['assets']
self.images = Images(app)
return app
def assert200(self, res):
self.assertEqual(res.status_code, 200)
| import unittest
import sys
from six import PY3
if PY3:
from urllib.parse import urlsplit, parse_qsl
else:
from urlparse import urlsplit, parse_qsl
import werkzeug as wz
from flask import Flask, url_for, render_template_string
import flask
from flask_images import Images, ImageSize, resized_img_src
flask_version = tuple(map(int, flask.__version__.split('.')))
class TestCase(unittest.TestCase):
def setUp(self):
self.app = self.create_app()
self.app_ctx = self.app.app_context()
self.app_ctx.push()
self.req_ctx = self.app.test_request_context('http://localhost:8000/')
self.req_ctx.push()
self.client = self.app.test_client()
def create_app(self):
app = Flask(__name__)
app.config['TESTING'] = True
app.config['SERVER_NAME'] = 'localhost'
app.config['SECRET_KEY'] = 'secret secret'
app.config['IMAGES_PATH'] = ['assets']
self.images = Images(app)
return app
def assert200(self, res):
self.assertEqual(res.status_code, 200)
| Stop using `flask.ext.*` in tests. | Stop using `flask.ext.*` in tests.
| Python | bsd-3-clause | mikeboers/Flask-Images |
211972701d8dbd39e42ec5a8d10b9c56be858d3e | tests/conftest.py | tests/conftest.py | import string
import pytest
@pytest.fixture
def identity_fixures():
l = []
for i, c in enumerate(string.ascii_uppercase):
l.append(dict(
name='identity_{0}'.format(i),
access_key_id='someaccesskey_{0}'.format(c),
secret_access_key='notasecret_{0}_{1}'.format(i, c),
))
return l
@pytest.fixture
def identity_store(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
def fin():
identity_store.identities.clear()
identity_store.save_to_config()
return identity_store
@pytest.fixture
def identity_store_with_data(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
for data in identity_fixures():
identity_store.add_identity(data)
def fin():
identity_store.identities.clear()
identity_store.save_to_config()
return identity_store
| import string
import pytest
@pytest.fixture
def identity_fixures():
l = []
for i, c in enumerate(string.ascii_uppercase):
l.append(dict(
name='identity_{0}'.format(i),
access_key_id='someaccesskey_{0}'.format(c),
secret_access_key='notasecret_{0}_{1}'.format(i, c),
))
return l
@pytest.fixture
def identity_store(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
return identity_store
@pytest.fixture
def identity_store_with_data(tmpdir):
from awsident.storage import IdentityStore
identity_store = IdentityStore(config_path=str(tmpdir))
for data in identity_fixures():
identity_store.add_identity(data)
return identity_store
| Remove fixture teardown since nothing should be saved (tmpdir) | Remove fixture teardown since nothing should be saved (tmpdir)
| Python | mit | nocarryr/AWS-Identity-Manager |
1da421dcb4356a4f572bc1a815ed7e87dc619f64 | vcfexplorer/frontend/views.py | vcfexplorer/frontend/views.py | """
vcfexplorer.frontend.views
VCF Explorer frontend views
"""
from flask import send_file
from . import bp
@bp.route('/', defaults={'path': ''})
@bp.route('/<path:path>')
def index(path):
return send_file('frontend/templates/index.html')
| """
vcfexplorer.frontend.views
VCF Explorer frontend views
"""
from flask import send_file
from . import bp
@bp.route('/', defaults={'path': ''})
@bp.route('<path:path>')
def index(path):
return send_file('frontend/templates/index.html')
| Add flexible routing, maybe change to stricter hardcoded ones? | Add flexible routing, maybe change to stricter hardcoded ones?
| Python | mit | CuppenResearch/vcf-explorer,CuppenResearch/vcf-explorer,CuppenResearch/vcf-explorer,CuppenResearch/vcf-explorer |
debdc71a1c22412c46d8bf74315a5467c1e228ee | magnum/tests/unit/common/test_exception.py | magnum/tests/unit/common/test_exception.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
from magnum.common import exception
from magnum.i18n import _
from magnum.tests import base
class TestMagnumException(exception.MagnumException):
message = _("templated %(name)s")
class TestException(base.BaseTestCase):
def raise_(self, ex):
raise ex
def test_message_is_templated(self):
ex = TestMagnumException(name="NAME")
self.assertEqual("templated NAME", ex.message)
def test_custom_message_is_templated(self):
ex = TestMagnumException(_("custom templated %(name)s"), name="NAME")
self.assertEqual("custom templated NAME", ex.message)
def test_all_exceptions(self):
for name, obj in inspect.getmembers(exception):
if inspect.isclass(obj) and issubclass(obj, Exception):
self.assertRaises(obj, self.raise_, obj())
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
from magnum.common import exception
from magnum.i18n import _
from magnum.tests import base
class TestMagnumException(exception.MagnumException):
message = _("templated %(name)s")
class TestException(base.BaseTestCase):
def raise_(self, ex):
raise ex
def test_message_is_templated(self):
ex = TestMagnumException(name="NAME")
self.assertEqual("templated NAME", str(ex))
def test_custom_message_is_templated(self):
ex = TestMagnumException(_("custom templated %(name)s"), name="NAME")
self.assertEqual("custom templated NAME", str(ex))
def test_all_exceptions(self):
for name, obj in inspect.getmembers(exception):
if inspect.isclass(obj) and issubclass(obj, Exception):
self.assertRaises(obj, self.raise_, obj())
| Stop using deprecated 'message' attribute in Exception | Stop using deprecated 'message' attribute in Exception
The 'message' attribute has been deprecated and removed
from Python3.
For more details, please check:
https://www.python.org/dev/peps/pep-0352/
Change-Id: Id952e4f59a911df7ccc1d64e7a8a2d5e9ee353dd
| Python | apache-2.0 | ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum,openstack/magnum |
021cf436c23c5c705d0e3c5b6383e25811ade669 | webmaster_verification/views.py | webmaster_verification/views.py | import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class GoogleVerificationView(VerificationView):
template_name = 'google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationView):
template_name = 'bing_verify_template.xml'
provider = 'bing'
| import logging
logger = logging.getLogger(__name__)
from django.http import Http404
from django.views.generic import TemplateView
import settings
class VerificationView(TemplateView):
"""
This simply adds the verification key to the view context and makes sure
we return a 404 if the key wasn't set for the provider
"""
def get_context_data(self, **kwargs):
context = super(VerificationView, self).get_context_data(**kwargs)
try:
context['%s_verification' % self.provider] = settings.WEBMASTER_VERIFICATION[self.provider]
except KeyError:
raise Http404
except AttributeError:
logger.info("WEBMASTER_VERIFICATION not defined in settings")
raise Http404
return context
class GoogleVerificationView(VerificationView):
template_name = 'webmaster_verification/google_verify_template.html'
provider = 'google'
class BingVerificationView(VerificationView):
template_name = 'webmaster_verification/bing_verify_template.xml'
provider = 'bing'
| Use the new template path | Use the new template path
| Python | bsd-3-clause | nkuttler/django-webmaster-verification,nkuttler/django-webmaster-verification |
8312ba2ca414e5da8ad165ec08ff0205cd99a2c9 | oneflow/settings/snippets/00_production.py | oneflow/settings/snippets/00_production.py |
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Do we show all 1flow AdminModels in admin ?
# Not by default, cf. https://trello.com/c/dJoV4xZy
FULL_ADMIN = False
|
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
import django.conf.urls.defaults # NOQA
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# Do we show all 1flow AdminModels in admin ?
# Not by default, cf. https://trello.com/c/dJoV4xZy
FULL_ADMIN = False
| Fix the annoying "/home/1flow/.virtualenvs/1flow/local/lib/python2.7/site-packages/django/conf/urls/defaults.py:3: DeprecationWarning: django.conf.urls.defaults is deprecated; use django.conf.urls instead DeprecationWarning)". We do not use it in our code at all, and external packages (rosetta, grappelli, etc) didn't issue any new release recently to fix this warning. | Fix the annoying "/home/1flow/.virtualenvs/1flow/local/lib/python2.7/site-packages/django/conf/urls/defaults.py:3: DeprecationWarning: django.conf.urls.defaults is deprecated; use django.conf.urls instead DeprecationWarning)". We do not use it in our code at all, and external packages (rosetta, grappelli, etc) didn't issue any new release recently to fix this warning. | Python | agpl-3.0 | WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow |
4d1b96792f73777adaa0a79341901ca82f57839b | use/functional.py | use/functional.py | def pipe(*functions):
def closure(x):
for fn in functions:
if not out:
out = fn(x)
else:
out = fn(out)
return out
return closure
| import collections
import functools
def pipe(*functions):
def closure(x):
for fn in functions:
if not out:
out = fn(x)
else:
out = fn(out)
return out
return closure
class memoize(object):
'''Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
| Add a simple memoize function | Add a simple memoize function
| Python | mit | log0ymxm/corgi |
63c72e9606ba9c9030ba63f25331c351873ab8b1 | node/multiply.py | node/multiply.py | #!/usr/bin/env python
from nodes import Node
class Multiply(Node):
char = "*"
args = 2
results = 1
@Node.test_func([4,5], [20])
def func(self, a,b):
"""a*b"""
return a*b | #!/usr/bin/env python
from nodes import Node
class Multiply(Node):
char = "*"
args = 2
results = 1
@Node.test_func([4,5], [20])
def func(self, a,b):
"""a*b"""
return[a*b] | Multiply now handles lists correctly | Multiply now handles lists correctly
| Python | mit | muddyfish/PYKE,muddyfish/PYKE |
e0671b84c3d11b738d6fe6981d8b2fac87828558 | cupy/logic/ops.py | cupy/logic/ops.py | from cupy import core
logical_and = core.create_comparison(
'logical_and', '&&',
'''Computes the logical AND of two arrays.
.. seealso:: :data:`numpy.logical_and`
''',
require_sortable_dtype=False)
logical_or = core.create_comparison(
'logical_or', '||',
'''Computes the logical OR of two arrays.
.. seealso:: :data:`numpy.logical_or`
''',
require_sortable_dtype=False)
logical_not = core.create_ufunc(
'cupy_logical_not',
('?->?', 'b->?', 'B->?', 'h->?', 'H->?', 'i->?', 'I->?', 'l->?', 'L->?',
'q->?', 'Q->?', 'e->?', 'f->?', 'd->?'),
'out0 = !in0',
doc='''Computes the logical NOT of an array.
.. seealso:: :data:`numpy.logical_not`
''')
logical_xor = core.create_ufunc(
'cupy_logical_xor',
('??->?', 'bb->?', 'BB->?', 'hh->?', 'HH->?', 'ii->?', 'II->?', 'll->?',
'LL->?', 'qq->?', 'QQ->?', 'ee->?', 'ff->?', 'dd->?'),
'out0 = !in0 != !in1',
doc='''Computes the logical XOR of two arrays.
.. seealso:: :data:`numpy.logical_xor`
''')
| from cupy import core
logical_and = core.create_comparison(
'logical_and', '&&',
'''Computes the logical AND of two arrays.
.. seealso:: :data:`numpy.logical_and`
''',
require_sortable_dtype=True)
logical_or = core.create_comparison(
'logical_or', '||',
'''Computes the logical OR of two arrays.
.. seealso:: :data:`numpy.logical_or`
''',
require_sortable_dtype=True)
logical_not = core.create_ufunc(
'cupy_logical_not',
('?->?', 'b->?', 'B->?', 'h->?', 'H->?', 'i->?', 'I->?', 'l->?', 'L->?',
'q->?', 'Q->?', 'e->?', 'f->?', 'd->?'),
'out0 = !in0',
doc='''Computes the logical NOT of an array.
.. seealso:: :data:`numpy.logical_not`
''')
logical_xor = core.create_ufunc(
'cupy_logical_xor',
('??->?', 'bb->?', 'BB->?', 'hh->?', 'HH->?', 'ii->?', 'II->?', 'll->?',
'LL->?', 'qq->?', 'QQ->?', 'ee->?', 'ff->?', 'dd->?'),
'out0 = !in0 != !in1',
doc='''Computes the logical XOR of two arrays.
.. seealso:: :data:`numpy.logical_xor`
''')
| Set required_sortable to True for logical operations | Set required_sortable to True for logical operations
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy |
208a48a8af8228bffc85db601f4eb2423981a6ec | manager/commands_puzzleboard.py | manager/commands_puzzleboard.py | ''' commands for the manager cli '''
from uuid import uuid4
import requests
def command_puzzleboard_consume(**kwargs):
url = kwargs['--consume-url']
name = kwargs['--name']
size = kwargs['--size']
data = f'{{"puzzleboard": "{name}", "size": {size}, correlation-id": "{uuid4()}"}}'
print(data)
r = requests.post(url, data)
print(f'status_code={r.status_code}')
print(f'reason={r.reason}')
print(f'text={r.text}')
| ''' commands for the manager cli '''
from uuid import uuid4
import requests
def command_puzzleboard_consume(**kwargs):
url = kwargs['--consume-url']
name = kwargs['--name']
size = kwargs['--size']
data = f'{{"puzzleboard": "{name}", "size": {size}, "correlation-id": "{uuid4()}"}}'
print(data)
r = requests.post(url, data)
print(f'status_code={r.status_code}')
print(f'reason={r.reason}')
print(f'text={r.text}')
| Make sure API payload is well-formed JSON | Make sure API payload is well-formed JSON
| Python | mit | klmcwhirter/huntwords,klmcwhirter/huntwords,klmcwhirter/huntwords,klmcwhirter/huntwords |
3f2b4236bdb5199d4830a893c7b511f7875dc501 | plata/utils.py | plata/utils.py | from decimal import Decimal
import simplejson
from django.core.serializers.json import DjangoJSONEncoder
try:
simplejson.dumps([42], use_decimal=True)
except TypeError:
raise Exception('simplejson>=2.1 with support for use_decimal required.')
class JSONFieldDescriptor(object):
def __init__(self, field):
self.field = field
def __get__(self, obj, objtype):
cache_field = '_cached_jsonfield_%s' % self.field
if not hasattr(obj, cache_field):
try:
setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field),
use_decimal=True))
except (TypeError, ValueError):
setattr(obj, cache_field, {})
return getattr(obj, cache_field)
def __set__(self, obj, value):
setattr(obj, '_cached_jsonfield_%s' % self.field, value)
setattr(obj, self.field, simplejson.dumps(value, use_decimal=True,
cls=DjangoJSONEncoder))
| from decimal import Decimal
import simplejson
from django.core.serializers.json import DjangoJSONEncoder
try:
simplejson.dumps([42], use_decimal=True)
except TypeError:
raise Exception('simplejson>=2.1 with support for use_decimal required.')
class CallbackOnUpdateDict(dict):
"""Dict which executes a callback on every update"""
def __init__(self, *args, **kwargs):
self.callback = kwargs.pop('callback')
super(CallbackOnUpdateDict, self).__init__(*args, **kwargs)
def __setitem__(self, key, value):
ret = super(CallbackOnUpdateDict, self).__setitem__(key, value)
self.callback(self)
return ret
def update(self, d):
ret = super(CallbackOnUpdateDict, self).update(d)
self.callback(self)
return ret
class JSONFieldDescriptor(object):
def __init__(self, field):
self.field = field
def __get__(self, obj, objtype):
cache_field = '_cached_jsonfield_%s' % self.field
if not hasattr(obj, cache_field):
try:
value = simplejson.loads(getattr(obj, self.field), use_decimal=True)
except (TypeError, ValueError):
value = {}
self.__set__(obj, value)
return getattr(obj, cache_field)
def __set__(self, obj, value):
if not isinstance(value, CallbackOnUpdateDict):
value = CallbackOnUpdateDict(value,
# Update cached and serialized value on every write to the data dict
callback=lambda d: self.__set__(obj, d))
setattr(obj, '_cached_jsonfield_%s' % self.field, value)
setattr(obj, self.field, simplejson.dumps(value, use_decimal=True,
cls=DjangoJSONEncoder))
| Make working with JSONDataDescriptor easier | Make working with JSONDataDescriptor easier
| Python | bsd-3-clause | allink/plata,armicron/plata,armicron/plata,armicron/plata,stefanklug/plata |
dae26a54aff5ada572b047869e83b098511bffbf | src/artgraph/plugins/plugin.py | src/artgraph/plugins/plugin.py | import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("SELECT old_text FROM text INNER JOIN revision ON text.old_id = revision.rev_text_id INNER JOIN page ON revision.rev_page = page.page_id AND page.page_title = %s", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
| import MySQLdb
import mwparserfromhell
class Plugin():
def get_wikicode(self, title):
# TODO Make this a conf
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="BigData")
cursor = db.cursor()
cursor.execute("""
SELECT old_text
FROM text
INNER JOIN revision ON text.old_id = revision.rev_text_id
INNER JOIN page ON revision.rev_page = page.page_id AND page.page_namespace = 0 AND page.page_title = %s""", (title))
return mwparserfromhell.parse(cursor.fetchone()[0])
| Fix SQL query to use indexes in the page table | Fix SQL query to use indexes in the page table | Python | mit | dMaggot/ArtistGraph |
131f266e73139f1148ee3e9fcce8db40842afb88 | sale_channel/models/account.py | sale_channel/models/account.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Sales Channels
# Copyright (C) 2016 June
# 1200 Web Development
# http://1200wd.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _, exceptions
class AccountTax(models.Model):
_inherit = 'account.tax'
@api.model
def _get_sales_channel_domain(self):
ids = self.env.ref('res_partner_category.sales_channel').ids
return [('category_id', 'in', ids)]
sales_channel_id = fields.Many2one('res.partner', string="Sales channel",
ondelete='set null', domain=_get_sales_channel_domain)
| # -*- coding: utf-8 -*-
##############################################################################
#
# Sales Channels
# Copyright (C) 2016 June
# 1200 Web Development
# http://1200wd.com/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _, exceptions
class AccountTax(models.Model):
_inherit = 'account.tax'
@api.model
def _get_sales_channel_domain(self):
ids = self.env.ref('res_partner_category.sales_channel').ids
return [('category_id', 'in', ids)]
sales_channel_id = fields.Many2one('res.partner', string="Sales channel",
ondelete='set null', domain=_get_sales_channel_domain)
_sql_constraints = [
('name_company_uniq', 'unique(name, company_id, sales_channel_id)', 'Tax Name must be unique per company and sales channel!'),
]
| Add constraint, tax name must be unique for each company and sales channel | [IMP] Add constraint, tax name must be unique for each company and sales channel
| Python | agpl-3.0 | 1200wd/1200wd_addons,1200wd/1200wd_addons |
999d243fbc9908255ae292186bf8b17eb67e42e8 | planner/forms.py | planner/forms.py | from django import forms
class LoginForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
| from django.contrib.auth.forms import AuthenticationForm
from django import forms
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
| Fix LoginForm to be conformant to builtin AuthenticationForm | Fix LoginForm to be conformant to builtin AuthenticationForm
| Python | mit | livingsilver94/getaride,livingsilver94/getaride,livingsilver94/getaride |
e1240aa33b286ba52507128458fc6d6b3b68dfb3 | statsmodels/stats/multicomp.py | statsmodels/stats/multicomp.py | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 18:27:25 2012
Author: Josef Perktold
"""
from statsmodels.sandbox.stats.multicomp import MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
| # -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 18:27:25 2012
Author: Josef Perktold
"""
from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison
def pairwise_tukeyhsd(endog, groups, alpha=0.05):
'''calculate all pairwise comparisons with TukeyHSD confidence intervals
this is just a wrapper around tukeyhsd method of MultiComparison
Parameters
----------
endog : ndarray, float, 1d
response variable
groups : ndarray, 1d
array with groups, can be string or integers
alpha : float
significance level for the test
Returns
-------
results : TukeyHSDResults instance
A results class containing relevant data and some post-hoc
calculations
See Also
--------
MultiComparison
tukeyhsd
statsmodels.sandbox.stats.multicomp.TukeyHSDResults
'''
return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
| Put back an import that my IDE incorrectly flagged as unused | Put back an import that my IDE incorrectly flagged as unused
| Python | bsd-3-clause | gef756/statsmodels,detrout/debian-statsmodels,detrout/debian-statsmodels,bzero/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,rgommers/statsmodels,hlin117/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,hainm/statsmodels,musically-ut/statsmodels,gef756/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,adammenges/statsmodels,waynenilsen/statsmodels,bzero/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,huongttlan/statsmodels,alekz112/statsmodels,adammenges/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,yl565/statsmodels,phobson/statsmodels,alekz112/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,saketkc/statsmodels,hainm/statsmodels,hlin117/statsmodels,kiyoto/statsmodels,YihaoLu/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,gef756/statsmodels,phobson/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,astocko/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,bzero/statsmodels,jseabold/statsmodels,bert9bert/statsmodels,hlin117/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,detrout/debian-statsmodels,hainm/statsmodels,wwf5067/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,bert9bert/statsmodels,alekz112/statsmodels,nguyentu1602/statsmodels,hainm/statsmodels,bert9bert/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,bzero/statsmodels,kiyoto/statsmodels,wwf5067/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,nvoron23/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,Averroes/statsmodels,astocko/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,bzero/statsmodels,wzbozon/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,hlin117/statsmodels,alekz112/statsmodels,astocko/statsmodels,musically-ut/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,yl565/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,astocko/statsmodels,bashtage/statsmodels,phobson/statsmodels,Averroes/statsmodels,huongttlan/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,wkfwkf/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,gef756/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,bsipocz/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,detrout/debian-statsmodels,yl565/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels |
eb785ce7485c438cfcaf6bb48d8cf8a840970bd4 | src/tenyksddate/main.py | src/tenyksddate/main.py | import datetime
from tenyksservice import TenyksService, run_service
from ddate.base import DDate
class DiscordianDate(TenyksService):
direct_only = True
irc_message_filters = {
'date': [r'^(?i)(ddate|discordian) (?P<month>(.*)) (?P<day>(.*)) (?P<year>(.*))'],
'today': [r'^(?i)(ddate|discordian)']
}
def __init__(self, *args, **kwargs):
super(DiscordianDate, self).__init__(*args, **kwargs)
def handle_today(self, data, match):
self.send(str(DDate()), data)
def handle_date(self, data, match):
year = int(match.groupdict()['year'])
month = int(match.groupdict()['month'])
day = int(match.groupdict()['day'])
self.send(str(DDate(datetime.date(year=year, month=month, day=day))), data)
def main():
run_service(DiscordianDate)
if __name__ == '__main__':
main()
| import datetime
from tenyksservice import TenyksService, run_service
from ddate.base import DDate
class DiscordianDate(TenyksService):
direct_only = True
irc_message_filters = {
'date': [r'^(?i)(ddate|discordian) (?P<month>(.*)) (?P<day>(.*)) (?P<year>(.*))'],
'today': [r'^(?i)(ddate|discordian)']
}
def __init__(self, *args, **kwargs):
super(DiscordianDate, self).__init__(*args, **kwargs)
def handle_date(self, data, match):
year = int(match.groupdict()['year'])
month = int(match.groupdict()['month'])
day = int(match.groupdict()['day'])
self.send(str(DDate(datetime.date(year=year, month=month, day=day))), data)
def handle_today(self, data, match):
self.send(str(DDate()), data)
def main():
run_service(DiscordianDate)
if __name__ == '__main__':
main()
| Change method order to match filters | Change method order to match filters
| Python | mit | kyleterry/tenyks-contrib,cblgh/tenyks-contrib,colby/tenyks-contrib |
524076bd1e629e2c73413609c012223a80b7e8a3 | signbank/registration/admin.py | signbank/registration/admin.py | from signbank.registration.models import *
from django.contrib import admin
from django.core.urlresolvers import reverse
class UserProfileAdmin(admin.ModelAdmin):
list_display = ['user', 'permissions', 'best_describes_you', 'australian', 'auslan_user', 'deaf', 'researcher_credentials']
readonly_fields = ['user', 'australian', 'auslan_user', 'deaf', 'yob', 'postcode', 'best_describes_you', 'researcher_credentials', 'learned', 'schooltype', 'school', 'teachercomm']
list_filter = ['australian', 'auslan_user', 'deaf']
def permissions(self, obj):
url = reverse('admin:auth_user_change', args=(obj.pk,))
return '<a href="%s">View user</a>' % (url)
permissions.allow_tags = True
admin.site.register(UserProfile, UserProfileAdmin)
| from signbank.registration.models import *
from django.contrib import admin
from django.core.urlresolvers import reverse
class UserProfileAdmin(admin.ModelAdmin):
list_display = ['user', 'permissions', 'best_describes_you', 'australian', 'auslan_user', 'deaf', 'researcher_credentials']
readonly_fields = ['user', 'australian', 'auslan_user', 'deaf', 'yob', 'postcode', 'best_describes_you', 'researcher_credentials', 'learned', 'schooltype', 'school', 'teachercomm']
list_filter = ['australian', 'auslan_user', 'deaf']
def permissions(self, obj):
url = reverse('admin:auth_user_change', args=(obj.user.id,))
return '<a href="%s">View user</a>' % (url)
permissions.allow_tags = True
admin.site.register(UserProfile, UserProfileAdmin)
| Use the correct user ID to show the user from the profiles view. Fixes %61. | Use the correct user ID to show the user from the profiles view. Fixes %61.
| Python | bsd-3-clause | Signbank/Auslan-signbank,Signbank/BSL-signbank,Signbank/BSL-signbank,Signbank/BSL-signbank,Signbank/BSL-signbank,Signbank/Auslan-signbank,Signbank/Auslan-signbank,Signbank/Auslan-signbank |
aa48fd54c8b4b0139f562de199a5f0c5dd4c5db8 | gitdir/__init__.py | gitdir/__init__.py | import pathlib
GITDIR = pathlib.Path('/opt/git') #TODO check permissions
| import pathlib
GITDIR = pathlib.Path(os.environ.get('GITDIR', '/opt/git')) #TODO check permissions
| Add support for GITDIR envar | Add support for GITDIR envar
| Python | mit | fenhl/gitdir |
68046b638b5d2a9d9a0c9c588a6c2b833442e01b | plinth/modules/ikiwiki/forms.py | plinth/modules/ikiwiki/forms.py | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Forms for configuring ikiwiki
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
class IkiwikiCreateForm(forms.Form):
"""Form to create a wiki or blog."""
site_type = forms.ChoiceField(
label=_('Type'),
choices=[('wiki', 'Wiki'), ('blog', 'Blog')])
name = forms.CharField(label=_('Name'))
admin_name = forms.CharField(label=_('Admin Account Name'))
admin_password = forms.CharField(
label=_('Admin Account Password'),
widget=forms.PasswordInput())
| #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Forms for configuring ikiwiki
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.core.validators import RegexValidator
class IkiwikiCreateForm(forms.Form):
"""Form to create a wiki or blog."""
site_type = forms.ChoiceField(
label=_('Type'),
choices=[('wiki', 'Wiki'), ('blog', 'Blog')])
name = forms.CharField(label=_('Name'),
validators=[RegexValidator(regex='^[a-zA-Z0-9]+$')])
admin_name = forms.CharField(label=_('Admin Account Name'))
admin_password = forms.CharField(
label=_('Admin Account Password'),
widget=forms.PasswordInput())
| Allow only alphanumerics in wiki/blog name | ikiwiki: Allow only alphanumerics in wiki/blog name
| Python | agpl-3.0 | harry-7/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,vignanl/Plinth,kkampardi/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,vignanl/Plinth,vignanl/Plinth,kkampardi/Plinth,vignanl/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,harry-7/Plinth,kkampardi/Plinth |
2aed2eb4a1db5fba9d161a679c147f2260fb0780 | msg/serializers.py | msg/serializers.py | from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ('url', 'name')
from msg.models import Msg
class MsgSerializer( serializers.ModelSerializer ):
class Meta:
model = Msg
fields = ('frame_id','timestamp','source','channel','signature','body')
| from django.contrib.auth.models import User, Group
from rest_framework import serializers
class UnixEpochDateField(serializers.DateTimeField):
def to_native(self, value):
""" Return epoch time for a datetime object or ``None``"""
import time
try:
return int(time.mktime(value.timetuple()))
except (AttributeError, TypeError):
return None
def from_native(self, value):
import datetime
return datetime.datetime.fromtimestamp(int(value))
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'groups')
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ('url', 'name')
from msg.models import Msg
class MsgSerializer( serializers.ModelSerializer ):
epoch = = UnixEpochDateField(source='timestamp')
class Meta:
model = Msg
fields = ('frame_id','timestamp','source','channel','signature','body','epoch')
| Add epoch conversion to timestamp | Add epoch conversion to timestamp | Python | mit | orisi/fastcast |
65fcfbfae9ef1a68d324aea932f983f7edd00cdf | mopidy/__init__.py | mopidy/__init__.py | import logging
from mopidy import settings as raw_settings
logger = logging.getLogger('mopidy')
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
def get_class(name):
module_name = name[:name.rindex('.')]
class_name = name[name.rindex('.') + 1:]
logger.info('Loading: %s from %s', class_name, module_name)
module = __import__(module_name, globals(), locals(), [class_name], -1)
class_object = getattr(module, class_name)
return class_object
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
| import logging
from multiprocessing.reduction import reduce_connection
import pickle
from mopidy import settings as raw_settings
logger = logging.getLogger('mopidy')
def get_version():
return u'0.1.dev'
def get_mpd_protocol_version():
return u'0.16.0'
def get_class(name):
module_name = name[:name.rindex('.')]
class_name = name[name.rindex('.') + 1:]
logger.info('Loading: %s from %s', class_name, module_name)
module = __import__(module_name, globals(), locals(), [class_name], -1)
class_object = getattr(module, class_name)
return class_object
def pickle_connection(connection):
return pickle.dumps(reduce_connection(connection))
def unpickle_connection(pickled_connection):
# From http://stackoverflow.com/questions/1446004
unpickled = pickle.loads(pickled_connection)
func = unpickled[0]
args = unpickled[1]
return func(*args)
class SettingsError(Exception):
pass
class Settings(object):
def __getattr__(self, attr):
if not hasattr(raw_settings, attr):
raise SettingsError(u'Setting "%s" is not set.' % attr)
value = getattr(raw_settings, attr)
if type(value) != bool and not value:
raise SettingsError(u'Setting "%s" is empty.' % attr)
return value
settings = Settings()
| Add util functions for pickling and unpickling multiprocessing.Connection | Add util functions for pickling and unpickling multiprocessing.Connection
| Python | apache-2.0 | SuperStarPL/mopidy,pacificIT/mopidy,swak/mopidy,hkariti/mopidy,dbrgn/mopidy,jmarsik/mopidy,diandiankan/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,quartz55/mopidy,ali/mopidy,pacificIT/mopidy,adamcik/mopidy,rawdlite/mopidy,swak/mopidy,dbrgn/mopidy,jodal/mopidy,hkariti/mopidy,priestd09/mopidy,dbrgn/mopidy,jmarsik/mopidy,quartz55/mopidy,liamw9534/mopidy,mokieyue/mopidy,mokieyue/mopidy,tkem/mopidy,jcass77/mopidy,woutervanwijk/mopidy,dbrgn/mopidy,ali/mopidy,SuperStarPL/mopidy,woutervanwijk/mopidy,ali/mopidy,swak/mopidy,diandiankan/mopidy,ZenithDK/mopidy,abarisain/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,quartz55/mopidy,bacontext/mopidy,hkariti/mopidy,vrs01/mopidy,ZenithDK/mopidy,vrs01/mopidy,glogiotatidis/mopidy,abarisain/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,vrs01/mopidy,adamcik/mopidy,jcass77/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,rawdlite/mopidy,tkem/mopidy,jcass77/mopidy,jmarsik/mopidy,mopidy/mopidy,priestd09/mopidy,ZenithDK/mopidy,ali/mopidy,kingosticks/mopidy,tkem/mopidy,bencevans/mopidy,priestd09/mopidy,swak/mopidy,mopidy/mopidy,bacontext/mopidy,bencevans/mopidy,diandiankan/mopidy,mopidy/mopidy,pacificIT/mopidy,liamw9534/mopidy,SuperStarPL/mopidy,tkem/mopidy,vrs01/mopidy,bacontext/mopidy,bencevans/mopidy,quartz55/mopidy,bencevans/mopidy,diandiankan/mopidy,bacontext/mopidy,mokieyue/mopidy,hkariti/mopidy,glogiotatidis/mopidy,rawdlite/mopidy,ZenithDK/mopidy,jodal/mopidy,kingosticks/mopidy |
a8bd6e86583b72211f028ecb51df2ee27550b258 | submit.py | submit.py | import json
import requests
import argparse
parser = argparse.ArgumentParser(
description="Upload submission from submit.cancergenetrust.org")
parser.add_argument('file', nargs='?', default="submission.json",
help="Path to json file to submit")
args = parser.parse_args()
with open(args.file) as f:
submission = json.loads(f.read())
submission["clinical"]["CGT Public ID"] = submission["patientId"]
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
files=[("files[]",
("foundationone.json",
json.dumps(submission["genomic"], sort_keys=True)))],
data=submission["clinical"])
print(r.text)
assert(r.status_code == requests.codes.ok)
| import json
import requests
import argparse
parser = argparse.ArgumentParser(
description="Upload submission from submit.cancergenetrust.org")
parser.add_argument('file', nargs='?', default="submission.json",
help="Path to json file to submit")
args = parser.parse_args()
with open(args.file) as f:
submission = json.loads(f.read())
submission["clinical"]["CGT Public ID"] = submission["patientId"]
if submission["genomic"]:
print("Submitting clinical and genomic data")
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
files=[("files[]",
("foundationone.json",
json.dumps(submission["genomic"], sort_keys=True)))],
data=submission["clinical"])
else:
print("No genomic data, submitting only clinical")
r = requests.post("http://localhost:5000/v0/submissions?publish=true",
data=submission["clinical"])
print(r.text)
assert(r.status_code == requests.codes.ok)
| Handle only clinical, no genomic, submission | Handle only clinical, no genomic, submission
| Python | apache-2.0 | ga4gh/CGT,ga4gh/CGT,ga4gh/CGT |
abf141415fb0c1fbb62c92db3afdc430218e2520 | mzgtfs/validate.py | mzgtfs/validate.py | """Validate a GTFS file."""
import argparse
import json
import feed
import validation
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='GTFS Info and JSON export')
parser.add_argument('filename', help='GTFS File')
parser.add_argument('--debug',
help='Show helpful debugging information',
action='store_true')
args = parser.parse_args()
validator = validation.ValidationReport()
feed = feed.Feed(args.filename, debug=args.debug)
feed.validate(validator=validator)
validator.report() | """Validate a GTFS file."""
import argparse
import json
import feed
import validation
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Validate a GTFS feed.')
parser.add_argument('filename', help='GTFS File')
parser.add_argument('--debug',
help='Show helpful debugging information',
action='store_true')
args = parser.parse_args()
validator = validation.ValidationReport()
feed = feed.Feed(args.filename, debug=args.debug)
feed.validate(validator=validator)
validator.report() | Fix copy and paste error | Fix copy and paste error
| Python | mit | transitland/mapzen-gtfs,brechtvdv/mapzen-gtfs,brennan-v-/mapzen-gtfs |
81904effd492e2b2cea64dc98b29033261ae8b62 | tests/generator_test.py | tests/generator_test.py | from fixture import GeneratorTest
from google.appengine.ext import testbed, ndb
class GeneratorTest(GeneratorTest):
def testLotsaModelsGenerated(self):
for klass in self.klasses:
k = klass._get_kind()
assert ndb.Model._lookup_model(k) == klass, klass
| from fixture import GeneratorTest
from google.appengine.ext import testbed, ndb
class GeneratorTest(GeneratorTest):
def testLotsaModelsGenerated(self):
for klass in self.klasses:
k = klass._get_kind()
assert ndb.Model._lookup_model(k) == klass, klass
assert len(self.klasses) > 100
| Check that we are creating Test Classes | Check that we are creating Test Classes
| Python | mit | talkiq/gaend,samedhi/gaend,talkiq/gaend,samedhi/gaend |
becd5721e9d6dcd1eb762b9b9e7089e7a90fcdf9 | python3.7-alpine3.7/app/main.py | python3.7-alpine3.7/app/main.py | def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.6 app in a\
Docker container (default)"]
| def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return [b"Hello World from a default Nginx uWSGI Python 3.7 app in a\
Docker container (default)"]
| Update default Python demo app to reflect actual Python version | Update default Python demo app to reflect actual Python version
| Python | apache-2.0 | tiangolo/uwsgi-nginx-docker,tiangolo/uwsgi-nginx-docker |
bc36a19d3bb1c07cbe2a44de88f227ef71c50b8c | notebooks/utils.py | notebooks/utils.py | def print_generated_sequence(g, num, *, sep=", "):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `g`.
"""
elems = [str(next(g)) for _ in range(num)]
sep_initial = "\n" if sep == "\n" else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
| def print_generated_sequence(g, num, *, sep=", ", seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `g`.
"""
if seed:
g.reset(seed)
elems = [str(next(g)) for _ in range(num)]
sep_initial = "\n" if sep == "\n" else " "
print("Generated sequence:{}{}".format(sep_initial, sep.join(elems)))
| Allow passing seed directly to helper function | Allow passing seed directly to helper function
| Python | mit | maxalbert/tohu |
44223235e5b8b0c49df564ae190927905de1f9a4 | plenario/worker.py | plenario/worker.py | from datetime import datetime
from flask import Flask
import plenario.tasks as tasks
def create_worker():
app = Flask(__name__)
app.config.from_object('plenario.settings')
app.url_map.strict_slashes = False
@app.route('/update/weather', methods=['POST'])
def weather():
return tasks.update_weather.delay().id
@app.route('/update/often', methods=['POST'])
def metar():
return tasks.update_metar.delay().id
@app.route('/update/<frequency>', methods=['POST'])
def update(frequency):
return tasks.frequency_update.delay(frequency).id
@app.route('/archive', methods=['POST'])
def archive():
return tasks.archive.delay(datetime.now()).id
@app.route('/resolve', methods=['POST'])
def resolve():
return tasks.resolve.delay().id
@app.route('/health', methods=['GET', 'POST'])
def check_health():
return tasks.health.delay().id
return app
| import os
from datetime import datetime
from flask import Flask
import plenario.tasks as tasks
def create_worker():
app = Flask(__name__)
app.config.from_object('plenario.settings')
app.url_map.strict_slashes = False
@app.route('/update/weather', methods=['POST'])
def weather():
return tasks.update_weather.delay().id
@app.route('/update/often', methods=['POST'])
def metar():
return tasks.update_metar.delay().id
@app.route('/update/<frequency>', methods=['POST'])
def update(frequency):
return tasks.frequency_update.delay(frequency).id
@app.route('/archive', methods=['POST'])
def archive():
return tasks.archive.delay(datetime.now()).id
@app.route('/resolve', methods=['POST'])
def resolve():
if not os.environ.get('PRIVATE'):
return 'hullo'
return tasks.resolve.delay().id
@app.route('/health', methods=['GET', 'POST'])
def check_health():
return tasks.health.delay().id
return app
| Add temporary check to block production resolve | Add temporary check to block production resolve
| Python | mit | UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario |
2ec93f385e9eea63d42e17a2a777b459edf93816 | tools/debug_adapter.py | tools/debug_adapter.py | #!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server(multiple=False)
| #!/usr/bin/python
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
| Update code for changed function. | Update code for changed function.
| Python | mit | vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb |
b20d59211701cbcb4d7600bce2d64e7f0f614ec0 | tvrenamr/tests/base.py | tvrenamr/tests/base.py | from os import makedirs
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import urlopenmock
class BaseTest(object):
files = 'tests/files'
def setup(self):
# if `file` isn't there, make it
if not exists(self.files):
makedirs(self.files)
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
| from os import mkdir
from os.path import abspath, dirname, exists, join
from shutil import rmtree
from tvrenamr.config import Config
from tvrenamr.main import TvRenamr
from tvrenamr.tests import urlopenmock
class BaseTest(object):
files = 'tests/files'
def setup(self):
# if `file` isn't there, make it
if not exists(self.files):
mkdir(self.files)
# absolute path to the file is pretty useful
self.path = abspath(dirname(__file__))
# build the file list
with open(join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
with open(abspath(join(self.files, fn.strip())), 'w') as f:
f.write('')
# instantiate tvr
self.config = Config(join(self.path, 'config.yml'))
self.tv = TvRenamr(self.files, self.config)
def teardown(self):
rmtree(self.files)
| Use mkdir instead of makedirs because we don't need parent directories made | Use mkdir instead of makedirs because we don't need parent directories made
| Python | mit | ghickman/tvrenamr,wintersandroid/tvrenamr |
143b74a2c6f99d2d92ac85310351327ffb630c1e | uscampgrounds/admin.py | uscampgrounds/admin.py | from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
admin.site.register(Campground, CampgroundAdmin)
| from django.contrib.gis import admin
from uscampgrounds.models import *
class CampgroundAdmin(admin.OSMGeoAdmin):
list_display = ('name', 'campground_code', 'campground_type', 'phone', 'sites', 'elevation', 'hookups', 'amenities')
list_filter = ('campground_type',)
search_fields = ('name',)
admin.site.register(Campground, CampgroundAdmin)
| Allow searching campgrounds by name for convenience. | Allow searching campgrounds by name for convenience.
| Python | bsd-3-clause | adamfast/geodjango-uscampgrounds,adamfast/geodjango-uscampgrounds |
c827afe434d1d106ad7747e0c094188b8d5cc9a9 | plumeria/plugins/bing_images.py | plumeria/plugins/bing_images.py | from aiohttp import BasicAuth
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", ""i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image.
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
| from aiohttp import BasicAuth
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.message import Response
from plumeria.util import http
from plumeria.util.ratelimit import rate_limit
SEARCH_URL = "https://api.datamarket.azure.com/Bing/Search/v1/Image"
api_key = config.create("bing", "key",
fallback="unset",
comment="An API key from Bing")
@commands.register("image", "images", "i", category="Search")
@rate_limit()
async def image(message):
"""
Search Bing for an image.
"""
q = message.content.strip()
if not q:
raise CommandError("Search term required!")
r = await http.get(SEARCH_URL, params=[
('$format', 'json'),
('$top', '10'),
('Query', "'{}'".format(q)),
], auth=BasicAuth("", password=api_key()))
data = r.json()['d']
if len(data['results']):
return Response(data['results'][0]['MediaUrl'])
else:
raise CommandError("no results found")
| Fix typo in Bing images. | Fix typo in Bing images.
| Python | mit | sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria |
d9024e4db0489b141fec9b96913c94a5d583f086 | backend/scripts/mktemplate.py | backend/scripts/mktemplate.py | #!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
| #!/usr/bin/env python
import json
import rethinkdb as r
import sys
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
parser.add_option("-f", "--file", dest="filename",
help="json file", type="string")
(options, args) = parser.parse_args()
if options.filename is None:
print "You must specify json file"
sys.exit(1)
conn = r.connect('localhost', int(options.port), db='materialscommons')
json_data = open(options.filename)
print "Loading template file: %s" % (options.filename)
data = json.load(json_data)
existing = r.table('templates').get(data['id']).run(conn)
if existing:
r.table('templates').get(data['id']).delete().run(conn)
r.table('templates').insert(data).run(conn)
print 'template deleted and re-inserted into the database'
else:
r.table('templates').insert(data).run(conn)
print 'template inserted into the database'
| Update script to show which file it is loading. | Update script to show which file it is loading.
| Python | mit | materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org |
8e8d80e744c99ab1c5552057899bf5470d751a29 | linked_list.py | linked_list.py | #!/usr/bin/env python
from __future__ import print_function
class Node(object):
def __init__(self, value):
self._val = value
self._next = None
@property
def next(self):
return self._next
@next.setter
def next(self, value):
self._next = value
@property
def val(self):
return self._val
class LinkedList(object):
def __init__(self):
self._head = None
self.second = None
def insert(self, val):
self.second, self._head = self._head, Node(val)
self._head.next = self.second
def pop(self):
self._head = self._head.next
@property
def head(self):
return self._head
l = LinkedList()
l.insert('Nick')
l.insert('Constantine')
l.insert('Mark')
print(l.head.val)
print(l.head.next.val)
l.pop()
print(l.head.val)
print(l.head.next.val)
| #!/usr/bin/env python
from __future__ import print_function
class Node(object):
def __init__(self, value):
self._val = value
self._next = None
@property
def next(self):
return self._next
@next.setter
def next(self, value):
self._next = value
@property
def val(self):
return self._val
class LinkedList(object):
def __init__(self):
self._head = None
self.second = None
def insert(self, val):
self.second, self._head = self._head, Node(val)
self._head.next = self.second
def pop(self):
self._head = self._head.next
def size(self):
if not self._head:
return 0
else:
i = 0
z = 1
try:
a = self._head.next
except AttributeError:
return i
while z != 0:
try:
a = a.next
except AttributeError:
z = 0
i += 1
return i
@property
def head(self):
return self._head
l = LinkedList()
l.insert('Nick')
print(l.size())
| Add semi-working size() function v1 | Nick: Add semi-working size() function v1
| Python | mit | constanthatz/data-structures |
64ae41be94374b0dae33d37ea1e2f20b233dd809 | moocng/peerreview/managers.py | moocng/peerreview/managers.py | # Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by('kq__unit__order')
| # Copyright 2013 Rooter Analysis S.L.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
class PeerReviewAssignmentManager(models.Manager):
def from_course(self, course):
return self.get_query_set().filter(
kq__unit__course=course).order_by(
'kq__unit__order', 'kq__order')
| Sort by kq too when returning peer review assignments | Sort by kq too when returning peer review assignments
| Python | apache-2.0 | OpenMOOC/moocng,OpenMOOC/moocng,GeographicaGS/moocng,GeographicaGS/moocng,GeographicaGS/moocng,GeographicaGS/moocng |
6252706583be20abb3eb9f541d99a212489daf00 | addons/dataverse/settings/defaults.py | addons/dataverse/settings/defaults.py | DEFAULT_HOSTS = [
'dataverse.harvard.edu', # Harvard PRODUCTION server
'dataverse.lib.virginia.edu' # University of Virginia server
]
REQUEST_TIMEOUT = 15
| DEFAULT_HOSTS = [
'dataverse.harvard.edu', # Harvard PRODUCTION server
'dataverse.lib.virginia.edu' # University of Virginia server
]
REQUEST_TIMEOUT = 60
| Increase request timeout for dataverse | Increase request timeout for dataverse
Dataverse responses are slow at the moment, so we need to
give them more time
| Python | apache-2.0 | binoculars/osf.io,leb2dg/osf.io,baylee-d/osf.io,mfraezz/osf.io,pattisdr/osf.io,mattclark/osf.io,Johnetordoff/osf.io,adlius/osf.io,mfraezz/osf.io,sloria/osf.io,brianjgeiger/osf.io,binoculars/osf.io,mattclark/osf.io,Johnetordoff/osf.io,chennan47/osf.io,saradbowman/osf.io,adlius/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,adlius/osf.io,caseyrollins/osf.io,erinspace/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,felliott/osf.io,mattclark/osf.io,leb2dg/osf.io,chennan47/osf.io,Johnetordoff/osf.io,icereval/osf.io,laurenrevere/osf.io,saradbowman/osf.io,adlius/osf.io,chennan47/osf.io,aaxelb/osf.io,felliott/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,caseyrollins/osf.io,felliott/osf.io,aaxelb/osf.io,aaxelb/osf.io,baylee-d/osf.io,cslzchen/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,felliott/osf.io,icereval/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,leb2dg/osf.io,icereval/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,brianjgeiger/osf.io,cslzchen/osf.io |
3dd5cd27963a0cfeb446a36fcd50c05e7c715eb3 | cyder/api/v1/endpoints/api.py | cyder/api/v1/endpoints/api.py | from django.utils.decorators import classonlymethod
from django.views.decorators.csrf import csrf_exempt
from rest_framework import serializers, viewsets
NestedAVFields = ['id', 'attribute', 'value']
class CommonAPISerializer(serializers.ModelSerializer):
pass
class CommonAPINestedAVSerializer(serializers.ModelSerializer):
attribute = serializers.SlugRelatedField(slug_field='name')
class CommonAPIMeta:
pass
class CommonAPIViewSet(viewsets.ModelViewSet):
def __init__(self, *args, **kwargs):
self.queryset = self.model.objects.all()
super(CommonAPIViewSet, self).__init__(*args, **kwargs)
#@classonlymethod
#@csrf_exempt
#def as_view(cls, *args, **kwargs):
# super(CommonAPIViewSet, cls).as_view(*args, **kwargs) | from rest_framework import serializers, viewsets
NestedAVFields = ['id', 'attribute', 'value']
class CommonAPISerializer(serializers.ModelSerializer):
pass
class CommonAPINestedAVSerializer(serializers.ModelSerializer):
attribute = serializers.SlugRelatedField(slug_field='name')
class CommonAPIMeta:
pass
class CommonAPIViewSet(viewsets.ModelViewSet):
def __init__(self, *args, **kwargs):
self.queryset = self.model.objects.all()
super(CommonAPIViewSet, self).__init__(*args, **kwargs)
| Fix earlier folly (commented and useless code) | Fix earlier folly (commented and useless code)
| Python | bsd-3-clause | akeym/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,murrown/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,OSU-Net/cyder,murrown/cyder,drkitty/cyder,OSU-Net/cyder,zeeman/cyder,zeeman/cyder,drkitty/cyder,zeeman/cyder,drkitty/cyder,akeym/cyder,zeeman/cyder |
fd5cad381e8b821bfabbefc9deb4b8a4531844f6 | rnacentral_pipeline/rnacentral/notify/slack.py | rnacentral_pipeline/rnacentral/notify/slack.py | """
Send a notification to slack.
NB: The webhook should be configured in the nextflow profile
"""
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
raise SystemExit("SLACK_WEBHOOK environment variable not defined")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
| """
Send a notification to slack.
NB: The webhook should be configured in the nextflow profile
"""
import os
import requests
def send_notification(title, message, plain=False):
"""
Send a notification to the configured slack webhook.
"""
SLACK_WEBHOOK = os.getenv('SLACK_WEBHOOK')
if SLACK_WEBHOOK is None:
try:
from rnacentral_pipeline.secrets import SLACK_WEBHOOK
except:
raise SystemExit("SLACK_WEBHOOK environment variable not defined, and couldn't find a secrets file")
if plain:
slack_json = {
"text" : title + ': ' + message
}
else:
slack_json = {
"text" : title,
"blocks" : [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": message
},
},
]
}
try:
response = requests.post(SLACK_WEBHOOK,
json=slack_json,
headers={'Content-Type':'application/json'}
)
response.raise_for_status()
except Exception as request_exception:
raise SystemExit from request_exception
| Add a secrets file in rnac notify | Add a secrets file in rnac notify
Nextflow doesn't propagate environment variables from the profile into
the event handler closures. This is the simplest workaround for that.
secrets.py should be on the cluster and symlinked into
rnacentral_pipeline
| Python | apache-2.0 | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline |
5df350254e966007f80f7a14fde29a8c93316bb3 | tests/rules/test_git_push.py | tests/rules/test_git_push.py | import pytest
from thefuck.rules.git_push import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''fatal: The current branch master has no upstream branch.
To push the current branch and set the remote as upstream, use
git push --set-upstream origin master
'''
def test_match(stderr):
assert match(Command('git push', stderr=stderr))
assert match(Command('git push master', stderr=stderr))
assert not match(Command('git push master'))
assert not match(Command('ls', stderr=stderr))
def test_get_new_command(stderr):
assert get_new_command(Command('git push', stderr=stderr))\
== "git push --set-upstream origin master"
| import pytest
from thefuck.rules.git_push import match, get_new_command
from tests.utils import Command
@pytest.fixture
def stderr():
return '''fatal: The current branch master has no upstream branch.
To push the current branch and set the remote as upstream, use
git push --set-upstream origin master
'''
def test_match(stderr):
assert match(Command('git push', stderr=stderr))
assert match(Command('git push master', stderr=stderr))
assert not match(Command('git push master'))
assert not match(Command('ls', stderr=stderr))
def test_get_new_command(stderr):
assert get_new_command(Command('git push', stderr=stderr))\
== "git push --set-upstream origin master"
assert get_new_command(Command('git push --quiet', stderr=stderr))\
== "git push --set-upstream origin master --quiet"
| Check arguments are preserved in git_push | Check arguments are preserved in git_push
| Python | mit | scorphus/thefuck,mlk/thefuck,Clpsplug/thefuck,SimenB/thefuck,nvbn/thefuck,Clpsplug/thefuck,SimenB/thefuck,mlk/thefuck,nvbn/thefuck,scorphus/thefuck |
3dad25bd909d4396129c7fe4aa848770119f0db7 | app/util/danger.py | app/util/danger.py | from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import request as flask_request
from flask import abort
import logging
import os
def gen_auth_token(id,expiration=10000):
"""Generate auth token"""
s = Serializer(os.environ['API_KEY'],expires_in=expiration)
return s.dumps({'id':id})
def verify_auth_token(token):
"""Verify auth token"""
try:
s = Serializer(os.environ['API_KEY'])
except KeyError:
abort(500)
# check the token and throw respective exception
try:
user = s.loads(token)
except Exception as e:
logging.info(e)
abort(401)
return user
def enable_auth(func):
"""Decorator to enable auth"""
def wrapper(*args,**kwargs):
re = flask_request
# deny if not authorized
if not re.headers.has_key("Authorization"):
abort(401)
auth = re.headers.get("Authorization").split(" ")
# proces token
validate = verify_auth_token(auth[1])
logging.debug("Valid auth! Yay")
return func(*args,**kwargs)
return wrapper | from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import request as flask_request
from flask import abort
import logging
import os
def gen_auth_token(id,expiration=10000):
"""Generate auth token"""
try:
s = Serializer(os.environ['API_KEY'],expires_in=expiration)
except KeyError:
logging.fatal("No API_KEY env")
abort(500)
return s.dumps({'id':id})
def verify_auth_token(token):
"""Verify auth token"""
try:
s = Serializer(os.environ['API_KEY'])
except KeyError:
logging.fatal("No API_KEY env")
abort(500)
# check the token and throw respective exception
try:
user = s.loads(token)
except Exception as e:
logging.info(e)
abort(401)
return user
def enable_auth(func):
"""Decorator to enable auth"""
def wrapper(*args,**kwargs):
re = flask_request
# deny if not authorized
if not re.headers.has_key("Authorization"):
logging.info("No token found")
abort(401)
auth = re.headers.get("Authorization").split(" ")
# proces token
validate = verify_auth_token(auth[1])
logging.debug("Valid auth! Yay")
return func(*args,**kwargs)
return wrapper | Add exception catch in gen_auth_token and add better logging messages | Add exception catch in gen_auth_token and add better logging messages
| Python | mit | tforrest/soda-automation,tforrest/soda-automation |
c09a8ce5bb47db4ea4381925ec07199415ae5c39 | spacy/tests/integration/test_load_languages.py | spacy/tests/integration/test_load_languages.py | # encoding: utf8
from __future__ import unicode_literals
from ...fr import French
def test_load_french():
nlp = French()
doc = nlp(u'Parlez-vous français?')
| # encoding: utf8
from __future__ import unicode_literals
from ...fr import French
def test_load_french():
nlp = French()
doc = nlp(u'Parlez-vous français?')
assert doc[0].text == u'Parlez'
assert doc[1].text == u'-'
assert doc[2].text == u'vouz'
assert doc[3].text == u'français'
assert doc[4].text == u'?'
| Add test for french tokenizer | Add test for french tokenizer
| Python | mit | raphael0202/spaCy,recognai/spaCy,raphael0202/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,raphael0202/spaCy,banglakit/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,banglakit/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,recognai/spaCy,banglakit/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,banglakit/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,banglakit/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy |
bd89dc8f6812ff824417875c9375499f331bf5e4 | scripts/maf_limit_to_species.py | scripts/maf_limit_to_species.py | #!/usr/bin/env python2.3
"""
Read a maf file from stdin and write out a new maf with only blocks having all
of the required in species, after dropping any other species and removing
columns containing only gaps.
usage: %prog species,species2,... < maf
"""
import psyco_full
import bx.align.maf
import copy
import sys
from itertools import *
def main():
species = sys.argv[1].split( ',' )
maf_reader = bx.align.maf.Reader( sys.stdin )
maf_writer = bx.align.maf.Writer( sys.stdout )
for m in maf_reader:
new_components = []
for comp in m.components:
if comp.src.split( '.' )[0] in species:
new_components.append( comp )
m.components = new_components
if len( m.components ) > 1:
maf_writer.write( m )
maf_reader.close()
maf_writer.close()
if __name__ == "__main__":
main()
| #!/usr/bin/env python2.3
"""
Read a maf file from stdin and write out a new maf with only blocks having all
of the required in species, after dropping any other species and removing
columns containing only gaps.
usage: %prog species,species2,... < maf
"""
import psyco_full
import bx.align.maf
import copy
import sys
from itertools import *
def main():
species = sys.argv[1].split( ',' )
maf_reader = bx.align.maf.Reader( sys.stdin )
maf_writer = bx.align.maf.Writer( sys.stdout )
for m in maf_reader:
new_components = []
for comp in m.components:
if comp.src.split( '.' )[0] in species:
new_components.append( comp )
m.components = new_components
m.remove_all_gap_columns()
if len( m.components ) > 1:
maf_writer.write( m )
maf_reader.close()
maf_writer.close()
if __name__ == "__main__":
main()
| Remove all-gap columns after removing rows of the alignment | Remove all-gap columns after removing rows of the alignment
| Python | mit | uhjish/bx-python,uhjish/bx-python,uhjish/bx-python |
b718c1d817e767c336654001f3aaea5d7327625a | wsgi_intercept/requests_intercept.py | wsgi_intercept/requests_intercept.py | """Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
| """Intercept HTTP connections that use `requests <http://docs.python-requests.org/en/latest/>`_.
"""
import sys
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPConnection.__init__(self, *args, **kwargs)
HTTPConnection.__init__(self, *args, **kwargs)
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPSConnection.__init__(self, *args, **kwargs)
HTTPSConnection.__init__(self, *args, **kwargs)
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
| Deal with request's urllib3 being annoying about 'strict' | Deal with request's urllib3 being annoying about 'strict'
These changes are required to get tests to pass in python3.4 (and
presumably others).
This is entirely code from @sashahart, who had done the work earlier
to deal with with some Debian related issues uncovered by @thomasgoirand.
These changes will probably mean the debian packages will need to be
updated when the next version is released.
| Python | mit | sileht/python3-wsgi-intercept,cdent/wsgi-intercept |
4f9ef0c4690a3d99e045c0ad347023dba3733bd0 | doc/filter-sectionnumbers.py | doc/filter-sectionnumbers.py | # remove section numbers for subheadings
# Based on Wagner Macedo's filter.py posted at
# https://groups.google.com/forum/#!msg/pandoc-discuss/RUC-tuu_qf0/h-H3RRVt1coJ
import pandocfilters as pf
sec = 0
def do_filter(k, v, f, m):
global sec
if sec > 3 or (k == "Header" and v[0] < 3):
return []
if k == "Header" and v[0] > 2:
sec += 1
v[1][1].append('unnumbered')
return pf.Header(v[0], v[1], v[2])
if __name__ == "__main__":
pf.toJSONFilter(do_filter)
| # remove section numbers for subheadings
# Based on Wagner Macedo's filter.py posted at
# https://groups.google.com/forum/#!msg/pandoc-discuss/RUC-tuu_qf0/h-H3RRVt1coJ
import pandocfilters as pf
sec = 0
def do_filter(k, v, f, m):
global sec
if sec > 2 or (k == "Header" and v[0] < 3):
return []
if k == "Header" and v[0] > 2:
sec += 1
v[1][1].append('unnumbered')
return pf.Header(v[0], v[1], v[2])
if __name__ == "__main__":
pf.toJSONFilter(do_filter)
| Reduce changelog excerpt for webpage and pdf | Reduce changelog excerpt for webpage and pdf
| Python | apache-2.0 | wilsonCernWq/ospray,ospray/OSPRay,ospray/OSPRay,ospray/OSPRay,wilsonCernWq/ospray,wilsonCernWq/ospray,ospray/OSPRay |
2843052a222541e3b7ce45fa633f5df61b10a809 | test/oracle.py | test/oracle.py | import qnd
import tensorflow as tf
def model_fn(x, y):
return (y,
0.0,
tf.contrib.framework.get_or_create_global_step().assign_add())
def input_fn(q):
shape = (100,)
return tf.zeros(shape, tf.float32), tf.ones(shape, tf.int32)
train_and_evaluate = qnd.def_train_and_evaluate()
def main():
train_and_evaluate(model_fn, input_fn)
if __name__ == "__main__":
main()
| import qnd
import tensorflow as tf
def model_fn(x, y):
return (y,
0.0,
tf.contrib.framework.get_or_create_global_step().assign_add())
def input_fn(q):
shape = (100,)
return tf.zeros(shape, tf.float32), tf.ones(shape, tf.int32)
train_and_evaluate = qnd.def_train_and_evaluate(distributed=True)
def main():
train_and_evaluate(model_fn, input_fn)
if __name__ == "__main__":
main()
| Use distributed flag for xfail test | Use distributed flag for xfail test
| Python | unlicense | raviqqe/tensorflow-qnd,raviqqe/tensorflow-qnd |
bf7b8df92fb1cc16fccefe201eefc0ed853eac5d | server/api/serializers/rides.py | server/api/serializers/rides.py | import requests
from django.conf import settings
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
from .riders import RiderSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
riders = RiderSerializer(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
fields = ('id', 'name', 'slug', 'strapline', 'description_html', 'start_location', 'end_location', 'start_date', 'end_date',
'chapter', 'rider_capacity', 'riders', 'spaces_left', 'price', 'full_cost', 'currency', 'is_over',
'fundraising_total', 'fundraising_target')
class RideRiderSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True, default=serializers.CurrentUserDefault())
class Meta:
model = RideRiders
fields = ('id', 'ride', 'user', 'signup_date', 'signup_expires', 'status', 'paid', 'expired', 'payload')
validators = [
UniqueTogetherValidator(
queryset=RideRiders.objects.all(),
fields=('user', 'ride'),
message='You have already registered for this ride.'
)
] | import requests
from django.conf import settings
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from server.api.serializers.chapters import ChapterSerializer
from .riders import RiderSerializer
from server.core.models.rides import Ride, RideRiders
class RideSerializer(serializers.ModelSerializer):
chapter = ChapterSerializer()
riders = RiderSerializer(source='registered_riders', many=True, read_only=True)
class Meta:
model = Ride
fields = ('id', 'name', 'slug', 'strapline', 'description_html', 'start_location', 'end_location', 'start_date', 'end_date',
'chapter', 'rider_capacity', 'riders', 'spaces_left', 'price', 'full_cost', 'currency', 'is_over',
'fundraising_total', 'fundraising_target')
class RideRiderSerializer(serializers.ModelSerializer):
user = serializers.PrimaryKeyRelatedField(read_only=True, default=serializers.CurrentUserDefault())
signup_date = serializers.DateTimeField(required=False)
class Meta:
model = RideRiders
fields = ('id', 'ride', 'user', 'signup_date', 'signup_expires', 'status', 'paid', 'expired', 'payload')
validators = [
UniqueTogetherValidator(
queryset=RideRiders.objects.all(),
fields=('user', 'ride'),
message='You have already registered for this ride.'
)
] | Make sure that the registration serialiser doesn't require the signup date. | Make sure that the registration serialiser doesn't require the signup date.
Signed-off-by: Michael Willmott <[email protected]>
| Python | mit | Techbikers/techbikers,Techbikers/techbikers,mwillmott/techbikers,mwillmott/techbikers,mwillmott/techbikers,Techbikers/techbikers,mwillmott/techbikers,Techbikers/techbikers |
76b85cd4fc848bf1b9db9d5e3a90e376400c66cb | src/pretix/urls.py | src/pretix/urls.py | from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
import pretix.control.urls
import pretix.presale.urls
urlpatterns = [
url(r'^control/', include(pretix.control.urls, namespace='control')),
url(r'^admin/', include(admin.site.urls)),
# The pretixpresale namespace is configured at the bottom of this file, because it
# contains a wildcard-style URL which has to be configured _after_ debug settings.
]
if settings.DEBUG:
import debug_toolbar
urlpatterns.append(
url(r'^__debug__/', include(debug_toolbar.urls)),
)
urlpatterns.append(
url(r'', include(pretix.presale.urls, namespace='presale'))
)
| import importlib
from django.apps import apps
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
import pretix.control.urls
import pretix.presale.urls
urlpatterns = [
url(r'^control/', include(pretix.control.urls, namespace='control')),
url(r'^admin/', include(admin.site.urls)),
# The pretixpresale namespace is configured at the bottom of this file, because it
# contains a wildcard-style URL which has to be configured _after_ debug settings.
]
if settings.DEBUG:
import debug_toolbar
urlpatterns.append(
url(r'^__debug__/', include(debug_toolbar.urls)),
)
for app in apps.get_app_configs():
if hasattr(app, 'PretixPluginMeta'):
try:
urlmod = importlib.import_module(app.name + '.urls')
urlpatterns.append(
url(r'', include(urlmod, namespace='plugins'))
)
except ImportError:
pass
urlpatterns.append(
url(r'', include(pretix.presale.urls, namespace='presale'))
)
| Allow plugins to register URLs | Allow plugins to register URLs
| Python | apache-2.0 | lab2112/pretix,Flamacue/pretix,Unicorn-rzl/pretix,awg24/pretix,Flamacue/pretix,awg24/pretix,Unicorn-rzl/pretix,lab2112/pretix,Unicorn-rzl/pretix,akuks/pretix,akuks/pretix,lab2112/pretix,lab2112/pretix,awg24/pretix,Flamacue/pretix,akuks/pretix,awg24/pretix,akuks/pretix,Flamacue/pretix,Unicorn-rzl/pretix |
0cb85aade1cfd7f264263bbe7113cb013b39cb44 | src/rolca/core/api/urls.py | src/rolca/core/api/urls.py | """.. Ignore pydocstyle D400.
=============
Core API URLs
=============
The ``routList`` is ment to be included in ``urlpatterns`` with the
following code:
.. code-block:: python
from rest_framework import routers
from rolca.core.api import urls as core_api_urls
route_lists = [
core_api_urls.routeList,
...
]
router = routers.DefaultRouter()
for route_list in route_lists:
for prefix, viewset in route_list:
router.register(prefix, viewset)
For advanced configuration code can be accordingly changed to meet the
needs.
"""
from rolca.core.api.views import ContestViewSet, SubmissionViewSet
routeList = (
(r'photo', SubmissionViewSet),
(r'contest', ContestViewSet),
)
| """.. Ignore pydocstyle D400.
=============
Core API URLs
=============
The ``routList`` is ment to be included in ``urlpatterns`` with the
following code:
.. code-block:: python
from rest_framework import routers
from rolca.core.api import urls as core_api_urls
route_lists = [
core_api_urls.routeList,
...
]
router = routers.DefaultRouter()
for route_list in route_lists:
for prefix, viewset in route_list:
router.register(prefix, viewset)
For advanced configuration code can be accordingly changed to meet the
needs.
"""
from rolca.core.api.views import ContestViewSet, SubmissionViewSet
routeList = (
(r'submission', SubmissionViewSet),
(r'contest', ContestViewSet),
)
| Rename photo endpoint to submission | Rename photo endpoint to submission
| Python | apache-2.0 | dblenkus/rolca,dblenkus/rolca,dblenkus/rolca |
788dd6f62899fb16aa983c17bc1a5e6eea5317b0 | FunctionHandler.py | FunctionHandler.py | import os, sys
from glob import glob
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('Functions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
del sys.modules['Functions.'+name]
for f in glob ('Functions/%s.pyc' % name):
os.remove(f)
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
ModuleName = str(src).split("from")[0].strip("(").rstrip(" ")
if loadType != 'rel':
print ModuleName + " loaded."
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'Functions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
if item[:-3] not in GlobalVars.nonDefaultModules:
LoadFunction(item[:-3])
except Exception, x:
print x.args
| import os, sys
from glob import glob
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('Functions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
del sys.modules['Functions.'+name]
for f in glob ('Functions/%s.pyc' % name):
os.remove(f)
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
ModuleName = str(src).split("from")[0].strip("(").rstrip(" ")
if loadType != 'rel':
print ModuleName + " loaded."
else:
print ModuleName + " reloaded."
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'Functions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
if item[:-3] not in GlobalVars.nonDefaultModules:
LoadFunction(item[:-3])
except Exception, x:
print x.args
| Clean up debug printing further | Clean up debug printing further
| Python | mit | HubbeKing/Hubbot_Twisted |
0bb777c0c77e5b7cac8d48f79f78d3a7cf944943 | backend/uclapi/uclapi/utils.py | backend/uclapi/uclapi/utils.py | def strtobool(x):
return x.lower() in ("true", "yes", "1", "y") | def strtobool(x):
try:
b = x.lower() in ("true", "yes", "1", "y")
return b
except AttributeError:
return False
except NameError
return False | Add some failsafes to strtobool | Add some failsafes to strtobool
| Python | mit | uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi |
1f914a04adb4ad7d39ca7104e2ea36acc76b18bd | pvextractor/tests/test_gui.py | pvextractor/tests/test_gui.py | import numpy as np
from numpy.testing import assert_allclose
import pytest
from astropy.io import fits
from ..pvextractor import extract_pv_slice
from ..geometry.path import Path
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
import matplotlib as mpl
if mpl.__version__[0] == '2':
MPLOK = False
else:
MPLOK = True
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100,200,220,330,340]
y = [100,200,300,420,430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i],y[i])
pv.fig.canvas.button_press_event(x[i],y[i],1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310,420)
pv.fig.canvas.button_press_event(410,420,1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5,2)
| import pytest
from distutils.version import LooseVersion
import matplotlib as mpl
from ..gui import PVSlicer
from .test_slicer import make_test_hdu
try:
import PyQt5
PYQT5OK = True
except ImportError:
PYQT5OK = False
if LooseVersion(mpl.__version__) < LooseVersion('2'):
MPLOK = True
else:
MPLOK = False
@pytest.mark.skipif('not PYQT5OK or not MPLOK')
def test_gui():
hdu = make_test_hdu()
pv = PVSlicer(hdu, clim=(-0.02, 2))
pv.show(block=False)
x = [100, 200, 220, 330, 340]
y = [100, 200, 300, 420, 430]
for i in range(len(x)):
pv.fig.canvas.motion_notify_event(x[i], y[i])
pv.fig.canvas.button_press_event(x[i], y[i], 1)
pv.fig.canvas.key_press_event('enter')
pv.fig.canvas.motion_notify_event(310, 420)
pv.fig.canvas.button_press_event(410, 420, 1)
pv.fig.canvas.draw()
assert pv.pv_slice.data.shape == (5, 2)
| Use LooseVersion to compare version numbers | Use LooseVersion to compare version numbers
| Python | bsd-3-clause | radio-astro-tools/pvextractor,keflavich/pvextractor |
0cc4e839d5d7725aba289047cefe77cd89d24593 | auth_mac/models.py | auth_mac/models.py | from django.db import models
from django.contrib.auth.models import User
import datetime
def default_expiry_time():
return datetime.datetime.now() + datetime.timedelta(days=1)
def random_string():
return User.objects.make_random_password(16)
class Credentials(models.Model):
"Keeps track of issued MAC credentials"
user = models.ForeignKey(User)
expiry = models.DateTimeField("Expires On", default=default_expiry_time)
identifier = models.CharField("MAC Key Identifier", max_length=16, default=random_string)
key = models.CharField("MAC Key", max_length=16, default=random_string)
def __unicode__(self):
return u"{0}:{1}".format(self.identifier, self.key)
class Nonce(models.Model):
"""Keeps track of any NONCE combinations that we have used"""
nonce = models.CharField("NONCE", max_length=16, null=True, blank=True)
timestamp = models.DateTimeField("Timestamp", auto_now_add=True)
credentials = models.ForeignKey(Credentials) | from django.db import models
from django.contrib.auth.models import User
import datetime
def default_expiry_time():
return datetime.datetime.now() + datetime.timedelta(days=1)
def random_string():
return User.objects.make_random_password(16)
class Credentials(models.Model):
"Keeps track of issued MAC credentials"
user = models.ForeignKey(User)
expiry = models.DateTimeField("Expires On", default=default_expiry_time)
identifier = models.CharField("MAC Key Identifier", max_length=16, default=random_string)
key = models.CharField("MAC Key", max_length=16, default=random_string)
def __unicode__(self):
return u"{0}:{1}".format(self.identifier, self.key)
@property
def expired(self):
"""Returns whether or not the credentials have expired"""
if self.expiry < datetime.datetime.now():
return True
return False
class Nonce(models.Model):
"""Keeps track of any NONCE combinations that we have used"""
nonce = models.CharField("NONCE", max_length=16, null=True, blank=True)
timestamp = models.DateTimeField("Timestamp", auto_now_add=True)
credentials = models.ForeignKey(Credentials) | Add a model property to tell if credentials have expired | Add a model property to tell if credentials have expired
| Python | mit | ndevenish/auth_mac |
87c861f6ed0e73e21983edc3add35954b9f0def5 | apps/configuration/fields.py | apps/configuration/fields.py | import unicodedata
from django.forms import fields
class XMLCompatCharField(fields.CharField):
"""
Strip 'control characters', as XML 1.0 does not allow them and the API may
return data in XML.
"""
def to_python(self, value):
value = super().to_python(value=value)
return self.remove_control_characters(value)
@staticmethod
def remove_control_characters(str):
return "".join(ch for ch in str if unicodedata.category(ch)[0] != "C")
| import unicodedata
from django.forms import fields
class XMLCompatCharField(fields.CharField):
"""
Strip 'control characters', as XML 1.0 does not allow them and the API may
return data in XML.
"""
def to_python(self, value):
value = super().to_python(value=value)
return self.remove_control_characters(value)
@staticmethod
def remove_control_characters(input):
valid_chars = ['\n', '\r']
return "".join(ch for ch in input if
unicodedata.category(ch)[0] != "C" or ch in valid_chars)
| Allow linebreaks textareas (should be valid in XML) | Allow linebreaks textareas (should be valid in XML)
| Python | apache-2.0 | CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat |
b61679efce39841120fcdb921acefbc729f4c4fd | tests/test_kmeans.py | tests/test_kmeans.py | import numpy as np
import milk.unsupervised
def test_kmeans():
features = np.r_[np.random.rand(20,3)-.5,.5+np.random.rand(20,3)]
centroids, _ = milk.unsupervised.kmeans(features,2)
positions = [0]*20 + [1]*20
correct = (centroids == positions).sum()
assert correct >= 38 or correct <= 2
| import numpy as np
import milk.unsupervised
def test_kmeans():
np.random.seed(132)
features = np.r_[np.random.rand(20,3)-.5,.5+np.random.rand(20,3)]
centroids, _ = milk.unsupervised.kmeans(features,2)
positions = [0]*20 + [1]*20
correct = (centroids == positions).sum()
assert correct >= 38 or correct <= 2
def test_kmeans_centroids():
np.random.seed(132)
features = np.random.rand(201,30)
for k in [2,3,5,10]:
indices,centroids = milk.unsupervised.kmeans(features, k)
for i in xrange(k):
assert np.allclose(centroids[i], features[indices == i].mean(0))
| Make sure results make sense | Make sure results make sense
| Python | mit | luispedro/milk,pombredanne/milk,luispedro/milk,pombredanne/milk,luispedro/milk,pombredanne/milk |
e676877492057d7b370431f6896154702c8459f1 | webshack/auto_inject.py | webshack/auto_inject.py | from urllib.parse import urljoin
from urllib.request import urlopen
from urllib.error import URLError
import sys
GITHUB_USERS = [('Polymer', '0.5.2')]
def resolve_missing_user(user, branch, package):
assets = ["{}.html".format(package),
"{}.css".format(package),
"{}.js".format(package)]
base_url = "https://raw.githubusercontent.com/{user}/{package}/{branch}/".format(**locals())
matched_assets = []
for asset in assets:
asset_url = urljoin(base_url, asset)
try:
with urlopen(asset_url):
pass
matched_assets.append(asset)
except URLError:
pass
if matched_assets:
print(" Matched.")
data = {'base': base_url, 'assets': {a: a for a in matched_assets}}
print('---')
print('{}:'.format(package))
print(' base: {}'.format(base_url))
print(' assets:')
for asset in matched_assets:
print(' {0}: {0}'.format(asset))
print('---')
return True
return False
def resolve_missing(package):
print('Trying to resolve missing package from GitHub repositories...')
for user, branch in GITHUB_USERS:
print(' {}...'.format(user))
if resolve_missing_user(user, branch, package):
return
| from urllib.parse import urljoin
from urllib.request import urlopen
from urllib.error import URLError
import sys
ENORMOUS_INJECTION_HACK = False
GITHUB_USERS = [('Polymer', '0.5.2')]
def resolve_missing_user(user, branch, package):
assets = ["{}.html".format(package),
"{}.css".format(package),
"{}.js".format(package)]
base_url = "https://raw.githubusercontent.com/{user}/{package}/{branch}/".format(**locals())
matched_assets = []
for asset in assets:
asset_url = urljoin(base_url, asset)
try:
with urlopen(asset_url):
pass
matched_assets.append(asset)
except URLError:
pass
if matched_assets:
print(" Matched.")
data = {'base': base_url, 'assets': {a: a for a in matched_assets}}
if ENORMOUS_INJECTION_HACK:
target = open('webshack/standard_packages.yaml', 'a')
else:
target = sys.stdout
print('---')
print('{}:'.format(package), file=target)
print(' base: {}'.format(base_url), file=target)
print(' assets:', file=target)
for asset in matched_assets:
print(' {0}: {0}'.format(asset), file=target)
if not ENORMOUS_INJECTION_HACK:
print('---')
return True
return False
def resolve_missing(package):
print('Trying to resolve missing package from GitHub repositories...')
for user, branch in GITHUB_USERS:
print(' {}...'.format(user))
if resolve_missing_user(user, branch, package):
return
| Add a hack to auto-inject new deps | Add a hack to auto-inject new deps
| Python | mit | prophile/webshack |
0e53ae11cb1cc53979edb1f17162e8b1d89ad809 | user/models.py | user/models.py | from django.db import models
# Create your models here.
| from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
# Extends User model. Defines sn and notifications for a User.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
sn = models.CharField(max_length=60)
notifications = models.ForeignKey(Notifications, on_delete=models.CASCADE)
# Defines user's email notification settings.
class Notifications(models.Model):
update = models.BooleanField(default=True)
response = models.BooleanField(default=True)
#
# The following functions define signals so that the Profile model
# will be automatically created/updated whenever the Django User object
# is created/updated. This makes it so you never have to call the Profile
# object's save method, all saving is done with the User model.
#
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
| Define initial schema for user and email notifications | Define initial schema for user and email notifications
| Python | apache-2.0 | ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints,ritstudentgovernment/PawPrints |
172feb5997a826181a0ec381c171a0a2cc854e4c | yolapy/configuration.py | yolapy/configuration.py | """Configuration.
Yolapy.configuration provides a key-value store used by the Yola client.
Data is stored here in the module, benefits include:
* Configuration is decoupled from application logic.
* When instantiating multiple service models, each contains its own client.
This module allows for configuration to happen once then consumed multiple
times by each client.
See README for example of use.
"""
config = {}
_default = object()
def configure(**kwargs):
"""Save all keyword arguments as (key=value) configuration."""
config.update(kwargs)
def get_config(key, default=_default):
"""Lookup the value of a configuration key using an optional default."""
value = config.get(key, default)
if value == _default:
raise KeyError('%s is not configured' % key)
return value
| """Configuration.
Yolapy.configuration provides a key-value store used by the Yola client.
Data is stored here in the module, benefits include:
* Configuration is decoupled from application logic.
* When instantiating multiple service models, each contains its own client.
This module allows for configuration to happen once then consumed multiple
times by each client.
See README for example of use.
"""
config = {}
_missing = object()
def configure(**kwargs):
"""Save all keyword arguments as (key=value) configuration."""
config.update(kwargs)
def get_config(key, default=_missing):
"""Lookup the value of a configuration key using an optional default."""
value = config.get(key, default)
if value == _missing:
raise KeyError('%s is not configured' % key)
return value
| Improve varname for missing config | Improve varname for missing config
| Python | mit | yola/yolapy |
b96cb194c8edd54fda9868d69fda515ac8beb29f | vumi/dispatchers/__init__.py | vumi/dispatchers/__init__.py | """The vumi.dispatchers API."""
__all__ = ["BaseDispatchWorker", "BaseDispatchRouter", "SimpleDispatchRouter",
"TransportToTransportRouter", "ToAddrRouter",
"FromAddrMultiplexRouter", "UserGroupingRouter"]
from vumi.dispatchers.base import (BaseDispatchWorker, BaseDispatchRouter,
SimpleDispatchRouter,
TransportToTransportRouter, ToAddrRouter,
FromAddrMultiplexRouter,
UserGroupingRouter)
| """The vumi.dispatchers API."""
__all__ = ["BaseDispatchWorker", "BaseDispatchRouter", "SimpleDispatchRouter",
"TransportToTransportRouter", "ToAddrRouter",
"FromAddrMultiplexRouter", "UserGroupingRouter",
"ContentKeywordRouter"]
from vumi.dispatchers.base import (BaseDispatchWorker, BaseDispatchRouter,
SimpleDispatchRouter,
TransportToTransportRouter, ToAddrRouter,
FromAddrMultiplexRouter,
UserGroupingRouter, ContentKeywordRouter)
| Add ContentKeywordRouter to vumi.dispatchers API. | Add ContentKeywordRouter to vumi.dispatchers API.
| Python | bsd-3-clause | harrissoerja/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,TouK/vumi,harrissoerja/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix |
041e1545c99681c8cf9e43d364877d1ff43342d0 | augur/datasources/augur_db/test_augur_db.py | augur/datasources/augur_db/test_augur_db.py | import os
import pytest
@pytest.fixture(scope="module")
def augur_db():
import augur
augur_app = augur.Application()
return augur_app['augur_db']()
# def test_repoid(augur_db):
# assert ghtorrent.repoid('rails', 'rails') >= 1000
# def test_userid(augur_db):
# assert ghtorrent.userid('howderek') >= 1000
"""
Pandas testing format
assert ghtorrent.<function>('owner', 'repo').isin(['<data that should be in dataframe>']).any
The tests check if a value is anywhere in the dataframe
"""
| import os
import pytest
@pytest.fixture(scope="module")
def augur_db():
import augur
augur_app = augur.Application()
return augur_app['augur_db']()
# def test_repoid(augur_db):
# assert ghtorrent.repoid('rails', 'rails') >= 1000
# def test_userid(augur_db):
# assert ghtorrent.userid('howderek') >= 1000
"""
Pandas testing format
assert ghtorrent.<function>('owner', 'repo').isin(['<data that should be in dataframe>']).any
The tests check if a value is anywhere in the dataframe
"""
def test_issues_first_time_opened(augur_db):
# repo_id
assert augur_db.issues_first_time_opened(
1, repo_id=25001, period='day').isin(["2019-05-23 00:00:00+00:00"]).any
assert augur_db.issues_first_time_opened(
1, repo_id=25001, period='week').isin(["2019-05-20 00:00:00+00:00"]).any
# repo_gorup_id
assert augur_db.issues_first_time_opened(1, period='day').isin([
"2019-05-23 00:00:00+00:00"]).any
| Add Unit test for new contributors of issues | Add Unit test for new contributors of issues
Signed-off-by: Bingwen Ma <[email protected]>
| Python | mit | OSSHealth/ghdata,OSSHealth/ghdata,OSSHealth/ghdata |
cd1c3645d733ab16355fe516bb2e505f87d49ace | backdrop/contrib/evl_upload.py | backdrop/contrib/evl_upload.py | from datetime import datetime
import itertools
from tests.support.test_helpers import d_tz
def ceg_volumes(rows):
def ceg_keys(rows):
return [
"_timestamp", "timeSpan", "relicensing_web", "relicensing_ivr",
"relicensing_agent", "sorn_web", "sorn_ivr", "sorn_agent",
"agent_automated_dupes", "calls_answered_by_advisor"
]
def ceg_rows(rows):
for column in itertools.count(3):
date = ceg_date(rows, column)
if not isinstance(date, datetime):
return
if date >= d_tz(2012, 4, 1):
yield [
date, "month", rows[5][column], rows[6][column],
rows[9][column], rows[11][column], rows[12][column],
rows[13][column], rows[15][column], rows[17][column]
]
def ceg_date(rows, column):
try:
return rows[3][column]
except IndexError:
return None
yield ceg_keys(rows)
for row in ceg_rows(rows):
yield row
| from datetime import datetime
import itertools
from tests.support.test_helpers import d_tz
def ceg_volumes(rows):
def ceg_keys(rows):
return [
"_timestamp", "timeSpan", "relicensing_web", "relicensing_ivr",
"relicensing_agent", "sorn_web", "sorn_ivr", "sorn_agent",
"agent_automated_dupes", "calls_answered_by_advisor"
]
def ceg_rows(rows):
rows = list(rows)
for column in itertools.count(3):
date = ceg_date(rows, column)
if not isinstance(date, datetime):
return
if date >= d_tz(2012, 4, 1):
yield [
date, "month", rows[5][column], rows[6][column],
rows[9][column], rows[11][column], rows[12][column],
rows[13][column], rows[15][column], rows[17][column]
]
def ceg_date(rows, column):
try:
return rows[3][column]
except IndexError:
return None
yield ceg_keys(rows)
for row in ceg_rows(rows):
yield row
| Convert rows to list in EVL CEG parser | Convert rows to list in EVL CEG parser
It needs to access cells directly
| Python | mit | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop |
7a04bb7692b4838e0abe9ba586fc4748ed9cd5d4 | tests/integration/blueprints/site/test_homepage.py | tests/integration/blueprints/site/test_homepage.py | """
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
| """
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import pytest
from tests.helpers import http_client
def test_homepage(site_app, site):
with http_client(site_app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
assert response.location is None
def test_homepage_with_root_redirect(make_site_app, site):
site_app = make_site_app(ROOT_REDIRECT_TARGET='welcome')
with http_client(site_app) as client:
response = client.get('/')
assert response.status_code == 307
assert response.location == 'http://www.acmecon.test/welcome'
| Test custom root path redirect | Test custom root path redirect
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
cdfb5c0c074e9143eeb84d914225dbcfb63151ba | common/djangoapps/dark_lang/models.py | common/djangoapps/dark_lang/models.py | """
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
return [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
| """
Models for the dark-launching languages
"""
from django.db import models
from config_models.models import ConfigurationModel
class DarkLangConfig(ConfigurationModel):
"""
Configuration for the dark_lang django app
"""
released_languages = models.TextField(
blank=True,
help_text="A comma-separated list of language codes to release to the public."
)
@property
def released_languages_list(self):
"""
``released_languages`` as a list of language codes.
Example: ['it', 'de-at', 'es', 'pt-br']
"""
if not self.released_languages.strip(): # pylint: disable=no-member
return []
languages = [lang.strip() for lang in self.released_languages.split(',')] # pylint: disable=no-member
# Put in alphabetical order
languages.sort()
return languages
| Put language modal in alphabetical order LMS-2302 | Put language modal in alphabetical order LMS-2302
| Python | agpl-3.0 | Softmotions/edx-platform,rismalrv/edx-platform,ovnicraft/edx-platform,jbzdak/edx-platform,nttks/jenkins-test,philanthropy-u/edx-platform,dkarakats/edx-platform,AkA84/edx-platform,kursitet/edx-platform,eestay/edx-platform,atsolakid/edx-platform,kxliugang/edx-platform,zadgroup/edx-platform,B-MOOC/edx-platform,romain-li/edx-platform,martynovp/edx-platform,deepsrijit1105/edx-platform,rhndg/openedx,doganov/edx-platform,cognitiveclass/edx-platform,ahmadiga/min_edx,chand3040/cloud_that,LICEF/edx-platform,utecuy/edx-platform,Unow/edx-platform,jelugbo/tundex,zerobatu/edx-platform,IndonesiaX/edx-platform,dcosentino/edx-platform,ampax/edx-platform-backup,cselis86/edx-platform,DNFcode/edx-platform,stvstnfrd/edx-platform,cpennington/edx-platform,fly19890211/edx-platform,sameetb-cuelogic/edx-platform-test,ESOedX/edx-platform,xingyepei/edx-platform,zhenzhai/edx-platform,Kalyzee/edx-platform,cselis86/edx-platform,tiagochiavericosta/edx-platform,prarthitm/edxplatform,devs1991/test_edx_docmode,eemirtekin/edx-platform,motion2015/a3,DNFcode/edx-platform,kmoocdev/edx-platform,shubhdev/edx-platform,kmoocdev/edx-platform,amir-qayyum-khan/edx-platform,eduNEXT/edx-platform,kursitet/edx-platform,Ayub-Khan/edx-platform,fintech-circle/edx-platform,shubhdev/edxOnBaadal,auferack08/edx-platform,OmarIthawi/edx-platform,UXE/local-edx,jamiefolsom/edx-platform,dcosentino/edx-platform,openfun/edx-platform,Lektorium-LLC/edx-platform,kxliugang/edx-platform,hkawasaki/kawasaki-aio8-0,arbrandes/edx-platform,ampax/edx-platform,hkawasaki/kawasaki-aio8-0,jazkarta/edx-platform,eemirtekin/edx-platform,ahmedaljazzar/edx-platform,jruiperezv/ANALYSE,CredoReference/edx-platform,bdero/edx-platform,wwj718/ANALYSE,Stanford-Online/edx-platform,MSOpenTech/edx-platform,JCBarahona/edX,alu042/edx-platform,Shrhawk/edx-platform,kmoocdev2/edx-platform,jelugbo/tundex,Endika/edx-platform,don-github/edx-platform,Kalyzee/edx-platform,angelapper/edx-platform,marcore/edx-platform,waheedahmed/edx-platform,UXE/local-edx,jazztpt/edx-platform,zhenzhai/edx-platform,IndonesiaX/edx-platform,chauhanhardik/populo_2,antonve/s4-project-mooc,procangroup/edx-platform,antonve/s4-project-mooc,mcgachey/edx-platform,zerobatu/edx-platform,shubhdev/openedx,appsembler/edx-platform,SivilTaram/edx-platform,DefyVentures/edx-platform,ferabra/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,devs1991/test_edx_docmode,Semi-global/edx-platform,AkA84/edx-platform,Softmotions/edx-platform,Endika/edx-platform,inares/edx-platform,yokose-ks/edx-platform,shurihell/testasia,halvertoluke/edx-platform,waheedahmed/edx-platform,xuxiao19910803/edx,olexiim/edx-platform,eestay/edx-platform,ubc/edx-platform,edry/edx-platform,knehez/edx-platform,deepsrijit1105/edx-platform,Shrhawk/edx-platform,4eek/edx-platform,carsongee/edx-platform,tanmaykm/edx-platform,carsongee/edx-platform,nttks/edx-platform,Endika/edx-platform,don-github/edx-platform,chauhanhardik/populo,waheedahmed/edx-platform,zadgroup/edx-platform,eduNEXT/edunext-platform,adoosii/edx-platform,jazkarta/edx-platform-for-isc,nikolas/edx-platform,nanolearningllc/edx-platform-cypress,caesar2164/edx-platform,amir-qayyum-khan/edx-platform,jelugbo/tundex,ampax/edx-platform-backup,longmen21/edx-platform,beni55/edx-platform,vasyarv/edx-platform,ovnicraft/edx-platform,jonathan-beard/edx-platform,solashirai/edx-platform,bdero/edx-platform,motion2015/edx-platform,appsembler/edx-platform,appliedx/edx-platform,chand3040/cloud_that,torchingloom/edx-platform,nanolearningllc/edx-platform-cypress-2,bigdatauniversity/edx-platform,B-MOOC/edx-platform,procangroup/edx-platform,Shrhawk/edx-platform,dcosentino/edx-platform,atsolakid/edx-platform,y12uc231/edx-platform,rue89-tech/edx-platform,devs1991/test_edx_docmode,gsehub/edx-platform,fintech-circle/edx-platform,gsehub/edx-platform,TeachAtTUM/edx-platform,CourseTalk/edx-platform,Edraak/circleci-edx-platform,EDUlib/edx-platform,eestay/edx-platform,Lektorium-LLC/edx-platform,shubhdev/openedx,jbzdak/edx-platform,benpatterson/edx-platform,jswope00/griffinx,jazkarta/edx-platform-for-isc,vismartltd/edx-platform,hkawasaki/kawasaki-aio8-2,simbs/edx-platform,xuxiao19910803/edx-platform,antonve/s4-project-mooc,arbrandes/edx-platform,IONISx/edx-platform,ampax/edx-platform-backup,OmarIthawi/edx-platform,knehez/edx-platform,itsjeyd/edx-platform,ahmadio/edx-platform,longmen21/edx-platform,Semi-global/edx-platform,nttks/edx-platform,wwj718/edx-platform,solashirai/edx-platform,miptliot/edx-platform,zhenzhai/edx-platform,ahmadiga/min_edx,bitifirefly/edx-platform,DefyVentures/edx-platform,teltek/edx-platform,auferack08/edx-platform,sudheerchintala/LearnEraPlatForm,doismellburning/edx-platform,zerobatu/edx-platform,solashirai/edx-platform,chauhanhardik/populo_2,mbareta/edx-platform-ft,jjmiranda/edx-platform,jonathan-beard/edx-platform,pepeportela/edx-platform,shurihell/testasia,etzhou/edx-platform,rue89-tech/edx-platform,vasyarv/edx-platform,prarthitm/edxplatform,jswope00/GAI,simbs/edx-platform,eduNEXT/edunext-platform,olexiim/edx-platform,motion2015/a3,arbrandes/edx-platform,louyihua/edx-platform,mitocw/edx-platform,ahmadiga/min_edx,hkawasaki/kawasaki-aio8-1,cecep-edu/edx-platform,chand3040/cloud_that,xinjiguaike/edx-platform,msegado/edx-platform,beni55/edx-platform,beni55/edx-platform,vikas1885/test1,utecuy/edx-platform,SivilTaram/edx-platform,nanolearning/edx-platform,procangroup/edx-platform,RPI-OPENEDX/edx-platform,SivilTaram/edx-platform,playm2mboy/edx-platform,jswope00/griffinx,pabloborrego93/edx-platform,jazkarta/edx-platform,JioEducation/edx-platform,hastexo/edx-platform,jzoldak/edx-platform,dkarakats/edx-platform,analyseuc3m/ANALYSE-v1,B-MOOC/edx-platform,Edraak/edraak-platform,longmen21/edx-platform,motion2015/edx-platform,angelapper/edx-platform,vikas1885/test1,DefyVentures/edx-platform,gsehub/edx-platform,jolyonb/edx-platform,knehez/edx-platform,ovnicraft/edx-platform,LICEF/edx-platform,ferabra/edx-platform,JCBarahona/edX,bdero/edx-platform,ubc/edx-platform,edx-solutions/edx-platform,nikolas/edx-platform,MSOpenTech/edx-platform,AkA84/edx-platform,ESOedX/edx-platform,adoosii/edx-platform,halvertoluke/edx-platform,mahendra-r/edx-platform,arifsetiawan/edx-platform,shabab12/edx-platform,Semi-global/edx-platform,lduarte1991/edx-platform,CredoReference/edx-platform,eemirtekin/edx-platform,MSOpenTech/edx-platform,iivic/BoiseStateX,knehez/edx-platform,jzoldak/edx-platform,openfun/edx-platform,Ayub-Khan/edx-platform,MakeHer/edx-platform,ak2703/edx-platform,nttks/edx-platform,doganov/edx-platform,simbs/edx-platform,defance/edx-platform,Edraak/circleci-edx-platform,jamiefolsom/edx-platform,10clouds/edx-platform,shubhdev/edx-platform,atsolakid/edx-platform,ZLLab-Mooc/edx-platform,romain-li/edx-platform,mcgachey/edx-platform,antoviaque/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,chauhanhardik/populo_2,hkawasaki/kawasaki-aio8-2,wwj718/ANALYSE,MakeHer/edx-platform,synergeticsedx/deployment-wipro,kmoocdev/edx-platform,valtech-mooc/edx-platform,lduarte1991/edx-platform,hkawasaki/kawasaki-aio8-0,romain-li/edx-platform,devs1991/test_edx_docmode,nagyistoce/edx-platform,shashank971/edx-platform,mushtaqak/edx-platform,cselis86/edx-platform,vasyarv/edx-platform,zubair-arbi/edx-platform,mjirayu/sit_academy,mtlchun/edx,vikas1885/test1,ovnicraft/edx-platform,angelapper/edx-platform,antoviaque/edx-platform,cpennington/edx-platform,prarthitm/edxplatform,deepsrijit1105/edx-platform,y12uc231/edx-platform,nagyistoce/edx-platform,zofuthan/edx-platform,unicri/edx-platform,nanolearningllc/edx-platform-cypress,cecep-edu/edx-platform,gsehub/edx-platform,carsongee/edx-platform,xingyepei/edx-platform,msegado/edx-platform,hamzehd/edx-platform,doganov/edx-platform,sudheerchintala/LearnEraPlatForm,wwj718/edx-platform,jazkarta/edx-platform-for-isc,don-github/edx-platform,leansoft/edx-platform,ampax/edx-platform-backup,zerobatu/edx-platform,shabab12/edx-platform,IndonesiaX/edx-platform,SivilTaram/edx-platform,franosincic/edx-platform,olexiim/edx-platform,shubhdev/edxOnBaadal,nttks/edx-platform,mcgachey/edx-platform,peterm-itr/edx-platform,zhenzhai/edx-platform,kmoocdev2/edx-platform,mcgachey/edx-platform,kamalx/edx-platform,amir-qayyum-khan/edx-platform,eemirtekin/edx-platform,arifsetiawan/edx-platform,mitocw/edx-platform,chudaol/edx-platform,alexthered/kienhoc-platform,adoosii/edx-platform,J861449197/edx-platform,a-parhom/edx-platform,jswope00/GAI,benpatterson/edx-platform,ubc/edx-platform,rue89-tech/edx-platform,y12uc231/edx-platform,torchingloom/edx-platform,TeachAtTUM/edx-platform,sudheerchintala/LearnEraPlatForm,nanolearningllc/edx-platform-cypress-2,nikolas/edx-platform,mjirayu/sit_academy,jjmiranda/edx-platform,wwj718/edx-platform,beni55/edx-platform,abdoosh00/edraak,analyseuc3m/ANALYSE-v1,teltek/edx-platform,UXE/local-edx,playm2mboy/edx-platform,jazztpt/edx-platform,zubair-arbi/edx-platform,LearnEra/LearnEraPlaftform,hmcmooc/muddx-platform,fly19890211/edx-platform,mahendra-r/edx-platform,OmarIthawi/edx-platform,naresh21/synergetics-edx-platform,rismalrv/edx-platform,mitocw/edx-platform,chauhanhardik/populo_2,synergeticsedx/deployment-wipro,Edraak/edraak-platform,openfun/edx-platform,rismalrv/edx-platform,lduarte1991/edx-platform,arifsetiawan/edx-platform,zubair-arbi/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,arifsetiawan/edx-platform,cyanna/edx-platform,fly19890211/edx-platform,jazkarta/edx-platform-for-isc,torchingloom/edx-platform,yokose-ks/edx-platform,fintech-circle/edx-platform,ahmadio/edx-platform,OmarIthawi/edx-platform,nanolearningllc/edx-platform-cypress-2,CourseTalk/edx-platform,bitifirefly/edx-platform,bitifirefly/edx-platform,mushtaqak/edx-platform,hamzehd/edx-platform,Edraak/circleci-edx-platform,edry/edx-platform,eestay/edx-platform,Semi-global/edx-platform,pabloborrego93/edx-platform,IONISx/edx-platform,alu042/edx-platform,jjmiranda/edx-platform,vikas1885/test1,fly19890211/edx-platform,shashank971/edx-platform,jbzdak/edx-platform,shabab12/edx-platform,martynovp/edx-platform,mahendra-r/edx-platform,a-parhom/edx-platform,unicri/edx-platform,jamesblunt/edx-platform,ESOedX/edx-platform,stvstnfrd/edx-platform,tanmaykm/edx-platform,bitifirefly/edx-platform,4eek/edx-platform,ahmadiga/min_edx,hkawasaki/kawasaki-aio8-1,marcore/edx-platform,DNFcode/edx-platform,xuxiao19910803/edx-platform,jzoldak/edx-platform,kmoocdev2/edx-platform,jruiperezv/ANALYSE,Lektorium-LLC/edx-platform,IONISx/edx-platform,Livit/Livit.Learn.EdX,jamiefolsom/edx-platform,Edraak/edx-platform,zubair-arbi/edx-platform,knehez/edx-platform,nttks/jenkins-test,DNFcode/edx-platform,jbassen/edx-platform,jazztpt/edx-platform,don-github/edx-platform,alexthered/kienhoc-platform,jolyonb/edx-platform,jamesblunt/edx-platform,stvstnfrd/edx-platform,chrisndodge/edx-platform,xinjiguaike/edx-platform,10clouds/edx-platform,teltek/edx-platform,nanolearningllc/edx-platform-cypress,etzhou/edx-platform,Edraak/circleci-edx-platform,Endika/edx-platform,kmoocdev2/edx-platform,bdero/edx-platform,MSOpenTech/edx-platform,mtlchun/edx,torchingloom/edx-platform,jbzdak/edx-platform,jamesblunt/edx-platform,yokose-ks/edx-platform,polimediaupv/edx-platform,mjirayu/sit_academy,kamalx/edx-platform,eestay/edx-platform,DNFcode/edx-platform,mjirayu/sit_academy,pabloborrego93/edx-platform,romain-li/edx-platform,dcosentino/edx-platform,edx/edx-platform,vasyarv/edx-platform,cognitiveclass/edx-platform,alu042/edx-platform,zadgroup/edx-platform,playm2mboy/edx-platform,unicri/edx-platform,cyanna/edx-platform,raccoongang/edx-platform,tiagochiavericosta/edx-platform,Edraak/edraak-platform,xuxiao19910803/edx-platform,mbareta/edx-platform-ft,adoosii/edx-platform,beacloudgenius/edx-platform,xuxiao19910803/edx-platform,nagyistoce/edx-platform,SravanthiSinha/edx-platform,kursitet/edx-platform,rhndg/openedx,mushtaqak/edx-platform,xuxiao19910803/edx-platform,dkarakats/edx-platform,nagyistoce/edx-platform,sudheerchintala/LearnEraPlatForm,sameetb-cuelogic/edx-platform-test,philanthropy-u/edx-platform,olexiim/edx-platform,morenopc/edx-platform,SravanthiSinha/edx-platform,ZLLab-Mooc/edx-platform,LearnEra/LearnEraPlaftform,mtlchun/edx,WatanabeYasumasa/edx-platform,jelugbo/tundex,mjirayu/sit_academy,abdoosh00/edraak,xinjiguaike/edx-platform,analyseuc3m/ANALYSE-v1,jswope00/GAI,utecuy/edx-platform,doismellburning/edx-platform,Unow/edx-platform,tiagochiavericosta/edx-platform,beacloudgenius/edx-platform,edry/edx-platform,nagyistoce/edx-platform,proversity-org/edx-platform,JioEducation/edx-platform,raccoongang/edx-platform,martynovp/edx-platform,polimediaupv/edx-platform,synergeticsedx/deployment-wipro,peterm-itr/edx-platform,doismellburning/edx-platform,RPI-OPENEDX/edx-platform,IONISx/edx-platform,Ayub-Khan/edx-platform,vismartltd/edx-platform,hkawasaki/kawasaki-aio8-1,4eek/edx-platform,ak2703/edx-platform,proversity-org/edx-platform,kamalx/edx-platform,antoviaque/edx-platform,motion2015/a3,mushtaqak/edx-platform,gymnasium/edx-platform,don-github/edx-platform,ESOedX/edx-platform,louyihua/edx-platform,openfun/edx-platform,mahendra-r/edx-platform,jazztpt/edx-platform,JCBarahona/edX,LICEF/edx-platform,leansoft/edx-platform,waheedahmed/edx-platform,ferabra/edx-platform,shubhdev/edx-platform,xinjiguaike/edx-platform,Softmotions/edx-platform,hastexo/edx-platform,wwj718/ANALYSE,etzhou/edx-platform,polimediaupv/edx-platform,Kalyzee/edx-platform,ZLLab-Mooc/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearning/edx-platform,ferabra/edx-platform,nanolearningllc/edx-platform-cypress-2,halvertoluke/edx-platform,JCBarahona/edX,jbassen/edx-platform,BehavioralInsightsTeam/edx-platform,RPI-OPENEDX/edx-platform,inares/edx-platform,ahmadio/edx-platform,edx-solutions/edx-platform,IndonesiaX/edx-platform,hmcmooc/muddx-platform,dsajkl/123,hmcmooc/muddx-platform,raccoongang/edx-platform,naresh21/synergetics-edx-platform,utecuy/edx-platform,miptliot/edx-platform,eemirtekin/edx-platform,rue89-tech/edx-platform,Edraak/edx-platform,eduNEXT/edx-platform,chauhanhardik/populo_2,tanmaykm/edx-platform,olexiim/edx-platform,UOMx/edx-platform,xuxiao19910803/edx,jjmiranda/edx-platform,vasyarv/edx-platform,appliedx/edx-platform,Kalyzee/edx-platform,hkawasaki/kawasaki-aio8-2,nikolas/edx-platform,chudaol/edx-platform,vismartltd/edx-platform,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,TeachAtTUM/edx-platform,prarthitm/edxplatform,pomegranited/edx-platform,Stanford-Online/edx-platform,zadgroup/edx-platform,Semi-global/edx-platform,kxliugang/edx-platform,proversity-org/edx-platform,chauhanhardik/populo,wwj718/ANALYSE,marcore/edx-platform,Softmotions/edx-platform,rue89-tech/edx-platform,cyanna/edx-platform,ak2703/edx-platform,xinjiguaike/edx-platform,shashank971/edx-platform,jazkarta/edx-platform,andyzsf/edx,andyzsf/edx,appliedx/edx-platform,zofuthan/edx-platform,chauhanhardik/populo,kamalx/edx-platform,nanolearningllc/edx-platform-cypress,iivic/BoiseStateX,dsajkl/reqiop,rhndg/openedx,zofuthan/edx-platform,pepeportela/edx-platform,Unow/edx-platform,nanolearningllc/edx-platform-cypress-2,ZLLab-Mooc/edx-platform,vismartltd/edx-platform,doismellburning/edx-platform,edry/edx-platform,jazztpt/edx-platform,doganov/edx-platform,hamzehd/edx-platform,tiagochiavericosta/edx-platform,valtech-mooc/edx-platform,shubhdev/edxOnBaadal,jruiperezv/ANALYSE,simbs/edx-platform,motion2015/edx-platform,jonathan-beard/edx-platform,etzhou/edx-platform,arbrandes/edx-platform,eduNEXT/edunext-platform,Livit/Livit.Learn.EdX,caesar2164/edx-platform,SivilTaram/edx-platform,BehavioralInsightsTeam/edx-platform,cpennington/edx-platform,longmen21/edx-platform,Unow/edx-platform,bigdatauniversity/edx-platform,Stanford-Online/edx-platform,zofuthan/edx-platform,eduNEXT/edx-platform,franosincic/edx-platform,WatanabeYasumasa/edx-platform,ampax/edx-platform-backup,tanmaykm/edx-platform,mbareta/edx-platform-ft,kmoocdev/edx-platform,valtech-mooc/edx-platform,10clouds/edx-platform,louyihua/edx-platform,louyihua/edx-platform,chand3040/cloud_that,mahendra-r/edx-platform,Kalyzee/edx-platform,10clouds/edx-platform,Livit/Livit.Learn.EdX,dsajkl/reqiop,auferack08/edx-platform,waheedahmed/edx-platform,SravanthiSinha/edx-platform,andyzsf/edx,shubhdev/openedx,leansoft/edx-platform,iivic/BoiseStateX,peterm-itr/edx-platform,jolyonb/edx-platform,defance/edx-platform,chrisndodge/edx-platform,xingyepei/edx-platform,chudaol/edx-platform,vismartltd/edx-platform,gymnasium/edx-platform,deepsrijit1105/edx-platform,jswope00/griffinx,msegado/edx-platform,nttks/jenkins-test,IONISx/edx-platform,procangroup/edx-platform,msegado/edx-platform,J861449197/edx-platform,chudaol/edx-platform,polimediaupv/edx-platform,pabloborrego93/edx-platform,shubhdev/edxOnBaadal,shashank971/edx-platform,gymnasium/edx-platform,shubhdev/edx-platform,morenopc/edx-platform,ZLLab-Mooc/edx-platform,LICEF/edx-platform,morenopc/edx-platform,ahmadio/edx-platform,zhenzhai/edx-platform,valtech-mooc/edx-platform,auferack08/edx-platform,cognitiveclass/edx-platform,AkA84/edx-platform,shubhdev/edxOnBaadal,shubhdev/edx-platform,Edraak/edx-platform,pomegranited/edx-platform,alexthered/kienhoc-platform,JioEducation/edx-platform,TeachAtTUM/edx-platform,Edraak/edraak-platform,RPI-OPENEDX/edx-platform,LearnEra/LearnEraPlaftform,nttks/jenkins-test,bitifirefly/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,halvertoluke/edx-platform,CredoReference/edx-platform,cecep-edu/edx-platform,wwj718/edx-platform,devs1991/test_edx_docmode,antonve/s4-project-mooc,motion2015/edx-platform,ovnicraft/edx-platform,yokose-ks/edx-platform,jonathan-beard/edx-platform,doismellburning/edx-platform,Livit/Livit.Learn.EdX,unicri/edx-platform,naresh21/synergetics-edx-platform,nttks/jenkins-test,4eek/edx-platform,dsajkl/123,chauhanhardik/populo,appsembler/edx-platform,cognitiveclass/edx-platform,ferabra/edx-platform,edx/edx-platform,pomegranited/edx-platform,jamiefolsom/edx-platform,mtlchun/edx,dsajkl/123,cselis86/edx-platform,shabab12/edx-platform,RPI-OPENEDX/edx-platform,jelugbo/tundex,lduarte1991/edx-platform,edx-solutions/edx-platform,itsjeyd/edx-platform,UOMx/edx-platform,mushtaqak/edx-platform,franosincic/edx-platform,proversity-org/edx-platform,longmen21/edx-platform,philanthropy-u/edx-platform,playm2mboy/edx-platform,utecuy/edx-platform,MakeHer/edx-platform,openfun/edx-platform,BehavioralInsightsTeam/edx-platform,nanolearning/edx-platform,JioEducation/edx-platform,Shrhawk/edx-platform,a-parhom/edx-platform,amir-qayyum-khan/edx-platform,halvertoluke/edx-platform,morenopc/edx-platform,zofuthan/edx-platform,caesar2164/edx-platform,caesar2164/edx-platform,AkA84/edx-platform,jazkarta/edx-platform-for-isc,alexthered/kienhoc-platform,CredoReference/edx-platform,simbs/edx-platform,zerobatu/edx-platform,4eek/edx-platform,kxliugang/edx-platform,jbassen/edx-platform,analyseuc3m/ANALYSE-v1,cyanna/edx-platform,hastexo/edx-platform,Edraak/edx-platform,philanthropy-u/edx-platform,iivic/BoiseStateX,EDUlib/edx-platform,DefyVentures/edx-platform,ahmadiga/min_edx,devs1991/test_edx_docmode,y12uc231/edx-platform,jbassen/edx-platform,kxliugang/edx-platform,hkawasaki/kawasaki-aio8-2,appliedx/edx-platform,kmoocdev/edx-platform,ubc/edx-platform,inares/edx-platform,Lektorium-LLC/edx-platform,tiagochiavericosta/edx-platform,zubair-arbi/edx-platform,Stanford-Online/edx-platform,xuxiao19910803/edx,jbzdak/edx-platform,unicri/edx-platform,franosincic/edx-platform,cyanna/edx-platform,shubhdev/openedx,benpatterson/edx-platform,solashirai/edx-platform,cecep-edu/edx-platform,peterm-itr/edx-platform,ahmedaljazzar/edx-platform,vikas1885/test1,JCBarahona/edX,nikolas/edx-platform,ak2703/edx-platform,DefyVentures/edx-platform,yokose-ks/edx-platform,CourseTalk/edx-platform,eduNEXT/edx-platform,pepeportela/edx-platform,Edraak/edx-platform,xingyepei/edx-platform,BehavioralInsightsTeam/edx-platform,UOMx/edx-platform,bigdatauniversity/edx-platform,MakeHer/edx-platform,inares/edx-platform,jazkarta/edx-platform,hastexo/edx-platform,mtlchun/edx,beacloudgenius/edx-platform,zadgroup/edx-platform,itsjeyd/edx-platform,IndonesiaX/edx-platform,jamiefolsom/edx-platform,teltek/edx-platform,ampax/edx-platform,benpatterson/edx-platform,LICEF/edx-platform,leansoft/edx-platform,rhndg/openedx,xuxiao19910803/edx,EDUlib/edx-platform,iivic/BoiseStateX,chudaol/edx-platform,bigdatauniversity/edx-platform,benpatterson/edx-platform,J861449197/edx-platform,LearnEra/LearnEraPlaftform,sameetb-cuelogic/edx-platform-test,devs1991/test_edx_docmode,abdoosh00/edraak,SravanthiSinha/edx-platform,kamalx/edx-platform,shurihell/testasia,cselis86/edx-platform,martynovp/edx-platform,marcore/edx-platform,xingyepei/edx-platform,hamzehd/edx-platform,antoviaque/edx-platform,morenopc/edx-platform,mitocw/edx-platform,pepeportela/edx-platform,carsongee/edx-platform,cecep-edu/edx-platform,dsajkl/reqiop,hamzehd/edx-platform,ahmadio/edx-platform,beni55/edx-platform,naresh21/synergetics-edx-platform,Shrhawk/edx-platform,y12uc231/edx-platform,jswope00/griffinx,atsolakid/edx-platform,rismalrv/edx-platform,J861449197/edx-platform,SravanthiSinha/edx-platform,angelapper/edx-platform,CourseTalk/edx-platform,bigdatauniversity/edx-platform,B-MOOC/edx-platform,cognitiveclass/edx-platform,edry/edx-platform,dcosentino/edx-platform,nanolearning/edx-platform,kursitet/edx-platform,etzhou/edx-platform,martynovp/edx-platform,alu042/edx-platform,WatanabeYasumasa/edx-platform,torchingloom/edx-platform,arifsetiawan/edx-platform,adoosii/edx-platform,dkarakats/edx-platform,shurihell/testasia,J861449197/edx-platform,miptliot/edx-platform,chrisndodge/edx-platform,Ayub-Khan/edx-platform,franosincic/edx-platform,EDUlib/edx-platform,defance/edx-platform,solashirai/edx-platform,edx/edx-platform,motion2015/a3,hkawasaki/kawasaki-aio8-1,dsajkl/123,jbassen/edx-platform,atsolakid/edx-platform,defance/edx-platform,jolyonb/edx-platform,mcgachey/edx-platform,itsjeyd/edx-platform,polimediaupv/edx-platform,msegado/edx-platform,appsembler/edx-platform,jruiperezv/ANALYSE,rismalrv/edx-platform,motion2015/a3,beacloudgenius/edx-platform,romain-li/edx-platform,sameetb-cuelogic/edx-platform-test,abdoosh00/edraak,doganov/edx-platform,a-parhom/edx-platform,ahmedaljazzar/edx-platform,jamesblunt/edx-platform,jswope00/griffinx,Edraak/circleci-edx-platform,MSOpenTech/edx-platform,hkawasaki/kawasaki-aio8-0,UXE/local-edx,jamesblunt/edx-platform,shurihell/testasia,leansoft/edx-platform,inares/edx-platform,motion2015/edx-platform,MakeHer/edx-platform,pomegranited/edx-platform,ampax/edx-platform,dsajkl/123,chrisndodge/edx-platform,chand3040/cloud_that,wwj718/edx-platform,fly19890211/edx-platform,rhndg/openedx,beacloudgenius/edx-platform,andyzsf/edx,shubhdev/openedx,dsajkl/reqiop,UOMx/edx-platform,B-MOOC/edx-platform,eduNEXT/edunext-platform,nttks/edx-platform,valtech-mooc/edx-platform,playm2mboy/edx-platform,fintech-circle/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,Ayub-Khan/edx-platform,xuxiao19910803/edx,jzoldak/edx-platform,ahmedaljazzar/edx-platform,WatanabeYasumasa/edx-platform,jazkarta/edx-platform,shashank971/edx-platform,hmcmooc/muddx-platform,kursitet/edx-platform,cpennington/edx-platform,mbareta/edx-platform-ft,jswope00/GAI,ak2703/edx-platform,ubc/edx-platform,ampax/edx-platform,nanolearningllc/edx-platform-cypress,pomegranited/edx-platform,jonathan-beard/edx-platform,gymnasium/edx-platform,dkarakats/edx-platform,appliedx/edx-platform,antonve/s4-project-mooc,alexthered/kienhoc-platform,nanolearning/edx-platform |
14b9ef43fd244d4709d14478ec0714325ca37cdb | tests/builtins/test_sum.py | tests/builtins/test_sum.py | from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class SumTests(TranspileTestCase):
def test_sum_list(self):
self.assertCodeExecution("""
print(sum([1, 2, 3, 4, 5, 6, 7]))
""")
def test_sum_tuple(self):
self.assertCodeExecution("""
print(sum((1, 2, 3, 4, 5, 6, 7)))
""")
def test_sum_iterator(self):
self.assertCodeExecution("""
i = iter([1, 2])
print(sum(i))
print(sum(i))
""")
def test_sum_mix_floats_and_ints(self):
self.assertCodeExecution("""
print(sum([1, 1.414, 2, 3.14159]))
""")
class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["sum"]
not_implemented = [
'test_bytearray',
'test_frozenzet',
]
| from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class SumTests(TranspileTestCase):
def test_sum_list(self):
self.assertCodeExecution("""
print(sum([1, 2, 3, 4, 5, 6, 7]))
""")
def test_sum_tuple(self):
self.assertCodeExecution("""
print(sum((1, 2, 3, 4, 5, 6, 7)))
""")
def test_sum_iterator(self):
self.assertCodeExecution("""
i = iter([1, 2])
print(sum(i))
print(sum(i))
""")
def test_sum_mix_floats_and_ints(self):
self.assertCodeExecution("""
print(sum([1, 1.414, 2, 3.14159]))
""")
class BuiltinSumFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["sum"]
not_implemented = [
'test_frozenzet',
]
| Fix unexpected success on sum(bytearray()) | Fix unexpected success on sum(bytearray())
| Python | bsd-3-clause | cflee/voc,cflee/voc,freakboy3742/voc,freakboy3742/voc |
9ff92d0a437e5af08fbf996ed0e3362cbd9cf2c9 | tests/instrumentdb_test.py | tests/instrumentdb_test.py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()))
self.assertTrue(os.path.exists(idb.focal_plane_db_file_name()))
self.assertTrue(os.path.exists(idb.detector_db_file_name()))
self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
| #!/usr/bin/env python
# -*- encoding: utf-8 -*-
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()),
'Path "{0}" not found'.format(idb.instrument_db_path()))
for file_name in (idb.focal_plane_db_file_name(),
idb.detector_db_file_name(),
idb.scanning_strategy_db_file_name()):
self.assertTrue(os.path.exists(file_name),
'File "{0}" not found'.format(file_name))
| Print more helpful messages when tests fail | Print more helpful messages when tests fail
| Python | mit | ziotom78/stripeline,ziotom78/stripeline |
7966f771c4b5450625d5247c6bf5369901457d9a | capstone/player/monte_carlo.py | capstone/player/monte_carlo.py | import random
from collections import defaultdict, Counter
from . import Player
from ..util import utility
class MonteCarlo(Player):
name = 'MonteCarlo'
def __init__(self, n_sims=1000):
self.n_sims = n_sims
def __repr__(self):
return type(self).name
def __str__(self):
return type(self).name
def move(self, game):
counter = defaultdict(int)
for i in range(self.n_sims):
for move in game.legal_moves():
new_game = game.copy()
new_game.make_move(move)
while not new_game.is_over():
rand_move = random.choice(new_game.legal_moves())
new_game.make_move(rand_move)
counter[move] += utility(new_game, game.cur_player())
m = Counter(counter).most_common(1)
return m[0][0]
##########
# Player #
##########
def choose_move(self, game):
return self.move(game)
| import random
from collections import defaultdict, Counter
from . import Player
from ..util import utility
class MonteCarlo(Player):
name = 'MonteCarlo'
def __init__(self, n_sims=1000):
self.n_sims = n_sims
def __repr__(self):
return type(self).name
def __str__(self):
return type(self).name
##########
# Player #
##########
def choose_move(self, game):
counter = defaultdict(int)
for i in range(self.n_sims):
for move in game.legal_moves():
new_game = game.copy()
new_game.make_move(move)
while not new_game.is_over():
rand_move = random.choice(new_game.legal_moves())
new_game.make_move(rand_move)
counter[move] += utility(new_game, game.cur_player())
best_move, count = Counter(counter).most_common(1)[0]
return best_move
| Move MonteCarlo move to choose_move | Move MonteCarlo move to choose_move
| Python | mit | davidrobles/mlnd-capstone-code |
8b19a4e275313cf2226f535d3ec10f414e0c6885 | django/__init__.py | django/__init__.py | VERSION = (1, 6, 6, 'alpha', 0)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
| VERSION = (1, 6, 6, 'final', 0)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
| Update version number for security release. | [1.6.x] Update version number for security release.
| Python | bsd-3-clause | felixjimenez/django,django-nonrel/django,felixjimenez/django,redhat-openstack/django,felixjimenez/django,redhat-openstack/django,redhat-openstack/django,django-nonrel/django,django-nonrel/django,redhat-openstack/django,django-nonrel/django,felixjimenez/django |
d0db4010ca9c6d2a6cbc27ae0029dd1ccfc6de42 | evexml/forms.py | evexml/forms.py | from django import forms
from django.forms.fields import IntegerField, CharField
import evelink.account
class AddAPIForm(forms.Form):
key_id = IntegerField()
v_code = CharField(max_length=64, min_length=1)
def clean(self):
self._clean()
return super(AddAPIForm, self).clean()
def _clean(self):
"""Check the access mask and characters of the supplied keypair.
"""
key_id = self.cleaned_data.get('key_id')
v_code = self.cleaned_data.get('v_code')
if not (key_id and v_code):
return
api = evelink.api.API(api_key=(key_id, v_code))
account = evelink.account.Account(api)
try:
key_info = account.key_info().result
except evelink.api.APIError as error:
self.add_error(None, error.message)
return
if key_info['type'] != 'account':
self.add_error(None, 'The API key should select Character: All')
if key_info['access_mask'] != 4294967295:
self.add_error(None, 'The API key should have full access')
if key_info['expire_ts']:
self.add_error(None, 'The API key should have no expiry checked')
| from django import forms
from django.forms.fields import IntegerField, CharField
import evelink.account
class AddAPIForm(forms.Form):
key_id = IntegerField()
v_code = CharField(max_length=64, min_length=1)
def clean(self):
super(AddAPIForm, self).clean()
self._clean()
return self.cleaned_data
def _clean(self):
"""Check the access mask and characters of the supplied keypair.
"""
key_id = self.cleaned_data.get('key_id')
v_code = self.cleaned_data.get('v_code')
if not (key_id and v_code):
return
api = evelink.api.API(api_key=(key_id, v_code))
account = evelink.account.Account(api)
try:
key_info = account.key_info().result
except evelink.api.APIError as error:
self.add_error(None, error.message)
return
if key_info['type'] != 'account':
self.add_error(None, 'The API key should select Character: All')
if key_info['access_mask'] != 4294967295:
self.add_error(None, 'The API key should have full access')
if key_info['expire_ts']:
self.add_error(None, 'The API key should have no expiry checked')
| Swap order of clean calls | Swap order of clean calls
| Python | mit | randomic/aniauth-tdd,randomic/aniauth-tdd |
ba0ea7491fab383992013a8379592657eedfe1ce | scripts/contrib/model_info.py | scripts/contrib/model_info.py | #!/usr/bin/env python3
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints version and model type from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
return parser.parse_args()
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints keys and values from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if args.special:
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
else:
if args.key:
if args.key not in model:
print("Key not found")
exit(1)
print(model[args.key])
else:
for key in model:
print(key)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
parser.add_argument("-s", "--special", action="store_true",
help="print values from special:model.yml node")
return parser.parse_args()
if __name__ == "__main__":
main()
| Add printing value for any key from model.npz | Add printing value for any key from model.npz
| Python | mit | emjotde/amunmt,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunmt,amunmt/marian,emjotde/amunn,amunmt/marian,emjotde/amunn,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/Marian,marian-nmt/marian-train,emjotde/Marian,amunmt/marian |
48e405f0f2027c82403c96b58023f1308c3f7c14 | model/orderbook.py | model/orderbook.py | # -*- encoding:utf8 -*-
import os
from model.oandapy import oandapy
class OrderBook(object):
def get_latest_orderbook(self, instrument, period, history):
oanda_token = os.environ.get('OANDA_TOKEN')
oanda = oandapy.API(environment="practice", access_token=oanda_token)
orders = oanda.get_orderbook(instrument=instrument)
try:
timeset = orders.keys()
timeset.sort()
timeset.reverse()
target_time = timeset[history]
except:
return None
order = orders[target_time]
order['time'] = target_time
return order
| # -*- encoding:utf8 -*-
import os
from model.oandapy import oandapy
class OrderBook(object):
def get_latest_orderbook(self, instrument, period, history):
oanda_token = os.environ.get('OANDA_TOKEN')
oanda_environment = os.environ.get('OANDA_ENVIRONMENT', 'practice')
oanda = oandapy.API(environment=oanda_environment, access_token=oanda_token)
orders = oanda.get_orderbook(instrument=instrument)
try:
timeset = orders.keys()
timeset.sort()
timeset.reverse()
target_time = timeset[history]
except:
return None
order = orders[target_time]
order['time'] = target_time
return order
| Add oanda environment selector from runtime environments. | Add oanda environment selector from runtime environments.
| Python | mit | supistar/OandaOrderbook,supistar/OandaOrderbook,supistar/OandaOrderbook |
082f366402ca2084542a6306624f1f467297ebae | bin/task_usage_index.py | bin/task_usage_index.py | #!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import numpy as np
import task_usage
def main(data_path, index_path, report_each=10000):
print('Looking for data in "{}"...'.format(data_path))
paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path)))
print('Processing {} databases...'.format(len(paths)))
index = []
count = 0
for path in paths:
data = task_usage.count_job_task_samples(path)
for i in range(data.shape[0]):
index.append({
'path': path,
'job': int(data[i, 0]),
'task': int(data[i, 1]),
'length': int(data[i, 2]),
})
count += 1
if count % report_each == 0:
print('Processed: {}'.format(count))
print('Saving into "{}"...'.format(index_path))
with open(index_path, 'w') as file:
json.dump({'index': index}, file, indent=4)
if __name__ == '__main__':
assert(len(sys.argv) == 3)
main(sys.argv[1], sys.argv[2])
| #!/usr/bin/env python3
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import glob, json
import numpy as np
import task_usage
def main(data_path, index_path, report_each=10000):
print('Looking for data in "{}"...'.format(data_path))
paths = sorted(glob.glob('{}/**/*.sqlite3'.format(data_path)))
total = len(paths)
print('Processing {} databases...'.format(total))
index = []
count = 0
for path in paths:
data = task_usage.count_job_task_samples(path)
for i in range(data.shape[0]):
index.append({
'path': path,
'job': int(data[i, 0]),
'task': int(data[i, 1]),
'length': int(data[i, 2]),
})
count += 1
if count % report_each == 0 or count == total:
print('Processed: {} ({:.2f}%)'.format(count, 100 * count / total))
print('Saving into "{}"...'.format(index_path))
with open(index_path, 'w') as file:
json.dump({'index': index}, file, indent=4)
if __name__ == '__main__':
assert(len(sys.argv) == 3)
main(sys.argv[1], sys.argv[2])
| Print the percentage from the task-usage-index script | Print the percentage from the task-usage-index script
| Python | mit | learning-on-chip/google-cluster-prediction |
2e6e7d8ec05f2a760f12f2547730c4707a07ebfa | utils/swift_build_support/tests/test_xcrun.py | utils/swift_build_support/tests/test_xcrun.py | # test_xcrun.py - Unit tests for swift_build_support.xcrun -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import platform
import unittest
from swift_build_support import xcrun
class FindTestCase(unittest.TestCase):
def setUp(self):
if platform.system() != 'Darwin':
self.skipTest('XCRun tests should only be run on OS X')
def test_when_tool_not_found_returns_none(self):
self.assertIsNone(xcrun.find(
toolchain='default', tool='a-tool-that-isnt-on-osx'))
def test_when_tool_found_returns_path(self):
self.assertTrue(xcrun.find(
toolchain='default', tool='clang').endswith('/clang'))
if __name__ == '__main__':
unittest.main()
| # test_xcrun.py - Unit tests for swift_build_support.xcrun -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import platform
import unittest
from swift_build_support import xcrun
class FindTestCase(unittest.TestCase):
def setUp(self):
if platform.system() != 'Darwin':
self.skipTest('xcrun tests should only be run on OS X')
def test_when_tool_not_found_returns_none(self):
self.assertIsNone(xcrun.find(
toolchain='default', tool='a-tool-that-isnt-on-osx'))
def test_when_tool_found_returns_path(self):
self.assertTrue(xcrun.find(
toolchain='default', tool='clang').endswith('/clang'))
if __name__ == '__main__':
unittest.main()
| Fix typo XCRun -> xcrun | [gardening][build-script] Fix typo XCRun -> xcrun | Python | apache-2.0 | nathawes/swift,xedin/swift,apple/swift,aschwaighofer/swift,practicalswift/swift,glessard/swift,uasys/swift,rudkx/swift,huonw/swift,roambotics/swift,Jnosh/swift,bitjammer/swift,shahmishal/swift,lorentey/swift,modocache/swift,manavgabhawala/swift,KrishMunot/swift,devincoughlin/swift,nathawes/swift,atrick/swift,zisko/swift,swiftix/swift,frootloops/swift,austinzheng/swift,allevato/swift,jopamer/swift,deyton/swift,arvedviehweger/swift,djwbrown/swift,dreamsxin/swift,modocache/swift,CodaFi/swift,tjw/swift,ahoppen/swift,johnno1962d/swift,hughbe/swift,kstaring/swift,JGiola/swift,deyton/swift,jopamer/swift,gottesmm/swift,codestergit/swift,practicalswift/swift,apple/swift,calebd/swift,tkremenek/swift,russbishop/swift,ben-ng/swift,aschwaighofer/swift,jopamer/swift,gmilos/swift,devincoughlin/swift,gribozavr/swift,Jnosh/swift,stephentyrone/swift,johnno1962d/swift,parkera/swift,manavgabhawala/swift,swiftix/swift,Jnosh/swift,milseman/swift,OscarSwanros/swift,JGiola/swift,manavgabhawala/swift,devincoughlin/swift,codestergit/swift,russbishop/swift,harlanhaskins/swift,ben-ng/swift,sschiau/swift,codestergit/swift,OscarSwanros/swift,jtbandes/swift,hooman/swift,shajrawi/swift,tkremenek/swift,brentdax/swift,milseman/swift,bitjammer/swift,xedin/swift,tjw/swift,KrishMunot/swift,felix91gr/swift,tinysun212/swift-windows,atrick/swift,amraboelela/swift,tinysun212/swift-windows,shajrawi/swift,frootloops/swift,tkremenek/swift,airspeedswift/swift,zisko/swift,shahmishal/swift,tjw/swift,uasys/swift,hooman/swift,harlanhaskins/swift,tinysun212/swift-windows,gmilos/swift,parkera/swift,therealbnut/swift,xedin/swift,sschiau/swift,milseman/swift,KrishMunot/swift,lorentey/swift,johnno1962d/swift,return/swift,kstaring/swift,JaSpa/swift,SwiftAndroid/swift,kperryua/swift,uasys/swift,zisko/swift,milseman/swift,OscarSwanros/swift,aschwaighofer/swift,harlanhaskins/swift,dduan/swift,stephentyrone/swift,gottesmm/swift,jmgc/swift,jopamer/swift,xwu/swift,allevato/swift,codestergit/swift,swiftix/swift,zisko/swift,danielmartin/swift,return/swift,hooman/swift,calebd/swift,harlanhaskins/swift,xedin/swift,calebd/swift,natecook1000/swift,xwu/swift,gmilos/swift,JGiola/swift,bitjammer/swift,dduan/swift,ben-ng/swift,manavgabhawala/swift,danielmartin/swift,lorentey/swift,roambotics/swift,allevato/swift,lorentey/swift,zisko/swift,shahmishal/swift,tjw/swift,benlangmuir/swift,tardieu/swift,gribozavr/swift,gregomni/swift,roambotics/swift,milseman/swift,shahmishal/swift,gmilos/swift,roambotics/swift,tjw/swift,lorentey/swift,jckarter/swift,amraboelela/swift,JGiola/swift,benlangmuir/swift,practicalswift/swift,jtbandes/swift,SwiftAndroid/swift,aschwaighofer/swift,SwiftAndroid/swift,return/swift,milseman/swift,alblue/swift,airspeedswift/swift,OscarSwanros/swift,milseman/swift,benlangmuir/swift,zisko/swift,devincoughlin/swift,brentdax/swift,karwa/swift,jopamer/swift,gregomni/swift,rudkx/swift,benlangmuir/swift,return/swift,ken0nek/swift,frootloops/swift,djwbrown/swift,jmgc/swift,SwiftAndroid/swift,IngmarStein/swift,kstaring/swift,practicalswift/swift,tardieu/swift,rudkx/swift,felix91gr/swift,apple/swift,atrick/swift,dduan/swift,bitjammer/swift,johnno1962d/swift,tinysun212/swift-windows,shajrawi/swift,ken0nek/swift,alblue/swift,KrishMunot/swift,aschwaighofer/swift,gribozavr/swift,gregomni/swift,dduan/swift,zisko/swift,hughbe/swift,atrick/swift,russbishop/swift,IngmarStein/swift,huonw/swift,jopamer/swift,kperryua/swift,tjw/swift,ben-ng/swift,kperryua/swift,djwbrown/swift,nathawes/swift,karwa/swift,danielmartin/swift,calebd/swift,felix91gr/swift,IngmarStein/swift,jtbandes/swift,modocache/swift,kstaring/swift,johnno1962d/swift,bitjammer/swift,uasys/swift,airspeedswift/swift,arvedviehweger/swift,huonw/swift,apple/swift,apple/swift,manavgabhawala/swift,austinzheng/swift,airspeedswift/swift,atrick/swift,stephentyrone/swift,glessard/swift,CodaFi/swift,alblue/swift,danielmartin/swift,jtbandes/swift,tinysun212/swift-windows,calebd/swift,tardieu/swift,benlangmuir/swift,austinzheng/swift,nathawes/swift,felix91gr/swift,uasys/swift,tinysun212/swift-windows,hughbe/swift,modocache/swift,natecook1000/swift,austinzheng/swift,arvedviehweger/swift,stephentyrone/swift,danielmartin/swift,shahmishal/swift,amraboelela/swift,swiftix/swift,tardieu/swift,bitjammer/swift,gottesmm/swift,ken0nek/swift,CodaFi/swift,devincoughlin/swift,amraboelela/swift,alblue/swift,jckarter/swift,gottesmm/swift,jckarter/swift,apple/swift,gregomni/swift,Jnosh/swift,return/swift,KrishMunot/swift,gmilos/swift,amraboelela/swift,lorentey/swift,hooman/swift,therealbnut/swift,jtbandes/swift,djwbrown/swift,ben-ng/swift,ahoppen/swift,frootloops/swift,parkera/swift,swiftix/swift,gribozavr/swift,hooman/swift,ken0nek/swift,ahoppen/swift,uasys/swift,amraboelela/swift,kperryua/swift,xedin/swift,devincoughlin/swift,austinzheng/swift,russbishop/swift,alblue/swift,karwa/swift,kstaring/swift,ahoppen/swift,gregomni/swift,stephentyrone/swift,tardieu/swift,OscarSwanros/swift,dduan/swift,deyton/swift,shajrawi/swift,Jnosh/swift,calebd/swift,karwa/swift,rudkx/swift,benlangmuir/swift,gribozavr/swift,JaSpa/swift,sschiau/swift,lorentey/swift,huonw/swift,manavgabhawala/swift,CodaFi/swift,tardieu/swift,JaSpa/swift,deyton/swift,shajrawi/swift,shahmishal/swift,nathawes/swift,russbishop/swift,brentdax/swift,devincoughlin/swift,karwa/swift,deyton/swift,jmgc/swift,rudkx/swift,codestergit/swift,xwu/swift,allevato/swift,OscarSwanros/swift,nathawes/swift,danielmartin/swift,glessard/swift,russbishop/swift,KrishMunot/swift,gmilos/swift,atrick/swift,Jnosh/swift,jckarter/swift,CodaFi/swift,kperryua/swift,gribozavr/swift,hughbe/swift,russbishop/swift,jmgc/swift,gribozavr/swift,codestergit/swift,airspeedswift/swift,jtbandes/swift,shajrawi/swift,gmilos/swift,brentdax/swift,JaSpa/swift,rudkx/swift,stephentyrone/swift,practicalswift/swift,CodaFi/swift,IngmarStein/swift,gregomni/swift,natecook1000/swift,IngmarStein/swift,return/swift,shajrawi/swift,danielmartin/swift,devincoughlin/swift,modocache/swift,brentdax/swift,felix91gr/swift,codestergit/swift,SwiftAndroid/swift,tkremenek/swift,jmgc/swift,gottesmm/swift,brentdax/swift,shahmishal/swift,lorentey/swift,tinysun212/swift-windows,frootloops/swift,sschiau/swift,IngmarStein/swift,karwa/swift,SwiftAndroid/swift,SwiftAndroid/swift,xedin/swift,arvedviehweger/swift,modocache/swift,airspeedswift/swift,huonw/swift,gottesmm/swift,ben-ng/swift,calebd/swift,practicalswift/swift,hughbe/swift,gribozavr/swift,practicalswift/swift,hughbe/swift,therealbnut/swift,karwa/swift,austinzheng/swift,CodaFi/swift,uasys/swift,johnno1962d/swift,djwbrown/swift,allevato/swift,JaSpa/swift,amraboelela/swift,manavgabhawala/swift,jckarter/swift,parkera/swift,tkremenek/swift,xwu/swift,tardieu/swift,jckarter/swift,natecook1000/swift,nathawes/swift,therealbnut/swift,parkera/swift,ahoppen/swift,kstaring/swift,sschiau/swift,felix91gr/swift,arvedviehweger/swift,aschwaighofer/swift,dduan/swift,JaSpa/swift,hooman/swift,allevato/swift,swiftix/swift,JaSpa/swift,ken0nek/swift,jtbandes/swift,sschiau/swift,therealbnut/swift,natecook1000/swift,parkera/swift,IngmarStein/swift,tjw/swift,jckarter/swift,tkremenek/swift,kperryua/swift,xwu/swift,KrishMunot/swift,shajrawi/swift,xedin/swift,airspeedswift/swift,xwu/swift,jopamer/swift,JGiola/swift,parkera/swift,gottesmm/swift,ken0nek/swift,JGiola/swift,harlanhaskins/swift,huonw/swift,harlanhaskins/swift,aschwaighofer/swift,johnno1962d/swift,therealbnut/swift,karwa/swift,bitjammer/swift,dduan/swift,sschiau/swift,huonw/swift,deyton/swift,xedin/swift,natecook1000/swift,glessard/swift,kperryua/swift,alblue/swift,allevato/swift,Jnosh/swift,frootloops/swift,glessard/swift,tkremenek/swift,ken0nek/swift,swiftix/swift,djwbrown/swift,natecook1000/swift,kstaring/swift,alblue/swift,return/swift,austinzheng/swift,hughbe/swift,glessard/swift,felix91gr/swift,ahoppen/swift,frootloops/swift,brentdax/swift,ben-ng/swift,shahmishal/swift,djwbrown/swift,arvedviehweger/swift,parkera/swift,deyton/swift,stephentyrone/swift,jmgc/swift,arvedviehweger/swift,roambotics/swift,modocache/swift,xwu/swift,hooman/swift,roambotics/swift,therealbnut/swift,OscarSwanros/swift,sschiau/swift,harlanhaskins/swift,dreamsxin/swift,practicalswift/swift,jmgc/swift |
0fe990cf476dcd0cdea56c39de1dad6003d81851 | statbot/mention.py | statbot/mention.py | #
# mention.py
#
# statbot - Store Discord records for later analysis
# Copyright (c) 2017 Ammon Smith
#
# statbot is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
from enum import auto, Enum
__all__ = [
'MentionType',
]
class MentionType(Enum):
USER = auto()
ROLE = auto()
CHANNEL = auto()
| #
# mention.py
#
# statbot - Store Discord records for later analysis
# Copyright (c) 2017 Ammon Smith
#
# statbot is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
from enum import Enum
__all__ = [
'MentionType',
]
class MentionType(Enum):
USER = 0
ROLE = 1
CHANNEL = 2
| Change MentionType to use fixed enum values. | Change MentionType to use fixed enum values.
| Python | mit | strinking/statbot,strinking/statbot |
06c2fe1bd836f4adfcff4eb35cc29203e10a729d | blinkytape/animation.py | blinkytape/animation.py | # TBD: Some animations mutate a pattern: shift it, fade it, etc.
# Not all animations need a pattern
# I need a rainbow pattern for fun
# TBD: How do you do random pixels? is it a pattern that is permuted by the
# animation? YES; patterns are static, animations do things with patterns,
# rotate them, scramble them, scale them, sort them, etcetera
class Animation(object):
def __init__(self, frame_period_sec):
if frame_period_sec < 0: raise ValueError
self._frame_period_sec = frame_period_sec
@property
def frame_period_sec(self):
return self._frame_period_sec
def begin(self):
pass
def next_frame(self):
pass
def end(self):
pass
| # TBD: Some animations mutate a pattern: shift it, fade it, etc.
# Not all animations need a pattern
# I need a rainbow pattern for fun
# TBD: How do you do random pixels? is it a pattern that is permuted by the
# animation? YES; patterns are static, animations do things with patterns,
# rotate them, scramble them, scale them, sort them, etcetera
class Animation(object):
def __init__(self, frame_period_sec):
if frame_period_sec < 0: raise ValueError
self._frame_period_sec = frame_period_sec
@property
def frame_period_sec(self):
return self._frame_period_sec
@property
def finished(self):
raise NotImplementedError('Animation must implement finished property')
def begin(self):
pass
def next_frame(self):
raise NotImplementedError('Animation must implement next_frame method')
def end(self):
pass
| Add abstract method exceptions to make Animation inheritance easier | Add abstract method exceptions to make Animation inheritance easier
| Python | mit | jonspeicher/blinkyfun |
c81a94568f12fca42c1cce1237c128c3123f6f73 | gunicorn_config.py | gunicorn_config.py | bind = ':5000'
workers = 2
errorlog = '/var/log/hgprofiler_gunicorn_error.log'
loglevel = 'info'
accesslog = '/var/log/hgprofiler_gunicorn_access.log'
| bind = ':5000'
workers = 4
errorlog = '/var/log/hgprofiler_gunicorn_error.log'
loglevel = 'info'
accesslog = '/var/log/hgprofiler_gunicorn_access.log'
| Set gunicorn workers to 4 | Set gunicorn workers to 4
| Python | apache-2.0 | TeamHG-Memex/hgprofiler,TeamHG-Memex/hgprofiler,TeamHG-Memex/hgprofiler,TeamHG-Memex/hgprofiler |
f2d34fa3153448ab6a893fba45ae48b52d7759db | chipy_org/apps/profiles/urls.py | chipy_org/apps/profiles/urls.py | from django.conf.urls.defaults import *
from django.contrib.auth.decorators import login_required
from profiles.views import (ProfilesList,
ProfileEdit,
)
urlpatterns = patterns("",
url(r'^list/$', ProfilesList.as_view(), name='list'),
url(r'^edit/$', ProfileEdit.as_view(), name='edit'),
)
| from django.conf.urls.defaults import *
from django.contrib.auth.decorators import login_required
from .views import ProfilesList, ProfileEdit
urlpatterns = patterns("",
url(r'^list/$', ProfilesList.as_view(), name='list'),
url(r'^edit/$', login_required(ProfileEdit).as_view(), name='edit'),
)
| Add login required for profile edit | Add login required for profile edit
| Python | mit | agfor/chipy.org,brianray/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,bharathelangovan/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,chicagopython/chipy.org,tanyaschlusser/chipy.org,agfor/chipy.org,tanyaschlusser/chipy.org,tanyaschlusser/chipy.org,brianray/chipy.org,chicagopython/chipy.org,brianray/chipy.org,agfor/chipy.org |
3f236d74615dced53c57628ae1b5f2c74f9e1de5 | examples/rate_limiting_test.py | examples/rate_limiting_test.py | from seleniumbase import BaseCase
from seleniumbase.common import decorators
class MyTestClass(BaseCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print(item)
def test_rate_limited_printing(self):
print("\nRunning rate-limited print test:")
for item in xrange(1, 11):
self.print_item(item)
| """
This test demonstrates the use of the "rate_limited" decorator.
You can use this decorator on any method to rate-limit it.
"""
import unittest
from seleniumbase.common import decorators
class MyTestClass(unittest.TestCase):
@decorators.rate_limited(3.5) # The arg is max calls per second
def print_item(self, item):
print(item)
def test_rate_limited_printing(self):
print("\nRunning rate-limited print test:")
for item in xrange(1, 11):
self.print_item(item)
| Update the rate_limited decorator test | Update the rate_limited decorator test
| Python | mit | seleniumbase/SeleniumBase,possoumous/Watchers,possoumous/Watchers,mdmintz/SeleniumBase,possoumous/Watchers,ktp420/SeleniumBase,seleniumbase/SeleniumBase,ktp420/SeleniumBase,mdmintz/SeleniumBase,ktp420/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/seleniumspot,ktp420/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,possoumous/Watchers |
2a23e72f7ad01976bcd80aa91f89882e2a37cbf6 | test/test_model.py | test/test_model.py | # coding: utf-8
import os, sys
sys.path.append(os.path.join(sys.path[0], '..'))
from carlo import model, entity, generate
def test_minimal_model():
m = model(entity('const', {'int': lambda: 42})).build()
assert [('const', {'int': 42})] == m.create()
m = model(entity('const2', {'str': lambda: 'hello'})).build()
assert [('const2', {'str': 'hello'})] == m.create()
def test_model_with_multiple_entities():
m = model(
entity('first', {'name': lambda: 'elves'}),
entity('second', {'name': lambda: 'humans'})).build()
assert [('first', {'name': 'elves'}),
('second', {'name': 'humans'})] == m.create()
def test_model_with_multiple_params():
m = model(entity('human', {
'head': lambda: 1,
'hands': lambda: 2,
'name': lambda: 'Hurin',
})).build()
assert [('human', {'head': 1, 'hands': 2, 'name': 'Hurin'})] == m.create()
| # coding: utf-8
import os, sys
sys.path.append(os.path.join(sys.path[0], '..'))
from carlo import model, entity, generate
def test_minimal_model():
m = model(entity('const', {'int': lambda: 42})).build()
assert [('const', {'int': 42})] == m.create()
m = model(entity('const2', {'str': lambda: 'hello'})).build()
assert [('const2', {'str': 'hello'})] == m.create()
def test_model_with_multiple_entities():
m = model(
entity('first', {'name': lambda: 'elves'}),
entity('second', {'name': lambda: 'humans'})).build()
assert [('first', {'name': 'elves'}),
('second', {'name': 'humans'})] == m.create()
def test_model_with_multiple_params():
m = model(entity('human', {
'head': lambda: 1,
'hands': lambda: 2,
'name': lambda: 'Hurin',
})).build()
assert [('human', {'head': 1, 'hands': 2, 'name': 'Hurin'})] == m.create()
# error handling
def test_same_enitities_should_throw_error():
pass
def test_same_params_should_throw_error():
pass
| Test blueprints for corner cases | Test blueprints for corner cases
| Python | mit | ahitrin/carlo |
b6fc4a8db76b3aad100c6e40ab1b0fb9977dfd0d | changes/api/project_index.py | changes/api/project_index.py | from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
# queryset = Build.query.options(
# joinedload(Build.project),
# joinedload(Build.author),
# ).order_by(Build.date_created.desc(), Build.date_started.desc())
# if change:
# queryset = queryset.filter_by(change=change)
project_list = list(queryset)
context = {
'projects': project_list,
}
return self.respond(context)
def get_stream_channels(self):
return []
| from __future__ import absolute_import, division, unicode_literals
from sqlalchemy.orm import joinedload
from changes.api.base import APIView
from changes.models import Project, Build
class ProjectIndexAPIView(APIView):
def get(self):
queryset = Project.query.order_by(Project.name.asc())
project_list = list(queryset)
context = {
'projects': [],
}
for project in project_list:
data = self.serialize(project)
data['recentBuilds'] = list(Build.query.options(
joinedload(Build.project),
joinedload(Build.author),
).filter_by(
project=project,
).order_by(
Build.date_created.desc(),
)[:3])
context['projects'].append(data)
return self.respond(context)
def get_stream_channels(self):
return ['builds:*']
| Add recentBuilds and stream to project index | Add recentBuilds and stream to project index
| Python | apache-2.0 | wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,dropbox/changes |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.