commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
b4e3461277669bf42225d278d491b7c714968491 | vm_server/test/execute_macro/code/execute.py | vm_server/test/execute_macro/code/execute.py | #!/usr/bin/python
"""Program to execute a VBA macro in MS Excel
"""
import os
import shutil
import win32com.client
import pythoncom
import repackage
repackage.up()
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = current_path + "\\action\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, current_path +
"\\action\\output\\excelsheet.xlsm")
shutil.move(current_path + "\\action\\data\\output.txt", current_path +
"\\action\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| #!/usr/bin/python
"""Program to execute a VBA macro in MS Excel
"""
import os
import shutil
import win32com.client
import pythoncom
def execute_macro():
"""Execute VBA macro in MS Excel
"""
pythoncom.CoInitialize()
current_path = os.path.dirname(os.getcwd())
path_to_file = ".\\data\\excelsheet.xlsm"
if os.path.exists(path_to_file):
xl_file = win32com.client.Dispatch("Excel.Application")
xl_run = xl_file.Workbooks.Open(os.path.abspath(path_to_file),
ReadOnly=1)
xl_run.Application.Run("excelsheet.xlsm!Module1.add_numbers_in_column") #execute macro
xl_run.Save()
xl_run.Close()
xl_file.Quit()
del xl_file
shutil.move(path_to_file, ".\\output\\excelsheet.xlsm")
shutil.move(".\\data\\output.txt", ".\\output\\output.txt")
print("Action successfully executed")
if __name__ == "__main__":
execute_macro()
| Modify excel screenshot test so that it works with the new directory structure | Modify excel screenshot test so that it works with the new directory structure
| Python | apache-2.0 | googleinterns/automated-windows-vms,googleinterns/automated-windows-vms |
c242ad95221c9c5b2f76795abd7dcbad5145cb2a | datagrid_gtk3/tests/utils/test_transformations.py | datagrid_gtk3/tests/utils/test_transformations.py | """Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
| """Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('1234567'),
'1.234567',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
self.assertEqual(
degree_decimal_str_transform('12345'),
'0.012345',
)
| Add more test cases to verify transformer behavior | Add more test cases to verify transformer behavior
| Python | mit | nowsecure/datagrid-gtk3,jcollado/datagrid-gtk3 |
5d6a96acd8018bc0c4ecbb684d6ebc17752c2796 | website_parameterized_snippet/__openerp__.py | website_parameterized_snippet/__openerp__.py | # -*- coding: utf-8 -*-
# © 2016 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Parameterize snippets",
"version": "8.0.1.0.0",
"author": "Therp BV,"
"Acsone SA/NV,"
"Odoo Community Association (OCA)",
"license": "AGPL-3",
"category": "Website",
"installable": True,
"application": False,
}
| # -*- coding: utf-8 -*-
# © 2016 Therp BV <http://therp.nl>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Parameterize snippets",
"version": "8.0.1.0.0",
"author": "Therp BV,"
"Acsone SA/NV,"
"Odoo Community Association (OCA).",
"license": "AGPL-3",
"depends": ['website'],
"category": "Website",
"installable": True,
"application": False,
}
| Add dependency to website (not necessary before inheriting website.qweb.field.html, but makes sense anyway. | Add dependency to website (not necessary before inheriting website.qweb.field.html, but makes sense anyway.
| Python | agpl-3.0 | brain-tec/website,open-synergy/website,gfcapalbo/website,LasLabs/website,acsone/website,LasLabs/website,gfcapalbo/website,acsone/website,brain-tec/website,gfcapalbo/website,acsone/website,acsone/website,open-synergy/website,open-synergy/website,brain-tec/website,LasLabs/website,open-synergy/website,brain-tec/website,LasLabs/website,gfcapalbo/website |
0d6d645f500f78f290d20f54cd94ca8614b1803a | server/dummy/dummy_server.py | server/dummy/dummy_server.py | #!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def do_POST(self):
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
content_length = int(self.headers['Content-Length'])
content = self.rfile.read(content_length)
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = BaseHTTPServer.HTTPServer(server_address, JsonPostResponder)
httpd.serve_forever()
| #!/usr/bin/env python
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def do_POST(self):
print '\n---> dummy server: got post!'
print 'command:', self.command
print 'path:', self.path
print 'headers:\n\n', self.headers
content_length = int(self.headers['Content-Length'])
content = self.rfile.read(content_length)
print 'content:\n\n', content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
| Use ServerClass definition for server creation | Use ServerClass definition for server creation
| Python | mit | jonspeicher/Puddle,jonspeicher/Puddle,jonspeicher/Puddle |
e70780358dd5cf64ee51b590be1b69dc25a214fb | cla_backend/apps/cla_eventlog/management/commands/find_and_delete_old_cases.py | cla_backend/apps/cla_eventlog/management/commands/find_and_delete_old_cases.py | from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
self._setup()
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) == 0:
print(cases.count())
elif args[0] == "test_find":
return cases
elif args[0] == "delete":
instance.run()
| from django.core.management.base import BaseCommand
from dateutil.relativedelta import relativedelta
from legalaid.models import Case
from cla_butler.tasks import DeleteOldData
class FindAndDeleteCasesUsingCreationTime(DeleteOldData):
def get_eligible_cases(self):
self._setup()
two_years = self.now - relativedelta(years=2)
return Case.objects.filter(created__lte=two_years).exclude(log__created__gte=two_years)
class Command(BaseCommand):
help = (
"Find or delete cases that are 2 years old or over that were not deleted prior to the task command being fixed"
)
def handle(self, *args, **kwargs):
instance = FindAndDeleteCasesUsingCreationTime()
cases = instance.get_eligible_cases()
if len(args) == 0:
print("Number of cases to be deleted: " + str(cases.count()))
elif args[0] == "test_find":
return cases
elif args[0] == "delete":
instance.run()
| Make delete command message more meaningful | Make delete command message more meaningful | Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend |
8683e4317875d99281b79904efc81af8b15614e6 | pycon/sponsorship/managers.py | pycon/sponsorship/managers.py | from django.db import models
class SponsorManager(models.Manager):
def active(self):
return self.get_query_set().filter(active=True).order_by("level")
def with_weblogo(self):
queryset = self.raw("""
SELECT DISTINCT
"sponsorship_sponsor"."id",
"sponsorship_sponsor"."applicant_id",
"sponsorship_sponsor"."name",
"sponsorship_sponsor"."external_url",
"sponsorship_sponsor"."annotation",
"sponsorship_sponsor"."contact_name",
"sponsorship_sponsor"."contact_emails",
"sponsorship_sponsor"."level_id",
"sponsorship_sponsor"."added",
"sponsorship_sponsor"."active",
"sponsorship_sponsorlevel"."order"
FROM
"sponsorship_sponsor"
INNER JOIN
"sponsorship_sponsorbenefit" ON ("sponsorship_sponsor"."id" = "sponsorship_sponsorbenefit"."sponsor_id")
INNER JOIN
"sponsorship_benefit" ON ("sponsorship_sponsorbenefit"."benefit_id" = "sponsorship_benefit"."id")
LEFT OUTER JOIN
"sponsorship_sponsorlevel" ON ("sponsorship_sponsor"."level_id" = "sponsorship_sponsorlevel"."id")
WHERE (
"sponsorship_sponsor"."active" = 't' AND
"sponsorship_benefit"."type" = 'weblogo' AND
"sponsorship_sponsorbenefit"."upload" != ''
)
ORDER BY "sponsorship_sponsorlevel"."order" ASC, "sponsorship_sponsor"."added" ASC
""")
return queryset
| from django.db import models
class SponsorManager(models.Manager):
def active(self):
return self.get_queryset().filter(active=True).order_by("level")
def with_weblogo(self):
queryset = self.raw("""
SELECT DISTINCT
"sponsorship_sponsor"."id",
"sponsorship_sponsor"."applicant_id",
"sponsorship_sponsor"."name",
"sponsorship_sponsor"."external_url",
"sponsorship_sponsor"."annotation",
"sponsorship_sponsor"."contact_name",
"sponsorship_sponsor"."contact_emails",
"sponsorship_sponsor"."level_id",
"sponsorship_sponsor"."added",
"sponsorship_sponsor"."active",
"sponsorship_sponsorlevel"."order"
FROM
"sponsorship_sponsor"
INNER JOIN
"sponsorship_sponsorbenefit" ON ("sponsorship_sponsor"."id" = "sponsorship_sponsorbenefit"."sponsor_id")
INNER JOIN
"sponsorship_benefit" ON ("sponsorship_sponsorbenefit"."benefit_id" = "sponsorship_benefit"."id")
LEFT OUTER JOIN
"sponsorship_sponsorlevel" ON ("sponsorship_sponsor"."level_id" = "sponsorship_sponsorlevel"."id")
WHERE (
"sponsorship_sponsor"."active" = 't' AND
"sponsorship_benefit"."type" = 'weblogo' AND
"sponsorship_sponsorbenefit"."upload" != ''
)
ORDER BY "sponsorship_sponsorlevel"."order" ASC, "sponsorship_sponsor"."added" ASC
""")
return queryset
| Fix one more get_query_set to get_queryset | Fix one more get_query_set to get_queryset
| Python | bsd-3-clause | PyCon/pycon,njl/pycon,njl/pycon,PyCon/pycon,njl/pycon,Diwahars/pycon,Diwahars/pycon,Diwahars/pycon,njl/pycon,PyCon/pycon,Diwahars/pycon,PyCon/pycon |
e8d56e5c47b370d1e4fcc3ccf575580d35a22dc8 | tx_salaries/management/commands/import_salary_data.py | tx_salaries/management/commands/import_salary_data.py | from os.path import basename
import sys
from django.core.management.base import BaseCommand
from ...utils import to_db, transformer
# TODO: Display help if unable to transform a file
# TODO: Switch to logging rather than direct output
class Command(BaseCommand):
def handle(self, *args, **kwargs):
verbosity = kwargs.get('verbosity', 1)
if len(args) is 0:
sys.stderr.write('Must provide at least one file to process')
return
if verbosity >= 2:
print "Number of file(s) to process: {num_of_files}".format(
num_of_files=len(args))
for filename in args:
records = transformer.transform(filename)
if verbosity >= 2:
print "Processing %d records from %s" % (len(records),
basename(filename))
for record in records:
to_db.save(record)
if verbosity >= 2:
sys.stdout.write('.')
sys.stdout.flush()
| from os.path import basename
import sys
from django.core.management.base import BaseCommand
from ...utils import to_db, transformer
# TODO: Display help if unable to transform a file
# TODO: Switch to logging rather than direct output
class Command(BaseCommand):
def handle(self, *args, **kwargs):
verbosity = kwargs.get('verbosity', 1)
if len(args) is 0:
sys.stderr.write('Must provide at least one file to process')
return
if verbosity >= 2:
print "Number of file(s) to process: {num_of_files}".format(
num_of_files=len(args))
for filename in args:
records = transformer.transform(filename)
if verbosity >= 2:
print "Processing %d records from %s" % (len(records),
basename(filename))
for record in records:
to_db.save(record)
if verbosity >= 2:
sys.stdout.write('.')
sys.stdout.flush()
if verbosity >= 2:
sys.stdout.write('\n')
sys.stdout.flush()
| Add a newline between files | Add a newline between files
| Python | apache-2.0 | texastribune/tx_salaries,texastribune/tx_salaries |
f4d66a5820582c995f1d31fe6a2442fc42d71077 | saulify/scrapers/newspaper.py | saulify/scrapers/newspaper.py | from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
article = Article(url_to_clean)
article.download()
article.parse()
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
| from __future__ import absolute_import
from flask import Markup
from newspaper import Article
from xml.etree import ElementTree
import markdown2
import html2text
def clean_content(url_to_clean):
""" Parse an article at a given url using newspaper.
Args:
url (str): Url where the article is found.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url_to_clean)
article.download()
article.parse()
return construct_result(article)
def clean_source(url, source):
""" Parse a pre-downloaded article using newspaper.
Args:
url (str): The url where the article was sourced (necessary for the
newspaper API).
source (str): Html source of the article page.
Returns:
Dictionary providing cleaned article and extracted content
(see `construct_result`).
"""
article = Article(url)
article.set_html(source)
article.parse()
return construct_result(article)
def construct_result(article):
""" Construct article extraction result dictionary in standard format.
Args:
article (Article): A parsed `newspaper` `Article` object.
Returns:
Dictionary providing cleaned article and extracted content;
author, title, markdown, plaintext, html.
"""
html_string = ElementTree.tostring(article.clean_top_node)
markdown = html2text.HTML2Text().handle(html_string)
article_html = Markup(markdown2.markdown(markdown))
return {
'html': article_html,
'authors': str(', '.join(article.authors)),
'title': article.title,
'plaintext': markdown.replace('\n', ' '),
'markdown': markdown
}
| Split `clean_content` into component functions | Split `clean_content` into component functions
Provides ability to use newspaper to parse articles whose source has
already been downloaded.
| Python | agpl-3.0 | asm-products/saulify-web,asm-products/saulify-web,asm-products/saulify-web |
b0de066ebaf81745878c1c4d3adf803445a0cfc5 | scrapi/processing/postgres.py | scrapi/processing/postgres.py | from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
# import django
import logging
from scrapi import events
from scrapi.processing.base import BaseProcessor
from api.webview.models import Document
# django.setup()
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
return Document.objects.filter(source=source, docID=docID)[0]
| from __future__ import absolute_import
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.api.settings")
import logging
from scrapi import events
from scrapi.processing.base import BaseProcessor
from api.webview.models import Document
logger = logging.getLogger(__name__)
class PostgresProcessor(BaseProcessor):
NAME = 'postgres'
@events.logged(events.PROCESSING, 'raw.postgres')
def process_raw(self, raw_doc):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.raw = raw_doc.attributes
document.save()
@events.logged(events.PROCESSING, 'normalized.postgres')
def process_normalized(self, raw_doc, normalized):
source, docID = raw_doc['source'], raw_doc['docID']
document = self._get_by_source_id(Document, source, docID) or Document(source=source, docID=docID)
document.normalized = normalized.attributes
document.providerUpdatedDateTime = normalized['providerUpdatedDateTime']
document.save()
def _get_by_source_id(self, model, source, docID):
try:
return Document.objects.filter(source=source, docID=docID)[0]
except IndexError:
return None
| Fix document query for existing documents | Fix document query for existing documents
| Python | apache-2.0 | fabianvf/scrapi,CenterForOpenScience/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,felliott/scrapi,felliott/scrapi,erinspace/scrapi,erinspace/scrapi,mehanig/scrapi |
412dc6e29e47148758382646dd65e0a9c5ff4505 | pymanopt/tools/autodiff/__init__.py | pymanopt/tools/autodiff/__init__.py | class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
| from ._callable import CallableBackend
from ._autograd import AutogradBackend
from ._pytorch import PyTorchBackend
from ._theano import TheanoBackend
from ._tensorflow import TensorflowBackend
class Function(object):
def __init__(self, function, arg, backend):
self._function = function
self._arg = arg
self._backend = backend
self._verify_backend()
self._compile()
def _verify_backend(self):
if not self._backend.is_available():
raise ValueError("Backend `{:s}' is not available".format(
str(self._backend)))
if not self._backend.is_compatible(self._function, self._arg):
raise ValueError("Backend `{:s}' is not compatible with cost "
"function of type `{:s}'".format(
str(self._backend),
self._function.__class__.__name__))
def _compile(self):
assert self._backend is not None
self._compiled_function = self._backend.compile_function(
self._function, self._arg)
def _perform_differentiation(self, attr):
assert self._backend is not None
method = getattr(self._backend, attr)
return method(self._function, self._arg)
def compute_gradient(self):
return self._perform_differentiation("compute_gradient")
def compute_hessian(self):
return self._perform_differentiation("compute_hessian")
def __call__(self, *args, **kwargs):
assert self._compiled_function is not None
return self._compiled_function(*args, **kwargs)
| Revert "autodiff: remove unused imports" | Revert "autodiff: remove unused imports"
This reverts commit d0ad4944671d94673d0051bd8faf4f3cf5d93ca9.
| Python | bsd-3-clause | pymanopt/pymanopt,pymanopt/pymanopt,nkoep/pymanopt,nkoep/pymanopt,nkoep/pymanopt |
b16474b4523e8e804f28188ba74c992896748efe | broctl/Napatech.py | broctl/Napatech.py | import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
| import BroControl.plugin
import BroControl.config
class Napatech(BroControl.plugin.Plugin):
def __init__(self):
super(Napatech, self).__init__(apiversion=1)
def name(self):
return 'napatech'
def pluginVersion(self):
return 1
def init(self):
# Use this plugin only if there is a Napatech interface in use
for nn in self.nodes():
if nn.type == 'worker' and nn.interface.startswith('napatech::'):
return True
return False
def nodeKeys(self):
return ['dedupe_lru_size', 'host_buffer_allowance']
def options(self):
return [('dedupe_lru_size', 'int', 1024, 'Size of deduplication lru.'),
('host_buffer_allowance', 'int', 100, 'Host buffer allowance.')]
def broctl_config(self):
script = ''
script += '# Settings for configuring Napatech interractions'
script += '\nredef Napatech::dedupe_lru_size = {0};'.format(self.getOption('dedupe_lru_size'))
script += '\nredef Napatech::host_buffer_allowance = {0};'.format(self.getOption('host_buffer_allowance'))
return script
| Fix minor bug in broctl plugin. | Fix minor bug in broctl plugin.
| Python | bsd-3-clause | hosom/bro-napatech,hosom/bro-napatech |
7c894c716cb712bbcb137df3a5df5548bdca9d93 | wafer/sponsors/migrations/0005_sponsorshippackage_symbol.py | wafer/sponsors/migrations/0005_sponsorshippackage_symbol.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
field=models.CharField(help_text='Optional symbol to display next to sponsors backing at this level sponsors list', max_length=1, blank=True),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sponsors', '0004_auto_20160813_1328'),
]
operations = [
migrations.AddField(
model_name='sponsorshippackage',
name='symbol',
field=models.CharField(blank=True, help_text='Optional symbol to display in the sponsors list next to sponsors who have sponsored at this list, (for example *).', max_length=1),
),
]
| Update the migration to changed text | Update the migration to changed text
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer |
3b366b32afedfd4bdae45ed5811d37679b3def0b | skyfield/tests/test_trigonometry.py | skyfield/tests/test_trigonometry.py | from skyfield.trigonometry import position_angle_of
from skyfield.units import Angle
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
| from skyfield.api import Angle, Topos, load, load_file
from skyfield.trigonometry import position_angle_of
def test_position_angle():
a = Angle(degrees=0), Angle(degrees=0)
b = Angle(degrees=1), Angle(degrees=1)
assert str(position_angle_of(a, b)) == '315deg 00\' 15.7"'
def test_position_angle_against_nasa_horizons():
ts = load.timescale(builtin=True)
t = ts.utc(2053, 10, 9)
eph = load_file('./skyfield/tests/data/jup310-2053-10-08.bsp')
boston = eph['earth'] + Topos(longitude_degrees=(-71, 3, 24.8),
latitude_degrees=(42, 21, 24.1))
b = boston.at(t)
j = b.observe(eph['jupiter'])#.apparent()
i = b.observe(eph['io'])#.apparent()
a = position_angle_of(j.radec(epoch='date')[1::-1],
i.radec(epoch='date')[1::-1])
# TODO: eliminate the need for this reversal step
from skyfield.api import tau
a2 = Angle(radians=(-a.radians) % tau)
print(abs(a2.degrees - 293.671), 0.002)
assert abs(a2.degrees - 293.671) < 0.002
| Add test of Position Angle from NASA HORIZONS | Add test of Position Angle from NASA HORIZONS
| Python | mit | skyfielders/python-skyfield,skyfielders/python-skyfield |
216216df9e3b42766a755f63519c84fda2fcebe0 | amy/workshops/migrations/0221_workshoprequest_rq_jobs.py | amy/workshops/migrations/0221_workshoprequest_rq_jobs.py | # Generated by Django 2.2.13 on 2020-10-25 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0220_event_public_status'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
| # Generated by Django 2.2.13 on 2020-10-25 18:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('workshops', '0221_auto_20201025_1113'),
]
operations = [
migrations.AddField(
model_name='workshoprequest',
name='rq_jobs',
field=models.ManyToManyField(blank=True, help_text='This should be filled out by AMY itself.', to='autoemails.RQJob', verbose_name='Related Redis Queue jobs'),
),
]
| Fix migrations conflict after rebase | Fix migrations conflict after rebase
| Python | mit | swcarpentry/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,swcarpentry/amy,swcarpentry/amy |
fda634ca2457716c33842cd0d285c20a0478601a | bugle_project/configs/development/settings.py | bugle_project/configs/development/settings.py | from bugle_project.configs.settings import *
FAYE_URL = None
DATABASE_ENGINE = 'postgresql_psycopg2'
DATABASE_NAME = 'bugle'
DATABASE_USER = 'bugle'
| from bugle_project.configs.settings import *
FAYE_ENABLED = False
FAYE_URL = None
DATABASE_ENGINE = 'postgresql_psycopg2'
DATABASE_NAME = 'bugle'
DATABASE_USER = 'bugle'
| Disable Faye on development, for now. | Disable Faye on development, for now.
| Python | bsd-2-clause | devfort/bugle,devfort/bugle,devfort/bugle |
536283e3dbbe6b549778d286401e08c6abeadff5 | dashboard/views.py | dashboard/views.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.mixins import (LoginRequiredMixin,
PermissionRequiredMixin)
from django.urls import reverse
from django.views.generic.base import TemplateView, RedirectView
from django.views.generic.detail import DetailView
from core.models import Child
class DashboardRedirect(LoginRequiredMixin, RedirectView):
# Show the overall dashboard or a child dashboard if one Child instance.
def get(self, request, *args, **kwargs):
if Child.objects.count() == 1:
child_instance = Child.objects.first()
self.url = reverse('dashboard-child', args={child_instance.slug})
else:
self.url = reverse('dashboard')
return super(DashboardRedirect, self).get(request, *args, **kwargs)
class Dashboard(LoginRequiredMixin, TemplateView):
template_name = 'dashboard/dashboard.html'
class ChildDashboard(PermissionRequiredMixin, DetailView):
model = Child
permission_required = ('core.view_child',)
template_name = 'dashboard/child.html'
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.mixins import (LoginRequiredMixin,
PermissionRequiredMixin)
from django.urls import reverse
from django.views.generic.base import TemplateView, RedirectView
from django.views.generic.detail import DetailView
from core.models import Child
class DashboardRedirect(LoginRequiredMixin, RedirectView):
# Show the overall dashboard or a child dashboard if one Child instance.
def get(self, request, *args, **kwargs):
children = Child.objects.count()
if children == 0:
# TODO: Create some sort of welcome page.
self.url = reverse('child-add')
elif children == 1:
child_instance = Child.objects.first()
self.url = reverse('dashboard-child', args={child_instance.slug})
else:
self.url = reverse('dashboard')
return super(DashboardRedirect, self).get(request, *args, **kwargs)
class Dashboard(LoginRequiredMixin, TemplateView):
template_name = 'dashboard/dashboard.html'
class ChildDashboard(PermissionRequiredMixin, DetailView):
model = Child
permission_required = ('core.view_child',)
template_name = 'dashboard/child.html'
| Handle dashboard redirect when there are no Child objects. | Handle dashboard redirect when there are no Child objects.
| Python | bsd-2-clause | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy |
226143945b3de994c68ef0b705eadfca330d9141 | setup.py | setup.py | from setuptools import setup, find_packages
import cplcom
setup(
name='CPLCom',
version=cplcom.__version__,
packages=find_packages(),
package_data={'cplcom': ['../media/*', '*.kv']},
install_requires=['moa', 'kivy'],
author='Matthew Einhorn',
author_email='[email protected]',
license='MIT',
description=(
'Project for common widgets used with Moa.')
)
| from setuptools import setup, find_packages
import cplcom
setup(
name='CPLCom',
version=cplcom.__version__,
packages=find_packages(),
package_data={'cplcom': ['../media/*', '/*.kv']},
install_requires=['moa', 'kivy'],
author='Matthew Einhorn',
author_email='[email protected]',
license='MIT',
description=(
'Project for common widgets used with Moa.')
)
| Include kv files in package. | Include kv files in package.
| Python | mit | matham/cplcom |
38a0328aaf6599412e56f4dec97d8ee9cef6e16c | setup.py | setup.py | # -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-basis",
version='0.1.9',
url='http://github.com/frecar/django-basis',
author='Fredrik Nygård Carlsen',
author_email='[email protected]',
description='Simple reusable django app for basic model functionality',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| # -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
setup(
name="django-basis",
version='0.2.0',
url='http://github.com/frecar/django-basis',
author='Fredrik Nygård Carlsen',
author_email='[email protected]',
description='Simple reusable django app for basic model functionality',
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
| Set upgrade to version 0.2.0 | Set upgrade to version 0.2.0
| Python | mit | frecar/django-basis |
10cd8149fe3eb911791c2188ffcafae5b66bfd21 | setup.py | setup.py | from setuptools import find_packages, setup
NAME = 'kaggle_tools'
VERSION = '0.0.1'
AUTHOR = 'Yassine Alouini'
DESCRIPTION = """This is a suite of tools to help you participate in various
Kaggle competitions"""
setup(
name=NAME,
version=VERSION,
packages=find_packages(),
# Some metadata
author=AUTHOR,
description=DESCRIPTION,
license="MIT",
keywords="kaggle machine-learning",
)
| from setuptools import find_packages, setup
NAME = 'kaggle_tools'
VERSION = '0.0.1'
AUTHOR = 'Yassine Alouini'
DESCRIPTION = """This is a suite of tools to help you participate in various
Kaggle competitions"""
EMAIL = "[email protected]"
URL = ""
setup(
name=NAME,
version=VERSION,
packages=find_packages(),
# Some metadata
author=AUTHOR,
author_email=EMAIL,
description=DESCRIPTION,
url=URL,
license="MIT",
keywords="kaggle machine-learning",
)
| Add missing required metadata for registering | Add missing required metadata for registering
| Python | mit | yassineAlouini/kaggle-tools,yassineAlouini/kaggle-tools |
957b5045e5c1d2c3d2f3b27074341cb8c5fb6128 | setup.py | setup.py | import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'name': 'clouseau',
'description': 'A silly git repo inspector',
'long_description': None ,
# Needs to be restructed text
# os.path.join(os.path.dirname(__file__), 'README.md').read()
'author': 'bill shelton',
'url': 'https://github.com/cfpb/clouseau',
'download_url': 'http://tbd.com',
'author_email': '[email protected]',
'version': '0.2.0',
'install_requires': ['jinja2','nose','nose-progressive'],
'packages': ['clouseau','tests'],
'py_modules': [],
'scripts': ['bin/clouseau', 'bin/clouseau_thin'],
'keywords': ['git', 'pii', 'security', 'search',
'sensitive information'],
'classifiers': [
'Development Status :: -0 - Pre-Natal',
'Environment :: Console',
'Intended Audience :: Developers, Security Engineers',
'Programming Language: Python 2.7',
'Operating System :: OSX',
'Operating System :: Linux',
]
}
setup(**config)
| import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'name': 'clouseau',
'description': 'A silly git repo inspector',
'long_description': None ,
# Needs to be restructed text
# os.path.join(os.path.dirname(__file__), 'README.md').read()
'author': 'bill shelton',
'url': 'https://github.com/cfpb/clouseau',
'download_url': 'http://tbd.com',
'author_email': '[email protected]',
'version': '0.2.0',
'install_requires': ['jinja2','nose','nose-progressive'],
'packages': ['clouseau','tests'],
'package_data': {'clouseau': ['clients/*.py', 'patterns/*.txt', 'templates/*.html']},
'py_modules': [],
'scripts': ['bin/clouseau', 'bin/clouseau_thin'],
'keywords': ['git', 'pii', 'security', 'search',
'sensitive information'],
'classifiers': [
'Development Status :: -0 - Pre-Natal',
'Environment :: Console',
'Intended Audience :: Developers, Security Engineers',
'Programming Language: Python 2.7',
'Operating System :: OSX',
'Operating System :: Linux',
]
}
setup(**config)
| Include subdirectories in pip install | Include subdirectories in pip install
| Python | cc0-1.0 | marcesher/clouseau,contolini/clouseau,contolini/clouseau,willbarton/clouseau,marcesher/clouseau,marcesher/clouseau,contolini/clouseau,willbarton/clouseau,willbarton/clouseau |
d7f07474326610bd6a01ac63157125b0ac43d450 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='[email protected]',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from setuptools import setup
from github_backup import __version__
def open_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname))
setup(
name='github-backup',
version=__version__,
author='Jose Diaz-Gonzalez',
author_email='[email protected]',
packages=['github_backup'],
scripts=['bin/github-backup'],
url='http://github.com/josegonzalez/python-github-backup',
license=open('LICENSE.txt').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description='backup a github user or organization',
long_description=open_file('README.rst').read(),
install_requires=open_file('requirements.txt').readlines(),
zip_safe=True,
)
| Remove support for python 2.7 in package classifiers | Remove support for python 2.7 in package classifiers
| Python | mit | josegonzalez/python-github-backup,josegonzalez/python-github-backup |
5b3a21390f5edf501aa47db921422d3198719099 | setup.py | setup.py | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
def long_description():
"""
Build the long description from a README file located in the same directory
as this module.
"""
base_path = os.path.dirname(os.path.realpath(__file__))
readme = open(os.path.join(base_path, 'README.rst'))
try:
return readme.read()
finally:
readme.close()
setup(
name='django-countries',
version='2.0',
description='Provides a country field for Django models.',
long_description=long_description(),
author='Chris Beaven',
author_email='[email protected]',
url='https://github.com/SmileyChris/django-countries/',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
)
| #!/usr/bin/env python
import os
from setuptools import setup, find_packages
def long_description():
"""
Build the long description from a README file located in the same directory
as this module.
"""
base_path = os.path.dirname(os.path.realpath(__file__))
readme = open(os.path.join(base_path, 'README.rst'))
try:
return readme.read()
finally:
readme.close()
setup(
name='django-countries',
version='2.0b',
description='Provides a country field for Django models.',
long_description=long_description(),
author='Chris Beaven',
author_email='[email protected]',
url='https://github.com/SmileyChris/django-countries/',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
)
| Mark version as 2.0 beta for now | Mark version as 2.0 beta for now
| Python | mit | SmileyChris/django-countries,basichash/django-countries,rahimnathwani/django-countries,maximzxc/django-countries,jrfernandes/django-countries,pimlie/django-countries,velfimov/django-countries,schinckel/django-countries,fladi/django-countries |
88bf4171ff58bcf35eab39efe78d7007e380f133 | setup.py | setup.py | try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(name='parcels',
version='0.0.1',
description="""Framework for Lagrangian tracking of virtual
ocean particles in the petascale age.""",
author="Imperial College London",
use_scm_version=True,
setup_requires=['setuptools_scm'],
packages=find_packages(exclude=['docs', 'examples', 'scripts', 'tests']) + ['include'],
include_package_data=True,
)
| """Install Parcels and dependencies."""
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
setup(name='parcels',
version='0.0.1',
description="""Framework for Lagrangian tracking of virtual
ocean particles in the petascale age.""",
author="Imperial College London",
use_scm_version=True,
setup_requires=['setuptools_scm'],
packages=find_packages(exclude=['docs', 'tests']) + ['include'],
include_package_data=True,
)
| Include examples and scripts and comply with PEP8 | Include examples and scripts and comply with PEP8
| Python | mit | OceanPARCELS/parcels,OceanPARCELS/parcels |
315933e24a19e01875fa7fcf182b5db905d5eff1 | setup.py | setup.py | from setuptools import setup
setup(name='exponent_server_sdk',
version='0.0.1',
description='Exponent Server SDK for Python',
url='https://github.com/exponentjs/exponent-server-sdk-python',
author='Exponent Team',
author_email='[email protected]',
license='MIT',
install_requires=[
'requests',
],
packages=['exponent_server_sdk'],
zip_safe=False)
| from setuptools import setup
setup(name='exponent_server_sdk',
version='0.0.1',
description='Exponent Server SDK for Python',
url='https://github.com/exponent/exponent-server-sdk-python',
author='Exponent Team',
author_email='[email protected]',
license='MIT',
install_requires=[
'requests',
],
packages=['exponent_server_sdk'],
zip_safe=False)
| Rename exponentjs references in our libraries | Rename exponentjs references in our libraries
fbshipit-source-id: 21072ac
| Python | mit | exponentjs/exponent-server-sdk-python |
ca2195fd99dc02e3e3d64b85003fcf27ddf4e897 | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
)
description = "An archive for Connexions documents."
setup(
name='cnx-archive',
version='0.1',
author='Connexions team',
author_email='[email protected]',
url="https://github.com/connexions/cnx-archive",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
""",
test_suite='cnxarchive.tests'
)
| # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
install_requires = (
'PasteDeploy',
'psycopg2',
)
description = "An archive for Connexions documents."
setup(
name='cnx-archive',
version='0.1',
author='Connexions team',
author_email='[email protected]',
url="https://github.com/connexions/cnx-archive",
license='LGPL, See aslo LICENSE.txt',
description=description,
packages=find_packages(),
install_requires=install_requires,
include_package_data=True,
entry_points="""\
""",
test_suite='cnxarchive.tests'
)
| Add dependencies, psycopg2 for Postgres integration and PasteDeploy for configuration file support. | Add dependencies, psycopg2 for Postgres integration and PasteDeploy for configuration file support.
| Python | agpl-3.0 | Connexions/cnx-archive,Connexions/cnx-archive |
355d71bb600df850b3914772d0dca9e0a68e64c8 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name="django-sanitizer",
version="0.3",
description="Django template filter application for sanitizing user submitted HTML",
author="Calvin Spealman",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name="django-sanitizer",
version="0.4",
description="Django template filter application for sanitizing user submitted HTML",
author="Caktus Consulting Group",
maintainer="Calvin Spealman",
maintainer_email="[email protected]",
url="http://github.com/caktus/django-sanitizer",
packages=['sanitizer', 'sanitizer.templatetags'],
)
| Make caktus the owner, listing myself as a maintainer. | Make caktus the owner, listing myself as a maintainer.
| Python | bsd-3-clause | caktus/django-sanitizer |
51f36e512b30ef19f0ee213995be9865d5123f6e | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name = "django-report-builder",
version = "2.0.2",
author = "David Burke",
author_email = "[email protected]",
description = ("Query and Report builder for Django ORM"),
license = "BSD",
keywords = "django report",
url = "https://github.com/burke-software/django-report-builder",
packages=find_packages(),
include_package_data=True,
test_suite='setuptest.setuptest.SetupTestSuite',
tests_require=(
'django-setuptest',
'south',
'argparse',
),
classifiers=[
"Development Status :: 5 - Production/Stable",
'Environment :: Web Environment',
'Framework :: Django',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
"License :: OSI Approved :: BSD License",
],
install_requires=[
'django>=1.4',
'openpyxl',
'python-dateutil',
'django-report-utils>=0.2.3',
]
)
| from setuptools import setup, find_packages
setup(
name = "django-report-builder",
version = "2.0.2",
author = "David Burke",
author_email = "[email protected]",
description = ("Query and Report builder for Django ORM"),
license = "BSD",
keywords = "django report",
url = "https://github.com/burke-software/django-report-builder",
packages=find_packages(),
include_package_data=True,
test_suite='setuptest.setuptest.SetupTestSuite',
tests_require=(
'django-setuptest',
'argparse',
),
classifiers=[
"Development Status :: 5 - Production/Stable",
'Environment :: Web Environment',
'Framework :: Django',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
"License :: OSI Approved :: BSD License",
],
install_requires=[
'django>=1.4',
'openpyxl',
'python-dateutil',
'django-report-utils>=0.2.3',
]
)
| Remove south from test requirements | Remove south from test requirements
| Python | bsd-3-clause | altanawealth/django-report-builder,AbhiAgarwal/django-report-builder,BrendanBerkley/django-report-builder,AbhiAgarwal/django-report-builder,BrendanBerkley/django-report-builder,BrendanBerkley/django-report-builder,altanawealth/django-report-builder,AbhiAgarwal/django-report-builder,altanawealth/django-report-builder |
4cfd8159ad33c88ef66837ed4bcb6d5c927d2d2c | setup.py | setup.py | import os.path
from ez_setup import use_setuptools
use_setuptools(min_version='0.6')
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='[email protected]',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'GDAL>=1.7', # Python 3 support.
'GeoAlchemy2>=0.2.1', # Bug fix for schemas other than public.
'pandas>=0.13.1',
'psycopg2>=2.5', # connection and cursor context managers.
'six',
'SQLAlchemy>=0.8' # GeoAlchemy2 support.
],
extras_require={
'rastertoolz': ['numpy>=1.8.0', 'rasterio>=0.12', 'rasterstats>=0.4',
'shapely>=1.3.2']
}
)
| import os.path
from ez_setup import use_setuptools
use_setuptools(min_version='0.6')
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='[email protected]',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'GDAL>=1.7', # Python 3 support.
'GeoAlchemy2>=0.2.1', # Bug fix for schemas other than public.
'pandas>=0.13.1',
'psycopg2>=2.5', # connection and cursor context managers.
'six>=1.4', # Mapping for urllib.
'SQLAlchemy>=0.8' # GeoAlchemy2 support.
],
extras_require={
'rastertoolz': ['numpy>=1.8.0', 'rasterio>=0.12', 'rasterstats>=0.4',
'shapely>=1.3.2']
}
)
| Add six package version specifier | Add six package version specifier
| Python | bsd-3-clause | UDST/spandex,SANDAG/spandex |
21cd0c4358cf97896af03ea05a2f1ee68ef06669 | setup.py | setup.py | import sys
from setuptools import setup, find_packages
from django_summernote import version, PROJECT
MODULE_NAME = 'django_summernote'
PACKAGE_DATA = list()
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
setup(
name=PROJECT,
version=version,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
author='django-summernote contributors',
maintainer='django-summernote maintainers',
url='http://github.com/summernote/django-summernote',
description='Summernote plugin for Django',
classifiers=CLASSIFIERS,
install_requires=['django', 'bleach'],
)
| import sys
from setuptools import setup, find_packages
from django_summernote import version, PROJECT
MODULE_NAME = 'django_summernote'
PACKAGE_DATA = list()
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
]
setup(
name=PROJECT,
version=version,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
author='django-summernote contributors',
maintainer='django-summernote maintainers',
url='http://github.com/summernote/django-summernote',
description='Summernote plugin for Django',
classifiers=CLASSIFIERS,
install_requires=['django', 'bleach'],
extras_require={
'dev': [
'django-dummy-plug',
'pytest',
'pytest-django',
]
},
)
| Add extra_require for development deps | Add extra_require for development deps
| Python | mit | summernote/django-summernote,summernote/django-summernote,summernote/django-summernote |
de1c551641216157da11039c2a732785b4bf9ce7 | setup.py | setup.py | from setuptools import setup
with open('README.md') as f:
description = f.read()
from beewarn import VERSION
setup(name='beewarn',
version=VERSION,
description='Utility for warning about bees',
author='Alistair Lynn',
author_email='[email protected]',
license='MIT',
long_description=description,
url='https://github.com/prophile/beewarn',
zip_safe=True,
setup_requires=['nose >=1.0, <2.0'],
entry_points = {
'console_scripts': [
'beewarn=beewarn.cli:run_cli'
]
},
packages=['beewarn'],
test_suite='nose.collector')
| from setuptools import setup
from beewarn import VERSION
setup(name='beewarn',
version=VERSION,
description='Utility for warning about bees',
author='Alistair Lynn',
author_email='[email protected]',
license='MIT',
url='https://github.com/prophile/beewarn',
zip_safe=True,
setup_requires=['nose >=1.0, <2.0'],
entry_points = {
'console_scripts': [
'beewarn=beewarn.cli:run_cli'
]
},
packages=['beewarn'],
test_suite='nose.collector')
| Remove the README.md loading step | Remove the README.md loading step
| Python | mit | prophile/beewarn |
89bb9c31e0e35f4d5bdacca094581ff8bcc213d2 | setup.py | setup.py | from setuptools import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='dacite',
version='0.0.14',
description='Simple creation of data classes from dictionaries.',
long_description=long_description,
author='Konrad Hałas',
author_email='[email protected]',
url='https://github.com/konradhalas/dacite',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
python_requires='>=3.6',
keywords='dataclasses',
py_modules=['dacite'],
install_requires=['dataclasses'],
)
| from setuptools import setup
setup(
name='dacite',
version='0.0.15',
description='Simple creation of data classes from dictionaries.',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
author='Konrad Hałas',
author_email='[email protected]',
url='https://github.com/konradhalas/dacite',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
python_requires='>=3.6',
keywords='dataclasses',
py_modules=['dacite'],
install_requires=['dataclasses'],
)
| Use Markdown description instead of pandoc conversion hack. Bump version. | Use Markdown description instead of pandoc conversion hack. Bump version.
| Python | mit | konradhalas/dacite |
b1563fb01ece4b4c01641b01ab33fd5993d4f16a | setup.py | setup.py | #!/usr/bin/env python
# Copyright 2009, 2012 Dietrich Epp <[email protected]>
# See LICENSE.txt for details.
from distutils.core import setup
setup(name='IdioTest',
version='0.1',
description='Idiotic testing framework',
author='Dietrich Epp',
author_email='[email protected]',
packages=['idiotest'])
| #!/usr/bin/env python
# Copyright 2009, 2012 Dietrich Epp <[email protected]>
# See LICENSE.txt for details.
from distutils.core import setup
setup(name='IdioTest',
version='0.1',
description='Simple testing framework',
author='Dietrich Epp',
author_email='[email protected]',
packages=['idiotest'])
| Scrub references to "Idiotic" testing framework | Scrub references to "Idiotic" testing framework
| Python | bsd-2-clause | depp/idiotest,depp/idiotest |
55fced15a69c07dc2b5fea9b25f244a0d1fa88c8 | setup.py | setup.py | # -*- coding: utf-8 -*-
#
# setup.py
# colorific
#
"""
Package information for colorific.
"""
import os
from setuptools import setup
readme = os.path.join(os.path.dirname(__file__), 'README.md')
setup(
name='colorific',
version='0.2.0',
description='Automatic color palette detection',
long_description=open(readme).read(),
author='Lars Yencken',
author_email='[email protected]',
url='http://bitbucket.org/larsyencken/palette-detect',
py_modules=['colorific'],
install_requires=[
'PIL>=1.1.6',
'colormath>=1.0.8',
'numpy>=1.6.1',
],
license='ISC',
entry_points={
'console_scripts': [
'colorific = colorific:main',
],
},
)
| # -*- coding: utf-8 -*-
#
# setup.py
# colorific
#
"""
Package information for colorific.
"""
import os
from setuptools import setup
readme = os.path.join(os.path.dirname(__file__), 'README.md')
setup(
name='colorific',
version='0.2.0',
description='Automatic color palette detection',
long_description=open(readme).read(),
author='Lars Yencken',
author_email='[email protected]',
url='http://github.com/99designs/colorific',
py_modules=['colorific'],
install_requires=[
'PIL>=1.1.6',
'colormath>=1.0.8',
'numpy>=1.6.1',
],
license='ISC',
entry_points={
'console_scripts': [
'colorific = colorific:main',
],
},
)
| Update repo URL to github. | Update repo URL to github.
| Python | isc | 99designs/colorific |
96fd8b71fd425d251e9cc07e8cc65b4fc040d857 | samples/nanomsg/hello_world.py | samples/nanomsg/hello_world.py | import os.path
import shutil
import tempfile
import threading
import sys
import nanomsg as nn
def ping(url, event):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
event.wait()
sock.send(b'Hello, World!')
def pong(url, event):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
event.set()
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
def main():
path = tempfile.mkdtemp()
try:
event = threading.Event()
url = 'ipc://' + os.path.join(path, 'reqrep.ipc')
print('Play ping-pong on %s' % url)
threads = [
threading.Thread(target=ping, args=(url, event)),
threading.Thread(target=pong, args=(url, event)),
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
finally:
shutil.rmtree(path)
return 0
if __name__ == '__main__':
sys.exit(main())
| import threading
import sys
import nanomsg as nn
def ping(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PUSH) as sock, sock.connect(url):
sock.send(b'Hello, World!')
# Shutdown the endpoint after the other side ack'ed; otherwise
# the message could be lost.
barrier.wait()
def pong(url, barrier):
with nn.Socket(protocol=nn.Protocol.NN_PULL) as sock, sock.bind(url):
message = sock.recv()
print(bytes(message.as_memoryview()).decode('ascii'))
barrier.wait()
def main():
barrier = threading.Barrier(2)
url = 'inproc://test'
print('Play ping-pong on %s' % url)
threads = [
threading.Thread(target=ping, args=(url, barrier)),
threading.Thread(target=pong, args=(url, barrier)),
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
return 0
if __name__ == '__main__':
sys.exit(main())
| Fix message lost issue in samples | Fix message lost issue in samples
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage |
c464887817334cd1dbc3c4587f185ec7ea598fda | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.txt'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='freesif',
version='0.1',
description='Get data from Sesam Interface Files',
long_description=long_description,
# url='https://github.com/agrav/freesif',
author='Audun Gravdal Johansen',
author_email='[email protected]',
license='MIT',
classifiers=[
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
# 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='sesam structural hydrodynamic',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['tables', 'numpy'],
)
| # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
# package data
name='freesif',
description='Get data from Sesam Interface Files',
use_scm_version=True,
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
package_data=dict(),
python_requires='~=3.7',
setup_requires=['setuptools_scm'],
install_requires=[
'tables>=3.6,<4',
'numpy>=1.17,<2'
],
zip_safe=True,
# meta data
long_description=read('README.md'),
keywords='sesam structural hydrodynamic',
url='https://github.com/agrav/freesif',
author='Audun Gravdal Johansen',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4',
],
)
| Enable versioning using Git tags, rendering of README in markdown syntax and limit depenencies by version. | Enable versioning using Git tags, rendering of README in markdown syntax and limit depenencies by version.
| Python | mit | agrav/freesif |
77f8e99ca67489caa75aceb76f79fd5a5d32ded8 | setup.py | setup.py | from distutils.core import setup
import re
def get_version():
init_py = open('pykka/__init__.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", init_py))
return metadata['version']
setup(
name='Pykka',
version=get_version(),
author='Stein Magnus Jodal',
author_email='[email protected]',
packages=['pykka'],
url='http://pykka.readthedocs.org/',
license='Apache License, Version 2.0',
description='Pykka is a Python implementation of the actor model',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
],
)
| from distutils.core import setup
import re
def get_version():
init_py = open('pykka/__init__.py').read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", init_py))
return metadata['version']
setup(
name='Pykka',
version=get_version(),
author='Stein Magnus Jodal',
author_email='[email protected]',
packages=['pykka'],
url='http://pykka.readthedocs.org/',
license='Apache License, Version 2.0',
description='Pykka is a Python implementation of the actor model',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries',
],
)
| Add more Python version/implementation classifiers | pypi: Add more Python version/implementation classifiers
| Python | apache-2.0 | jodal/pykka,tamland/pykka,tempbottle/pykka |
a0c5589060725004b6baadaa32cc0d23d157bacf | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.2',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.3',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 7.0.3. | Update the PyPI version to 7.0.3.
| Python | mit | Doist/todoist-python |
1c15f3f6aeed2384353c3e1efc5bcf7551b88850 | setup.py | setup.py | from distutils.core import setup
import gutter
setup(
name = u'gutter',
version = gutter.__version__,
author = gutter.__author__,
author_email = u'[email protected]',
url = u'https://gutter.readthedocs.org/',
description = u'Rainwave client framework',
packages = [u'gutter'],
classifiers = [
u'Development Status :: 3 - Alpha',
u'Intended Audience :: Developers',
u'License :: OSI Approved :: MIT License',
u'Natural Language :: English',
u'Programming Language :: Python',
u'Programming Language :: Python :: 2.7',
u'Topic :: Software Development :: Libraries'
],
license = open(u'LICENSE').read()
)
| from distutils.core import setup
import gutter
setup(
name = u'gutter',
version = gutter.__version__,
author = gutter.__author__,
author_email = u'[email protected]',
url = u'https://gutter.readthedocs.org/',
description = u'Rainwave client framework',
packages = ['gutter'],
classifiers = [
u'Development Status :: 3 - Alpha',
u'Intended Audience :: Developers',
u'License :: OSI Approved :: MIT License',
u'Natural Language :: English',
u'Programming Language :: Python',
u'Programming Language :: Python :: 2.7',
u'Topic :: Software Development :: Libraries'
],
license = open(u'LICENSE').read()
)
| Package name must be string, not Unicode | Package name must be string, not Unicode
| Python | mit | williamjacksn/python-rainwave-client |
8e3abcd310b7e932d769f05fa0a7135cc1a53b76 | setup.py | setup.py | from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need
# fine tuning.
build_exe_options = {
"excludes": [
"numpy"
],
"bin_includes": [
"libcrypto.so.1.0.0",
"libssl.so.1.0.0"
],
"packages": [
"_cffi_backend",
"appdirs",
"asyncio",
"bcrypt",
"encodings",
"idna",
"motor",
"packaging",
"raven",
"uvloop"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
setup(name='virtool', executables=executables, options=options)
| from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need
# fine tuning.
build_exe_options = {
"bin_includes": [
"libcrypto.so.1.0.0",
"libssl.so.1.0.0"
],
"includes": [
"numpy",
"numpy.core._methods",
"numpy.lib",
"numpy.lib.format"
],
"packages": [
"_cffi_backend",
"appdirs",
"asyncio",
"bcrypt",
"encodings",
"idna",
"motor",
"packaging",
"raven",
"uvloop"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
setup(name='virtool', executables=executables, options=options)
| Include missing numpy modules in build | Include missing numpy modules in build
| Python | mit | igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool |
f9094d47d138ecd7ece6e921b9a10a7c79eed629 | setup.py | setup.py | from setuptools import setup
setup(
name='cmsplugin-biography',
version='0.0.1',
packages='cmsplugin_biography',
install_requires=[
'django-cms',
'djangocms-text-ckeditor==1.0.9',
'easy-thumbnails==1.2',
],
author='Kevin Richardson',
author_email='[email protected]',
description='A Django CMS plugin that manages and displays biographical information',
long_description=open('README.rst').read(),
license='MIT',
url='http://github.com/kfr2/cmsplugin-biography',
include_package_data=True
)
| from setuptools import setup
setup(
name='cmsplugin-biography',
version='0.0.1',
packages=['cmsplugin_biography', ],
install_requires=[
'django-cms',
'djangocms-text-ckeditor==1.0.9',
'easy-thumbnails==1.2',
],
author='Kevin Richardson',
author_email='[email protected]',
description='A Django CMS plugin that manages and displays biographical information',
long_description=open('README.rst').read(),
license='MIT',
url='http://github.com/kfr2/cmsplugin-biography',
include_package_data=True
)
| Convert `packages` to a list | Convert `packages` to a list
| Python | mit | kfr2/cmsplugin-biography |
15299d7f15a1aa9523c491190880e1aad84cae07 | setup.py | setup.py | from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.5.4',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = '[email protected]',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
| from setuptools import setup
setup(
name = 'brunnhilde',
version = '1.6.0',
url = 'https://github.com/timothyryanwalsh/brunnhilde',
author = 'Tim Walsh',
author_email = '[email protected]',
py_modules = ['brunnhilde'],
scripts = ['brunnhilde.py'],
description = 'A Siegfried-based digital archives reporting tool for directories and disk images',
keywords = 'archives reporting characterization identification diskimages',
platforms = ['POSIX'],
test_suite='test',
classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Topic :: Communications :: File Sharing',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Filesystems',
'Topic :: Utilities'
],
)
| Update for 1.6.0 - TODO: Add Windows | Update for 1.6.0 - TODO: Add Windows | Python | mit | timothyryanwalsh/brunnhilde |
8e502a8041aed0758093c306c012d34e37e62309 | setup.py | setup.py | import os, sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def main():
setup(
name='stcrestclient',
version= '1.0.2',
author='Andrew Gillis',
author_email='[email protected]',
url='https://github.com/ajgillis/py-stcrestclient',
description='stcrestclient: Client modules for STC ReST API',
long_description = open('README.md').read(),
license='http://www.opensource.org/licenses/mit-license.php',
platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
keywords='Spirent TestCenter API',
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3'],
packages=['stcrestclient'],
zip_safe=True,
)
if __name__ == '__main__':
main()
| import os, sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def main():
setup(
name='stcrestclient',
version= '1.0.2',
author='Andrew Gillis',
author_email='[email protected]',
url='https://github.com/ajgillis/py-stcrestclient',
description='stcrestclient: Client modules for STC ReST API',
long_description = 'See https://github.com/ajgillis/py-stcrestclient#python-stc-rest-api-client-stcrestclient',
license='http://www.opensource.org/licenses/mit-license.php',
platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
keywords='Spirent TestCenter API',
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3'],
packages=['stcrestclient'],
zip_safe=True,
)
if __name__ == '__main__':
main()
| Change long description to URL. | Change long description to URL.
| Python | mit | Spirent/py-stcrestclient |
ac111399e390a5f62b35467b9cf5b9af613317b2 | setup.py | setup.py | from setuptools import setup
from setuptools import find_packages
setup(name='DSPP-Keras',
version='0.0.3',
description='Integration of DSPP database with Keral Machine Learning Library',
author='Jan Domanski',
author_email='[email protected]',
url='https://github.com/PeptoneInc/dspp-keras',
download_url='https://github.com/PeptoneInc/dspp-keras/archive/v0.0.3.tar.gz',
license='MIT',
install_requires=['keras', 'numpy', 'h5py'],
packages=find_packages())
| from setuptools import setup
from setuptools import find_packages
setup(name='DSPP-Keras',
version='0.0.3',
description='Integration of Database of structural propensities of proteins (dSPP) with Keras Machine Learning Library',
author='Jan Domanski',
author_email='[email protected]',
url='https://github.com/PeptoneInc/dspp-keras',
download_url='https://github.com/PeptoneInc/dspp-keras/archive/v0.0.3.tar.gz',
license='MIT',
install_requires=['keras', 'numpy', 'h5py'],
packages=find_packages())
| Change title and fix spelling for pip package | Change title and fix spelling for pip package
| Python | agpl-3.0 | PeptoneInc/dspp-keras |
e14071cb1bb6a331b6d2a32c65c8a71aa7d85e04 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
try:
long_description = open('README.md', 'r').read()
except:
long_description = ''
setup(
name='samp-client',
version='2.0.2',
packages=['samp_client'],
url='https://github.com/mick88/samp-client',
license='MIT',
author='Michal Dabski',
author_email='[email protected]',
requires=['future'],
description='SA-MP API client for python supporting both query and RCON APIs',
long_description=long_description,
)
| #!/usr/bin/env python
from distutils.core import setup
try:
long_description = open('README.md', 'r').read()
except:
long_description = ''
setup(
name='samp-client',
version='2.0.2',
packages=['samp_client'],
url='https://github.com/mick88/samp-client',
license='MIT',
author='Michal Dabski',
author_email='[email protected]',
install_requires=['future'],
description='SA-MP API client for python supporting both query and RCON APIs',
long_description=long_description,
)
| Install future as a dependency | Install future as a dependency
| Python | mit | mick88/samp-client |
7081e163f9e9c4c11d5a52d2dce6d1ef308bc0ab | setup.py | setup.py | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name='rna',
version='0.0.1',
description='',
author='Josh Mize',
author_email='[email protected]',
#url='',
#license='',
packages=[
'rna', 'rna.migrations'],
install_requires=[
'South',
'Django>=1.4.9',
'djangorestframework==2.3.7',
'django-extensions==1.2.0'],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python'],
)
| #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name='rna',
version='0.0.1',
description='',
author='Josh Mize',
author_email='[email protected]',
#url='',
#license='',
packages=[
'rna', 'rna.migrations', 'rna.management.commands'],
install_requires=[
'South',
'Django>=1.4.9',
'djangorestframework==2.3.7',
'django-extensions==1.2.0'],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'Programming Language :: Python'],
)
| Include management commands when installed. | Include management commands when installed. | Python | mpl-2.0 | sylvestre/rna,sylvestre/rna,sylvestre/rna,mozilla/rna,jgmize/rna,jgmize/rna,mozilla/rna,mozilla/rna,jgmize/rna |
7e7b71cc9ea8f79b81ef60d0303e59ad77389c06 | setup.py | setup.py | # I prefer Markdown to reStructuredText. PyPi does not. This allows people to
# install and not get any errors.
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = (
"Tavi (as in `Rikki Tikki Tavi "
"<http://en.wikipedia.org/wiki/Rikki-Tikki-Tavi>`_) "
"is an extremely thin Mongo object mapper for Python. It is a thin "
"abstraction over `pymongo <http://api.mongodb.org/python/current/>`_ "
"that allows you to easily model your applications and persist your "
"data in MongoDB. See `README.md <http://github.com/bnadlerjr/tavi>`_ "
"for more details."
)
from setuptools import setup
setup(
name='Tavi',
version='0.0.1',
author='Bob Nadler Jr.',
author_email='[email protected]',
packages=['tavi', 'tavi.test'],
url='http://pypi.python.org/pypi/Tavi/',
license='LICENSE.txt',
description='Super thin Mongo object mapper for Python.',
long_description=long_description,
install_requires=[
"inflection >= 0.2.0",
"pymongo >= 2.5.2"
]
)
| # I prefer Markdown to reStructuredText. PyPi does not. This allows people to
# install and not get any errors.
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
long_description = (
"Tavi (as in `Rikki Tikki Tavi "
"<http://en.wikipedia.org/wiki/Rikki-Tikki-Tavi>`_) "
"is an extremely thin Mongo object mapper for Python. It is a thin "
"abstraction over `pymongo <http://api.mongodb.org/python/current/>`_ "
"that allows you to easily model your applications and persist your "
"data in MongoDB. See `README.md <http://github.com/bnadlerjr/tavi>`_ "
"for more details."
)
from setuptools import setup
setup(
name='Tavi',
version='0.0.1',
author='Bob Nadler Jr.',
author_email='[email protected]',
packages=['tavi', 'tavi.test'],
url='https://github.com/bnadlerjr/tavi',
license='LICENSE.txt',
description='Super thin Mongo object mapper for Python.',
long_description=long_description,
install_requires=[
"inflection >= 0.2.0",
"pymongo >= 2.5.2"
]
)
| Update project URL to point to GitHub. | Update project URL to point to GitHub.
| Python | mit | bnadlerjr/tavi |
1fdb94783831bdd8e0608dc95a008f9344753a58 | setup.py | setup.py | """
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.12.0',
url='http://github.com/python-babel/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='[email protected]',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| from setuptools import setup
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='Flask-Babel',
version='0.12.0',
url='http://github.com/python-babel/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='[email protected]',
description='Adds i18n/l10n support to Flask applications',
long_description=long_description,
long_description_content_type='text/markdown',
packages=['flask_babel'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel>=2.3',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| Switch to using README.md for project description on pypi. | Switch to using README.md for project description on pypi.
| Python | bsd-3-clause | mitsuhiko/flask-babel,mitsuhiko/flask-babel |
6c001251a4df02aa011187bf1444c94ffc4f92db | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='zorg',
version='0.0.4',
url='https://github.com/zorg/zorg',
description='Python framework for robotics and physical computing.',
long_description=read('README.rst'),
author='Zorg Group',
maintainer_email='[email protected]',
packages=find_packages(),
package_dir={'zorg': 'zorg'},
include_package_data=True,
license='MIT',
zip_safe=True,
platforms=['any'],
keywords=['zorg', 'robotics'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Console',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=[]
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='zorg',
version='0.0.5',
url='https://github.com/zorg/zorg',
description='Python framework for robotics and physical computing.',
long_description=read('README.rst'),
author='Zorg Group',
maintainer_email='[email protected]',
packages=find_packages(),
package_dir={'zorg': 'zorg'},
include_package_data=True,
license='MIT',
zip_safe=True,
platforms=['any'],
keywords=['zorg', 'robotics'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Environment :: Console',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
test_suite='tests',
tests_require=[]
)
| Update release version to 0.0.5 | Update release version to 0.0.5
| Python | mit | zorg-framework/zorg,zorg/zorg |
4c90264d744b177aabcaa1cecba4fe17e30cf308 | corehq/apps/accounting/migrations/0026_auto_20180508_1956.py | corehq/apps/accounting/migrations/0026_auto_20180508_1956.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import HqRunPython
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
HqRunPython(_convert_emailed_to_array_field)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import HqRunPython
def noop(*args, **kwargs):
pass
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
HqRunPython(_convert_emailed_to_array_field, reverse_code=noop)
]
| Add noop to migration file | Add noop to migration file
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq |
a0ab64fe0582f6001e39b942bcb1b120e068432d | setup.py | setup.py | import os
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename, 'r').readlines())
def read_file(filename):
filepath = os.path.join(os.path.dirname(__file__), filename)
return open(filepath, 'r').read()
setup(
name="python-smpp",
version="0.1.7a",
url='http://github.com/praekelt/python-smpp',
license='BSD',
description="Python SMPP Library",
long_description=read_file('README.rst'),
author='Praekelt Foundation',
author_email='[email protected]',
packages=find_packages(),
install_requires=['setuptools'].extend(listify('requirements.pip')),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| import os
from setuptools import setup, find_packages
def listify(filename):
filepath = os.path.join(os.path.dirname(__file__), filename)
return filter(None, open(filepath, 'r').readlines())
def read_file(filename):
filepath = os.path.join(os.path.dirname(__file__), filename)
return open(filepath, 'r').read()
setup(
name="python-smpp",
version="0.1.7a",
url='http://github.com/praekelt/python-smpp',
license='BSD',
description="Python SMPP Library",
long_description=read_file('README.rst'),
author='Praekelt Foundation',
author_email='[email protected]',
packages=find_packages(),
install_requires=['setuptools'].extend(listify('requirements.pip')),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| Use full file path for getting list of requirements (thanks @hodgestar) | Use full file path for getting list of requirements (thanks @hodgestar)
| Python | bsd-3-clause | praekelt/python-smpp,praekelt/python-smpp |
30fcfb5fec7933d69d9117be6219945406012ef5 | setup.py | setup.py | from setuptools import setup
description = 'New testament greek app for django.'
long_desc = open('README.rst').read()
setup(
name='django-greekapp',
version='0.0.1',
url='https://github.com/honza/greekapp',
install_requires=['django', 'redis'],
description=description,
long_description=long_desc,
author='Honza Pokorny',
author_email='[email protected]',
maintainer='Honza Pokorny',
maintainer_email='[email protected]',
packages=['greekapp'],
package_data={
'greekapp': [
'templates/greekapp/index.html',
'static/greekapp.min.js',
'static/greekapp.css'
]
}
)
| from setuptools import setup
description = 'New testament greek app for django.'
long_desc = open('README.rst').read()
setup(
name='django-greekapp',
version='0.0.1',
url='https://github.com/honza/greekapp',
install_requires=['django', 'redis'],
description=description,
long_description=long_desc,
author='Honza Pokorny',
author_email='[email protected]',
maintainer='Honza Pokorny',
maintainer_email='[email protected]',
packages=['greekapp'],
package_data={
'greekapp': [
'templates/greekapp/index.html',
'static/greekapp.min.js',
'static/greekapp.css',
'managements/commands/nt.db'
]
}
)
| Include nt.db with package data. | Include nt.db with package data.
| Python | bsd-2-clause | honza/greekapp,honza/greekapp,honza/greekapp |
a7f17a7fa3761126d6edb890f3420556f663b4c0 | setup.py | setup.py | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = "1.0.0b2",
author = "Jan-Christopher Pien",
author_email = "[email protected]",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework",
"schedule",
"django-cache-machine >= 0.8",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = "1.0.0b2",
author = "Jan-Christopher Pien",
author_email = "[email protected]",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework = 2.3",
"schedule",
"django-cache-machine >= 0.8",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | Add specific version of rest framework | Add specific version of rest framework
| Python | mit | fraunhoferfokus/mobile-city-memory,fraunhoferfokus/mobile-city-memory,jessepeng/coburg-city-memory,jessepeng/coburg-city-memory |
14bf00afda2fc052dec0ae1655ba6a14073761cd | data/data.py | data/data.py | from __future__ import print_function, division
import hashlib
import os
import json
d = json.load(open("data/hashList.txt"))
def generate_file_md5(filename, blocksize=2**20):
m = hashlib.md5()
with open(filename, "rb") as f:
while True:
buf = f.read(blocksize)
if not buf:
break
m.update(buf)
return m.hexdigest()
def check_hashes(d):
all_good = True
counter = 0
for k, v in d.items():
digest = generate_file_md5(k)
if v == digest:
counter += 1
#print("The file {0} has the correct hash.".format(k))
else:
print("ERROR: The file {0} has the WRONG hash!".format(k))
all_good = False
print("There are " + str(counter) + " correct files.")
return all_good
if __name__ == "__main__":
check_hashes(d)
| from __future__ import print_function, division
import hashlib
import os
import json
d = json.load(open("./hashList.txt"))
def generate_file_md5(filename, blocksize=2**20):
m = hashlib.md5()
with open(filename, "rb") as f:
while True:
buf = f.read(blocksize)
if not buf:
break
m.update(buf)
return m.hexdigest()
def check_hashes(d):
all_good = True
counter = 0
for k, v in d.items():
digest = generate_file_md5(k)
if v == digest:
counter += 1
#print("The file {0} has the correct hash.".format(k))
else:
print("ERROR: The file {0} has the WRONG hash!".format(k))
all_good = False
print("There are " + str(counter) + " correct files.")
return all_good
if __name__ == "__main__":
check_hashes(d)
| Change path name to './' | Change path name to './'
| Python | bsd-3-clause | berkeley-stat159/project-theta |
73007bf3b2764c464dd475fa62d8c2651efe20eb | setup.py | setup.py | import os
from setuptools import setup
readme_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'README.rst',
)
long_description = open(readme_path).read()
version_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'VERSION',
)
version = open(version_path).read()
setup(
name='flask-ldap3-login',
version=version,
packages=['flask_ldap3_login'],
author="Nick Whyte",
author_email='[email protected]',
description="LDAP Support for Flask in Python3/2",
long_description=long_description,
url='https://github.com/nickw444/flask-ldap3-login',
zip_safe=False,
install_requires=[
"ldap3",
"Flask",
"Flask-wtf",
"enum34"
],
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Flask',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2.6',
],
test_suite="flask_ldap3_login_tests",
)
| import os
from setuptools import setup
readme_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'README.rst',
)
long_description = open(readme_path).read()
version_path = os.path.join(os.path.dirname(
os.path.abspath(__file__)),
'VERSION',
)
version = open(version_path).read()
requires = ['ldap3' ,'Flask', 'Flask-wtf']
try:
import enum
except Exception as e:
requires.append('enum34')
setup(
name='flask-ldap3-login',
version=version,
packages=['flask_ldap3_login'],
author="Nick Whyte",
author_email='[email protected]',
description="LDAP Support for Flask in Python3/2",
long_description=long_description,
url='https://github.com/nickw444/flask-ldap3-login',
zip_safe=False,
install_requires=requires,
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python',
'Environment :: Web Environment',
'Framework :: Flask',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2.6',
],
test_suite="flask_ldap3_login_tests",
)
| Fix for when using python3.5. Don't install enum34 if enum already exists (python35) | Fix for when using python3.5. Don't install enum34 if enum already exists (python35)
| Python | mit | mwielgoszewski/flask-ldap3-login,nickw444/flask-ldap3-login |
c53f03c738ad6357ccd87a506cbc05bc1e2a8474 | views.py | views.py | from django.http import HttpResponse
from django.shortcuts import render_to_response
from store.models import FandomHierarchy
def frontpage(request, filter):
return render_to_response('index.html', {'filter': filter, 'nodes': FandomHierarchy.objects.all()})
| from django.http import HttpResponse
from django.shortcuts import render_to_response
from store.models import FandomHierarchy
def frontpage(request, filter=None):
return render_to_response('index.html', {'filter': filter, 'nodes': FandomHierarchy.objects.all()})
| Add default value for filter so things don't break | Add default value for filter so things don't break
| Python | bsd-3-clause | willmurnane/store |
7ac1d811b4f9e56d3dbcab99862c92b2dd6376d7 | webapp-django/crashstats/dataservice/models.py | webapp-django/crashstats/dataservice/models.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf import settings
from configman import configuration, ConfigFileFutureProxy, Namespace
from socorro.app.socorro_app import App
from socorro.dataservice.util import (
classes_in_namespaces_converter,
)
SERVICES_LIST = ('socorro.external.postgresql.bugs_service.Bugs')
# Allow configman to dynamically load the configuration and classes
# for our API dataservice objects
def_source = Namespace()
def_source.namespace('services')
def_source.services.add_option(
'service_list',
default=SERVICES_LIST,
from_string_converter=classes_in_namespaces_converter()
)
settings.DATASERVICE_CONFIG = configuration(
definition_source=[
def_source,
App.get_required_config(),
],
values_source_list=[
settings.DATASERVICE_INI,
ConfigFileFutureProxy,
]
)
| # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from django.conf import settings
from configman import configuration, ConfigFileFutureProxy, Namespace
from socorro.app.socorro_app import App
from socorro.dataservice.util import (
classes_in_namespaces_converter,
)
SERVICES_LIST = ('socorro.external.postgresql.bugs_service.Bugs',)
# Allow configman to dynamically load the configuration and classes
# for our API dataservice objects
def_source = Namespace()
def_source.namespace('services')
def_source.services.add_option(
'service_list',
default=','.join(SERVICES_LIST),
from_string_converter=classes_in_namespaces_converter()
)
settings.DATASERVICE_CONFIG = configuration(
definition_source=[
def_source,
App.get_required_config(),
],
values_source_list=[
settings.DATASERVICE_INI,
ConfigFileFutureProxy,
]
)
| Change SERVICE_LIST to a tuple | Change SERVICE_LIST to a tuple
| Python | mpl-2.0 | mozilla/socorro,luser/socorro,cliqz/socorro,twobraids/socorro,cliqz/socorro,adngdb/socorro,mozilla/socorro,mozilla/socorro,pcabido/socorro,Tayamarn/socorro,twobraids/socorro,yglazko/socorro,AdrianGaudebert/socorro,luser/socorro,linearregression/socorro,lonnen/socorro,KaiRo-at/socorro,Tayamarn/socorro,linearregression/socorro,rhelmer/socorro,luser/socorro,twobraids/socorro,m8ttyB/socorro,twobraids/socorro,yglazko/socorro,KaiRo-at/socorro,cliqz/socorro,spthaolt/socorro,Tayamarn/socorro,cliqz/socorro,pcabido/socorro,Tchanders/socorro,Serg09/socorro,Tayamarn/socorro,linearregression/socorro,adngdb/socorro,Tchanders/socorro,linearregression/socorro,Tchanders/socorro,cliqz/socorro,adngdb/socorro,KaiRo-at/socorro,twobraids/socorro,spthaolt/socorro,spthaolt/socorro,Serg09/socorro,AdrianGaudebert/socorro,adngdb/socorro,AdrianGaudebert/socorro,spthaolt/socorro,spthaolt/socorro,rhelmer/socorro,AdrianGaudebert/socorro,lonnen/socorro,rhelmer/socorro,lonnen/socorro,AdrianGaudebert/socorro,Tchanders/socorro,pcabido/socorro,Serg09/socorro,mozilla/socorro,AdrianGaudebert/socorro,m8ttyB/socorro,luser/socorro,Tchanders/socorro,KaiRo-at/socorro,pcabido/socorro,cliqz/socorro,rhelmer/socorro,linearregression/socorro,pcabido/socorro,yglazko/socorro,pcabido/socorro,mozilla/socorro,m8ttyB/socorro,KaiRo-at/socorro,KaiRo-at/socorro,luser/socorro,twobraids/socorro,yglazko/socorro,Tayamarn/socorro,rhelmer/socorro,adngdb/socorro,rhelmer/socorro,Serg09/socorro,adngdb/socorro,yglazko/socorro,luser/socorro,m8ttyB/socorro,Serg09/socorro,linearregression/socorro,m8ttyB/socorro,m8ttyB/socorro,lonnen/socorro,yglazko/socorro,Tchanders/socorro,mozilla/socorro,Serg09/socorro,spthaolt/socorro,Tayamarn/socorro |
49f1715067df0208c79a1af2e73d6aa314b96bef | django_su/utils.py | django_su/utils.py | # -*- coding: utf-8 -*-
import warnings
import collections
from django.conf import settings
from django.utils.module_loading import import_string
def su_login_callback(user):
if hasattr(settings, 'SU_LOGIN'):
warnings.warn(
"SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK",
DeprecationWarning,
)
func = getattr(settings, 'SU_LOGIN_CALLBACK', None)
if func is not None:
if not isinstance(func, collections.Callable):
func = import_string(func)
return func(user)
return user.has_perm('auth.change_user')
def custom_login_action(request, user):
func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None)
if func is None:
return False
if not isinstance(func, collections.Callable):
func = import_string(func)
func(request, user)
return True
| # -*- coding: utf-8 -*-
import warnings
from collections.abc import Callable
from django.conf import settings
from django.utils.module_loading import import_string
def su_login_callback(user):
if hasattr(settings, 'SU_LOGIN'):
warnings.warn(
"SU_LOGIN is deprecated, use SU_LOGIN_CALLBACK",
DeprecationWarning,
)
func = getattr(settings, 'SU_LOGIN_CALLBACK', None)
if func is not None:
if not isinstance(func, Callable):
func = import_string(func)
return func(user)
return user.has_perm('auth.change_user')
def custom_login_action(request, user):
func = getattr(settings, 'SU_CUSTOM_LOGIN_ACTION', None)
if func is None:
return False
if not isinstance(func, Callable):
func = import_string(func)
func(request, user)
return True
| Update collections.Callable typecheck to collections.abc.Callable | Update collections.Callable typecheck to collections.abc.Callable
| Python | mit | adamcharnock/django-su,PetrDlouhy/django-su,PetrDlouhy/django-su,adamcharnock/django-su |
a45c79b10ef5ca6eb4b4e792f2229b2f9b0a7bbf | thinglang/foundation/definitions.py | thinglang/foundation/definitions.py | import itertools
from thinglang.lexer.values.identifier import Identifier
"""
The internal ordering of core types used by the compiler and runtime
"""
INTERNAL_TYPE_COUNTER = itertools.count(1)
# TODO: map dynamically at runtime
INTERNAL_TYPE_ORDERING = {
Identifier("text"): next(INTERNAL_TYPE_COUNTER),
Identifier("number"): next(INTERNAL_TYPE_COUNTER),
Identifier("bool"): next(INTERNAL_TYPE_COUNTER),
Identifier("list"): next(INTERNAL_TYPE_COUNTER),
Identifier("map"): next(INTERNAL_TYPE_COUNTER),
Identifier("iterator"): next(INTERNAL_TYPE_COUNTER),
Identifier("Console"): next(INTERNAL_TYPE_COUNTER),
Identifier("File"): next(INTERNAL_TYPE_COUNTER),
Identifier("Directory"): next(INTERNAL_TYPE_COUNTER),
Identifier("Time"): next(INTERNAL_TYPE_COUNTER),
Identifier("Exception"): next(INTERNAL_TYPE_COUNTER)
}
| import glob
import os
from thinglang.lexer.values.identifier import Identifier
"""
The internal ordering of core types used by the compiler and runtime
"""
CURRENT_PATH = os.path.dirname(os.path.abspath(__file__))
SOURCE_PATTERN = os.path.join(CURRENT_PATH, 'source/**/*.thing')
def list_types():
for path in glob.glob(SOURCE_PATTERN, recursive=True):
name = os.path.basename(path).replace('.thing', '')
yield name, path
PRIMITIVE_TYPES = [
'text',
'number'
]
INTERNAL_SOURCES = {Identifier(name): path for name, path in list_types()} | Remove manual INTERNAL_TYPE_ORDERING map in favor of explicit import tables | Remove manual INTERNAL_TYPE_ORDERING map in favor of explicit import tables
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang |
3b2fae7875d89adb8537b75c7e9b48a8663a9d4f | src/rnaseq_lib/web/synapse.py | src/rnaseq_lib/web/synapse.py | import os
from synapseclient import Synapse, File
expression = 'syn11311347'
metadata = 'syn11311931'
def upload_file(file_path, login, parent, description=None):
"""
Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS
:param str file_path: Path to file
:param str login: Login (usually an email address)
:param str parent: Parent Synapse ID (example: syn12312415) where file will be placed
:param str description: Optional description to add
"""
description = '' if None else description
f = File(file_path, description=description, parent=parent)
assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable'
syn = Synapse()
syn.login(login, os.environ['SYNAPSE_PASS'])
syn.store(f)
| import os
from synapseclient import Synapse, File
expression = 'syn11311347'
metadata = 'syn11311931'
def upload_file(file_path, login, parent, description=None):
"""
Uploads file to Synapse. Password must be stored in environment variable SYNAPSE_PASS
:param str file_path: Path to file
:param str login: Login (usually an email address)
:param str parent: Parent Synapse ID (example: syn12312415) where file will be placed
:param str description: Optional description to add
"""
description = '' if None else description
f = File(file_path, description=description, parent=parent)
syn = _syn_login(login)
syn.store(f)
def download_file(synid, login, download_location='.'):
"""
Synapse ID of file to download
:param str synid: Synapse ID
:param str login: Synapse ID
:param str download_location: Download location for file
"""
syn = _syn_login(login)
syn.get(synid, downloadLocation=download_location)
def _syn_login(login):
"""
Login to synapse. Set environment variable SYNAPSE_PASS to the password for `login`
:param str login:
:return: Synapse instance
:rtype: instance
"""
assert 'SYNAPSE_PASS' in os.environ, 'SYNAPSE_PASS must be set as an environment variable'
syn = Synapse()
syn.login(login, os.environ['SYNAPSE_PASS'])
return syn
| Add download and login functions | Add download and login functions
| Python | mit | jvivian/rnaseq-lib,jvivian/rnaseq-lib |
b7335f5c011d9fad3570a097fb1165cc6fbd3cef | src/python/grpcio_tests/tests/unit/_logging_test.py | src/python/grpcio_tests/tests/unit/_logging_test.py | # Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python's interaction with the python logging module"""
import unittest
import six
import grpc
import logging
class LoggingTest(unittest.TestCase):
def test_logger_not_occupied(self):
self.assertEqual(0, len(logging.getLogger().handlers))
if __name__ == '__main__':
unittest.main(verbosity=2)
| # Copyright 2018 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test of gRPC Python's interaction with the python logging module"""
import unittest
import six
from six.moves import reload_module
import logging
import grpc
import functools
import sys
class LoggingTest(unittest.TestCase):
def test_logger_not_occupied(self):
self.assertEqual(0, len(logging.getLogger().handlers))
def test_handler_found(self):
old_stderr = sys.stderr
sys.stderr = six.StringIO()
try:
reload_module(logging)
logging.basicConfig()
reload_module(grpc)
self.assertFalse("No handlers could be found" in sys.stderr.getvalue())
finally:
sys.stderr = old_stderr
reload_module(logging)
if __name__ == '__main__':
unittest.main(verbosity=2)
| Add test for 'No handlers could be found' problem | Add test for 'No handlers could be found' problem
| Python | apache-2.0 | mehrdada/grpc,sreecha/grpc,stanley-cheung/grpc,vjpai/grpc,mehrdada/grpc,muxi/grpc,pszemus/grpc,stanley-cheung/grpc,jtattermusch/grpc,donnadionne/grpc,grpc/grpc,mehrdada/grpc,pszemus/grpc,ctiller/grpc,nicolasnoble/grpc,firebase/grpc,donnadionne/grpc,ctiller/grpc,jtattermusch/grpc,donnadionne/grpc,vjpai/grpc,muxi/grpc,donnadionne/grpc,grpc/grpc,vjpai/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,stanley-cheung/grpc,ctiller/grpc,jboeuf/grpc,donnadionne/grpc,ejona86/grpc,jboeuf/grpc,carl-mastrangelo/grpc,carl-mastrangelo/grpc,ctiller/grpc,nicolasnoble/grpc,grpc/grpc,pszemus/grpc,stanley-cheung/grpc,sreecha/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,nicolasnoble/grpc,nicolasnoble/grpc,nicolasnoble/grpc,grpc/grpc,pszemus/grpc,carl-mastrangelo/grpc,mehrdada/grpc,nicolasnoble/grpc,grpc/grpc,jtattermusch/grpc,pszemus/grpc,muxi/grpc,carl-mastrangelo/grpc,ctiller/grpc,vjpai/grpc,grpc/grpc,ctiller/grpc,jtattermusch/grpc,sreecha/grpc,vjpai/grpc,firebase/grpc,donnadionne/grpc,sreecha/grpc,donnadionne/grpc,muxi/grpc,grpc/grpc,muxi/grpc,sreecha/grpc,pszemus/grpc,vjpai/grpc,firebase/grpc,grpc/grpc,jboeuf/grpc,jboeuf/grpc,carl-mastrangelo/grpc,firebase/grpc,ejona86/grpc,pszemus/grpc,ejona86/grpc,stanley-cheung/grpc,stanley-cheung/grpc,ejona86/grpc,vjpai/grpc,ejona86/grpc,vjpai/grpc,vjpai/grpc,mehrdada/grpc,pszemus/grpc,muxi/grpc,jtattermusch/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,mehrdada/grpc,ctiller/grpc,grpc/grpc,ejona86/grpc,pszemus/grpc,jtattermusch/grpc,firebase/grpc,ejona86/grpc,firebase/grpc,nicolasnoble/grpc,firebase/grpc,ejona86/grpc,nicolasnoble/grpc,mehrdada/grpc,firebase/grpc,donnadionne/grpc,stanley-cheung/grpc,pszemus/grpc,jboeuf/grpc,donnadionne/grpc,vjpai/grpc,donnadionne/grpc,mehrdada/grpc,ctiller/grpc,muxi/grpc,vjpai/grpc,pszemus/grpc,stanley-cheung/grpc,jboeuf/grpc,mehrdada/grpc,carl-mastrangelo/grpc,jtattermusch/grpc,carl-mastrangelo/grpc,mehrdada/grpc,muxi/grpc,jboeuf/grpc,ctiller/grpc,mehrdada/grpc,nicolasnoble/grpc,carl-mastrangelo/grpc,ejona86/grpc,ejona86/grpc,mehrdada/grpc,muxi/grpc,muxi/grpc,pszemus/grpc,donnadionne/grpc,nicolasnoble/grpc,sreecha/grpc,jboeuf/grpc,sreecha/grpc,carl-mastrangelo/grpc,jtattermusch/grpc,donnadionne/grpc,ctiller/grpc,firebase/grpc,vjpai/grpc,carl-mastrangelo/grpc,jboeuf/grpc,firebase/grpc,jtattermusch/grpc,jtattermusch/grpc,muxi/grpc,grpc/grpc,sreecha/grpc,sreecha/grpc,ejona86/grpc,grpc/grpc,sreecha/grpc,stanley-cheung/grpc,firebase/grpc,muxi/grpc,stanley-cheung/grpc,jboeuf/grpc,jboeuf/grpc,sreecha/grpc,nicolasnoble/grpc,grpc/grpc,firebase/grpc,sreecha/grpc,ejona86/grpc,nicolasnoble/grpc,jboeuf/grpc |
8fc504113e12649067fb2bdcc239f8f2260ad4b8 | tests/test_quality/test_restoringbeam.py | tests/test_quality/test_restoringbeam.py | import os
import unittest2 as unittest
from tkp.quality.restoringbeam import beam_invalid
from tkp.testutil.decorators import requires_data
from tkp import accessors
from tkp.testutil.data import DATAPATH
fits_file = os.path.join(DATAPATH,
'quality/noise/bad/home-pcarrol-msss-3C196a-analysis-band6.corr.fits')
@requires_data(fits_file)
class TestRestoringBeam(unittest.TestCase):
def test_header(self):
image = accessors.open(fits_file)
(semimaj, semimin, theta) = image.beam
self.assertFalse(beam_invalid(semimaj, semimin))
# TODO: this is for FOV calculation and checking
#data = tkp.quality.restoringbeam.parse_fits(image)
#frequency = image.freq_eff
#wavelength = scipy.constants.c/frequency
#d = 32.25
#fwhm = tkp.lofar.beam.fwhm(wavelength, d)
#fov = tkp.lofar.beam.fov(fwhm)
if __name__ == '__main__':
unittest.main()
| import os
import unittest2 as unittest
from tkp.quality.restoringbeam import beam_invalid
from tkp.testutil.decorators import requires_data
from tkp import accessors
from tkp.testutil.data import DATAPATH
fits_file = os.path.join(DATAPATH,
'quality/noise/bad/home-pcarrol-msss-3C196a-analysis-band6.corr.fits')
@requires_data(fits_file)
class TestRestoringBeam(unittest.TestCase):
def test_header(self):
image = accessors.open(fits_file)
(semimaj, semimin, theta) = image.beam
self.assertFalse(beam_invalid(semimaj, semimin))
# TODO: this is for FOV calculation and checking
#data = tkp.quality.restoringbeam.parse_fits(image)
#frequency = image.freq_eff
#wavelength = scipy.constants.c/frequency
#d = 32.25
#fwhm = tkp.lofar.beam.fwhm(wavelength, d)
#fov = tkp.lofar.beam.fov(fwhm)
def test_infinite(self):
smaj, smin, theta = float('inf'), float('inf'), float('inf')
self.assertTrue(beam_invalid(smaj, smin, theta))
if __name__ == '__main__':
unittest.main()
| Test for infinite beam QC | Test for infinite beam QC
| Python | bsd-2-clause | transientskp/tkp,mkuiack/tkp,mkuiack/tkp,transientskp/tkp,bartscheers/tkp,bartscheers/tkp |
62d9fdfe0ad3fc37286aa19a87e2890aaf90f639 | tasks/check_rd2_enablement.py | tasks/check_rd2_enablement.py | import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
self.return_values = False | import simple_salesforce
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
class is_rd2_enabled(BaseSalesforceApiTask):
def _run_task(self):
try:
settings = self.sf.query(
"SELECT IsRecurringDonations2Enabled__c "
"FROM npe03__Recurring_Donations_Settings__c "
"WHERE SetupOwnerId IN (SELECT Id FROM Organization)"
)
except simple_salesforce.exceptions.SalesforceMalformedRequest:
# The field does not exist in the target org, meaning it's
# pre-RD2
self.return_values = False
return
if settings.get("records"):
if settings["records"][0]["IsRecurringDonations2Enabled__c"]:
self.return_values = True
return
self.return_values = False | Correct bug in preflight check | Correct bug in preflight check
| Python | bsd-3-clause | SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus |
7cbd21a050a9e94d0f8f1f5c3ce4f81c812e279c | trump/templating/tests/test_templates.py | trump/templating/tests/test_templates.py |
from ..templates import QuandlFT
class TestTemplates(object):
def test_quandl_ft(self):
ftemp = QuandlFT("xxx", trim_start="yyyy-mm-dd", authtoken="yyy")
assert ftemp.sourcing == {'authtoken': 'yyy',
'trim_start': 'yyyy-mm-dd',
'dataset': 'xxx'}
|
from ..templates import QuandlFT, QuandlSecureFT, GoogleFinanceFT
class TestTemplates(object):
def test_quandl_ft(self):
ftemp = QuandlFT("xxx", trim_start="yyyy-mm-dd", authtoken="yyy")
assert ftemp.sourcing == {'authtoken': 'yyy',
'trim_start': 'yyyy-mm-dd',
'dataset': 'xxx'}
def test_quandl_secure_ft(self):
ftemp = QuandlSecureFT("xxx", trim_start="yyyy-mm-dd")
assert ftemp.sourcing == {'trim_start': 'yyyy-mm-dd',
'dataset': 'xxx'}
assert ftemp.meta == {'sourcing_key' : 'userone',
'stype' : 'Quandl'}
def test_google_finance_ft(self):
ftemp = GoogleFinanceFT("xxx")
assert ftemp.sourcing == {'name': 'xxx',
'start': '2000-01-01,
'end': 'now',
'data_source' : 'google',
'data_column' : 'Close'}
assert ftemp.meta == {'stype' : 'PyDataDataReaderST'}
| Add two tests for templates | Add two tests for templates | Python | bsd-3-clause | Equitable/trump,Asiant/trump,jnmclarty/trump |
ea17a76c4ada65dac9e909b930c938a24ddb99b2 | tests/formatter/test_csver.py | tests/formatter/test_csver.py | import unittest, argparse
from echolalia.formatter.csver import Formatter
class CsverTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)]
self.formatter = Formatter()
def test_add_args(self):
new_parser = self.formatter.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['--with_header'])
self.assertTrue(args.with_header)
args = new_parser.parse_args([])
self.assertFalse(args.with_header)
def test_marshall_no_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args([])
result = self.formatter.marshall(args, self.data)
expect = "a,1\r\nb,2\r\nc,3\r\n"
def test_marshall_with_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args(['--with_header'])
result = self.formatter.marshall(args, self.data)
expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n"
self.assertEqual(result, expect)
| import unittest, argparse
from echolalia.formatter.csver import Formatter
class CsverTestCase(unittest.TestCase):
def setUp(self):
self.parser = argparse.ArgumentParser()
self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)]
self.formatter = Formatter()
def test_add_args(self):
new_parser = self.formatter.add_args(self.parser)
self.assertEqual(new_parser, self.parser)
args = new_parser.parse_args(['--with_header'])
self.assertTrue(args.with_header)
args = new_parser.parse_args([])
self.assertFalse(args.with_header)
def test_marshall_no_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args([])
result = self.formatter.marshall(args, self.data)
expect = "a,1\r\nb,2\r\nc,3\r\n"
self.assertEqual(result, expect)
def test_marshall_with_header(self):
new_parser = self.formatter.add_args(self.parser)
args = new_parser.parse_args(['--with_header'])
result = self.formatter.marshall(args, self.data)
expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n"
self.assertEqual(result, expect)
| Fix no header test for csv formatter | Fix no header test for csv formatter
| Python | mit | eiri/echolalia-prototype |
1bb03750b997a152a9360be613a15057e59b8a17 | beyonic/__init__.py | beyonic/__init__.py | # Beyonic API Python bindings
#default values if any
DEFAULT_ENDPOINT_BASE = 'https://app.beyonic.com/api/'
#config
api_key = None
api_endpoint_base = None
api_version = None
verify_ssl_certs = True #set to False if you want to bypass SSL checks(mostly useful while testing it on local env).
from beyonic.apis.payment import Payment
from beyonic.apis.webhook import Webhook
from beyonic.apis.collection import Collection
from beyonic.apis.collectionrequest import CollectionRequest
from beyonic.apis.accounts import Account
| # Beyonic API Python bindings
#default values if any
DEFAULT_ENDPOINT_BASE = 'https://app.beyonic.com/api/'
#config
api_key = None
api_endpoint_base = None
api_version = None
verify_ssl_certs = True #set to False if you want to bypass SSL checks(mostly useful while testing it on local env).
from beyonic.apis.payment import Payment
from beyonic.apis.webhook import Webhook
from beyonic.apis.collection import Collection
from beyonic.apis.collectionrequest import CollectionRequest
from beyonic.apis.account import Account
| Fix accounts api wrapper import error | Fix accounts api wrapper import error
| Python | mit | beyonic/beyonic-python,beyonic/beyonic-python,beyonic/beyonic-python |
3a5432e14c18852758afdf92b913c93906808e3e | cinder/db/sqlalchemy/migrate_repo/versions/115_add_shared_targets_to_volumes.py | cinder/db/sqlalchemy/migrate_repo/versions/115_add_shared_targets_to_volumes.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean, Column, MetaData, Table
def upgrade(migrate_engine):
"""Add shared_targets column to Volumes."""
meta = MetaData()
meta.bind = migrate_engine
volumes = Table('volumes', meta, autoload=True)
# NOTE(jdg): We use a default of True because it's harmless for a device
# that does NOT use shared_targets to be treated as if it does
shared_targets = Column('shared_targets',
Boolean,
default=True)
volumes.create_column(shared_targets)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean, Column, MetaData, Table
def upgrade(migrate_engine):
"""Add shared_targets column to Volumes."""
meta = MetaData()
meta.bind = migrate_engine
volumes = Table('volumes', meta, autoload=True)
# NOTE(jdg): We use a default of True because it's harmless for a device
# that does NOT use shared_targets to be treated as if it does
if not hasattr(volumes.c, 'shared_targets'):
volumes.create_column(Column('shared_targets', Boolean, default=True))
| Add 'shared_targets' only when it doesn't exist | Add 'shared_targets' only when it doesn't exist
Add existence check before actually create it.
Change-Id: I96946f736d7263f80f7ad24f8cbbc9a09eb3cc63
| Python | apache-2.0 | phenoxim/cinder,Datera/cinder,mahak/cinder,openstack/cinder,j-griffith/cinder,openstack/cinder,mahak/cinder,j-griffith/cinder,Datera/cinder,phenoxim/cinder |
052042e2f48b7936a6057c18a128f497d5e5b1a4 | folium/__init__.py | folium/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
__version__ = '0.2.0.dev'
from folium.folium import Map, initialize_notebook, CircleMarker
from folium.map import FeatureGroup, FitBounds,Icon, LayerControl, Marker, Popup, TileLayer
from folium.features import (ClickForMarker, ColorScale, CustomIcon, DivIcon, GeoJson, GeoJsonStyle,
ImageOverlay, LatLngPopup, MarkerCluster, MultiPolyLine, PolyLine,
RegularPolygonMarker, TopoJson, Vega, WmsTileLayer)
| Make features accessible from root | Make features accessible from root
| Python | mit | QuLogic/folium,talespaiva/folium,andrewgiessel/folium,themiurgo/folium,shankari/folium,python-visualization/folium,talespaiva/folium,QuLogic/folium,BibMartin/folium,ocefpaf/folium,themiurgo/folium,talespaiva/folium,andrewgiessel/folium,BibMartin/folium,ocefpaf/folium,python-visualization/folium,shankari/folium,shankari/folium,themiurgo/folium,BibMartin/folium,andrewgiessel/folium,talespaiva/folium,QuLogic/folium |
6e764429961831632d6245f5587250b4772b1474 | gaphas/picklers.py | gaphas/picklers.py | """
Some extra picklers needed to gracefully dump and load a canvas.
"""
from future import standard_library
standard_library.install_aliases()
import copyreg
# Allow instancemethod to be pickled:
import types
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return types.MethodType(func, self, clazz)
def reduce_instancemethod(im):
return (
construct_instancemethod,
(im.__func__.__name__, im.__self__, im.__self__.__class__),
)
copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
import cairo
def construct_cairo_matrix(*args):
return cairo.Matrix(*args)
def reduce_cairo_matrix(m):
return construct_cairo_matrix, tuple(m)
copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix)
# vim:sw=4:et:ai
| """
Some extra picklers needed to gracefully dump and load a canvas.
"""
from future import standard_library
standard_library.install_aliases()
import copyreg
# Allow instancemethod to be pickled:
import types
def construct_instancemethod(funcname, self, clazz):
func = getattr(clazz, funcname)
return types.MethodType(func, self)
def reduce_instancemethod(im):
return (
construct_instancemethod,
(im.__func__.__name__, im.__self__, im.__self__.__class__),
)
copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod)
# Allow cairo.Matrix to be pickled:
import cairo
def construct_cairo_matrix(*args):
return cairo.Matrix(*args)
def reduce_cairo_matrix(m):
return construct_cairo_matrix, tuple(m)
copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix)
# vim:sw=4:et:ai
| Fix MethodType only takes two parameters | Fix MethodType only takes two parameters
Signed-off-by: Dan Yeaw <[email protected]>
| Python | lgpl-2.1 | amolenaar/gaphas |
bb6f4302937e477f23c4de0d6a265d1d6f8985a0 | geometry_export.py | geometry_export.py | print "Loading ", __name__
import geometry, from_poser, to_lux
reload(geometry)
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
class GeometryExporter(object):
def __init__(self, subject, convert_material = None,
write_mesh_parameters = None, options = {}):
geom = from_poser.get(subject)
if geom is None or geom.is_empty:
print "Mesh is empty."
self.write = lambda file: None
else:
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
mats = geom.materials
key = geom.material_key
if convert_material:
materials = [convert_material(mat, key) for mat in mats]
else:
materials = [' NamedMaterial "%s/%s"' % (key, mat.Name())
for mat in mats]
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geom.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geom.subdivide()
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
| print "Loading ", __name__
import geometry, from_poser, to_lux
reload(geometry)
reload(from_poser)
reload(to_lux)
import from_poser, to_lux
def get_materials(geometry, convert = None):
f = convert or (lambda mat, k: ' NamedMaterial "%s/%s"' % (k, mat.Name()))
return [f(mat, geometry.material_key) for mat in geometry.materials]
def preprocess(geometry, options = {}):
if options.get('compute_normals', True) in [True, 1, '1', 'true']:
geometry.compute_normals()
for i in xrange(int(options.get('subdivisionlevel', 0))):
print " subdividing: pass", (i+1)
geometry.subdivide()
class GeometryExporter(object):
def __init__(self, subject, convert_material = None,
write_mesh_parameters = None, options = {}):
geom = from_poser.get(subject)
if geom is None or geom.is_empty:
print "Mesh is empty."
self.write = lambda file: None
else:
print "Mesh has", geom.number_of_polygons, "polygons and",
print geom.number_of_points, "vertices"
materials = get_materials(geom, convert_material)
preprocess(geom, options)
to_lux.preprocess(geom)
self.write = lambda file: to_lux.write(file, geom, materials,
write_mesh_parameters)
| Split off two functions from GeometryExporter.__init__ | Split off two functions from GeometryExporter.__init__
| Python | mit | odf/pydough |
77b1f64633d2b70e4e4fc490916e2a9ccae7228f | gignore/__init__.py | gignore/__init__.py | __version__ = (2014, 10, 0)
def get_version():
"""
:rtype: str
"""
return '.'.join(str(i) for i in __version__)
class Gignore(object):
BASE_URL = 'https://raw.githubusercontent.com/github/gitignore/master/'
name = None
file_content = None
def get_base_url(self):
"""
:rtype: str
"""
return self.BASE_URL
def set_name(self, name):
"""
:type name: str
"""
self.name = name
def get_name(self):
"""
:rtype: str
"""
return self.name
def set_file_content(self, file_content):
"""
:type file_content: str
"""
self.file_content = file_content
def get_file_content(self):
"""
:rtype: str
"""
return self.file_content
| __version__ = (2014, 10, 0)
def get_version():
"""
:rtype: str
"""
return '.'.join(str(i) for i in __version__)
class Gignore(object):
BASE_URL = 'https://raw.githubusercontent.com/github/gitignore/master/'
name = None
file_content = None
valid = True
def get_base_url(self):
"""
:rtype: str
"""
return self.BASE_URL
def set_name(self, name):
"""
:type name: str
"""
self.name = name
def get_name(self):
"""
:rtype: str
"""
return self.name
def set_file_content(self, file_content):
"""
:type file_content: str
"""
self.file_content = file_content
def get_file_content(self):
"""
:rtype: str
"""
return self.file_content
def is_valid(self):
"""
:rtype: bool
"""
return self.valid
def set_valid(self, valid):
"""
:type valid: bool
"""
self.valid = valid
| Add valid attribute with setter/getter | Add valid attribute with setter/getter
| Python | bsd-3-clause | Alir3z4/python-gignore |
883df49451aaa41ca5e20d1af799af2642615d5a | nineml/examples/AL/demos/demo1b_load_save_izekevich.py | nineml/examples/AL/demos/demo1b_load_save_izekevich.py | import nineml
from nineml.abstraction_layer.testing_utils import RecordValue, TestableComponent
from nineml.abstraction_layer import ComponentClass
from nineml.abstraction_layer.testing_utils import std_pynn_simulation
# Load the Component:
iz_file = '../../../../../../catalog/sample_xml_files/PostTF_izhikevich.xml'
iz = nineml.al.parse(iz_file)
# Write the component back out to XML
nineml.al.writers.XMLWriter.write(iz, 'TestOut_Iz.xml')
nineml.al.writers.DotWriter.write(iz, 'TestOut_Iz.dot')
nineml.al.writers.DotWriter.build('TestOut_Iz.dot')
# Simulate the Neuron:
records = [
RecordValue(what='V', tag='V', label='V'),
#RecordValue(what='U', tag='U', label='U'),
#RecordValue( what='regime', tag='Regime', label='Regime' ),
]
parameters = nineml.al.flattening.ComponentFlattener.flatten_namespace_dict({
'a': 0.02,
'b': 0.2,
'c': -65,
'd': 8,
'iinj_constant': 5.0,
})
res = std_pynn_simulation( test_component = iz,
parameters = parameters,
initial_values = {},
synapse_components = [],
records = records,
)
| import nineml
from nineml.abstraction_layer.testing_utils import RecordValue, TestableComponent
from nineml.abstraction_layer import ComponentClass
from nineml.abstraction_layer.testing_utils import std_pynn_simulation
# Load the Component:
iz_file = '../../../../../../catalog/sample_xml_files/PostTF_izhikevich.xml'
iz = nineml.al.parse(iz_file)
# Write the component back out to XML
nineml.al.writers.XMLWriter.write(iz, 'TestOut_Iz.xml')
nineml.al.writers.DotWriter.write(iz, 'TestOut_Iz.dot')
nineml.al.writers.DotWriter.build('TestOut_Iz.dot')
# Simulate the Neuron:
records = [
RecordValue(what='V', tag='V', label='V'),
#RecordValue(what='U', tag='U', label='U'),
#RecordValue( what='regime', tag='Regime', label='Regime' ),
]
parameters = nineml.al.flattening.ComponentFlattener.flatten_namespace_dict({
'a': 0.02,
'b': 0.2,
'c': -65,
'd': 8,
'iinj_constant': 50.0,
})
res = std_pynn_simulation( test_component = iz,
parameters = parameters,
initial_values = {},
synapse_components = [],
records = records,
)
| Fix to finding paths in nineml2nmodl | Fix to finding paths in nineml2nmodl
git-svn-id: c3c03ae8de67eddb7d242ee89b936c58f5138363@496 e4a6332b-da94-4e19-b8c2-16eed22ecab5
| Python | bsd-3-clause | INCF/lib9ML |
0986bbba02a4bb4d2c13835dd91281cce3bb5f10 | alembic/versions/174eb928136a_gdpr_restrict_processing.py | alembic/versions/174eb928136a_gdpr_restrict_processing.py | """GDPR restrict processing
Revision ID: 174eb928136a
Revises: d5b07c8f0893
Create Date: 2018-05-14 11:21:55.138387
"""
# revision identifiers, used by Alembic.
revision = '174eb928136a'
down_revision = 'd5b07c8f0893'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('restrict', sa.Boolean))
def downgrade():
op.drop_column('user', 'restrict')
| """GDPR restrict processing
Revision ID: 174eb928136a
Revises: d5b07c8f0893
Create Date: 2018-05-14 11:21:55.138387
"""
# revision identifiers, used by Alembic.
revision = '174eb928136a'
down_revision = 'd5b07c8f0893'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('restrict', sa.Boolean, default=False))
sql = 'update "user" set restrict=false'
op.execute(sql)
def downgrade():
op.drop_column('user', 'restrict')
| Set default to False, and update existing users. | Set default to False, and update existing users.
| Python | agpl-3.0 | Scifabric/pybossa,PyBossa/pybossa,Scifabric/pybossa,PyBossa/pybossa |
0e7a7f960ff970262ffbe1569dc3437dcae2599c | app/main/helpers/presenters.py | app/main/helpers/presenters.py | import re
class Presenters(object):
def __init__(self):
return None
def present(self, value, question_content):
if "type" in question_content:
field_type = question_content["type"]
else:
return value
if hasattr(self, "_" + field_type):
return getattr(self, "_" + field_type)(value)
else:
return value
def _service_id(self, value):
if re.findall("[a-zA-Z]", value):
return [value]
else:
return re.findall("....", str(value))
def _upload(self, value):
return {
"url": value or "",
"filename": value.split("/")[-1] or ""
}
| import re
class Presenters(object):
def __init__(self):
return None
def present(self, value, question_content):
if "type" in question_content:
field_type = question_content["type"]
else:
return value
if hasattr(self, "_" + field_type):
return getattr(self, "_" + field_type)(value)
else:
return value
def _service_id(self, value):
if re.findall("[a-zA-Z]", str(value)):
return [value]
else:
return re.findall("....", str(value))
def _upload(self, value):
return {
"url": value or "",
"filename": value.split("/")[-1] or ""
}
| Fix the thing that @quis broke. | Fix the thing that @quis broke.
| Python | mit | mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend |
75d435e55e42fefe1c28095dadb9abb56284c1fb | marked/__init__.py | marked/__init__.py | import markgen
from bs4 import BeautifulSoup
TAGS = {
'p': 'paragraph',
'div': 'paragraph',
'a': 'link',
'strong': 'emphasis',
'em': 'emphasis',
'b': 'emphasis',
'i': 'emphasis',
'u': 'emphasis',
'img': 'image',
'image': 'image',
'blockquote': 'quote',
'pre': 'pre',
'code': 'pre',
'h1': 'header',
'h2': 'header',
'h3': 'header',
'h4': 'header',
'h5': 'header',
'h6': 'header',
'ul': 'ulist',
'ol': 'olist'
}
def markup_to_markdown(content):
soup = BeautifulSoup(content)
# Account for HTML snippets and full documents alike
contents = soup.body.contents if soup.body is not None else soup.contents
return _iterate_over_contents(contents)
def _iterate_over_contents(contents):
out = u''
for c in contents:
if hasattr(c, 'contents'):
c = _iterate_over_contents(c.contents)
if c.name in TAGS:
wrap = getattr(markgen, TAGS[c.name])
c = wrap(c)
out += u"\n{0}".format(c)
return out
| import markgen
from bs4 import BeautifulSoup
TAGS = {
'p': 'paragraph',
'div': 'paragraph',
'a': 'link',
'strong': 'emphasis',
'em': 'emphasis',
'b': 'emphasis',
'i': 'emphasis',
'u': 'emphasis',
'img': 'image',
'image': 'image',
'blockquote': 'quote',
'pre': 'pre',
'code': 'pre',
'h1': 'header',
'h2': 'header',
'h3': 'header',
'h4': 'header',
'h5': 'header',
'h6': 'header',
'ul': 'ulist',
'ol': 'olist'
}
def markup_to_markdown(content):
soup = BeautifulSoup(content)
# Account for HTML snippets and full documents alike
contents = soup.body.contents if soup.body is not None else soup.contents
return _iterate_over_contents(contents)
def _iterate_over_contents(contents):
out = u''
for c in contents:
if hasattr(c, 'contents'):
c.string = _iterate_over_contents(c.contents)
if c.name in TAGS:
wrap = getattr(markgen, TAGS[c.name])
c = wrap(c.string)
out += u"\n{0}".format(c)
return out
| Use .string so we keep within BS parse tree | Use .string so we keep within BS parse tree
| Python | bsd-3-clause | 1stvamp/marked |
6d964e5ce83b8f07de64ef8ed5b531271725d9c4 | peering/management/commands/deploy_configurations.py | peering/management/commands/deploy_configurations.py | from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from peering.models import InternetExchange
class Command(BaseCommand):
help = ('Deploy configurations each IX having a router and a configuration'
' template attached.')
logger = logging.getLogger('peering.manager.peering')
def handle(self, *args, **options):
self.logger.info('Deploying configurations...')
for ix in InternetExchange.objects.all():
if ix.configuration_template and ix.router:
self.logger.info(
'Deploying configuration on {}'.format(ix.name))
ix.router.set_napalm_configuration(ix.generate_configuration(),
commit=True)
else:
self.logger.info(
'No configuration to deploy on {}'.format(ix.name))
self.logger.info('Configurations deployed')
| from __future__ import unicode_literals
import logging
from django.core.management.base import BaseCommand
from peering.models import InternetExchange
class Command(BaseCommand):
help = ('Deploy configurations each IX having a router and a configuration'
' template attached.')
logger = logging.getLogger('peering.manager.peering')
def handle(self, *args, **options):
self.logger.info('Deploying configurations...')
for ix in InternetExchange.objects.all():
# Only deploy config if there are at least a configuration
# template, a router and a platform for the router
if ix.configuration_template and ix.router and ix.router.platform:
self.logger.info(
'Deploying configuration on {}'.format(ix.name))
ix.router.set_napalm_configuration(ix.generate_configuration(),
commit=True)
else:
self.logger.info(
'No configuration to deploy on {}'.format(ix.name))
self.logger.info('Configurations deployed')
| Check for router platform in auto-deploy script. | Check for router platform in auto-deploy script.
| Python | apache-2.0 | respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager |
681f322c4cc57f9fdac1efb59f431360c209232d | backdrop/collector/__init__.py | backdrop/collector/__init__.py | # Namespace package: https://docs.python.org/2/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__VERSION__ = "1.0.0"
__AUTHOR__ = "GDS Developers"
__AUTHOR_EMAIL__ = ""
| # Namespace package: https://docs.python.org/2/library/pkgutil.html
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__VERSION__ = "2.0.1"
__AUTHOR__ = "GDS Developers"
__AUTHOR_EMAIL__ = ""
| Make version number match the latest tag | Make version number match the latest tag
We have a 2.0.0 tag in github which points to code where the version
claims to be 1.0.0:
https://github.com/alphagov/backdrop-collector/blob/2.0.0/backdrop/collector/__init__.py
We definitely have code which specifies 2.0.0 as its dependency.
Upversion to 2.0.1 so we can make a new, correct, tag.
| Python | mit | gds-attic/backdrop-collector,gds-attic/backdrop-collector |
8ef3e88c99602dbdac8fca1b223c7bab8308d820 | backend/backend/serializers.py | backend/backend/serializers.py | from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Animal
fields = ('id', 'name', 'dob', 'active', 'own') | from rest_framework import serializers
from .models import Animal
class AnimalSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Animal
fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother') | Add parents and gender to the list of values in serializer | Add parents and gender to the list of values in serializer
| Python | apache-2.0 | mmlado/animal_pairing,mmlado/animal_pairing |
c5635d1146fc2c0ff284c41d4b2d1132b25ae270 | composer/workflows/use_local_deps.py | composer/workflows/use_local_deps.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A DAG consisting of a BashOperator that prints the result of a coin flip."""
import datetime
import airflow
from airflow.operators import bash_operator
# [START composer_dag_local_deps]
from .dependencies import coin_module
# [END composer_dag_local_deps]
default_args = {
'start_date':
datetime.datetime.combine(
datetime.datetime.today() - datetime.timedelta(days=1),
datetime.datetime.min.time()),
}
with airflow.DAG('dependencies_dag', default_args=default_args) as dag:
t1 = bash_operator.BashOperator(
task_id='print_coin_result',
bash_command='echo "{0}"'.format(coin_module.flip_coin()),
dag=dag)
| # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A DAG consisting of a BashOperator that prints the result of a coin flip."""
import datetime
import airflow
from airflow.operators import bash_operator
# [START composer_dag_local_deps]
from dependencies import coin_module
# [END composer_dag_local_deps]
default_args = {
'start_date':
datetime.datetime.combine(
datetime.datetime.today() - datetime.timedelta(days=1),
datetime.datetime.min.time()),
}
with airflow.DAG('dependencies_dag', default_args=default_args) as dag:
t1 = bash_operator.BashOperator(
task_id='print_coin_result',
bash_command='echo "{0}"'.format(coin_module.flip_coin()),
dag=dag)
| Use absolute / implicit relative imports for local deps | Use absolute / implicit relative imports for local deps
Since Composer is Python 2.7 only for now, this sample can use implicit
relative imports. Airflow doesn't seem to support explicit relative
imports when I try to run the use_local_deps.py file in Composer.
Aside: Airflow is using the imp.load_source method to load the DAG
modules. This will be problematic for Python 3 support, see:
https://issues.apache.org/jira/browse/AIRFLOW-2243.
| Python | apache-2.0 | GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples |
4d7f94e7ee5b2ffdfe58b353688ae5bfc280332c | boris/reporting/management.py | boris/reporting/management.py | '''
Created on 3.12.2011
@author: xaralis
'''
from os.path import dirname, join
from django.db import models, connection
from boris import reporting
from boris.reporting import models as reporting_app
def install_views(app, created_models, verbosity, **kwargs):
if verbosity >= 1:
print "Installing reporting views ..."
cursor = connection.cursor()
sql_file = open(join(dirname(reporting.__file__), 'sql', 'reporting-views.mysql.sql'), 'r')
cursor.execute(sql_file.read())
sql_file.close()
models.signals.post_syncdb.connect(install_views, sender=reporting_app)
| from os.path import dirname, join
from django.db import connection
from south.signals import post_migrate
from boris import reporting
from boris.reporting import models as reporting_app
def install_views(app, **kwargs):
print "Installing reporting views ..."
cursor = connection.cursor()
sql_file = open(join(dirname(reporting.__file__), 'sql', 'reporting-views.mysql.sql'), 'r')
try:
cursor.execute(sql_file.read())
finally:
sql_file.close()
post_migrate.connect(install_views, sender=reporting_app)
| Install views on post_migrate rather than post_syncdb. | Install views on post_migrate rather than post_syncdb.
| Python | mit | fragaria/BorIS,fragaria/BorIS,fragaria/BorIS |
c6ba057d2e8a1b75edb49ce3c007676f4fe46a16 | tv-script-generation/helper.py | tv-script-generation/helper.py | import os
import pickle
def load_data(path):
"""
Load Dataset from File
"""
input_file = os.path.join(path)
with open(input_file, "r") as f:
data = f.read()
return data
def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables):
"""
Preprocess Text Data
"""
text = load_data(dataset_path)
token_dict = token_lookup()
for key, token in token_dict.items():
text = text.replace(key, ' {} '.format(token))
text = text.lower()
text = text.split()
vocab_to_int, int_to_vocab = create_lookup_tables(text)
int_text = [vocab_to_int[word] for word in text]
pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb'))
def load_preprocess():
"""
Load the Preprocessed Training data and return them in batches of <batch_size> or less
"""
return pickle.load(open('preprocess.p', mode='rb'))
def save_params(params):
"""
Save parameters to file
"""
pickle.dump(params, open('params.p', 'wb'))
def load_params():
"""
Load parameters from file
"""
return pickle.load(open('params.p', mode='rb'))
| import os
import pickle
def load_data(path):
"""
Load Dataset from File
"""
input_file = os.path.join(path)
with open(input_file, "r") as f:
data = f.read()
return data
def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables):
"""
Preprocess Text Data
"""
text = load_data(dataset_path)
# Ignore notice, since we don't use it for analysing the data
text = text[81:]
token_dict = token_lookup()
for key, token in token_dict.items():
text = text.replace(key, ' {} '.format(token))
text = text.lower()
text = text.split()
vocab_to_int, int_to_vocab = create_lookup_tables(text)
int_text = [vocab_to_int[word] for word in text]
pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb'))
def load_preprocess():
"""
Load the Preprocessed Training data and return them in batches of <batch_size> or less
"""
return pickle.load(open('preprocess.p', mode='rb'))
def save_params(params):
"""
Save parameters to file
"""
pickle.dump(params, open('params.p', 'wb'))
def load_params():
"""
Load parameters from file
"""
return pickle.load(open('params.p', mode='rb'))
| Remove copyright notice during preprocessing | Remove copyright notice during preprocessing
| Python | mit | spencer2211/deep-learning |
0983361e6fba5812416d8fb5b695f6b3034bc927 | registration/management/commands/cleanupregistration.py | registration/management/commands/cleanupregistration.py | """
A management command which deletes expired accounts (e.g.,
accounts which signed up but never activated) from the database.
Calls ``RegistrationProfile.objects.delete_expired_users()``, which
contains the actual logic for determining which accounts are deleted.
"""
from django.core.management.base import NoArgsCommand
from ...models import RegistrationProfile
class Command(NoArgsCommand):
help = "Delete expired user registrations from the database"
def handle_noargs(self, **options):
RegistrationProfile.objects.delete_expired_users()
| """
A management command which deletes expired accounts (e.g.,
accounts which signed up but never activated) from the database.
Calls ``RegistrationProfile.objects.delete_expired_users()``, which
contains the actual logic for determining which accounts are deleted.
"""
from django.core.management.base import BaseCommand
from ...models import RegistrationProfile
class Command(BaseCommand):
help = "Delete expired user registrations from the database"
def handle(self, *args, **options):
RegistrationProfile.objects.delete_expired_users()
| Fix deprecated class NoArgsCommand class. | Fix deprecated class NoArgsCommand class.
Solve the warning RemovedInDjango110Warning: NoArgsCommand class is deprecated and will be removed in Django 1.10. Use BaseCommand instead, which takes no arguments by default.
| Python | bsd-3-clause | sergafts/django-registration,timgraham/django-registration,sergafts/django-registration,pando85/django-registration,pando85/django-registration,allo-/django-registration,allo-/django-registration,timgraham/django-registration |
da66b82b4a5d5c0b0bb716b05a8bfd2dae5e2f4c | ookoobah/glutil.py | ookoobah/glutil.py | from contextlib import contextmanager
from pyglet.gl import *
def ptr(*args):
return (GLfloat * len(args))(*args)
@contextmanager
def gl_disable(*bits):
glPushAttrib(GL_ENABLE_BIT)
map(glDisable, bits)
yield
glPopAttrib(GL_ENABLE_BIT)
@contextmanager
def gl_ortho(window):
# clobbers current modelview matrix
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
gluOrtho2D(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
yield
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
| from contextlib import contextmanager
from pyglet.gl import *
__all__ = [
'ptr',
'gl_disable',
'gl_ortho',
]
def ptr(*args):
return (GLfloat * len(args))(*args)
@contextmanager
def gl_disable(*bits):
glPushAttrib(GL_ENABLE_BIT)
map(glDisable, bits)
yield
glPopAttrib(GL_ENABLE_BIT)
@contextmanager
def gl_ortho(window):
# clobbers current modelview matrix
glMatrixMode(GL_PROJECTION)
glPushMatrix()
glLoadIdentity()
gluOrtho2D(0, window.width, 0, window.height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
yield
glMatrixMode(GL_PROJECTION)
glPopMatrix()
glMatrixMode(GL_MODELVIEW)
| Fix pyglet breackage by controlling exports. | Fix pyglet breackage by controlling exports.
| Python | mit | vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah |
04c8a36c5713e4279f8bf52fa45cdb03de721dbb | example/deploy.py | example/deploy.py | from pyinfra import inventory, state
from pyinfra_docker import deploy_docker
from pyinfra_etcd import deploy_etcd
from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node
SUDO = True
FAIL_PERCENT = 0
def get_etcd_nodes():
return [
'http://{0}:2379'.format(
etcd_node.fact.network_devices[etcd_node.data.etcd_interface]
['ipv4']['address'],
)
for etcd_node in inventory.get_group('etcd_nodes')
]
# Install/configure etcd cluster
with state.limit('etcd_nodes'):
deploy_etcd()
# Install/configure the masters (apiserver, controller, scheduler)
with state.limit('kubernetes_masters'):
deploy_kubernetes_master(etcd_nodes=get_etcd_nodes())
# Install/configure the nodes
with state.limit('kubernetes_nodes'):
# Install Docker
deploy_docker()
# Install Kubernetes node components (kubelet, kube-proxy)
first_master = inventory.get_group('kubernetes_masters')[0]
deploy_kubernetes_node(
master_address='http://{0}'.format((
first_master
.fact.network_devices[first_master.data.network_interface]
['ipv4']['address']
)),
)
# deploy_docker(config={
# # Make Docker use the Vagrant provided interface which has it's own /24
# 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}',
# })
| from pyinfra import inventory, state
from pyinfra_docker import deploy_docker
from pyinfra_etcd import deploy_etcd
from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node
SUDO = True
FAIL_PERCENT = 0
def get_etcd_nodes():
return [
'http://{0}:2379'.format(
etcd_node.fact.network_devices[etcd_node.data.etcd_interface]
['ipv4']['address'],
)
for etcd_node in inventory.get_group('etcd_nodes')
]
# Install/configure etcd cluster
with state.limit('etcd_nodes'):
deploy_etcd()
# Install/configure the masters (apiserver, controller, scheduler)
with state.limit('kubernetes_masters'):
deploy_kubernetes_master(etcd_nodes=get_etcd_nodes())
# Install/configure the nodes
with state.limit('kubernetes_nodes'):
# Install Docker
deploy_docker(config={
# Make Docker use the Vagrant provided interface which has it's own /24
'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}',
})
# Install Kubernetes node components (kubelet, kube-proxy)
first_master = inventory.get_group('kubernetes_masters')[0]
deploy_kubernetes_node(
master_address='http://{0}'.format((
first_master
.fact.network_devices[first_master.data.network_interface]
['ipv4']['address']
)),
)
| Use Docker config pointing at the correct interface/subnect for networking. | Use Docker config pointing at the correct interface/subnect for networking.
| Python | mit | EDITD/pyinfra-kubernetes,EDITD/pyinfra-kubernetes |
4714f803b22eda26eb2fc867c1d9e2c7230bdd11 | pythonforandroid/recipes/pysdl2/__init__.py | pythonforandroid/recipes/pysdl2/__init__.py |
from pythonforandroid.recipe import PythonRecipe
class PySDL2Recipe(PythonRecipe):
version = '0.9.3'
url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz'
depends = ['sdl2']
recipe = PySDL2Recipe()
|
from pythonforandroid.recipe import PythonRecipe
class PySDL2Recipe(PythonRecipe):
version = '0.9.6'
url = 'https://files.pythonhosted.org/packages/source/P/PySDL2/PySDL2-{version}.tar.gz'
depends = ['sdl2']
recipe = PySDL2Recipe()
| Fix outdated PySDL2 version and non-PyPI install source | Fix outdated PySDL2 version and non-PyPI install source
| Python | mit | kronenpj/python-for-android,rnixx/python-for-android,germn/python-for-android,PKRoma/python-for-android,germn/python-for-android,kivy/python-for-android,rnixx/python-for-android,rnixx/python-for-android,rnixx/python-for-android,PKRoma/python-for-android,kivy/python-for-android,germn/python-for-android,kronenpj/python-for-android,rnixx/python-for-android,kronenpj/python-for-android,kivy/python-for-android,kivy/python-for-android,kronenpj/python-for-android,germn/python-for-android,PKRoma/python-for-android,rnixx/python-for-android,germn/python-for-android,germn/python-for-android,kronenpj/python-for-android,kivy/python-for-android,PKRoma/python-for-android,PKRoma/python-for-android |
32cc988e81bbbecf09f7e7a801e92c6cfc281e75 | docs/autogen_config.py | docs/autogen_config.py | #!/usr/bin/env python
from os.path import join, dirname, abspath
from IPython.terminal.ipapp import TerminalIPythonApp
from ipykernel.kernelapp import IPKernelApp
here = abspath(dirname(__file__))
options = join(here, 'source', 'config', 'options')
generated = join(options, 'generated.rst')
def write_doc(name, title, app, preamble=None):
filename = '%s.rst' % name
with open(join(options, filename), 'w') as f:
f.write(title + '\n')
f.write(('=' * len(title)) + '\n')
f.write('\n')
if preamble is not None:
f.write(preamble + '\n\n')
f.write(app.document_config_options())
with open(generated, 'a') as f:
f.write(filename + '\n')
if __name__ == '__main__':
# create empty file
with open(generated, 'w'):
pass
write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp())
write_doc('kernel', 'IPython kernel options', IPKernelApp(),
preamble=("These options can be used in :file:`ipython_kernel_config.py`. "
"The kernel also respects any options in `ipython_config.py`"),
)
| #!/usr/bin/env python
from os.path import join, dirname, abspath
from IPython.terminal.ipapp import TerminalIPythonApp
from ipykernel.kernelapp import IPKernelApp
here = abspath(dirname(__file__))
options = join(here, 'source', 'config', 'options')
def write_doc(name, title, app, preamble=None):
filename = '%s.rst' % name
with open(join(options, filename), 'w') as f:
f.write(title + '\n')
f.write(('=' * len(title)) + '\n')
f.write('\n')
if preamble is not None:
f.write(preamble + '\n\n')
f.write(app.document_config_options())
if __name__ == '__main__':
write_doc('terminal', 'Terminal IPython options', TerminalIPythonApp())
write_doc('kernel', 'IPython kernel options', IPKernelApp(),
preamble=("These options can be used in :file:`ipython_kernel_config.py`. "
"The kernel also respects any options in `ipython_config.py`"),
)
| Remove generation of unnecessary generated.rst file | Remove generation of unnecessary generated.rst file
| Python | bsd-3-clause | ipython/ipython,ipython/ipython |
03caca6932384f08b06bbe5cb3ddc316b7ebf560 | manila_ui/local/local_settings.d/_90_manila_shares.py | manila_ui/local/local_settings.d/_90_manila_shares.py | # Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
| # Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Sample
# settings.LOGGING['loggers'].update({
# 'manilaclient': {
# 'handlers': ['console'],
# 'level': 'DEBUG',
# 'propagate': False,
# }
# })
# The OPENSTACK_MANILA_FEATURES settings can be used to enable or disable
# the UI for the various services provided by Manila.
OPENSTACK_MANILA_FEATURES = {
'enable_share_groups': True,
'enable_replication': True,
'enable_migration': True,
'enable_public_share_type_creation': True,
'enable_public_share_group_type_creation': True,
'enable_public_shares': True,
'enabled_share_protocols': ['NFS', 'CIFS', 'GlusterFS', 'HDFS', 'CephFS',
'MapRFS'],
}
| Add example of logging setting for Manila | Add example of logging setting for Manila
This change demonstrates how we can define logging setting specific to
manila-ui, so that operators can easily understand how to customize
logging level and so on.
Change-Id: Ia8505d988ed75e0358452b5b3c2889b364680f22
| Python | apache-2.0 | openstack/manila-ui,openstack/manila-ui,openstack/manila-ui |
b43b555a7803c6afd50fe5992f455cc5d1ad5d86 | stonemason/service/tileserver/health/views.py | stonemason/service/tileserver/health/views.py | # -*- encoding: utf-8 -*-
__author__ = 'ray'
__date__ = '3/2/15'
from flask import make_response
def health_check():
"""Return a dummy response"""
response = make_response()
response.headers['Content-Type'] = 'text/plain'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
| # -*- encoding: utf-8 -*-
__author__ = 'ray'
__date__ = '3/2/15'
from flask import make_response
import stonemason
import sys
import platform
VERSION_STRING = '''stonemason:%s
Python: %s
Platform: %s''' % (stonemason.__version__,
sys.version,
platform.version())
del stonemason, sys, platform
def health_check():
"""Return a dummy response"""
response = make_response(VERSION_STRING)
response.headers['Content-Type'] = 'text/plain'
response.headers['Cache-Control'] = 'public, max-age=0'
return response
| Return sys/platform version in tileserver health check | FEATURE: Return sys/platform version in tileserver health check
| Python | mit | Kotaimen/stonemason,Kotaimen/stonemason |
a2871c774dd793f5264c1c530a36b10824c435db | cryptography/bindings/openssl/__init__.py | cryptography/bindings/openssl/__init__.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cryptography.bindings.openssl import api
__all__ = ["api"]
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cryptography.bindings.openssl.api import api
__all__ = ["api"]
| Make api poitn to the right object | Make api poitn to the right object
| Python | bsd-3-clause | Lukasa/cryptography,dstufft/cryptography,bwhmather/cryptography,skeuomorf/cryptography,Ayrx/cryptography,bwhmather/cryptography,bwhmather/cryptography,dstufft/cryptography,Lukasa/cryptography,skeuomorf/cryptography,Ayrx/cryptography,Lukasa/cryptography,Ayrx/cryptography,kimvais/cryptography,dstufft/cryptography,dstufft/cryptography,sholsapp/cryptography,bwhmather/cryptography,kimvais/cryptography,Hasimir/cryptography,Hasimir/cryptography,sholsapp/cryptography,skeuomorf/cryptography,skeuomorf/cryptography,kimvais/cryptography,Hasimir/cryptography,sholsapp/cryptography,glyph/cryptography,kimvais/cryptography,Ayrx/cryptography,Hasimir/cryptography,sholsapp/cryptography,dstufft/cryptography,glyph/cryptography |
de6ac0596b58fac2efc547fe6f81a48f4a06f527 | tests/grammar_creation_test/TerminalAdding.py | tests/grammar_creation_test/TerminalAdding.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class TerminalAddingTest(TestCase):
pass
if __name__ == '__main__':
main() | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import *
class TerminalAddingTest(TestCase):
def test_shouldAddOneTerminal(self):
g = Grammar(terminals=['asdf'])
self.assertTrue(g.have_term('asdf'))
self.assertFalse(g.have_term('a'))
def test_shouldAddMoreTerminals(self):
g = Grammar(terminals=[0, 1, 2])
self.assertTrue(g.have_term([0, 1, 2]))
self.assertFalse(g.have_term('a'))
self.assertFalse(g.have_term('asdf'))
self.assertFalse(g.have_term(3))
if __name__ == '__main__':
main()
| Add tests of terminal adding when grammar is create | Add tests of terminal adding when grammar is create
| Python | mit | PatrikValkovic/grammpy |
3081fcd1e37520f504804a3efae62c33d3371a21 | temba/msgs/migrations/0034_move_recording_domains.py | temba/msgs/migrations/0034_move_recording_domains.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('msgs', '0033_exportmessagestask_uuid'),
]
def move_recording_domains(apps, schema_editor):
Msg = apps.get_model('msgs', 'Msg')
# this is our new bucket name
bucket_name = settings.AWS_STORAGE_BUCKET_NAME
# our old bucket name had periods instead of dashes
old_bucket_domain = 'http://' + bucket_name.replace('-', '.')
# our new domain is more specific
new_bucket_domain = 'https://' + settings.AWS_BUCKET_DOMAIN
for msg in Msg.objects.filter(msg_type='V').exclude(recording_url=None):
# if our recording URL is on our old bucket
if msg.recording_url.find(old_bucket_domain) >= 0:
# rename it to our new bucket
old_recording_url = msg.recording_url
msg.recording_url = msg.recording_url.replace(old_bucket_domain,
new_bucket_domain)
print "[%d] %s to %s" % (msg.id, old_recording_url, msg.recording_url)
msg.save(update_fields=['recording_url'])
operations = [
migrations.RunPython(move_recording_domains)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('msgs', '0033_exportmessagestask_uuid'),
]
def move_recording_domains(apps, schema_editor):
Msg = apps.get_model('msgs', 'Msg')
# this is our new bucket name
bucket_name = settings.AWS_STORAGE_BUCKET_NAME
# our old bucket name had periods instead of dashes
old_bucket_domain = 'http://' + bucket_name.replace('-', '.')
# our new domain is more specific
new_bucket_domain = 'https://' + settings.AWS_BUCKET_DOMAIN
for msg in Msg.objects.filter(direction='I', msg_type='V').exclude(recording_url=None):
# if our recording URL is on our old bucket
if msg.recording_url.find(old_bucket_domain) >= 0:
# rename it to our new bucket
old_recording_url = msg.recording_url
msg.recording_url = msg.recording_url.replace(old_bucket_domain,
new_bucket_domain)
print "[%d] %s to %s" % (msg.id, old_recording_url, msg.recording_url)
msg.save(update_fields=['recording_url'])
operations = [
migrations.RunPython(move_recording_domains)
]
| Tweak to migration so it is a bit faster for future migraters | Tweak to migration so it is a bit faster for future migraters
| Python | agpl-3.0 | tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,praekelt/rapidpro,ewheeler/rapidpro,reyrodrigues/EU-SMS,reyrodrigues/EU-SMS,ewheeler/rapidpro,reyrodrigues/EU-SMS,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,praekelt/rapidpro,tsotetsi/textily-web,praekelt/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,pulilab/rapidpro,praekelt/rapidpro,ewheeler/rapidpro,pulilab/rapidpro |
7755dda1449f6264d7d7fe57dc776c731ab22d84 | src/satosa/micro_services/processors/scope_processor.py | src/satosa/micro_services/processors/scope_processor.py | from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
value = attributes.get(attribute, [None])[0]
attributes[attribute][0] = value + '@' + scope
| from ..attribute_processor import AttributeProcessorError
from .base_processor import BaseProcessor
CONFIG_KEY_SCOPE = 'scope'
CONFIG_DEFAULT_SCOPE = ''
class ScopeProcessor(BaseProcessor):
def process(self, internal_data, attribute, **kwargs):
scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE)
if scope is None or scope == '':
raise AttributeProcessorError("No scope set.")
attributes = internal_data.attributes
values = attributes.get(attribute, [])
if not isinstance(values, list):
values = [values]
if values:
new_values=[]
for value in values:
new_values.append(value + '@' + scope)
attributes[attribute] = new_values
| Allow scope processor to handle multivalued attributes | Allow scope processor to handle multivalued attributes
| Python | apache-2.0 | its-dirg/SATOSA,irtnog/SATOSA,SUNET/SATOSA,SUNET/SATOSA,irtnog/SATOSA |
adf3a500e8ab8115520daa16bc008faeec7cfca9 | gitfs/views/view.py | gitfs/views/view.py | import os
from abc import ABCMeta, abstractmethod
from gitfs import FuseMethodNotImplemented
from gitfs.filesystems.passthrough import PassthroughFuse
class View(PassthroughFuse):
__metaclass__ = ABCMeta
def __init__(self, *args, **kwargs):
self.args = args
for attr in kwargs:
setattr(self, attr, kwargs[attr])
def getxattr(self, path, name, position=0):
"""Get extended attributes"""
raise FuseMethodNotImplemented
| import os
from abc import ABCMeta, abstractmethod
from gitfs import FuseMethodNotImplemented
class View(object):
__metaclass__ = ABCMeta
def __init__(self, *args, **kwargs):
self.args = args
for attr in kwargs:
setattr(self, attr, kwargs[attr])
def getxattr(self, path, name, position=0):
"""Get extended attributes"""
raise FuseMethodNotImplemented
| Make View inherit from objects instead of PassthroughFuse | Make View inherit from objects instead of PassthroughFuse
| Python | apache-2.0 | PressLabs/gitfs,PressLabs/gitfs,rowhit/gitfs,bussiere/gitfs,ksmaheshkumar/gitfs |
ee28fdc66fbb0f91821ff18ff219791bf5de8f4d | corehq/apps/fixtures/tasks.py | corehq/apps/fixtures/tasks.py | from __future__ import absolute_import
from __future__ import unicode_literals
from corehq.apps.fixtures.upload import upload_fixture_file
from soil import DownloadBase
from celery.task import task
@task(serializer='pickle')
def fixture_upload_async(domain, download_id, replace):
task = fixture_upload_async
DownloadBase.set_progress(task, 0, 100)
download_ref = DownloadBase.get(download_id)
result = upload_fixture_file(domain, download_ref.get_filename(), replace, task)
DownloadBase.set_progress(task, 100, 100)
return {
'messages': {
'success': result.success,
'messages': result.messages,
'errors': result.errors,
'number_of_fixtures': result.number_of_fixtures,
},
}
@task(serializer='pickle')
def fixture_download_async(prepare_download, *args, **kw):
task = fixture_download_async
DownloadBase.set_progress(task, 0, 100)
prepare_download(task=task, *args, **kw)
DownloadBase.set_progress(task, 100, 100)
| from __future__ import absolute_import, unicode_literals
from celery.task import task
from soil import DownloadBase
from corehq.apps.fixtures.upload import upload_fixture_file
@task
def fixture_upload_async(domain, download_id, replace):
task = fixture_upload_async
DownloadBase.set_progress(task, 0, 100)
download_ref = DownloadBase.get(download_id)
result = upload_fixture_file(domain, download_ref.get_filename(), replace, task)
DownloadBase.set_progress(task, 100, 100)
return {
'messages': {
'success': result.success,
'messages': result.messages,
'errors': result.errors,
'number_of_fixtures': result.number_of_fixtures,
},
}
@task(serializer='pickle')
def fixture_download_async(prepare_download, *args, **kw):
task = fixture_download_async
DownloadBase.set_progress(task, 0, 100)
prepare_download(task=task, *args, **kw)
DownloadBase.set_progress(task, 100, 100)
| Change fixture upload task to json serializer | Change fixture upload task to json serializer
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq |
621858406aaa4d7deb9d8ec6b96459a6a7d25285 | masters/master.chromium.webkit/master_gatekeeper_cfg.py | masters/master.chromium.webkit/master_gatekeeper_cfg.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': ['update', 'runhooks', 'compile'],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
public_html='../master.chromium/public_html',
tree_status_url=active_master.tree_status_url,
use_getname=True,
sheriffs=[]))
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': ['update', 'runhooks', 'compile'],
}
exclusions = {
'WebKit XP': ['runhooks'], # crbug.com/262577
}
forgiving_steps = ['update_scripts', 'update', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
public_html='../master.chromium/public_html',
tree_status_url=active_master.tree_status_url,
use_getname=True,
sheriffs=[]))
| Change runhooks failures on WebKit XP to not close the chromium.webkit tree. | Change runhooks failures on WebKit XP to not close the chromium.webkit tree.
Since the failures are usually the bot's fault, not the patch's.
[email protected], [email protected], [email protected]
BUG=262577
Review URL: https://chromiumcodereview.appspot.com/19477003
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@213237 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build |
57024104a5951d62ff8a87a281a6d232583dabed | python/new_year_chaos.py | python/new_year_chaos.py | #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the minimumBribes function below.
def minimumBribes(finalLine):
if invalid(finalLine):
return "Too chaotic"
return bubbleSort(finalLine)
def invalid(finalLine):
return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine))
def didBribeMoreThanTwoPeople(person, index):
return index + 2 < person - 1
def bubbleSort(line):
swaps = 0
numberOfPeople = len(line)
for person in range(numberOfPeople):
for i in range(0, numberOfPeople - person - 1):
if line[i] > line[i + 1]:
line[i], line[i + 1] = line[i + 1], line[i]
swaps += 1
return swaps
if __name__ == '__main__':
t = int(input())
for t_itr in range(t):
n = int(input())
q = list(map(int, input().rstrip().split()))
print(minimumBribes(q))
| #!/bin/python3
import math
import os
import random
import re
import sys
# Complete the minimumBribes function below.
def minimumBribes(finalLine):
if invalid(finalLine):
return "Too chaotic"
return bubbleSort(finalLine)
def invalid(finalLine):
return any(didBribeMoreThanTwoPeople(person, index) for index, person in enumerate(finalLine))
def didBribeMoreThanTwoPeople(person, index):
return index + 2 < person - 1
def bubbleSort(line):
swaps = 0
swappedInCurrentPass = False
for person in range(len(line)):
for i in range(0, len(line) - 1):
if line[i] > line[i + 1]:
line[i], line[i + 1] = line[i + 1], line[i]
swaps += 1
swappedInCurrentPass = True
if swappedInCurrentPass:
swappedInCurrentPass = False
else:
break
return swaps
if __name__ == '__main__':
t = int(input())
for t_itr in range(t):
n = int(input())
q = list(map(int, input().rstrip().split()))
print(minimumBribes(q))
| Improve efficiency of new year chaos | Improve efficiency of new year chaos
| Python | mit | rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank |
d28eba4f9a62ca96bbfe5069f43864a6a71bea71 | examples/advanced/extend_python.py | examples/advanced/extend_python.py | """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import .python3 (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was add to the implicitely imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| """
Extend the Python Grammar
==============================
This example demonstrates how to use the `%extend` statement,
to add new syntax to the example Python grammar.
"""
from lark.lark import Lark
from python_parser import PythonIndenter
GRAMMAR = r"""
%import .python3 (compound_stmt, single_input, file_input, eval_input, test, suite, _NEWLINE, _INDENT, _DEDENT, COMMENT)
%extend compound_stmt: match_stmt
match_stmt: "match" test ":" cases
cases: _NEWLINE _INDENT case+ _DEDENT
case: "case" test ":" suite // test is not quite correct.
%ignore /[\t \f]+/ // WS
%ignore /\\[\t \f]*\r?\n/ // LINE_CONT
%ignore COMMENT
"""
parser = Lark(GRAMMAR, parser='lalr', start=['single_input', 'file_input', 'eval_input'], postlex=PythonIndenter())
tree = parser.parse(r"""
def name(n):
match n:
case 1:
print("one")
case 2:
print("two")
case _:
print("number is too big")
""", start='file_input')
# Remove the 'python3__' prefix that was added to the implicitly imported rules.
for t in tree.iter_subtrees():
t.data = t.data.rsplit('__', 1)[-1]
print(tree.pretty())
| Fix typos in comment of example code | Fix typos in comment of example code
| Python | mit | lark-parser/lark |
51b28e286bc7f8f272a1f45b47d246976b65ddda | go/apps/rapidsms/tests/test_definition.py | go/apps/rapidsms/tests/test_definition.py | from vumi.tests.helpers import VumiTestCase
from go.apps.rapidsms.definition import ConversationDefinition
class TestConversationDefinition(VumiTestCase):
def test_conversation_type(self):
conv_def = ConversationDefinition()
self.assertEqual(conv_def.conversation_type, "rapidsms")
| Add test for conversation definition. | Add test for conversation definition.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go |
|
d90edf3b4d8fa714e7e24acbc22fb35bc828911d | services/controllers/interpolator.py | services/controllers/interpolator.py | class Interpolator:
def __init__(self):
self.data = []
def addIndexValue(self, index, value):
self.data.append((index, value))
def valueAtIndex(self, target_index):
if target_index < self.data[0][0]:
return None
elif self.data[-1][0] < target_index:
return None
else:
start = None
end = None
for (index, value) in self.data:
if index == target_index:
return value
else:
if index <= target_index:
start = (index, value)
elif target_index < index:
end = (index, value)
break
index_delta = end[0] - start[0]
percent = (target_index - start[0]) / index_delta
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
if __name__ == "__main__":
pass
| class Interpolator:
def __init__(self):
self.data = []
def addIndexValue(self, index, value):
self.data.append((index, value))
def valueAtIndex(self, target_index):
if target_index < self.data[0][0]:
return None
elif self.data[-1][0] < target_index:
return None
else:
start = None
end = None
for (index, value) in self.data:
if index == target_index:
return value
else:
if index <= target_index:
start = (index, value)
elif target_index < index:
end = (index, value)
break
index_delta = end[0] - start[0]
percent = (target_index - start[0]) / index_delta
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
def to_array(self):
result = []
for (index, value) in self.data:
result.append(index)
result.append(value)
return result
def from_array(self, array):
self.data = []
for i in range(0, len(array), 2):
self.addIndexValue(array[i], array[i + 1])
if __name__ == "__main__":
pass
| Add ability to convert to/from an array | Add ability to convert to/from an array
This is needed as an easy way to serialize an interpolator for sending/receiving over HTTP
| Python | bsd-3-clause | gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2,gizmo-cda/g2x-submarine-v2 |
3deffc39e1a489255272c35f7171b7e85942b108 | shipyard/shipyard/host/node/build.py | shipyard/shipyard/host/node/build.py | """Host-only environment for Node.js."""
from pathlib import Path
from foreman import define_parameter, decorate_rule
from shipyard import install_packages
(define_parameter('npm_prefix')
.with_doc("""Location host-only npm.""")
.with_type(Path)
.with_derive(lambda ps: ps['//base:build'] / 'host/npm-host')
)
@decorate_rule('//base:build')
def install(parameters):
"""Set up host-only environment for Node.js."""
if not Path('/usr/bin/nodejs').exists():
install_packages(['nodejs', 'npm'])
contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute()
(Path.home() / '.npmrc').write_text(contents)
| """Host-only environment for Node.js."""
from pathlib import Path
from foreman import define_parameter, decorate_rule
from shipyard import (
ensure_file,
execute,
install_packages,
)
(define_parameter('npm_prefix')
.with_doc("""Location host-only npm.""")
.with_type(Path)
.with_derive(lambda ps: ps['//base:build'] / 'host/npm-host')
)
@decorate_rule('//base:build')
def install(parameters):
"""Set up host-only environment for Node.js."""
if not Path('/usr/bin/node').exists():
install_packages(['nodejs', 'npm'])
contents = 'prefix = %s\n' % parameters['npm_prefix'].absolute()
(Path.home() / '.npmrc').write_text(contents)
# Ubuntu systems use `nodejs` rather than `node` :(
if not Path('/usr/bin/node').exists():
ensure_file('/usr/bin/nodejs')
execute('sudo ln --symbolic nodejs node'.split(), cwd='/usr/bin')
| Fix node/nodejs name conflict on Ubuntu systems | Fix node/nodejs name conflict on Ubuntu systems
| Python | mit | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage |
df8618c185108aa71e42da7d9569e16fb350b4c0 | hackeriet/doorcontrold/__init__.py | hackeriet/doorcontrold/__init__.py | #!/usr/bin/env python
from hackeriet.mqtt import MQTT
from hackeriet.door import Doors
import threading, os, logging
logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
piface = False
# Determine if piface is used on the Pi
if "PIFACE" in os.environ:
piface = True
logging.info('Using piface configuration')
# Be backwards compatible with old env variable name
gpio_pin = int(os.getenv("DOOR_GPIO_PIN", os.getenv("DOOR_PIN", 0)))
# How many seconds should the door lock remain open
timeout = int(os.getenv("DOOR_TIMEOUT", 2))
door = Doors(piface=piface,pin=gpio_pin,timeout=timeout)
mqtt = MQTT()
door_name = os.getenv("DOOR_NAME", 'hackeriet')
door_topic = "hackeriet/door/%s/open" % door_name
mqtt.subscribe(door_topic, 0)
def on_message(mosq, obj, msg):
door.open()
logging('Door opened: %s' % msg.payload.decode())
mqtt.on_message = on_message
# Block forever
def main():
for t in threading.enumerate():
if t us threading.currentThread():
continue
t.join()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
from hackeriet.mqtt import MQTT
from hackeriet.door import Doors
import threading, os, logging
logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
piface = False
# Determine if piface is used on the Pi
if "PIFACE" in os.environ:
piface = True
logging.info('Using piface configuration')
# Be backwards compatible with old env variable name
gpio_pin = int(os.getenv("DOOR_GPIO_PIN", os.getenv("DOOR_PIN", 0)))
# How many seconds should the door lock remain open
timeout = int(os.getenv("DOOR_TIMEOUT", 2))
door = Doors(piface=piface,pin=gpio_pin,timeout=timeout)
def on_message(mosq, obj, msg):
door.open()
logging.info('Door opened: %s' % msg.payload
door_name = os.getenv("DOOR_NAME", 'hackeriet')
door_topic = "hackeriet/door/%s/open" % door_name
mqtt = MQTT(on_message)
mqtt.subscribe(door_topic, 0)
# Block forever
def main():
for t in threading.enumerate():
if t us threading.currentThread():
continue
t.join()
if __name__ == "__main__":
main()
| Fix incompatibilities with latest paho lib | Fix incompatibilities with latest paho lib
| Python | apache-2.0 | hackeriet/pyhackeriet,hackeriet/pyhackeriet,hackeriet/nfcd,hackeriet/nfcd,hackeriet/pyhackeriet,hackeriet/nfcd |
Subsets and Splits