commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
c750cbb65541ea32c2f8904c394469a14fa1e82b
|
add import script for West Dorset
|
polling_stations/apps/data_collection/management/commands/import_west_dorset.py
|
polling_stations/apps/data_collection/management/commands/import_west_dorset.py
|
Python
| 0 |
@@ -0,0 +1,632 @@
+from data_collection.management.commands import BaseXpressDCCsvInconsistentPostcodesImporter%0A%0Aclass Command(BaseXpressDCCsvInconsistentPostcodesImporter):%0A council_id = 'E07000052'%0A addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WDDC.TSV'%0A stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017WDDC.TSV'%0A elections = %5B'parl.2017-06-08'%5D%0A csv_delimiter = '%5Ct'%0A%0A station_postcode_search_fields = %5B%0A 'polling_place_postcode',%0A 'polling_place_address_4',%0A 'polling_place_address_3',%0A 'polling_place_address_2',%0A 'polling_place_address_1',%0A %5D%0A
|
|
3e4ed4d6624ac0db7838e9aeb7a98710f746b2b8
|
Create solution.py
|
hackerrank/algorithms/strings/easy/mars_exploration/py/solution.py
|
hackerrank/algorithms/strings/easy/mars_exploration/py/solution.py
|
Python
| 0.000018 |
@@ -0,0 +1,310 @@
+#!/bin/python3%0A%0Aimport sys%0A%0Adef solution(signal):%0A import itertools%0A %0A count = 0%0A %0A for expected, received in zip(itertools.cycle('SOS'), signal):%0A if expected != received:%0A count += 1%0A %0A return count%0A%0Asignal = input().strip()%0Acount = solution(signal)%0A%0Aprint(count)%0A
|
|
eb9eb8fd295d8dbba66267e7551f4e6a51687797
|
Set db starting point.
|
snippets/base/migrations/0062_set_asrsnippet_id_autoincrement_starting_point.py
|
snippets/base/migrations/0062_set_asrsnippet_id_autoincrement_starting_point.py
|
Python
| 0 |
@@ -0,0 +1,309 @@
+# Generated by Django 2.1.3 on 2018-11-16 12:30%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('base', '0061_auto_20181116_0810'),%0A %5D%0A%0A operations = %5B%0A migrations.RunSQL(%5B'ALTER TABLE base_asrsnippet AUTO_INCREMENT=10500;'%5D, %5B''%5D)%0A %5D%0A
|
|
68e10dcb52f17aca1482112816062ea15e40097b
|
Create viruscheck.py
|
viruscheck.py
|
viruscheck.py
|
Python
| 0 |
@@ -0,0 +1,303 @@
+#!/usr/bin/env python%0A%0A#Requires ClamAV to be installed%0A%0Aimport sys%0Aimport subprocess%0A%0A%0A%0Adef clamscan():%0A scan = subprocess.check_output(%5B%0A 'clamscan',%0A '-r',%0A starting_dir%0A %5D)%0A %0A print scan%0A %0A%0Astarting_dir = sys.argv%5B1%5D%0A%0Aprint %22Running scan..........%22%0Aclamscan()%0A%0A%0A%0A
|
|
70e14187ecd2567894e5e8183341a63835d6839c
|
Create pldm related specific constants file.
|
data/pldm_variables.py
|
data/pldm_variables.py
|
Python
| 0 |
@@ -0,0 +1,1337 @@
+#!/usr/bin/python%0A%0Ar%22%22%22%0AContains PLDM-related constants.%0A%22%22%22%0A%0A%0APLDM_TYPE_BASE = '00'%0APLDM_TYPE_PLATFORM = '02'%0APLDM_TYPE_BIOS = '03'%0APLDM_TYPE_OEM = '3F'%0A%0APLDM_BASE_CMD = %7B%0A 'GET_TID': '2',%0A 'GET_PLDM_VERSION': '3',%0A 'GET_PLDM_TYPES': '4',%0A 'GET_PLDM_COMMANDS': '5'%7D%0A%0APLDM_SUCCESS = '00'%0APLDM_ERROR = '01'%0APLDM_ERROR_INVALID_DATA = '02'%0APLDM_ERROR_INVALID_LENGTH = '03'%0APLDM_ERROR_NOT_READY = '04'%0APLDM_ERROR_UNSUPPORTED_PLDM_CMD = '05'%0APLDM_ERROR_INVALID_PLDM_TYPE = '20'%0A%0ABIOS_TABLE_UNAVAILABLE = '83',%0AINVALID_BIOS_TABLE_DATA_INTEGRITY_CHECK = '84',%0AINVALID_BIOS_TABLE_TYPE = '85'%0A%0APLDM_BIOS_CMD = %7B%0A 'GET_BIOS_TABLE': '01',%0A 'SET_BIOS_ATTRIBUTE_CURRENT_VALUE': '07',%0A 'GET_BIOS_ATTRIBUTE_CURRENT_VALUE_BY_HANDLE': '08',%0A 'GET_DATE_TIME': '0c'%7D%0A%0APLDM_PLATFORM_CMD = %7B%0A 'SET_STATE_EFFECTER_STATES': '39',%0A 'GET_PDR': '51'%7D%0A%0APLDM_PDR_TYPES = %7B%0A 'STATE_EFFECTER_PDR': '11'%7D%0A%0A# PLDM OEM related variables.%0APLDM_FILEIO_CMD = %7B%0A 'GET_FILE_TABLE': '1',%0A 'READ_FILE': '4',%0A 'WRITE_FILE': '5',%0A 'READ_FILE_INTO_MEMORY': '6',%0A 'WRITE_FILE_FROM_MEMORY': '7'%7D%0A%0APLDM_FILEIO_COMPLETION_CODES = %7B%0A 'INVALID_FILE_HANDLE': '80',%0A 'DATA_OUT_OF_RANGE': '81',%0A 'INVALID_READ_LENGTH': '82',%0A 'INVALID_WRITE_LENGTH': '83',%0A 'FILE_TABLE_UNAVAILABLE': '84',%0A 'INVALID_FILE_TABLE_TYPE': '85'%7D%0A
|
|
f6a4b230d3ee98d906920c2e1cd671208a5b3e96
|
Python 1.5.4
|
python/jsbeautifier/__version__.py
|
python/jsbeautifier/__version__.py
|
__version__ = '1.5.3'
|
Python
| 0.998796 |
@@ -16,7 +16,7 @@
1.5.
-3
+4
'%0A
|
5669de3e5331add3fe2ee27cfad312115463a5d6
|
Use str instead of repr for exception message
|
py/selenium/common/exceptions.py
|
py/selenium/common/exceptions.py
|
# Copyright 2008-2009 WebDriver committers
# Copyright 2008-2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Exceptions that may happen in all the webdriver code.
"""
class WebDriverException(Exception):
"""
Base webdriver exception.
"""
def __init__(self, msg=None, screen=None, stacktrace=None):
self.msg = msg
self.screen = screen
self.stacktrace = stacktrace
def __str__(self):
exception_msg = "Message: %s " % repr(self.msg)
if self.screen is not None:
exception_msg = "%s; Screenshot: available via screen " \
% exception_msg
if self.stacktrace is not None:
exception_msg = "%s; Stacktrace: %s " \
% (exception_msg, str("\n" + "\n".join(self.stacktrace)))
return exception_msg
class ErrorInResponseException(WebDriverException):
"""
Thrown when an error has occurred on the server side.
This may happen when communicating with the firefox extension
or the remote driver server.
"""
def __init__(self, response, msg):
WebDriverException.__init__(self, msg)
self.response = response
class InvalidSwitchToTargetException(WebDriverException):
"""
Thrown when frame or window target to be switched doesn't exist.
"""
pass
class NoSuchFrameException(InvalidSwitchToTargetException):
"""
Thrown when frame target to be switched doesn't exist.
"""
pass
class NoSuchWindowException(InvalidSwitchToTargetException):
"""
Thrown when window target to be switched doesn't exist.
To find the current set of active window handles, you can get a list
of the active window handles in the following way::
print driver.window_handles
"""
pass
class NoSuchElementException(WebDriverException):
"""
Thrown when element could not be found.
If you encounter this exception, you may want to check the following:
* Check your selector used in your find_by...
* Element may not yet be on the screen at the time of the find operation,
(webpage is still loading) see selenium.webdriver.support.wait.WebDriverWait()
for how to write a wait wrapper to wait for an element to appear.
"""
pass
class NoSuchAttributeException(WebDriverException):
"""
Thrown when the attribute of element could not be found.
You may want to check if the attribute exists in the particular browser you are
testing against. Some browsers may have different property names for the same
property. (IE8's .innerText vs. Firefox .textContent)
"""
pass
class StaleElementReferenceException(WebDriverException):
"""
Thrown when a reference to an element is now "stale".
Stale means the element no longer appears on the DOM of the page.
Possible causes of StaleElementReferenceException include, but not limited to:
* You are no longer on the same page, or the page may have refreshed since the element
was located.
* The element may have been removed and re-added to the screen, since it was located.
Such as an element being relocated.
This can happen typically with a javascript framework when values are updated and the
node is rebuilt.
* Element may have been inside an iframe or another context which was refreshed.
"""
pass
class InvalidElementStateException(WebDriverException):
"""
"""
pass
class UnexpectedAlertPresentException(WebDriverException):
"""
Thrown when an unexpected alert is appeared.
Usually raised when when an expected modal is blocking webdriver form executing any
more commands.
"""
def __init__(self, msg=None, screen=None, stacktrace=None, alert_text=None):
super(UnexpectedAlertPresentException, self).__init__(msg, screen, stacktrace)
self.alert_text = alert_text
def __str__(self):
return "Alert Text: %s\n%s" % (self.alert_text, str(super(WebDriverException, self)))
class NoAlertPresentException(WebDriverException):
"""
Thrown when switching to no presented alert.
This can be caused by calling an operation on the Alert() class when an alert is
not yet on the screen.
"""
pass
class ElementNotVisibleException(InvalidElementStateException):
"""
Thrown when an element is present on the DOM, but
it is not visible, and so is not able to be interacted with.
Most commonly encountered when trying to click or read text
of an element that is hidden from view.
"""
pass
class ElementNotSelectableException(InvalidElementStateException):
"""
Thrown when trying to select an unselectable element.
For example, selecting a 'script' element.
"""
pass
class InvalidCookieDomainException(WebDriverException):
"""
Thrown when attempting to add a cookie under a different domain
than the current URL.
"""
pass
class UnableToSetCookieException(WebDriverException):
"""
Thrown when a driver fails to set a cookie.
"""
pass
class RemoteDriverServerException(WebDriverException):
"""
"""
pass
class TimeoutException(WebDriverException):
"""
Thrown when a command does not complete in enough time.
"""
pass
class MoveTargetOutOfBoundsException(WebDriverException):
"""
Thrown when the target provided to the `ActionsChains` move()
method is invalid, i.e. out of document.
"""
pass
class UnexpectedTagNameException(WebDriverException):
"""
Thrown when a support class did not get an expected web element.
"""
pass
class InvalidSelectorException(NoSuchElementException):
"""
Thrown when the selector which is used to find an element does not return
a WebElement. Currently this only happens when the selector is an xpath
expression and it is either syntactically invalid (i.e. it is not a
xpath expression) or the expression does not select WebElements
(e.g. "count(//input)").
"""
pass
class ImeNotAvailableException(WebDriverException):
"""
Thrown when IME support is not available. This exception is thrown for every IME-related
method call if IME support is not available on the machine.
"""
pass
class ImeActivationFailedException(WebDriverException):
"""
Thrown when activating an IME engine has failed.
"""
pass
|
Python
| 0 |
@@ -981,26 +981,22 @@
: %25s
-
+%5Cn
%22 %25
-repr(
self.msg
)%0A
@@ -991,17 +991,16 @@
self.msg
-)
%0A
@@ -1054,23 +1054,20 @@
ion_msg
++
= %22
-%25s;
Screensh
@@ -1094,44 +1094,11 @@
reen
- %22 %5C%0A %25 exception_msg
+%5Cn%22
%0A
@@ -1150,30 +1150,37 @@
-exception_msg = %22%25s; S
+stacktrace = %22%5Cn%22.join(self.s
tack
@@ -1188,16 +1188,9 @@
race
-: %25s %22 %5C
+)
%0A
@@ -1202,15 +1202,8 @@
- %25 (
exce
@@ -1215,36 +1215,32 @@
_msg
-, str(%22%5Cn%22 + %22%5Cn%22.join(self.
+ += %22Stacktrace:%5Cn%25s%22 %25
stac
@@ -1245,19 +1245,16 @@
acktrace
-)))
%0A
|
8ff3b74df83055068b1f8abe05e8ce186ab6eb18
|
implement strStr with KMP. Kana我喜欢你啊!!!
|
python/string/ImplementstrStr.py
|
python/string/ImplementstrStr.py
|
Python
| 0 |
@@ -0,0 +1,1212 @@
+#KMP algorithm. can't get it. Just a simple implementation of%0A#0. https://www.youtube.com/watch?v=2ogqPWJSftE%0A#and%0A#1. http://www.cnblogs.com/zuoyuan/p/3698900.html%0Aclass Solution:%0A # @param haystack, a string%0A # @param needle, a string%0A # @return an integer%0A def strStr(self, haystack, needle):%0A n = len(haystack)%0A m = len(needle)%0A if m == 0 or haystack == needle:%0A return 0%0A prefix = self.prefixMap(needle)%0A q = 0%0A for i in range(0, n):%0A while q %3E 0 and needle%5Bq%5D != haystack%5Bi%5D:%0A q = prefix%5Bq - 1%5D%0A if needle%5Bq%5D == haystack%5Bi%5D:%0A q = q + 1%0A if q == m:%0A return i - m + 1%0A return -1%0A%0A def prefixMap(self, needle):%0A prefix = %5B0 for i in xrange(len(needle))%5D%0A a = 0%0A for b in xrange(2, len(needle)+1):%0A while a %3E 0 and needle%5Ba%5D != needle%5Bb-1%5D:%0A a = prefix%5Ba-1%5D%0A if needle%5Ba%5D == needle%5Bb-1%5D:%0A a += 1%0A prefix%5Bb-1%5D = a%0A return prefix%0A%0Aif __name__ == %22__main__%22:%0A solution = Solution()%0A%0A print solution.strStr(%22mississippi%22, %22pi%22)%0A print solution.strStr(%22a%22, %22a%22)%0A
|
|
a5044eedc9efcd5294b55bc565e7ec1685fd9e17
|
Add return post (mirrors add_membership)
|
pupa/scrape/popolo.py
|
pupa/scrape/popolo.py
|
import copy
from .base import (BaseModel, SourceMixin, LinkMixin, ContactDetailMixin, OtherNameMixin,
IdentifierMixin)
from .schemas.post import schema as post_schema
from .schemas.person import schema as person_schema
from .schemas.membership import schema as membership_schema
from .schemas.organization import schema as org_schema
# a copy of the org schema without sources
org_schema_no_sources = copy.deepcopy(org_schema)
org_schema_no_sources['properties'].pop('sources')
class Post(BaseModel, LinkMixin, ContactDetailMixin):
"""
A popolo-style Post
"""
_type = 'post'
_schema = post_schema
def __init__(self, label, role, organization_id, start_date='', end_date=''):
super(Post, self).__init__()
self.label = label
self.role = role
self.organization_id = organization_id
self.start_date = start_date
self.end_date = end_date
def __str__(self):
return self.label
__unicode__ = __str__
class Membership(BaseModel, ContactDetailMixin, LinkMixin):
"""
A popolo-style Membership.
"""
_type = 'membership'
_schema = membership_schema
def __init__(self, person_id, organization_id, post_id=None,
role='', label='', start_date='', end_date='', on_behalf_of_id=None):
"""
Constructor for the Membership object.
We require a person ID and organization ID, as required by the
popolo spec. Additional arguments may be given, which match those
defined by popolo.
"""
super(Membership, self).__init__()
self.person_id = person_id
self.organization_id = organization_id
self.post_id = post_id
self.start_date = start_date
self.end_date = end_date
self.role = role
self.label = label
self.on_behalf_of_id = on_behalf_of_id
def __str__(self):
return self.person_id + ' membership in ' + self.organization_id
__unicode__ = __str__
class Person(BaseModel, SourceMixin, ContactDetailMixin, LinkMixin, IdentifierMixin,
OtherNameMixin):
"""
Details for a Person in Popolo format.
"""
_type = 'person'
_schema = person_schema
def __init__(self, name, birth_date='', death_date='', biography='', summary='', image='',
gender='', national_identity=''):
super(Person, self).__init__()
self.name = name
self.birth_date = birth_date
self.death_date = death_date
self.biography = biography
self.summary = summary
self.image = image
self.gender = gender
self.national_identity = national_identity
def add_membership(self, organization, role='member', **kwargs):
"""
add a membership in an organization and return the membership
object in case there are more details to add
"""
membership = Membership(self._id, organization._id, role=role, **kwargs)
self._related.append(membership)
return membership
def __str__(self):
return self.name
__unicode__ = __str__
class Organization(BaseModel, SourceMixin, ContactDetailMixin, LinkMixin, IdentifierMixin,
OtherNameMixin):
"""
A single popolo-style Organization
"""
_type = 'organization'
_schema = org_schema
def __init__(self, name, classification=None, parent_id=None, chamber='',
founding_date='', dissolution_date='', image=''):
"""
Constructor for the Organization object.
"""
super(Organization, self).__init__()
self.name = name
self.classification = classification
self.chamber = chamber
self.founding_date = founding_date
self.dissolution_date = dissolution_date
self.parent_id = parent_id
self.image = image
def __str__(self):
return self.name
__unicode__ = __str__
def validate(self):
schema = None
if self.classification in ['party']:
schema = org_schema_no_sources
return super(Organization, self).validate(schema=schema)
#@property
#def parent(self):
# return self.parent_id
#@parent.setter
#def parent(self, val):
# self.parent_id = val._id
def add_post(self, label, role, **kwargs):
post = Post(label=label, role=role, organization_id=self._id, **kwargs)
self._related.append(post)
|
Python
| 0 |
@@ -4490,8 +4490,28 @@
d(post)%0A
+ return post%0A
|
8b42b0825d5cbb6becef9669b43a2c8229ea8642
|
Add script to remove unpaired fasta entries.
|
remove_unpaired_fasta_entries.py
|
remove_unpaired_fasta_entries.py
|
Python
| 0 |
@@ -0,0 +1,1026 @@
+#!/usr/bin/env python%0A%0A%22%22%22%0ARemove unpaired reads from a fasta file.%0A%0AThis script can be used for the case that unpaired reads (e.g. as%0Areads were removed during quality trimming) in a pair of fasta files%0Afrom paired-end sequencing need to be removed.%0A%0A%22%22%22%0A%0Aimport argparse%0Afrom Bio import SeqIO%0Afrom Bio.SeqIO.FastaIO import FastaWriter%0A%0Aparser = argparse.ArgumentParser()%0Aparser.add_argument(%22fasta_file_to_filter%22)%0Aparser.add_argument(%22reference_fasta_file%22)%0Aparser.add_argument(%22--output_fasta%22, default=%22output.fa%22)%0Aargs = parser.parse_args()%0A%0A# Read reference file header%0Areference_headers = %7B%7D%0Afor seq_record in SeqIO.parse(args.reference_fasta_file, %22fasta%22):%0A reference_headers%5Bseq_record.id.split()%5B0%5D%5D = 1%0A%0A# Read fasta file to filter and write output%0Awith open(args.output_fasta, 'w') as output_fh:%0A writer = FastaWriter(output_fh, wrap=0)%0A writer.write_file(%0A filter(lambda seq_record: seq_record.id.split()%5B0%5D in reference_headers,%0A SeqIO.parse(args.fasta_file_to_filter, %22fasta%22)))%0A
|
|
b84af881f800bfad13b5e90379c5f4ec0445239a
|
Add setup.py.
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,369 @@
+#!/usr/bin/env python%0A%0Afrom distutils.core import setup%0A%0Asetup(name = 'pymoira',%0A version = '1.0',%0A description = 'Client library for MIT Moira service managment system protocol',%0A author = 'Victor Vasiliev',%0A author_email = '[email protected]',%0A url = 'https://github.com/vasilvv/pymoira',%0A license = 'MIT',%0A py_modules = %5B'pymoira'%5D)%0A
|
|
52d3a5a20c7f1bf4c874e4210fd17753a67d5c71
|
Add ID command
|
commands/cmd_id.py
|
commands/cmd_id.py
|
Python
| 0.000012 |
@@ -0,0 +1,439 @@
+from lib.command import Command%0A%0A%0Aclass IdCommand(Command):%0A name = 'id'%0A description = 'Returns your user ID, or the ID of the current chat when -c or %5C'chat%5C' is passed as an argument.'%0A%0A def run(self, message, args):%0A reply = 'Your Telegram ID is %7B0%7D'.format(message.from_user.id)%0A if '-c' or 'chat' in args:%0A reply = 'This chat%5C's ID is %7B0%7D'.format(message.chat.id)%0A self.reply(message, reply)%0A
|
|
9bc26f8a0d2c209fc3e73cd0f267164bfd49fef3
|
Update setup.py
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from distutils.core import setup
from wok import version
setup(name='wok',
version=version.encode("utf8"),
description='Static site generator',
install_requires=['pyyaml', 'jinja2'],
author='Mike Cooper',
author_email='[email protected]',
url='https://www.github.com/mythmon/wok',
packages=['wok'],
scripts=['scripts/wok'],
)
|
Python
| 0.000001 |
@@ -82,16 +82,21 @@
%0A%0Asetup(
+%0A
name='wo
@@ -95,26 +95,24 @@
name='wok',%0A
-
version=
@@ -143,210 +143,816 @@
- description='Static site generator',%0A install_requires=%5B'pyyaml', 'jinja2'%5D,%0A author='Mike Cooper',%0A author_email='[email protected]',%0A
+author='Mike Cooper',%0A author_email='[email protected]',%0A url='http://wok.mythmon.com',%0A description='Static site generator',%0A long_description=%0A %22Wok is a static website generator. It turns a pile of templates, %22%0A %22content, and resources (like CSS and images) into a neat stack of %22%0A %22plain HTML. You run it on your local computer, and it generates a %22%0A %22directory of web files that you can upload to your web server, or %22%0A %22serve directly.%22%0A download_
url=
-'
+%22
http
-s
://w
-ww.github.com/mythmon/wok',%0A
+ok.mythmon.com/download%22,%0A classifiers=%5B%0A %22Development Status :: 4 - Beta%22,%0A %22License :: OSI Approved :: MIT License%22,%0A 'Operating System :: POSIX',%0A 'Programming Language :: Python',%0A %5D%0A requires=%5B'pyyaml', 'jinja2', 'Markdown', 'docutils', 'Pygments'%5D,%0A
@@ -965,26 +965,24 @@
es=%5B'wok'%5D,%0A
-
scripts=
@@ -998,16 +998,10 @@
/wok'%5D,%0A
-
)%0A
|
8238e0476097af0afed1443391370285dd61d8ca
|
Add setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,1460 @@
+#!/usr/bin/env python%0A%0Aimport setuptools%0Aimport os%0A%0Awith open(os.path.join('fs', 'sshfs', '__metadata__.py')) as f:%0A exec(f.read())%0A%0ACLASSIFIERS = %5B%0A 'Development Status :: 5 - Production/Stable',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: MIT License',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python',%0A 'Programming Language :: Python :: 2.7',%0A 'Programming Language :: Python :: 3.3',%0A 'Programming Language :: Python :: 3.4',%0A 'Programming Language :: Python :: 3.5',%0A 'Programming Language :: Python :: 3.6',%0A 'Topic :: System :: Filesystems',%0A%5D%0A%0Awith open('README.rst', 'rt') as f:%0A DESCRIPTION = f.read()%0A%0Awith open('requirements.txt') as f:%0A REQUIREMENTS = f.read().splitlines()%0A%0Awith open(os.path.join('tests', 'requirements.txt')) as f:%0A TEST_REQUIREMENTS = %5Bl for l in f if not l.startswith('-r')%5D%0A TEST_REQUIREMENTS.extend(REQUIREMENTS)%0A%0A%0Asetuptools.setup(%0A author=__author__,%0A author_email=__author_email__,%0A classifiers=CLASSIFIERS,%0A description=%22Pyfilesystem2 implementation for SSH/SFTP using paramiko %22,%0A install_requires=REQUIREMENTS,%0A license=__license__,%0A long_description=DESCRIPTION,%0A name='fs.sshfs',%0A packages=setuptools.find_packages(exclude=(%22tests%22,)),%0A platforms=%5B'any'%5D,%0A test_suite=%22tests%22,%0A tests_require=TEST_REQUIREMENTS,%0A url=%22https://github.com/althonos/fs.sshfs%22,%0A version=__version__,%0A)%0A
|
|
d348a3f5a1d86dbc5bf38782953fcc8510c010d6
|
version change
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='jieba',
version='0.15.1',
description='Chinese Words Segementation Utilities',
author='Sun, Junyi',
author_email='[email protected]',
url='http://github.com/fxsjy',
packages=['jieba'],
package_dir={'jieba':'jieba'},
package_data={'jieba':['*.*','finalseg/*']}
)
|
Python
| 0.000001 |
@@ -72,11 +72,9 @@
'0.1
-5.1
+6
',
|
72416d5bf4308c10bc9b2ab31464ad2853042402
|
Use the official package django select2 that finally support py3
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
Installation script:
To release a new version to PyPi:
- Ensure the version is correctly set in accounting.__init__.py
- Run:
`python setup.py sdist`
`twine upload dist/*`
"""
from setuptools import setup, find_packages
import os
import sys
from accounting import get_version
PROJECT_DIR = os.path.dirname(__file__)
setup(name='django-accounting',
version=get_version().replace(' ', '-'),
url='https://github.com/dulaccc/django-accounting',
author="Pierre Dulac",
author_email="[email protected]",
description="Accounting made accessible for small businesses and "
"sole proprietorships through a simple Django project",
long_description=open(os.path.join(PROJECT_DIR, 'README.rst')).read(),
keywords="Accounting, Django, Money, Cashflow",
license='MIT',
platforms=['linux'],
packages=find_packages(exclude=["tests*"]),
include_package_data=True,
install_requires=[
'django>=1.7.0,<1.8',
# Used to render the forms
'django-bootstrap3==4.11.0',
# Used to improve the forms
'Django_Select2_Py3>=4.2.1',
# Used for date/time form fields
'django-datetime-widget>=0.9,<1.0',
# Define beautiful tags
'django-classy-tags==0.5.1',
# Internationalization
'Babel>=1.0,<1.4',
# Date utilities
'python-dateutil>=2.2,<2.3',
],
dependency_links=[
'http://github.com/applegrew/django-select2@python3#egg=Django_Select2_Py3-4.2.1',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Application Frameworks']
)
|
Python
| 0.000001 |
@@ -1472,126 +1472,57 @@
-%5D,%0A
- dependency_links=%5B%0A 'http://github.com/applegrew/django-select2@python3#egg=Django_Select2_Py3-4.2.1
+# Select2%0A 'django-select2%3E=4.3,%3C4.4
',%0A
|
e941103913c1f18b60cbc4c3ba9edfe2da9ca9f0
|
Version bump.
|
setup.py
|
setup.py
|
import os
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
setup(
name='django-ajaximage',
version='0.1.18',
description='Add ajax image upload functionality with a progress bar to file input fields within Django admin. Images are optionally resized.',
long_description=readme,
author="Bradley Griffiths",
author_email='[email protected]',
url='https://github.com/bradleyg/django-ajaximage',
packages=['ajaximage'],
include_package_data=True,
install_requires=[
'Django',
'Pillow',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
|
Python
| 0 |
@@ -180,12 +180,11 @@
='0.
-1.18
+2.0
',%0A
@@ -203,104 +203,30 @@
on='
-Add ajax image upload functionality with a progress bar to file input fields within Django admin
+Upload images via ajax
. Im
@@ -502,25 +502,16 @@
quires=%5B
-%0A
'Django'
@@ -511,24 +511,16 @@
Django',
-%0A
'Pillow
@@ -521,21 +521,16 @@
Pillow',
-%0A
%5D,%0A z
@@ -760,11 +760,11 @@
::
-BSD
+MIT
Lic
@@ -855,16 +855,72 @@
: Python
+ :: 2.7',%0A 'Programming Language :: Python :: 3.4
',%0A %5D
|
a79fcf2786df38f84b065ff579f83f03c1d5a20b
|
Add setup.py file
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,363 @@
+#!/usr/bin/env python%0A%0Afrom setuptools import setup, find_packages%0A%0Asetup(%0A name='django-cyclebufferfield',%0A description=%22Field to manage Django fields in a fixed-size ring buffer.%22,%0A version='0.1',%0A url='http://code.playfire.com/',%0A%0A author='Playfire.com',%0A author_email='[email protected]',%0A license='BSD',%0A%0A packages=find_packages(),%0A)%0A
|
|
5d6f52d2b89eda2aa070faafad2fd89eeaf599ec
|
add setup py
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,827 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Afrom setuptools import setup, find_packages%0A%0Asetup(%0A name='selectel_cloud_api',%0A version='1.0',%0A packages=find_packages(),%0A install_requires='selectel_cloud_api',%0A url='https://github.com/RustoriaRu/SelectelCloudApi',%0A license='MIT',%0A author='vir-mir',%0A keywords='selectel.ru selectel api, cloud',%0A author_email='[email protected]',%0A description='api select cloud api',%0A classifiers=%5B%0A 'Environment :: Web Environment',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: MIT License',%0A 'Programming Language :: Python :: 2.7',%0A 'Programming Language :: Python :: 3.3',%0A 'Programming Language :: Python :: 3.4',%0A 'Topic :: Software Development :: Libraries :: Python Modules',%0A %5D%0A)%0A
|
|
ab3728405be94c071c353374735b97f207479c00
|
Add setup.py to make an .exe with py2exe
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,97 @@
+#!/c/Anaconda/python%0A%0Afrom distutils.core import setup%0Aimport py2exe%0A%0Asetup(console=%5B%22tmpr.py%22%5D)%0A
|
|
2e91c826a72e3f240f6d010678d68bab0bab5749
|
Add setup.py for packaging
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,513 @@
+from setuptools import setup%0Afrom sirsi import __version__, __author__%0A%0Asetup(%0A name='sirsi',%0A version=__version__,%0A author=__author__,%0A author_email='[email protected]',%0A description='Manage a sirsi enterprise-based library account',%0A url='https://github.com/-winny/sirsi',%0A license='MIT',%0A packages=%5B'sirsi'%5D,%0A install_requires=%5B%0A 'argparse==1.2.1',%0A 'beautifulsoup4==4.3.2',%0A 'mechanize==0.2.5',%0A 'python-dateutil==2.2',%0A 'tabulate==0.7.2',%0A %5D,%0A)%0A
|
|
76601be760f0aa15637f65164c5e595b218fc2b9
|
Add setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,317 @@
+from setuptools import find_packages%0Afrom setuptools import setup%0A%0AVERSION = '0.0.1'%0A%0Asetup(%0A name='gae-utils',%0A version=VERSION,%0A packages=find_packages(),%0A install_requires=%5B%5D,%0A include_package_data=True,%0A zip_safe=False,%0A maintainer='Tyler Treat',%0A maintainer_email='[email protected]'%0A)%0A%0A
|
|
609bc6fbd1284c1b769c2e0548f6c65a97d144cd
|
Add initial attempt at a setup.py file
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,340 @@
+from setuptools import setup%0A%0Aimport pygametemplate%0A%0A%0Asetup(%0A name=%22pygametemplate%22,%0A version=pygametemplate.__version__,%0A description=pygametemplate.__doc__,%0A url=%22https://github.com/AndyDeany/pygame-template%22,%0A author=pygametemplate.__author__,%0A author_email=%[email protected]%22,%0A packages=%5B%22pygametemplate%22%5D%0A)%0A
|
|
b5b503229789c61af5bb47d6bb587bafb2ada562
|
Fix setup.py, bump version.
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
# pykafka
pykafka allows you to produce messages to the Kafka distributed publish/subscribe messaging service.
## Requirements
You need to have access to your Kafka instance and be able to connect through
TCP. You can obtain a copy and instructions on how to setup kafka at
https://github.com/kafka-dev/kafka
## Installation
pip install pykafka
## Usage
### Sending a simple message
import kafka
producer = kafka.producer.Producer('test')
message = kafka.message.Message("Foo!")
producer.send(message)
### Sending a sequence of messages
import kafka
producer = kafka.producer.Producer('test')
message1 = kafka.message.Message("Foo!")
message2 = kafka.message.Message("Bar!")
producer.send([message1, message2])
### Batching a bunch of messages using a context manager.
import kafka
producer = kafka.producer.Producer('test')
with producer.batch() as messages:
print "Batching a send of multiple messages.."
messages.append(kafka.message.Message("first message to send")
messages.append(kafka.message.Message("second message to send")
* they will be sent all at once, after the context manager execution.
### Consuming messages one by one
import kafka
consumer = kafka.consumer.Consumer('test')
messages = consumer.consume()
### Consuming messages using a generator loop
import kafka
consumer = kafka.consumer.Consumer('test')
for message in consumer.loop():
print message
Contact:
Please use the GitHub issues: https://github.com/dsully/pykafka/issues
* Inspiried from Alejandro Crosa's kafka-rb: https://github.com/acrosa/kafka-rb
"""
import setuptools
# Don't install deps for development mode.
setuptools.bootstrap_install_from = None
setuptools.setup(
name = 'pykafka',
version = '0.1',
license = 'MIT',
long_description = __doc__
author = "Dan Sully",
author_email = "[email protected]",
url = 'http://github.com/dsully/pykafka',
platforms = 'any',
# What are we packaging up?
packages = setuptools.find_packages('kafka'),
zip_safe = True,
verbose = False,
)
|
Python
| 0 |
@@ -1836,16 +1836,18 @@
n = '0.1
+.1
',%0A lic
@@ -1888,16 +1888,17 @@
__doc__
+,
%0A autho
|
a1e35b73b5e10a885e78e965242c5b1b6e92aa16
|
Add a setup.py file
|
setup.py
|
setup.py
|
Python
| 0.000002 |
@@ -0,0 +1,110 @@
+from setuptools import setup%0A%0Asetup(%0A name='wellknown',%0A version='0.1dev',%0A packages=%5B'wellknown'%5D%0A)%0A
|
|
65ecc0145406e7d8e20a281c0e5c04b26208646d
|
Add a setup.py file.
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,560 @@
+try:%0A from setuptools import setup%0Aexcept ImportError:%0A from distutils.core import setup%0A%0Aconfig = %7B%0A 'name': 'ultracold-ions',%0A 'description': 'A library for the simulation of ultracold neutral plasmas.',%0A 'author': 'Tech-X Corporation',%0A 'url': 'https://github.com/Tech-XCorp/ultracold-ions',%0A 'download_url': 'https://github.com/Tech-XCorp/ultracold-ions',%0A 'author_email': '[email protected]',%0A 'version': '0.1',%0A 'install_requires': %5B'numpy','pyopencl','nose'%5D,%0A 'packages': %5B'uci'%5D,%0A 'scripts': %5B%5D%0A%7D%0A%0Asetup(**config)%0A
|
|
d9d3ae4a1d4007a0aa1dafe09102cb7414c338db
|
Remove extracting HG revision from setup.py.
|
setup.py
|
setup.py
|
from setuptools import setup
import subprocess
import os.path
try:
# don't get confused if our sdist is unzipped in a subdir of some
# other hg repo
if os.path.isdir('.hg'):
p = subprocess.Popen(['hg', 'parents', r'--template={rev}\n'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if not p.returncode:
fh = open('HGREV', 'wb')
fh.write(p.communicate()[0].splitlines()[0])
fh.close()
except (OSError, IndexError):
pass
try:
hgrev = open('HGREV').read()
except IOError:
hgrev = ''
long_description = (open('README.rst').read() +
open('CHANGES.rst').read() +
open('TODO.rst').read())
def _static_files(prefix):
return [prefix+'/'+pattern for pattern in [
'markitup/*.*',
'markitup/sets/*/*.*',
'markitup/sets/*/images/*.png',
'markitup/skins/*/*.*',
'markitup/skins/*/images/*.png',
'markitup/templates/*.*'
]]
setup(
name='django-markitup',
version='2.2.2.post%s' % hgrev,
description='Markup handling for Django using the MarkItUp! universal markup editor',
long_description=long_description,
author='Carl Meyer',
author_email='[email protected]',
url='http://bitbucket.org/carljm/django-markitup/',
packages=['markitup', 'markitup.templatetags'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
zip_safe=False,
test_suite='runtests.runtests',
tests_require='Django>=1.3',
package_data={'markitup': ['templates/markitup/*.html'] +
_static_files('static')}
)
|
Python
| 0 |
@@ -26,568 +26,8 @@
tup%0A
-import subprocess%0Aimport os.path%0A%0Atry:%0A # don't get confused if our sdist is unzipped in a subdir of some%0A # other hg repo%0A if os.path.isdir('.hg'):%0A p = subprocess.Popen(%5B'hg', 'parents', r'--template=%7Brev%7D%5Cn'%5D,%0A stdout=subprocess.PIPE, stderr=subprocess.PIPE)%0A if not p.returncode:%0A fh = open('HGREV', 'wb')%0A fh.write(p.communicate()%5B0%5D.splitlines()%5B0%5D)%0A fh.close()%0Aexcept (OSError, IndexError):%0A pass%0A%0Atry:%0A hgrev = open('HGREV').read()%0Aexcept IOError:%0A hgrev = ''
%0A%0Alo
@@ -513,19 +513,10 @@
post
-%25s' %25 hgrev
+0'
,%0A
|
b82dee62e325d83f8aeaede406de24973ee42b42
|
Update project url in setup.py
|
setup.py
|
setup.py
|
#!/usr/bin/env python
#
# Copyright 2010 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='closure_linter',
version='2.3.17',
description='Closure Linter',
license='Apache',
author='The Closure Linter Authors',
author_email='[email protected]',
url='http://code.google.com/p/closure-linter',
install_requires=['python-gflags'],
package_dir={'closure_linter': 'closure_linter'},
packages=['closure_linter', 'closure_linter.common'],
entry_points = {
'console_scripts': [
'gjslint = closure_linter.gjslint:main',
'fixjsstyle = closure_linter.fixjsstyle:main'
]
}
)
|
Python
| 0 |
@@ -946,28 +946,29 @@
http
+s
://
-code.google.com/p
+github.com/google
/clo
|
45a7a979d687b75851d3901171b826faa965389e
|
Add setup script
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,213 @@
+#!/usr/bin/env python%0A%0Afrom distutils.core import setup%0A%0Asetup(name='ambra',%0A version='0.1dev',%0A description='Temporal prediction by pairwise comparisons',%0A packages=%5B'ambra'%5D,%0A)%0A%0A
|
|
20a5ccf55c9292d3c360a34d190e583b84594a37
|
Add zeeman energy tests.
|
pyoommf/test_zeeman.py
|
pyoommf/test_zeeman.py
|
Python
| 0 |
@@ -0,0 +1,665 @@
+from zeeman import Zeeman%0A%0Adef test_zeeman_mif():%0A H = (0.1, -0.5, -8.9e6)%0A zeeman = Zeeman(H)%0A mif_string = zeeman.get_mif()%0A lines = mif_string.split('%5Cn')%0A assert 'Specify Oxs_FixedZeeman %7B' in lines%5B0%5D%0A assert '%7B Oxs_UniformVectorField %7B' in lines%5B1%5D%0A assert 'vector' in lines%5B2%5D%0A line2 = lines%5B2%5D.split()%0A assert float(line2%5B1%5D%5B1:%5D) == H%5B0%5D%0A assert float(line2%5B2%5D) == H%5B1%5D%0A assert float(line2%5B3%5D%5B0:-1%5D) == H%5B2%5D%0A%0Adef test_zeeman_formatting():%0A H = (0.1, -0.5, -8.9e6)%0A zeeman = Zeeman(H)%0A mif_string = zeeman.get_mif()%0A assert mif_string%5B0%5D == 'S'%0A assert mif_string%5B-1%5D == '%5Cn'%0A assert mif_string%5B-2%5D == '%5Cn'%0A
|
|
692d215f26a2f31bd9ae1d56f7e493299d6481da
|
fix for virtualenv install
|
setup.py
|
setup.py
|
#!/usr/bin/python
# python setup.py sdist --format=zip,gztar
from setuptools import setup
import os
import sys
import platform
import imp
version = imp.load_source('version', 'lib/version.py')
util = imp.load_source('util', 'lib/util.py')
if sys.version_info[:3] < (2, 6, 0):
sys.exit("Error: Electrum requires Python version >= 2.6.0...")
usr_share = util.usr_share_dir()
if not os.access(usr_share, os.W_OK):
sys.exit("Error: cannot write to %s.\nIf you do not have root permissions, you may install Electrum a virtualenv.\nAlso, please note that you can run Electrum without installing it on your system."%usr_share)
data_files = []
if (len(sys.argv) > 1 and (sys.argv[1] == "sdist")) or (platform.system() != 'Windows' and platform.system() != 'Darwin'):
print "Including all files"
data_files += [
(os.path.join(usr_share, 'applications/'), ['electrum.desktop']),
(os.path.join(usr_share, 'app-install', 'icons/'), ['icons/electrum.png'])
]
if not os.path.exists('locale'):
os.mkdir('locale')
for lang in os.listdir('locale'):
if os.path.exists('locale/%s/LC_MESSAGES/electrum.mo' % lang):
data_files.append((os.path.join(usr_share, 'locale/%s/LC_MESSAGES' % lang), ['locale/%s/LC_MESSAGES/electrum.mo' % lang]))
appdata_dir = os.path.join(usr_share, "electrum")
data_files += [
(appdata_dir, ["data/README"]),
(os.path.join(appdata_dir, "cleanlook"), [
"data/cleanlook/name.cfg",
"data/cleanlook/style.css"
]),
(os.path.join(appdata_dir, "sahara"), [
"data/sahara/name.cfg",
"data/sahara/style.css"
]),
(os.path.join(appdata_dir, "dark"), [
"data/dark/name.cfg",
"data/dark/style.css"
])
]
for lang in os.listdir('data/wordlist'):
data_files.append((os.path.join(appdata_dir, 'wordlist'), ['data/wordlist/%s' % lang]))
setup(
name="Electrum",
version=version.ELECTRUM_VERSION,
install_requires=[
'slowaes',
'ecdsa>=0.9',
'pbkdf2',
'requests',
'pyasn1',
'pyasn1-modules',
'qrcode',
'SocksiPy-branch',
'tlslite'
],
package_dir={
'electrum': 'lib',
'electrum_gui': 'gui',
'electrum_plugins': 'plugins',
},
scripts=['electrum'],
data_files=data_files,
py_modules=[
'electrum.account',
'electrum.bitcoin',
'electrum.blockchain',
'electrum.bmp',
'electrum.commands',
'electrum.daemon',
'electrum.i18n',
'electrum.interface',
'electrum.mnemonic',
'electrum.msqr',
'electrum.network',
'electrum.network_proxy',
'electrum.old_mnemonic',
'electrum.paymentrequest',
'electrum.paymentrequest_pb2',
'electrum.plugins',
'electrum.qrscanner',
'electrum.simple_config',
'electrum.synchronizer',
'electrum.transaction',
'electrum.util',
'electrum.verifier',
'electrum.version',
'electrum.wallet',
'electrum.x509',
'electrum_gui.gtk',
'electrum_gui.qt.__init__',
'electrum_gui.qt.amountedit',
'electrum_gui.qt.console',
'electrum_gui.qt.history_widget',
'electrum_gui.qt.icons_rc',
'electrum_gui.qt.installwizard',
'electrum_gui.qt.lite_window',
'electrum_gui.qt.main_window',
'electrum_gui.qt.network_dialog',
'electrum_gui.qt.password_dialog',
'electrum_gui.qt.paytoedit',
'electrum_gui.qt.qrcodewidget',
'electrum_gui.qt.qrtextedit',
'electrum_gui.qt.receiving_widget',
'electrum_gui.qt.seed_dialog',
'electrum_gui.qt.transaction_dialog',
'electrum_gui.qt.util',
'electrum_gui.qt.version_getter',
'electrum_gui.stdio',
'electrum_gui.text',
'electrum_plugins.btchipwallet',
'electrum_plugins.coinbase_buyback',
'electrum_plugins.cosigner_pool',
'electrum_plugins.exchange_rate',
'electrum_plugins.greenaddress_instant',
'electrum_plugins.labels',
'electrum_plugins.trezor',
'electrum_plugins.virtualkeyboard',
],
description="Lightweight Bitcoin Wallet",
author="Thomas Voegtlin",
author_email="[email protected]",
license="GNU GPLv3",
url="https://electrum.org",
long_description="""Lightweight Bitcoin Wallet"""
)
|
Python
| 0 |
@@ -414,16 +414,69 @@
.W_OK):%0A
+ try:%0A os.mkdir(usr_share)%0A except:%0A
sys.
@@ -571,16 +571,19 @@
lectrum
+in
a virtua
|
331aecb334f4e4ff4c38b4a2b12d3a80d7327de1
|
Remove unused URL from setup.py
|
setup.py
|
setup.py
|
#! /usr/bin/python
# Copyright (C) 2007-2010 Michael Foord & the mock team
# E-mail: fuzzyman AT voidspace DOT org DOT uk
# http://www.voidspace.org.uk/python/mock/
from mock import __version__
from distutils.core import setup
import os
NAME = 'mock'
MODULES = ['mock']
DESCRIPTION = 'A Python Mocking and Patching Library for Testing'
URL = "http://www.voidspace.org.uk/python/mock/"
'http://www.voidspace.org.uk/downloads/mock-%s.zip' % __version__
readme = os.path.join(os.path.dirname(__file__), 'README.txt')
LONG_DESCRIPTION = open(readme).read()
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.4',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing',
]
AUTHOR = 'Michael Foord'
AUTHOR_EMAIL = '[email protected]'
KEYWORDS = "testing test mock mocking unittest patching stubs fakes doubles".split(' ')
setup(
name=NAME,
version=__version__,
py_modules=MODULES,
# metadata for upload to PyPI
author=AUTHOR,
author_email=AUTHOR_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
keywords=KEYWORDS,
url=URL,
classifiers=CLASSIFIERS,
)
|
Python
| 0.000001 |
@@ -387,74 +387,8 @@
ck/%22
-%0A'http://www.voidspace.org.uk/downloads/mock-%25s.zip' %25 __version__
%0A%0Are
|
98b2114199b04678cd41e25deb9a3478e0f76e45
|
say hello to python 3.3
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
from setuptools import setup, find_packages, Command
import sys
from gunicorn import __version__
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content']
# read long description
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as f:
long_description = f.read()
# read dev requirements
fname = os.path.join(os.path.dirname(__file__), 'requirements_dev.txt')
with open(fname) as f:
tests_require = list(map(lambda l: l.strip(), f.readlines()))
class PyTest(Command):
user_options = [
("cov", None, "measure coverage")
]
def initialize_options(self):
self.cov = None
def finalize_options(self):
pass
def run(self):
import sys,subprocess
basecmd = [sys.executable, '-m', 'py.test']
if self.cov:
basecmd += ['--cov', 'gunicorn']
errno = subprocess.call(basecmd + ['tests'])
raise SystemExit(errno)
setup(
name = 'gunicorn',
version = __version__,
description = 'WSGI HTTP Server for UNIX',
long_description = long_description,
author = 'Benoit Chesneau',
author_email = '[email protected]',
license = 'MIT',
url = 'http://gunicorn.org',
classifiers = CLASSIFIERS,
zip_safe = False,
packages = find_packages(exclude=['examples', 'tests']),
include_package_data = True,
tests_require = tests_require,
cmdclass = {'test': PyTest},
entry_points="""
[console_scripts]
gunicorn=gunicorn.app.wsgiapp:run
gunicorn_django=gunicorn.app.djangoapp:run
gunicorn_paster=gunicorn.app.pasterapp:run
[gunicorn.workers]
sync=gunicorn.workers.sync:SyncWorker
eventlet=gunicorn.workers.geventlet:EventletWorker
gevent=gunicorn.workers.ggevent:GeventWorker
gevent_wsgi=gunicorn.workers.ggevent:GeventPyWSGIWorker
gevent_pywsgi=gunicorn.workers.ggevent:GeventPyWSGIWorker
tornado=gunicorn.workers.gtornado:TornadoWorker
[gunicorn.loggers]
simple=gunicorn.glogging:Logger
[paste.server_runner]
main=gunicorn.app.pasterapp:paste_server
"""
)
|
Python
| 0.999756 |
@@ -696,16 +696,187 @@
: 2.7',%0A
+ 'Programming Language :: Python',%0A 'Programming Language :: Python :: 3',%0A 'Programming Language :: Python :: 3.2',%0A 'Programming Language :: Python :: 3.3',%0A
'Top
|
d00f9fd43cfc45747a9479f00db5d67fda658e55
|
Add initial distutils configuration
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,1665 @@
+# Copyright 2009 Google Inc. All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%0A%22%22%22distutils configuration.%22%22%22%0A%0A__author__ = '[email protected] (Thomas Stromberg)'%0A%0Afrom namebench import VERSION%0Afrom distutils.core import setup%0Asetup(name='namebench',%0A version=VERSION,%0A py_modules=%5B'namebench'%5D,%0A description='DNS service benchmarking tool',%0A author='Thomas Stromberg',%0A author_email='[email protected]',%0A url='http://namebench.googlecode.com/',%0A packages=('libnamebench',),%0A platforms=('Any',),%0A requires=%5B'graphy', 'dnspython', 'jinja2'%5D,%0A license='Apache 2.0',%0A scripts=%5B'namebench.py'%5D,%0A package_data = %7B'libnamebench': %5B'data/alexa-top-10000-global.txt',%0A 'templates/ascii.tmpl',%0A 'templates/html.tmpl',%0A 'namebench.cfg'%5D%7D,%0A# package_data=%5B('data', %5B'data/alexa-top-10000-global.txt'%5D),%0A# ('templates', %5B'templates/ascii.tmpl',%0A# 'templates/html.tmpl'%5D),%0A# ('config', %5B'namebench.cfg'%5D)%5D%0A )%0A
|
|
8439263d6ff66e659a8051d3efc0475020048629
|
update v.# make tag and set to release
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# (C) 2013-2014 முத்தையா அண்ணாமலை
# open-tamil project
from distutils.core import setup
from codecs import open
setup(name='Open-Tamil',
version='0.2.4',
description='Tamil language text processing tools',
author='Muthiah Annamalai',
author_email='[email protected]',
url='https://github.com/arcturusannamalai/open-tamil',
packages=['tamil','transliterate','ngram'],
license='GPLv3',
platforms='PC,Linux,Mac',
classifiers='Natural Language :: Tamil',
long_description=open('README.md','r','UTF-8').read(),
download_url='https://github.com/arcturusannamalai/open-tamil/archive/latest.zip',#pip
)
|
Python
| 0 |
@@ -201,17 +201,17 @@
on='0.2.
-4
+8
',%0A
@@ -282,14 +282,9 @@
r='M
-uthiah
+.
Ann
@@ -289,16 +289,30 @@
nnamalai
+, T. Arulalan,
',%0A
@@ -723,8 +723,9 @@
)%0A
+%0A
|
c75ee6a0ee2f542463b5ca8cb81b06a6a6650d4c
|
Add initial setup file
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,359 @@
+from setuptools import setup%0A%0Asetup(%0A name='python2-consul',%0A packages=%5B'python2-consul'%5D,%0A version='0.0.1',%0A install_requires=%5B%0A 'certifi==2017.4.17',%0A 'chardet==3.0.4',%0A 'idna==2.5',%0A 'PyYAML==3.12',%0A 'requests==2.18.1',%0A 'urllib3==1.21.1',%0A 'validators==0.12.0',%0A 'pytest==3.2.2'%0A %5D%0A)%0A
|
|
af2effaf147b8e473f7b9c655842617a91414278
|
Upgrade the requirement on taskotron-python-versions to include latest changes in shared functions
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
requires = [
'sqlalchemy >= 1.0, < 2.0',
'PyYAML >= 3.11, < 4.0',
'click >= 3.3, < 7.0',
'flask >= 0.10, < 1.0',
'markdown >= 2.4, < 3.0',
'dogpile.cache >= 0.5.5, < 1.0',
'taskotron-python-versions',
]
tests_require = ['pytest']
setup_args = dict(
name='portingdb',
version='0.1',
packages=['portingdb'],
url='https://github.com/fedora-python/portingdb',
description="""Database of packages that need Python 3 porting""",
author='Petr Viktorin',
author_email='[email protected]',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=requires,
tests_require=tests_require,
cmdclass={'test': PyTest},
)
if __name__ == '__main__':
setup(**setup_args)
|
Python
| 0 |
@@ -590,16 +590,28 @@
versions
+ %3E= 0.1.dev2
',%0A%5D%0A%0Ate
|
47b2e9890a0f3022ffbbf83a6e722b2e77e3443b
|
Fix dajax setup.py
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='django-dajax',
version='0.9',
author='Jorge Bastida',
author_email='[email protected]',
description=('Easy to use library to create asynchronous presentation '
'logic with django and dajaxice'),
url='http://dajaxproject.com',
license='BSD',
packages=['dajax'],
package_data={'dajax': ['static/*']},
long_description=('dajax is a powerful tool to easily and super-quickly '
'develop asynchronous presentation logic in web '
'applications using python and almost no JS code. It '
'supports up to four of the most popular JS frameworks: '
'jQuery, Prototype, Dojo and mootols.'),
install_requires=[
'django-dajaxice>=0.5'
],
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Utilities']
)
|
Python
| 0.000003 |
@@ -388,16 +388,22 @@
'static/
+dajax/
*'%5D%7D,%0A
|
330650e7fe7c1a9aa0178812d08af332e927fe98
|
add minimal setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,342 @@
+from setuptools import setup%0A%0Asetup(name='Cohorte Micronode',%0A version='0.9',%0A description='Cohorte Micronode Repository',%0A url='https://github.com/librallu/cohorte-herald',%0A author='Luc Libralesso',%0A author_email='[email protected]',%0A license='Apache License 2.0',%0A packages=%5B%5D,%0A zip_safe=False)%0A
|
|
a90162a43e4e1817bd818b66e4ad6e377ab8af92
|
Update the setup.py version.
|
setup.py
|
setup.py
|
from distutils.core import setup
import sys
requires = ['feedgenerator', 'jinja2', 'pygments']
if sys.version_info < (2,7):
requires.append('argparse')
setup(
name = "pelican",
version = '1.1.1',
url = 'http://hg.lolnet.org/pelican/',
author = 'Alexis Metaireau',
author_email = '[email protected]',
description = "A tool to generate a static blog, with restructured text input files.",
long_description=open('README.rst').read(),
packages = ['pelican'],
package_data = {'pelican': ['themes/templates/*']},
requires = requires,
scripts = ['bin/pelican'],
classifiers = ['Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Python
| 0 |
@@ -197,19 +197,17 @@
on = '1.
-1.1
+2
',%0A u
|
06c67a7df4e2fd5cbc221f2a9c3f64179af91344
|
Add setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,759 @@
+from setuptools import setup, find_packages%0A%0Asetup(%0A name='django-exadmin',%0A version='0.1.0',%0A description='New style and free plugin django admin module, UI base bootstrap2.',%0A author='TM (sshwsfc)',%0A author_email='[email protected]',%0A url='http://github.com/sshwsfc/django-exadmin',%0A download_url='',%0A packages=find_packages(),%0A include_package_data=True,%0A zip_safe=False,%0A classifiers=%5B%0A 'Development Status :: 3 - Alpha',%0A 'Environment :: Web Environment',%0A 'Framework :: Django',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: BSD License',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python',%0A 'Framework :: Django',%0A %5D%0A)%0A
|
|
2c874c09e7bf35a0ea6a7a5029c9b17ec5f057af
|
Fix mongoengine version.
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import mongonaut
LONG_DESCRIPTION = open('README.rst').read()
setup(
name='django-mongonaut',
version=mongonaut.__version__,
description="An introspective interface for Django and MongoDB",
long_description=LONG_DESCRIPTION,
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: JavaScript",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='mongodb,django',
author=mongonaut.__author__,
author_email='[email protected]',
url='http://github.com/pydanny/django-mongonaut',
license='MIT',
packages=find_packages(),
include_package_data=True,
install_requires=['mongoengine==0.5.2'],
zip_safe=False,
)
|
Python
| 0 |
@@ -1105,17 +1105,17 @@
goengine
-=
+%3E
=0.5.2'%5D
|
ccbb7e11edc63a128b7006e015539fdabd8f3a7f
|
Set up frontend for longpolling
|
bitHopper/LongPoll.py
|
bitHopper/LongPoll.py
|
Python
| 0 |
@@ -0,0 +1,306 @@
+from gevent.event import AsyncResult%0A%0A_event = AsyncResult()%0A%0Adef wait():%0A %22%22%22%0A Gets the New Block work unit to send to clients%0A %22%22%22%0A return _event.get()%0A%0Adef trigger(work):%0A %22%22%22%0A Call to trigger a LP%0A %22%22%22%0A%0A old = self._event%0A self._event = event.AsyncResult()%0A old.set(work)%0A
|
|
34ad457ab831173efd3758af926deb17daf53feb
|
Add sitemap
|
resources/sitemaps.py
|
resources/sitemaps.py
|
Python
| 0.000002 |
@@ -0,0 +1,380 @@
+from django.contrib.sitemaps import Sitemap%0Afrom resources.models import Resource%0Afrom django.utils import translation%0A%0Aclass ResourceSitemap(Sitemap):%0A%0A def items(self):%0A return Resource.objects.filter(noindex=False, is_published=True,%0A language=translation.get_language())%0A%0A def lastmod(self, obj):%0A return obj.modified%0A
|
|
b38cdd6c42114b3e43c04744300c78c213093def
|
Improve the SSHTCPForwarder __init__ logic.
|
king_phisher/ssh_forward.py
|
king_phisher/ssh_forward.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/ssh_forward.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import select
import SocketServer
import threading
import paramiko
__all__ = ['SSHTCPForwarder']
class ForwardServer(SocketServer.ThreadingTCPServer):
daemon_threads = True
allow_reuse_address = True
def __init__(self, remote_server, ssh_transport, *args, **kwargs):
self.remote_server = remote_server
self.ssh_transport = ssh_transport
SocketServer.ThreadingTCPServer.__init__(self, *args, **kwargs)
class ForwardHandler(SocketServer.BaseRequestHandler):
def __init__(self, *args, **kwargs):
self.server = args[2]
self.chain_host = self.server.remote_server[0]
self.chain_port = self.server.remote_server[1]
self.ssh_transport = self.server.ssh_transport
SocketServer.BaseRequestHandler.__init__(self, *args, **kwargs)
def handle(self):
try:
chan = self.ssh_transport.open_channel('direct-tcpip', (self.chain_host, self.chain_port), self.request.getpeername())
except Exception as err:
return
if chan is None:
return
while True:
r, w, x = select.select([self.request, chan], [], [])
if self.request in r:
data = self.request.recv(1024)
if len(data) == 0:
break
chan.send(data)
if chan in r:
data = chan.recv(1024)
if len(data) == 0:
break
self.request.send(data)
peername = self.request.getpeername()
chan.close()
self.request.close()
class SSHTCPForwarder(threading.Thread):
def __init__(self, server, username, password, local_port, remote_server, preferred_private_key = None):
super(SSHTCPForwarder, self).__init__()
self.local_port = local_port
self.server = server
self.remote_server = remote_server
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client = client
self.__connected = False
# an issue seems to exist in paramiko when multiple keys are present through the ssh-agent
ssh_agent = paramiko.Agent()
ssh_keys = ssh_agent.get_keys()
if len(ssh_keys) == 1:
self.__try_connect(username = username, look_for_keys = False, allow_agent = False, pkey = ssh_keys[0])
if not self.__connected and preferred_private_key:
preferred_private_key = preferred_private_key.strip()
preferred_private_key = preferred_private_key.replace(':', '')
preferred_private_key = preferred_private_key.lower()
preferred_private_key = filter(lambda k: k.get_fingerprint().encode('hex').lower() == preferred_private_key, ssh_keys)
if len(preferred_private_key) == 1:
self.__try_connect(username = username, look_for_keys = False, allow_agent = False, pkey = preferred_private_key[0])
if not self.__connected:
self.client.connect(self.server[0], self.server[1], username = username, password = password, allow_agent = False, look_for_keys = True)
def __try_connect(self, *args, **kwargs):
try:
self.client.connect(self.server[0], self.server[1], *args, **kwargs)
except paramiko.SSHException:
return False
self.__connected = True
return True
def run(self):
transport = self.client.get_transport()
self.server = ForwardServer(self.remote_server, transport, ('', self.local_port), ForwardHandler)
self.server.serve_forever()
def stop(self):
if isinstance(self.server, ForwardServer):
self.server.shutdown()
self.join()
self.client.close()
|
Python
| 0.000005 |
@@ -3327,16 +3327,43 @@
client%0A
+%09%09self.username = username%0A
%09%09self._
@@ -3587,37 +3587,16 @@
connect(
-username = username,
look_for
@@ -4069,37 +4069,16 @@
connect(
-username = username,
look_for
@@ -4178,39 +4178,38 @@
ed:%0A%09%09%09self.
-client.
+__try_
connect(
self.server%5B
@@ -4200,61 +4200,8 @@
ect(
-self.server%5B0%5D, self.server%5B1%5D, username = username,
pass
@@ -4253,16 +4253,36 @@
r_keys =
+ True, raise_error =
True)%0A%0A
@@ -4324,16 +4324,136 @@
wargs):%0A
+%09%09raise_error = False%0A%09%09if 'raise_error' in kwargs:%0A%09%09%09raise_error = kwargs%5B'raise_error'%5D%0A%09%09%09del kwargs%5B'raise_error'%5D%0A
%09%09try:%0A%09
@@ -4505,16 +4505,42 @@
rver%5B1%5D,
+ username = self.username,
*args,
@@ -4579,17 +4579,61 @@
xception
-:
+ as error:%0A%09%09%09if raise_error:%0A%09%09%09%09raise error
%0A%09%09%09retu
|
ed8447534ce028a308669c139cb8b0d5a7f2ef0d
|
Fix Nordic nRF51822 board to work on Linux
|
pyOCD/interface/pyusb_backend.py
|
pyOCD/interface/pyusb_backend.py
|
"""
mbed CMSIS-DAP debugger
Copyright (c) 2006-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from interface import Interface
import logging, os, threading
try:
import usb.core
import usb.util
except:
if os.name == "posix" and not os.uname()[0] == 'Darwin':
logging.error("PyUSB is required on a Linux Machine")
isAvailable = False
else:
isAvailable = True
class PyUSB(Interface):
"""
This class provides basic functions to access
a USB HID device using pyusb:
- write/read an endpoint
"""
vid = 0
pid = 0
intf_number = 0
isAvailable = isAvailable
def __init__(self):
super(PyUSB, self).__init__()
self.ep_out = None
self.ep_in = None
self.dev = None
self.closed = False
self.rcv_data = []
self.read_sem = threading.Semaphore(0)
def start_rx(self):
self.thread = threading.Thread(target = self.rx_task)
self.thread.daemon = True
self.thread.start()
def rx_task(self):
while not self.closed:
self.read_sem.acquire()
if not self.closed:
# Timeouts appear to corrupt data occasionally. Because of this the
# timeout is set to infinite.
self.rcv_data.append(self.ep_in.read(self.ep_in.wMaxPacketSize, -1))
@staticmethod
def getAllConnectedInterface(vid, pid):
"""
returns all the connected devices which matches PyUSB.vid/PyUSB.pid.
returns an array of PyUSB (Interface) objects
"""
# find all devices matching the vid/pid specified
all_devices = usb.core.find(find_all=True, idVendor=vid, idProduct=pid)
if not all_devices:
logging.debug("No device connected")
return None
boards = []
# iterate on all devices found
for board in all_devices:
interface_number = -1
# get active config
config = board.get_active_configuration()
# iterate on all interfaces:
# - if we found a HID interface -> CMSIS-DAP
for interface in config:
if interface.bInterfaceClass == 0x03:
interface_number = interface.bInterfaceNumber
break
if interface_number == -1:
continue
try:
if board.is_kernel_driver_active(interface_number):
board.detach_kernel_driver(interface_number)
except Exception as e:
print e
ep_in, ep_out = None, None
for ep in interface:
if ep.bEndpointAddress & 0x80:
ep_in = ep
else:
ep_out = ep
product_name = usb.util.get_string(board, 2)
vendor_name = usb.util.get_string(board, 1)
"""If there is no EP for OUT then we can use CTRL EP"""
if not ep_in or not ep_out:
logging.error('Endpoints not found')
return None
new_board = PyUSB()
new_board.ep_in = ep_in
new_board.ep_out = ep_out
new_board.dev = board
new_board.vid = vid
new_board.pid = pid
new_board.intf_number = interface_number
new_board.product_name = product_name
new_board.vendor_name = vendor_name
new_board.start_rx()
boards.append(new_board)
return boards
def write(self, data):
"""
write data on the OUT endpoint associated to the HID interface
"""
report_size = 64
if self.ep_out:
report_size = self.ep_out.wMaxPacketSize
for _ in range(report_size - len(data)):
data.append(0)
self.read_sem.release()
if not self.ep_out:
bmRequestType = 0x21 #Host to device request of type Class of Recipient Interface
bmRequest = 0x09 #Set_REPORT (HID class-specific request for transferring data over EP0)
wValue = 0x200 #Issuing an OUT report
wIndex = self.intf_number #mBed Board interface number for HID
self.dev.ctrl_transfer(bmRequestType,bmRequest,wValue,wIndex,data)
return
#raise ValueError('EP_OUT endpoint is NULL')
self.ep_out.write(data)
#logging.debug('sent: %s', data)
return
def read(self):
"""
read data on the IN endpoint associated to the HID interface
"""
while len(self.rcv_data) == 0:
pass
return self.rcv_data.pop(0)
def setPacketCount(self, count):
# No interface level restrictions on count
self.packet_count = count
def close(self):
"""
close the interface
"""
logging.debug("closing interface")
self.closed = True
self.read_sem.release()
self.thread.join()
usb.util.dispose_resources(self.dev)
|
Python
| 0 |
@@ -3611,22 +3611,8 @@
p_in
- or not ep_out
:%0A
|
67cca3176d1e2b5def3ebbd64f4bd56a8976529b
|
add res.company file
|
l10n_br_sale/res_company.py
|
l10n_br_sale/res_company.py
|
Python
| 0.000001 |
@@ -0,0 +1,1662 @@
+# -*- encoding: utf-8 -*-%0A###############################################################################%0A# #%0A# Copyright (C) 2014 Renato Lima - Akretion #%0A# #%0A#This program is free software: you can redistribute it and/or modify #%0A#it under the terms of the GNU Affero General Public License as published by #%0A#the Free Software Foundation, either version 3 of the License, or #%0A#(at your option) any later version. #%0A# #%0A#This program is distributed in the hope that it will be useful, #%0A#but WITHOUT ANY WARRANTY; without even the implied warranty of #%0A#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #%0A#GNU Affero General Public License for more details. #%0A# #%0A#You should have received a copy of the GNU Affero General Public License #%0A#along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E. #%0A###############################################################################%0A%0Afrom openerp import models, fields%0A%0A%0Aclass ResCompany(models.Model):%0A _inherit = 'res.company'%0A%0A sale_fiscal_category_id = fields.Many2one(%0A 'l10n_br_account.fiscal.category', u'Categoria Fiscal Padr%C3%A3o Compras',%0A domain=%22%5B('journal_type', '=', 'sale')%5D%22)%0A
|
|
f1830a1ada87a21d851290398feac326e015dc3d
|
Fix bug when prompt is none (#456) (#458)
|
openhtf/plugs/user_input.py
|
openhtf/plugs/user_input.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""User input module for OpenHTF.
Allows tests to prompt for user input using the framework, so prompts can be
presented via the CLI interface, the included web frontend, and custom
frontends alike. Any other part of the framework that needs to access shared
prompt state should use the openhtf.prompts pseudomodule.
"""
import collections
import functools
import logging
import platform
import select
import sys
import threading
import uuid
from openhtf import PhaseOptions
from openhtf import plugs
from openhtf.util import argv
if platform.system() != 'Windows':
import termios
_LOG = logging.getLogger(__name__)
class PromptInputError(Exception):
"""Raised in the event that a prompt returns without setting the response."""
class MultiplePromptsError(Exception):
"""Raised if a prompt is invoked while there is an existing prompt."""
class PromptUnansweredError(Exception):
"""Raised when a prompt times out or otherwise comes back unanswered."""
Prompt = collections.namedtuple('Prompt', 'id message text_input')
class ConsolePrompt(threading.Thread):
"""Thread that displays a prompt to the console and waits for a response.
Args:
prompt_id: The prompt manager's id associated with this prompt.
"""
def __init__(self, message, callback):
super(ConsolePrompt, self).__init__()
self.daemon = True
self._message = message
self._callback = callback
self._stopped = False
def Stop(self):
"""Mark this ConsolePrompt as stopped.
If this prompt was already stopped, do nothing.
"""
if not self._stopped:
print "Nevermind; prompt was answered from elsewhere."
self._stopped = True
def run(self):
"""Main logic for this thread to execute."""
try:
if platform.system() == 'Windows':
# Windows doesn't support file-like objects for select(), so fall back
# to raw_input().
self._callback(raw_input(self._message + '\n\r'))
else:
# First, display the prompt to the console.
print self._message
# Before reading, clear any lingering buffered terminal input.
termios.tcflush(sys.stdin, termios.TCIFLUSH)
while not self._stopped:
inputs, _, _ = select.select([sys.stdin], [], [], 0.001)
for stream in inputs:
if stream is sys.stdin:
response = sys.stdin.readline().rstrip()
self._callback(response)
return
finally:
self._stopped = True
class UserInput(plugs.BasePlug):
"""Get user input from inside test phases."""
enable_remote = True
def __init__(self):
self._prompt = None
self._response = None
self._cond = threading.Condition()
def _asdict(self):
"""Return a dict representation of the current prompt."""
return {'id': self._prompt.id.hex,
'message': self._prompt.message,
'text-input': self._prompt.text_input}
def prompt(self, message, text_input=False, timeout_s=None):
"""Prompt for a user response by showing the message.
Args:
message: The message to display to the user.
text_input: True iff the user needs to provide a string back.
timeout_s: Seconds to wait before raising a PromptUnansweredError.
Returns:
The string input by the user.
"""
with self._cond:
if self._prompt is not None:
self._prompt = None
raise MultiplePromptsError
self._prompt = Prompt(id=uuid.uuid4(),
message=message,
text_input=text_input)
self._response = None
_LOG.debug('Displaying prompt (%s): "%s"%s%s', self._prompt.id,
message, ', Expects text' if text_input else '',
', Timeout: %s sec' % timeout_s if timeout_s else '')
console_prompt = ConsolePrompt(
message, functools.partial(self.respond, self._prompt.id))
console_prompt.start()
self._cond.wait(timeout_s)
console_prompt.Stop()
self._prompt = None
if self._response is None:
self._prompt = None
raise PromptUnansweredError
return self._response
def respond(self, prompt_id, response):
"""Respond to the prompt that has the given ID.
Args:
prompt_id: Either a UUID instance, or a string representing a UUID.
response: A string response to the given prompt.
If there is no active prompt or the prompt id being responded to doesn't
match the active prompt, do nothing.
"""
if type(prompt_id) == str:
prompt_id = uuid.UUID(prompt_id)
_LOG.debug('Responding to prompt (%s): "%s"', prompt_id.hex, response)
with self._cond:
if self._prompt is not None and prompt_id == self._prompt.id:
self._response = response
self._cond.notifyAll()
return True # The response was used.
return False # The response was not used.
def prompt_for_test_start(
message='Enter a DUT ID in order to start the test.', timeout_s=60*60*24):
"""Return an OpenHTF phase for use as a prompt-based start trigger."""
@PhaseOptions(timeout_s=timeout_s)
@plugs.plug(prompts=UserInput)
def trigger_phase(test, prompts):
"""Test start trigger that prompts the user for a DUT ID."""
test.test_record.dut_id = prompts.prompt(message=message, text_input=True,
timeout_s=timeout_s)
return trigger_phase
|
Python
| 0.000002 |
@@ -3375,24 +3375,71 @@
prompt.%22%22%22%0A
+ if self._prompt is None:%0A return None%0A
return %7B
|
df84cf964214420987c51813b8960ce068223adf
|
Add request handler
|
request_handler/request_handler.py
|
request_handler/request_handler.py
|
Python
| 0.000001 |
@@ -0,0 +1,2210 @@
+#!flask/bin/python%0Afrom flask import Flask, jsonify, abort%0Afrom flask import make_response%0Afrom flask import request%0Afrom flask import url_for%0Aimport psycopg2 as pg%0A%0A%0Aapp = Flask(__name__)%0A%0Adef make_public_request(request):%0A new_request = %7B%7D%0A new_request%5B'uri'%5D = url_for('get_requests', request_id=request%5B0%5D, _external=True)%0A new_request%5B'source'%5D = request%5B1%5D%0A new_request%5B'destination'%5D = request%5B2%5D%0A%0A return new_request%0A%[email protected]('/clientapp/requests', methods=%5B'GET'%5D)%0Adef get_requests():%0A ''' Get requests from the database%0A '''%0A conn = pg.connect(database=%22ngot%22, host=%22127.0.0.1%22, port=%225432%22)%0A cursor = conn.cursor()%0A cursor.execute(%22SELECT request_id, source, destination from requests%22)%0A rows = list(cursor.fetchall())%0A cursor.close()%0A conn.close()%0A%0A return jsonify(%7B'requests': %5Bmake_public_request(req) for req in rows%5D%7D)%0A%[email protected]('/clientapp/vehicle_trips', methods=%5B'GET'%5D)%0Adef get_vehicle_trips():%0A ''' Query the database and return generated vehicle trips%0A '''%0A conn = pg.connect(database=%22ngot%22, host=%22127.0.0.1%22, port=%225432%22)%0A cursor = conn.cursor()%0A pg.extensions.register_type(%0A pg.extensions.new_array_type(%0A (1017,), 'PICKUP_POINTS%5B%5D', pg.STRING))%0A cursor.execute(%22SELECT pickup_points from vehicletrips%22)%0A rows = cursor.fetchone()%0A cursor.close()%0A conn.close()%0A%0A return jsonify(%7B'vehicle_trips': rows%7D)%0A%[email protected]('/clientapp/requests', methods=%5B'POST'%5D)%0Adef create_request():%0A #if not request.json in request.json:%0A #abort(404)%0A%0A conn = pg.connect(database=%22ngot%22, host=%22127.0.0.1%22, port=%225432%22)%0A cursor = conn.cursor()%0A #request_id = request.json%5B'request_id'%5D%0A source = request.json%5B'source'%5D%0A destination = request.json%5B'destination'%5D%0A cursor.execute(%22INSERT INTO requests (source, destination) values (%25s, %25s)%22, (source, destination))%0A rows = cursor.rowcount%0A conn.commit()%0A cursor.close()%0A conn.close()%0A%0A return jsonify(%7B'rows': rows%7D), 201%0A%[email protected](404)%0Adef not_found(error):%0A return make_response(jsonify(%7B'error': 'Not found'%7D), 404)%0A%0A%0Aif __name__ == '__main__':%0A app.run(host='0.0.0.0', debug=True)%0A #app.run(debug=True)%0A
|
|
4e54128e5c0b9c762e5f93ae0d8791eeddde2264
|
Add JSON serializer
|
dxr/json.py
|
dxr/json.py
|
Python
| 0 |
@@ -0,0 +1,2045 @@
+#!/usr/bin/env python2%0A%0Aclass JsonOutput:%0A need_separator = False%0A content = ''%0A%0A def open(self):%0A self.content += '%7B'%0A self.need_separator = False%0A%0A def close(self):%0A self.content += '%7D'%0A self.need_separator = True%0A%0A def open_list(self):%0A self.content += '%5B'%0A self.need_separator = False%0A%0A def close_list(self):%0A self.content += '%5D'%0A self.need_separator = True%0A%0A def key_value(self, key, value, quote_value):%0A if self.need_separator is True:%0A self.content += ','%0A%0A if key is not None:%0A self.content += '%22' + key + '%22'%0A self.content += ' : '%0A%0A if quote_value is True:%0A self.content += '%22' + value + '%22'%0A else:%0A self.content += value%0A%0A self.need_separator = True%0A%0A def key_dict(self, key, nested_values):%0A if self.need_separator is True:%0A self.content += ','%0A%0A if key is not None:%0A self.content += '%22' + key + '%22'%0A self.content += ' : '%0A%0A self.open()%0A%0A for subkey in nested_values.keys():%0A self.add(subkey, nested_values%5Bsubkey%5D)%0A%0A self.close()%0A self.need_separator = True%0A%0A def key_list(self, key, values):%0A if self.need_separator is True:%0A self.content += ','%0A%0A self.content += '%22' + key + '%22'%0A self.content += ' : '%0A%0A self.open_list()%0A%0A for subvalue in values:%0A self.add(None, subvalue)%0A%0A self.close_list()%0A self.need_separator = True%0A%0A def add(self, key, value):%0A if isinstance(value, dict):%0A self.key_dict(key, value)%0A elif isinstance(value, list):%0A self.key_list(key, value)%0A elif isinstance(value, int):%0A self.key_value(key, str(value), False)%0A else:%0A self.key_value(key, str(value), True)%0A%0A def print_str(self):%0A return '%7B' + self.content + '%7D'%0A%0A#if __name__ == '__main__':%0A# json = JsonOutput()%0A#%0A# json.add('foo', 'bar')%0A# json.add('age', 666)%0A# json.add('hash', %7B 'aa': 'bb', 'cc': 'dd', 'zz': %5B 1, 3, 5%5D%7D)%0A# json.add('list', %5B1, 2, 3%5D)%0A# json.add('mixed', %5B %7B'Foo': 'bar', 'Tu': 'ruru' %7D, %7B 'lala': 'whee', 'pi': 3 %7D %5D)%0A#%0A# print json.print_str();%0A
|
|
30eec7bb18285b82a7d67a0a3d9098afc5b9e286
|
Create QRfactorization.py
|
effective_quadratures/QRfactorization.py
|
effective_quadratures/QRfactorization.py
|
Python
| 0.000001 |
@@ -0,0 +1,74 @@
+# A set of functions just for QR factorization, pivoting and iterative-QR%0A
|
|
324243dfd61afd8ce244a9a02ffc800c5c73ce55
|
Add modified chart with better values
|
charts/daniels_designing_great_beers/appendix_two_course_grind_potential_extract_modified.py
|
charts/daniels_designing_great_beers/appendix_two_course_grind_potential_extract_modified.py
|
Python
| 0 |
@@ -0,0 +1,1221 @@
+%0Afrom brew.utilities import sg_from_dry_basis%0A%0A%0A%22%22%22%0ARay Daniels%0ADesigning Great Beers%0A%0AAppendix 2: Course Grind Potential Extract (modified)%0A%0ANotes:%0A The chart appears to have been developed with the moisture content set%0A to zero (0.0) and the Brew House Efficiency set to 100%25 (1.0). This%0A is not typical and the book even states that you should expect moisture%0A content at around 4.0%25 and Brew House Efficiency at arount 90.0%25.%0A%0AThis version has been modified with more typical values.%0A%22%22%22%0A%0A%0Adef get_chart():%0A mc = 4%0A bhe = 0.9%0A%0A chart = %5B%5D%0A for dbcg in range(5000, 7600, 100) + range(7600, 8025, 25):%0A gu = sg_from_dry_basis(%0A dbcg / 100.0,%0A moisture_content=mc,%0A brew_house_efficiency=bhe)%0A sg = 1 + (gu / 1000.0)%0A chart.append(%5Bround(dbcg / 100.0, 2), round(gu, 2), round(sg, 4)%5D)%0A return chart%0A%0A%0Adef print_chart():%0A chart = get_chart()%0A print(%22DBCG%5CtGU%5Ct1 lb./gallon%22)%0A print(%22'As-Is'%5Ct%5CtYields SG%22)%0A print(%22-------%5Ct-----%5Ct------------%22)%0A for dbcg, gu, sg in chart:%0A print(%22%7B0:0.2f%7D%5Ct%7B1:0.2f%7D%5Ct%7B2:0.4f%7D%22.format(dbcg, gu, sg))%0A%0A%0Adef main():%0A print_chart()%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
7172d06ced60b2c69b9ac2762019ff95f3fd7da5
|
Create twice.py
|
twice.py
|
twice.py
|
Python
| 0.000002 |
@@ -0,0 +1,502 @@
+#!/usr/bin/env python%0Aimport rospy%0Afrom std_msgs.msg import Int32%0A%0An = 0%0A%0Adef cb(message):%0A //rospy.loginfo(message.data*2)%0A global n%0A n = message.data*2%0A%0Aif __name__ == '__main__':%0A rospy.init_node('twice')%0A sub = rospy.Subscriber('count_up', Int32, cb)%0A //rospy.spin()%0A pub = rospy.Publisher('twice', Int32, queue_size=1)%0A rate = rospy.Rate(10)%0A while not rospy.is_shutdown():%0A pub.publish(n)%0A rate.sleep()%0A
|
|
e075fe93dc8261c780aa6e2a1b4e643719e4d941
|
Update factory.py
|
skbio/parse/sequences/factory.py
|
skbio/parse/sequences/factory.py
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import os
from gzip import open as gzip_open
from itertools import chain
from .iterator import FastaIterator, FastqIterator
FILEEXT_MAP = {'fna': (FastaIterator, open),
'fna.gz': (FastaIterator, gzip_open),
'fasta': (FastaIterator, open),
'fasta.gz': (FastaIterator, gzip_open),
'qual': (FastaIterator, open),
'qual.gz': (FastaIterator, gzip_open),
'fastq': (FastqIterator, open),
'fastq.gz': (FastqIterator, gzip_open),
'fq': (FastqIterator, open),
'fq.gz': (FastqIterator, gzip_open)}
def _determine_types_and_openers(files):
"""Attempt to determine the appropriate iterators and openers"""
if files is None:
return [], []
iters = []
openers = []
for fpath in files:
if fpath.endswith('.gz'):
ext = '.'.join(fpath.rsplit('.', 2)[-2:])
else:
ext = fpath.rsplit('.', 1)[-1]
i, o = FILEEXT_MAP.get(ext, (None, None))
if i is None:
raise IOError("Unknown filetype for %s" % fpath)
iters.append(i)
openers.append(o)
return iters, openers
def _is_single_iterator_type(iters):
"""Determine if there is a single or multiple type of iterator
If iters is [], this method returns True it considers the null case to be
a single iterator type.
"""
if iters:
return len(set(iters)) == 1
else:
return True
def _open_or_none(opener, f):
"""Open a file or returns None"""
opened = None
if not opener:
return None
else:
name = opener.__name__
if not os.path.exists(f):
raise IOError("%s does not appear to exist!" % f)
try:
with opener(f) as opened:
return opened
except IOError:
raise IOError("Could not open %s with %s!" % (f, name))
def load(seqs, qual=None, constructor=None, **kwargs):
"""Construct the appropriate iterator for all your processing needs
This method will attempt to open all files correctly and to feed the
appropriate objects into the correct iterators.
Seqs can list multiple types of files (e.g., FASTA and FASTQ), but if
multiple file types are specified, qual must be None
Parameters
----------
seqs : str or list of sequence file paths
qual : str or list of qual file paths or None
constructor : force a constructor on seqs
kwargs : dict
passed into the subsequent generators.
Returns
-------
SequenceIterator
the return is ``Iterable``
See Also
--------
skbio.parse.sequences.iterator.SequenceIterator
skbio.parse.sequences.iterator.FastaIterator
skbio.parse.sequences.iterator.FastqIterator
"""
if not seqs:
raise ValueError("Must pass in sequences!")
if isinstance(seqs, str):
seqs = [seqs]
if isinstance(qual, str):
qual = [qual]
# i -> iters, o -> openers
if constructor is not None:
i_seqs = [constructor] * len(seqs)
o_seqs = [open] * len(seqs)
else:
i_seqs, o_seqs = _determine_types_and_openers(seqs)
i_qual, o_qual = _determine_types_and_openers(qual)
seqs = [_open_or_none(o, f) for f, o in zip(seqs, o_seqs)]
qual = [_open_or_none(o, f) for f, o in zip(qual or [], o_qual or [])]
if not qual:
qual = None
if not _is_single_iterator_type(i_seqs) and qual is not None:
# chaining Fasta/Fastq for sequence is easy, but it gets nasty quick
# if seqs is a mix of fasta/fastq, with qual coming in as there aren't
# 1-1 mappings. This could be addressed if necessary, but seems like
# an unnecessary block of code right now
raise ValueError("Cannot handle multiple sequence file types and qual "
"at the sametime!")
if _is_single_iterator_type(i_seqs):
seqs_constructor = i_seqs[0]
gen = seqs_constructor(seq=seqs, qual=qual, **kwargs)
else:
gen = chain(*[c(seq=[fp], **kwargs) for c, fp in zip(i_seqs, seqs)])
return gen
|
Python
| 0.000001 |
@@ -2121,59 +2121,26 @@
-with opener(f) as opened:%0A return opened
+opened = opener(f)
%0A
@@ -2220,16 +2220,39 @@
name))%0A
+ %0A return opened%0A
%0A%0Adef lo
|
a882409ede1898a3b4e2fb4619089b33c1427315
|
Add migration
|
apps/conditions/migrations/0005_empty_relation.py
|
apps/conditions/migrations/0005_empty_relation.py
|
Python
| 0.000002 |
@@ -0,0 +1,746 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9 on 2016-08-10 14:23%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('conditions', '0004_condition_title'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='condition',%0A name='relation',%0A field=models.CharField(choices=%5B('eq', 'is equal to (==)'), ('neq', 'is not equal to (!=)'), ('contains', 'contains'), ('gt', 'is greater than (%3E)'), ('gte', 'is greater than or equal (%3E=)'), ('lt', 'is lesser than (%3C)'), ('lte', 'is lesser than or equal (%3C=)'), ('empty', 'is empty'), ('notempty', 'is not empty')%5D, max_length=8),%0A ),%0A %5D%0A
|
|
673a6ee654d7e540fe9c473904b6d1e326928c58
|
Create run_test.py
|
recipes/django-storages/run_test.py
|
recipes/django-storages/run_test.py
|
Python
| 0.000004 |
@@ -0,0 +1,191 @@
+import django%0Afrom django.conf import settings%0Asettings.configure(INSTALLED_APPS=%5B'storages', 'django.contrib.contenttypes', 'django.contrib.auth'%5D) %0Adjango.setup() %0A %0Aimport storages%0A
|
|
397bc67a5a214a4cad5eef20f3a13c53f90964c5
|
Modify tms_nw_svr
|
scripts/tms_nw_svr.py
|
scripts/tms_nw_svr.py
|
Python
| 0 |
@@ -0,0 +1,808 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0Aimport rospy%0Aimport requests%0Afrom BaseHTTPServer import HTTPServer%0Afrom BaseHTTPServer import BaseHTTPRequestHandler%0Aimport urlparse%0A%0Adef svr_start(port, callback):%0A def handler(*args):%0A CallbackServer(callback, *args)%0A server = HTTPServer(('', int(port)), handler)%0A server.serve_forever()%0A%0Aclass tms_nw_svr(BaseHTTPRequestHandler):%0A def __init__(self, callback, *args):%0A self.callback = callback%0A BaseHTTPRequestHandler.__init__(self, args)%0A%0A def do_GET(self):%0A parsed_path = urlparse.urlparse(self.path)%0A query = parsed_path.query%0A self.send_response(200)%0A self.end_headers()%0A result = self.callback(query)%0A message = '%5Cr%5Cn'.join(result)%0A self.wfile.write(message)%0A return
|
|
fec45cfaee6c5e5d02b6c3979179cdad153d5076
|
add ds18b20 rpi implementation to examples
|
examples/raspberrypi/platform/ds18b20.py
|
examples/raspberrypi/platform/ds18b20.py
|
Python
| 0 |
@@ -0,0 +1,1421 @@
+import os%0Aimport re%0Aimport subprocess%0A%0AW1_DEVICES = '/sys/bus/w1/devices/'%0AW1_SENSOR_PATTERN = re.compile('(10%7C22%7C28)-.+', re.IGNORECASE)%0A%0A%0Adef modprobe(module):%0A return subprocess.check_call(%5B'modprobe', module%5D)%0A%0A%0Adef init_w1():%0A modprobe('w1-gpio')%0A modprobe('w1-therm')%0A%0A%0Adef is_w1_sensor(path):%0A return %5C%0A W1_SENSOR_PATTERN.match(path) and %5C%0A os.path.isfile(sensor_full_path(path))%0A%0A%0Adef sensor_full_path(sensor):%0A return os.path.join(W1_DEVICES, sensor, 'w1_slave')%0A%0A%0Adef read_whole_file(path):%0A with open(path, 'r') as f:%0A return f.read()%0A%0A%0Aclass InvalidW1Address(Exception):%0A def __init__(self, address):%0A super(InvalidW1Address, self).__init__()%0A self.address = address%0A%0A%0Adef guard_against_invalid_address(address):%0A if not W1_SENSOR_PATTERN.match(address):%0A raise InvalidW1Address(address)%0A%0A%0Aclass DS18b20(object):%0A @staticmethod%0A def find_all():%0A return %5BDS18b20(x) for x in os.listdir(W1_DEVICES) if is_w1_sensor(x)%5D%0A%0A def __init__(self, address):%0A guard_against_invalid_address(address)%0A self.address = address%0A%0A def read(self):%0A readings = read_whole_file(sensor_full_path(self.address))%0A temp_token = 't='%0A temp_index = readings.find(temp_token)%0A if temp_index %3C 0:%0A return None%0A temp = readings%5Btemp_index + len(temp_token):%5D%0A return float(temp) / 1000%0A
|
|
4f3e0663ce07669d1a6e975fb720c19f72243d78
|
Update Mojo gtest script for Android changes.
|
mojo/tools/mopy/gtest.py
|
mojo/tools/mopy/gtest.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import re
import subprocess
import sys
import time
from mopy.config import Config
from mopy.paths import Paths
def set_color():
"""Run gtests with color on TTY, unless its environment variable is set."""
if sys.stdout.isatty() and "GTEST_COLOR" not in os.environ:
logging.getLogger().debug("Setting GTEST_COLOR=yes")
os.environ["GTEST_COLOR"] = "yes"
def run_apptest(config, shell, args, apptest, isolate):
"""Run the apptest; optionally isolating fixtures across shell invocations.
Args:
config: The mopy.config.Config for the build.
shell: The mopy.android.AndroidShell, if Android is the target platform.
args: The arguments for the shell or apptest.
apptest: The application test URL.
isolate: True if the test fixtures should be run in isolation.
"""
if not isolate:
return _run_apptest(config, shell, args, apptest)
fixtures = _get_fixtures(config, shell, args, apptest)
result = True if fixtures else False
for fixture in fixtures:
arguments = args + ["--gtest_filter=%s" % fixture]
if not _run_apptest(config, shell, arguments, apptest):
result = False
return result
def _run_apptest(config, shell, args, apptest):
"""Runs an apptest and checks the output for signs of gtest failure."""
command = _build_command_line(config, args, apptest)
logging.getLogger().debug("Command: %s" % " ".join(command))
start_time = time.time()
try:
output = _run_test(config, shell, args, apptest)
except Exception as e:
_print_error(command, e)
return False
# Fail on output with gtest's "[ FAILED ]" or a lack of "[ PASSED ]".
# The latter condition ensures failure on broken command lines or output.
# Check output instead of exit codes because mojo shell always exits with 0.
if output.find("[ FAILED ]") != -1 or output.find("[ PASSED ]") == -1:
_print_error(command, output)
return False
ms = int(round(1000 * (time.time() - start_time)))
logging.getLogger().debug("Passed with output (%d ms):\n%s" % (ms, output))
return True
def _get_fixtures(config, shell, args, apptest):
"""Returns an apptest's "Suite.Fixture" list via --gtest_list_tests output."""
try:
arguments = args + ["--gtest_list_tests"]
tests = _run_test(config, shell, arguments, apptest)
logging.getLogger().debug("Tests for %s:\n%s" % (apptest, tests))
# Remove log lines from the output and ensure it matches known formatting.
tests = re.sub("^(\[|WARNING: linker:).*\n", "", tests, flags=re.MULTILINE)
if not re.match("^(\w*\.\r?\n( \w*\r?\n)+)+", tests):
raise Exception("Unrecognized --gtest_list_tests output:\n%s" % tests)
tests = tests.split("\n")
test_list = []
for line in tests:
if not line:
continue
if line[0] != " ":
suite = line.strip()
continue
test_list.append(suite + line.strip())
return test_list
except Exception as e:
_print_error(_build_command_line(config, arguments, apptest), e)
return []
def _print_error(command_line, error):
"""Properly format an exception raised from a failed command execution."""
exit_code = ""
if hasattr(error, 'returncode'):
exit_code = " (exit code %d)" % error.returncode
print "\n[ FAILED ] Command%s: %s" % (exit_code, " ".join(command_line))
print 72 * "-"
print error.output if hasattr(error, 'output') else error
print 72 * "-"
def _build_command_line(config, args, apptest):
"""Build the apptest command line. This value isn't executed on Android."""
return [Paths(config).mojo_runner] + args + [apptest]
def _run_test(config, shell, args, apptest):
"""Run the given test and return the output."""
if (config.target_os != Config.OS_ANDROID):
command = _build_command_line(config, args, apptest)
return subprocess.check_output(command, stderr=subprocess.STDOUT)
assert shell
(r, w) = os.pipe()
with os.fdopen(r, "r") as rf:
with os.fdopen(w, "w") as wf:
shell.StartShell(args + [apptest], wf, wf.close)
return rf.read()
|
Python
| 0.000039 |
@@ -4166,14 +4166,38 @@
tart
-Shell(
+Activity('MojoShellActivity',
args
|
053974bc96ef34075612495a7eb537ff691ff38e
|
Add test to see if legacy files are renamed
|
tests/Settings/TestCuraContainerRegistry.py
|
tests/Settings/TestCuraContainerRegistry.py
|
# Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import os #To find the directory with test files and find the test files.
import pytest #This module contains unit tests.
import unittest.mock #To mock and monkeypatch stuff.
from cura.Settings.CuraContainerRegistry import CuraContainerRegistry #The class we're testing.
from cura.Settings.ExtruderStack import ExtruderStack #Testing for returning the correct types of stacks.
from cura.Settings.GlobalStack import GlobalStack #Testing for returning the correct types of stacks.
from UM.Resources import Resources #Mocking some functions of this.
import UM.Settings.ContainerRegistry #Making empty container stacks.
import UM.Settings.ContainerStack #Setting the container registry here properly.
from UM.Settings.DefinitionContainer import DefinitionContainer #Checking against the DefinitionContainer class.
## Gives a fresh CuraContainerRegistry instance.
@pytest.fixture()
def container_registry():
return CuraContainerRegistry()
## Tests whether loading gives objects of the correct type.
@pytest.mark.parametrize("filename, output_class", [
("ExtruderLegacy.stack.cfg", ExtruderStack),
("MachineLegacy.stack.cfg", GlobalStack),
("Left.extruder.cfg", ExtruderStack),
("Global.global.cfg", GlobalStack),
("Global.stack.cfg", GlobalStack)
])
def test_loadTypes(filename, output_class, container_registry):
#Mock some dependencies.
UM.Settings.ContainerStack.setContainerRegistry(container_registry)
Resources.getAllResourcesOfType = unittest.mock.MagicMock(return_value = [os.path.join(os.path.dirname(os.path.abspath(__file__)), "stacks", filename)]) #Return just this tested file.
def findContainers(id, container_type = 0):
if id == "some_instance" or id == "some_definition":
return [UM.Settings.ContainerRegistry._EmptyInstanceContainer(id)]
else:
return []
container_registry.findContainers = findContainers
with unittest.mock.patch("cura.Settings.GlobalStack.GlobalStack.findContainer"):
with unittest.mock.patch("os.remove"):
container_registry.load()
#Check whether the resulting type was correct.
stack_id = filename.split(".")[0]
for container in container_registry._containers: #Stupid ContainerRegistry class doesn't expose any way of getting at this except by prodding the privates.
if container.getId() == stack_id: #This is the one we're testing.
assert type(container) == output_class
break
else:
assert False #Container stack with specified ID was not loaded.
|
Python
| 0 |
@@ -212,16 +212,71 @@
tests.%0A
+import shutil #To copy files to make a temporary file.%0A
import u
@@ -2836,8 +2836,1177 @@
loaded.
+%0A%0A## Tests whether loading a legacy file moves the upgraded file properly.%0Adef test_loadLegacyFileRenamed(container_registry):%0A #Create a temporary file for the registry to load.%0A temp_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), %22stacks%22, %22temporary.stack.cfg%22)%0A temp_file_source = os.path.join(os.path.dirname(os.path.abspath(__file__)), %22stacks%22, %22MachineLegacy.stack.cfg%22)%0A shutil.copyfile(temp_file_source, temp_file)%0A%0A #Mock some dependencies.%0A UM.Settings.ContainerStack.setContainerRegistry(container_registry)%0A Resources.getAllResourcesOfType = unittest.mock.MagicMock(return_value = %5Btemp_file%5D) #Return a temporary file that we'll make for this test.%0A def findContainers(id, container_type = 0):%0A return %5BUM.Settings.ContainerRegistry._EmptyInstanceContainer(id)%5D%0A container_registry.findContainers = findContainers%0A%0A with unittest.mock.patch(%22cura.Settings.GlobalStack.GlobalStack.findContainer%22):%0A container_registry.load()%0A%0A assert not os.path.isfile(temp_file)%0A new_filename = os.path.splitext(os.path.splitext(temp_file)%5B0%5D)%5B0%5D + %22.global.cfg%22%0A assert os.path.isfile(new_filename)
|
01a659318644ef47cfe0c9ad3c484a974fb31e25
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
cb454d310431700e5ac9883a32f0b36e2e50e0fe
|
Add a check for keystone expired tokens buildup.
|
sensu/plugins/check-keystone-expired-tokens.py
|
sensu/plugins/check-keystone-expired-tokens.py
|
Python
| 0 |
@@ -0,0 +1,2692 @@
+#!/opt/openstack/current/keystone/bin/python%0A#%0A# Copyright 2015, Jesse Keating %[email protected]%3E%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A%0Aimport os%0Aimport sys%0A%0Apossible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv%5B0%5D),%0A os.pardir,%0A os.pardir))%0Aif os.path.exists(os.path.join(possible_topdir,%0A 'keystone',%0A '__init__.py')):%0A sys.path.insert(0, possible_topdir)%0A%0Afrom keystone import cli%0Afrom keystone.common import environment%0Afrom keystone import token%0Afrom keystone.common import sql%0Afrom oslo.utils import timeutils%0A%0AWATERMARK=1000%0A%0A# Monkeypatch the sql Token class to add a method%0Afrom keystone.token.persistence.backends.sql import TokenModel%0Afrom keystone.token.persistence.backends.sql import Token%0A%0Adef monkeypatch_method(cls):%0A def decorator(func):%0A setattr(cls, func.__name__, func)%0A return func%0A return decorator%0A%0A@monkeypatch_method(Token)%0Adef list_tokens(self):%0A session = sql.get_session()%0A with session.begin():%0A now = timeutils.utcnow()%0A query = session.query(TokenModel)%0A query = query.filter(TokenModel.expires %3C now)%0A tokens = query.all()%0A if len(tokens) %3E WATERMARK:%0A print(%22Too many expired keystone tokens: %25s%22 %25 len(tokens))%0A sys.exit(1)%0A%0A# Create a class for listing the tokens and add it to the keystone-manage%0A# command list%0Aclass TokenList(cli.BaseApp):%0A %22%22%22List tokens in the DB%22%22%22%0A%0A name = %22token_list%22%0A%0A @classmethod%0A def main(cls):%0A token_manager = token.persistence.PersistenceManager()%0A token_manager.driver.list_tokens()%0A%0Acli.CMDS.append(TokenList)%0A%0A# Now do our thing%0Aif __name__ == '__main__':%0A environment.use_stdlib()%0A%0A dev_conf = os.path.join(possible_topdir,%0A 'etc',%0A 'keystone.conf')%0A config_files = None%0A if os.path.exists(dev_conf):%0A config_files = %5Bdev_conf%5D%0A%0A # keystone-manage wants a command as a argv, so give it token_list%0A sys.argv.append('token_list')%0A cli.main(argv=sys.argv, config_files=config_files)%0A
|
|
ac40e54d22717fbf1a2444a67198cdba66506df8
|
Add test for input setup workflow
|
cea/tests/test_inputs_setup_workflow.py
|
cea/tests/test_inputs_setup_workflow.py
|
Python
| 0 |
@@ -0,0 +1,1315 @@
+import os%0Aimport unittest%0A%0Aimport cea.config%0Afrom cea.utilities import create_polygon%0Afrom cea.datamanagement import zone_helper, surroundings_helper, terrain_helper, streets_helper, data_initializer, %5C%0A archetypes_mapper%0A%0A# Zug site coordinates%0APOLYGON_COORDINATES = %5B(8.513465734818856, 47.178027239429234), (8.515472027162078, 47.177895971877604),%0A (8.515214535096632, 47.175496635565885), (8.513139577193424, 47.175600066313542),%0A (8.513465734818856, 47.178027239429234)%5D%0A%0A%0Aclass TestInputSetupWorkflowCase(unittest.TestCase):%0A def setUp(self):%0A self.config = cea.config.Configuration(cea.config.DEFAULT_CONFIG)%0A self.config.project = os.path.expandvars(%22$%7BTEMP%7D/reference-case-open%22)%0A%0A def test_input_setup_workflow(self):%0A self.config.create_polygon.coordinates = POLYGON_COORDINATES%0A self.config.create_polygon.filename = 'site'%0A%0A data_initializer.main(self.config)%0A create_polygon.main(self.config)%0A # TODO: Mock osmnx.create_footprints_download%0A zone_helper.main(self.config)%0A surroundings_helper.main(self.config)%0A terrain_helper.main(self.config)%0A streets_helper.main(self.config)%0A archetypes_mapper.main(self.config)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
8465d9a9b2c30b0b493bdf9ba24a29e39a51c1df
|
add dbutil to compute archive_begin for HADS sites
|
scripts/dbutil/compute_hads_sts.py
|
scripts/dbutil/compute_hads_sts.py
|
Python
| 0 |
@@ -0,0 +1,1569 @@
+%22%22%22Compute the archive start time of a HADS/DCP network%22%22%22%0Afrom pyiem.network import Table as NetworkTable%0Aimport sys%0Aimport psycopg2%0Aimport datetime%0A%0ATHISYEAR = datetime.datetime.now().year%0AHADSDB = psycopg2.connect(database='hads', host='iemdb')%0AMESOSITEDB = psycopg2.connect(database='mesosite', host='iemdb')%0A%0A%0Adef do(network, sid):%0A cursor = HADSDB.cursor()%0A running = None%0A # We work backwards%0A for yr in range(THISYEAR, 2001, -1):%0A cursor.execute(%22%22%22%0A SELECT min(valid) from raw%22%22%22 + str(yr) + %22%22%22%0A WHERE station = %25s%0A %22%22%22, (sid,))%0A minv = cursor.fetchone()%5B0%5D%0A if minv is None:%0A return running%0A running = minv%0A return running%0A%0A%0Adef main(argv):%0A %22%22%22Go main Go%22%22%22%0A network = argv%5B1%5D%0A nt = NetworkTable(network)%0A for sid in nt.sts.keys():%0A sts = do(network, sid)%0A if sts is None:%0A continue%0A if (nt.sts%5Bsid%5D%5B'archive_begin'%5D is None or%0A nt.sts%5Bsid%5D%5B'archive_begin'%5D != sts):%0A osts = nt.sts%5Bsid%5D%5B'archive_begin'%5D%0A f = %22%25Y-%25m-%25d %25H:%25M%22%0A print((%22%25s %5B%25s%5D new sts: %25s OLD sts: %25s%22%0A ) %25 (sid, network, sts.strftime(f),%0A osts.strftime(f) if osts is not None else 'null'))%0A cursor = MESOSITEDB.cursor()%0A cursor.execute(%22%22%22UPDATE stations SET archive_begin = %25s%0A WHERE id = %25s and network = %25s%22%22%22, (sts, sid, network))%0A cursor.close()%0A MESOSITEDB.commit()%0A%0Aif __name__ == '__main__':%0A main(sys.argv)%0A
|
|
582b5c598da5b35032447f0eb7888051b84f844c
|
Add datetime to fast cache
|
alembic/versions/20860ffde766_add_datetime_to_fastcache.py
|
alembic/versions/20860ffde766_add_datetime_to_fastcache.py
|
Python
| 0.000001 |
@@ -0,0 +1,855 @@
+%22%22%22Add datetime to fastcache%0A%0ARevision ID: 20860ffde766%0ARevises: 471e6f7722a7%0ACreate Date: 2015-04-14 07:44:36.507406%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '20860ffde766'%0Adown_revision = '471e6f7722a7'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('TranslationFastCaches', sa.Column('datetime', sa.DateTime(), nullable=True))%0A op.create_index(u'ix_TranslationFastCaches_datetime', 'TranslationFastCaches', %5B'datetime'%5D, unique=False)%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_index(u'ix_TranslationFastCaches_datetime', table_name='TranslationFastCaches')%0A op.drop_column('TranslationFastCaches', 'datetime')%0A ### end Alembic commands ###%0A
|
|
f54f427c16b394ff1ea0f55875bfb9d02e7264b0
|
add SiD calculator.
|
src/get_SiD.py
|
src/get_SiD.py
|
Python
| 0 |
@@ -0,0 +1,1653 @@
+#!/usr/bin/python%0A# -*- coding: UTF-8 -*-%0A# Introduction: This script is used to calculate similarity index (SiD)%0A# Created by Xiangchen Li on 2017/3/19 21:15%0A%0Afrom collections import defaultdict%0Afrom src.global_items import genetic_code%0A%0A%0Adef get_sid(virus_rscu_file, host_rscu_file):%0A for pass_codon in %5B%22TAG%22, %22TAA%22, %22TGA%22, %22ATG%22, %22TGG%22%5D:%0A del genetic_code%5Bpass_codon%5D%0A virus_rscu_dict = defaultdict()%0A with open(virus_rscu_file, 'r') as f1:%0A for each_line in f1.readlines()%5B1:%5D:%0A v_list = each_line.strip().split('%5Ct')%0A v_codon = v_list%5B0%5D%0A v_rscu = v_list%5B1%5D%0A virus_rscu_dict%5Bv_codon%5D = float(v_rscu)%0A host_rscu_dict = defaultdict()%0A with open(host_rscu_file, 'r') as f2:%0A for each_line in f2.readlines()%5B1:%5D:%0A h_list = each_line.strip().split('%5Ct')%0A h_codon = h_list%5B0%5D%0A h_rscu = h_list%5B1%5D%0A host_rscu_dict%5Bh_codon%5D = float(h_rscu)%0A aa = 0%0A bb = 0%0A cc = 0%0A for codon in genetic_code.keys():%0A aa += virus_rscu_dict%5Bcodon%5D * host_rscu_dict%5Bcodon%5D%0A bb += pow(virus_rscu_dict%5Bcodon%5D, 2)%0A cc += pow(host_rscu_dict%5Bcodon%5D, 2)%0A %22%22%22%0A R(A,B) is defined as the cosine value of the angle included%0A between the A and B spatial vectors, and represents the degree of%0A similarity between the virus and host overall codon usage patterns.%0A%0A D(A,B) represents the potential effect of the overall codon usage%0A of the host on that of virus, and its value ranges from 0 to 1.0.%0A %22%22%22%0A rr = aa / pow(bb * cc, 0.5) # rr -%3E R(A,B)%0A dd = (1 - rr) / 2 # dd -%3E D(A,B)%0A return dd%0A
|
|
cd9f80c1567c945fe40e02af56433c49c6ddad65
|
Create lintcode_二进制求和.py
|
lintcode_二进制求和.py
|
lintcode_二进制求和.py
|
Python
| 0.000005 |
@@ -0,0 +1,1017 @@
+/**%0A * http://www.lintcode.com/zh-cn/problem/add-binary/%0A * %E7%BB%99%E5%AE%9A%E4%B8%A4%E4%B8%AA%E4%BA%8C%E8%BF%9B%E5%88%B6%E5%AD%97%E7%AC%A6%E4%B8%B2%EF%BC%8C%E8%BF%94%E5%9B%9E%E4%BB%96%E4%BB%AC%E7%9A%84%E5%92%8C%EF%BC%88%E7%94%A8%E4%BA%8C%E8%BF%9B%E5%88%B6%E8%A1%A8%E7%A4%BA%E3%80%82%0A * %E6%A0%B7%E4%BE%8B a = 11 b = 1 %E8%BF%94%E5%9B%9E 100%0A */%0A%0Aclass Solution:%0A # @param %7Bstring%7D a a number%0A # @param %7Bstring%7D b a number%0A # @return %7Bstring%7D the result%0A def addBinary(self, a, b):%0A # Write your code here%0A a = a%5B::-1%5D%0A b = b%5B::-1%5D%0A index = 0%0A result = %5B%5D%0A flag = 0%0A while (index %3C len(a)) and (index %3C len(b)):%0A res = int(a%5Bindex%5D) + int(b%5Bindex%5D) + flag%0A result.append(str(res%252))%0A flag = res / 2%0A index = index + 1%0A while index %3C len(a):%0A res = int(a%5Bindex%5D) + flag%0A result.append(str(res%252))%0A flag = res / 2%0A index = index + 1%0A while index %3C len(b):%0A res = int(b%5Bindex%5D) + flag%0A result.append(str(res%252))%0A flag = res / 2%0A index = index + 1%0A if flag != 0:%0A result.append(str(flag))%0A return ''.join(result%5B::-1%5D)%0A
|
|
65b362985d502440b12efc8a6a49ab0603354fd2
|
Add script to count emotional sentences according to LIWC
|
liwc_emotional_sentences.py
|
liwc_emotional_sentences.py
|
Python
| 0 |
@@ -0,0 +1,3294 @@
+%22%22%22Count the numbers of annotated entities and emotional sentences in the%0Acorpus that was manually annotated.%0A%0AUsage: python annotation_statistics.py %3Cdir containing the folia files with%0AEmbodiedEmotions annotations%3E%0A%22%22%22%0Afrom lxml import etree%0Afrom bs4 import BeautifulSoup%0Afrom emotools.bs4_helpers import sentence, note%0Aimport argparse%0Aimport os%0Afrom collections import Counter%0Aimport json%0Aimport codecs%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser()%0A parser.add_argument('dir_name', help='the name of the dir containing the '%0A 'FoLiA XML files that should be processed.')%0A args = parser.parse_args()%0A%0A dir_name = args.dir_name%0A%0A act_tag = '%7Bhttp://ilk.uvt.nl/folia%7Ddiv'%0A%0A cur_dir = os.getcwd()%0A os.chdir(dir_name)%0A%0A folia_counter = 0%0A num_sent = 0%0A num_emotional = 0%0A stats = Counter()%0A entity_words = %7B%7D%0A text_stats = %7B%7D%0A emotional_cats = %5B'liwc-Posemo', 'liwc-Negemo'%5D%0A%0A print 'Files'%0A for file_name in os.listdir(dir_name):%0A folia_counter += 1%0A print '%7B%7D'.format(file_name)%0A%0A text_id = file_name%5B0:13%5D%0A text_stats%5Btext_id%5D = Counter()%0A%0A sents = set()%0A # load document%0A context = etree.iterparse(file_name,%0A events=('start', 'end'),%0A tag=act_tag,%0A huge_tree=True)%0A for event, elem in context:%0A if event == 'end' and elem.get('class') == 'act':%0A # load act into memory%0A act_xml = BeautifulSoup(etree.tostring(elem), 'xml')%0A sentences = act_xml.find_all(sentence)%0A s = None%0A for sent in sentences:%0A if not note(sent.parent):%0A # some t elements appear to be empty (this is not%0A # allowed, but it happens). So, check whether there is%0A # a string to add before adding it.%0A if sent.t:%0A if sent.t.string:%0A s = sent.t.string%0A%0A # calculate stats only for unique sentences in text%0A if s and s not in sents:%0A sents.add(s)%0A num_sent += 1%0A%0A entities = sent.find_all('entity')%0A emotional = False%0A for entity in entities:%0A e = entity.attrs.get('class')%0A if e in emotional_cats:%0A emotional = True%0A%0A if emotional:%0A num_emotional += 1%0A%0A del context%0A # clear memory%0A # results in segmentation fault (for some reason)%0A #if delete:%0A # elem.clear()%0A # while elem.getprevious() is not None:%0A # del elem.getparent()%5B0%5D%0A # del context%0A%0A # print stats%0A print '%5CnBasic stats'%0A print '%7B%7D sentences in %7B%7D files'.format(num_sent, folia_counter)%0A perc = float(num_emotional)/float(num_sent)*100.0%0A print '%7B%7D emotional sentences (%7B:.2f%7D%25)'.format(num_emotional, perc)%0A
|
|
c910e1898c1e49c60877e092032daebd289c6f31
|
add scripts to export from env file to profile
|
scripts/env2profile.py
|
scripts/env2profile.py
|
Python
| 0 |
@@ -0,0 +1,719 @@
+#!/usr/bin/evn python%0Aimport os%0Aimport re%0Aimport sys%0A%0A%0Aline_re = re.compile('(%5CS+?)%5Cs*?=%5Cs*?(%5CS+?)$')%0A%0A%0Adef env2profile(env_path, out_path):%0A out_lines = list()%0A with open(env_path, 'r') as env_file:%0A for line in env_file.readlines():%0A matched = line_re.findall(line)%0A if matched and len(matched%5B0%5D) == 2:%0A name, value = matched%5B0%5D%0A out_lines.append('export %25s=%25s' %25 (name, value))%0A with open(out_path, 'w') as out_file:%0A out_file.write('%5Cn'.join(out_lines))%0A%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) == 3:%0A _, env_path, out_path = sys.argv%0A env2profile(env_path, out_path)%0A else:%0A print 'Wrong numbers of args'%0A
|
|
8351d98c3036021507a75b65e424d02942f09633
|
Add alembic upgrade info
|
alembic/versions/3d3c72ecbc0d_add_rtp_task_resource_record_table.py
|
alembic/versions/3d3c72ecbc0d_add_rtp_task_resource_record_table.py
|
Python
| 0 |
@@ -0,0 +1,1123 @@
+%22%22%22Add rtp_task_resource_record table%0A%0ARevision ID: 3d3c72ecbc0d%0ARevises: c9a1ff35c6ed%0ACreate Date: 2018-01-20 21:35:16.716477+00:00%0A%0A%22%22%22%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '3d3c72ecbc0d'%0Adown_revision = 'c9a1ff35c6ed'%0Abranch_labels = None%0Adepends_on = None%0A%0A%0Adef upgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.create_table('rtp_task_resource_record',%0A sa.Column('obsid', sa.BigInteger(), nullable=False),%0A sa.Column('task_name', sa.Text(), nullable=False),%0A sa.Column('start_time', sa.BigInteger(), nullable=False),%0A sa.Column('stop_time', sa.BigInteger(), nullable=False),%0A sa.Column('max_memory', sa.Float(), nullable=True),%0A sa.Column('avg_cpu_load', sa.Float(), nullable=True),%0A sa.ForeignKeyConstraint(%5B'obsid'%5D, %5B'hera_obs.obsid'%5D, ),%0A sa.PrimaryKeyConstraint('obsid', 'task_name')%0A )%0A # ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.drop_table('rtp_task_resource_record')%0A # ### end Alembic commands ###%0A
|
|
843e6f0ccb73a387e151d7f40ef7a2b4fc1597e0
|
test getmap
|
pathmap/test/test_getmap.py
|
pathmap/test/test_getmap.py
|
Python
| 0.000001 |
@@ -0,0 +1,148 @@
+import unittest%0Afrom .. getmap import MapDownloader%0A%0Aclass TestGetmap(unittest.TestCase):%0A pass%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
cc89c5222ec7f6d6f95b5efdce3958b3ca33814e
|
Add basic functionality and regression tests for ACA dark cal module
|
mica/archive/tests/test_aca_dark_cal.py
|
mica/archive/tests/test_aca_dark_cal.py
|
Python
| 0 |
@@ -0,0 +1,1363 @@
+%22%22%22%0ABasic functionality and regression tests for ACA dark cal module.%0A%22%22%22%0A%0Aimport numpy as np%0A%0Afrom ..aca_dark import dark_cal%0A%0A%0Adef test_date_to_dark_id():%0A assert dark_cal.date_to_dark_id('2011-01-15T12:00:00') == '2011015'%0A%0A%0Adef test_dark_id_to_date():%0A assert dark_cal.dark_id_to_date('2011015') == '2011:015'%0A%0A%0Adef test_dark_temp_scale():%0A scale = dark_cal.dark_temp_scale(-10., -14)%0A assert np.allclose(scale, 0.70)%0A%0A%0Adef test_get_dark_cal_id():%0A assert dark_cal.get_dark_cal_id('2007:008', 'nearest') == '2007006'%0A assert dark_cal.get_dark_cal_id('2007:008', 'before') == '2007006'%0A assert dark_cal.get_dark_cal_id('2007:008', 'after') == '2007069'%0A%0A%0Adef test_get_dark_cal_image():%0A image = dark_cal.get_dark_cal_image('2007:008')%0A assert image.shape == (1024, 1024)%0A%0A%0Adef test_get_dark_cal_props():%0A props = dark_cal.get_dark_cal_props('2007:008')%0A assert len(props%5B'replicas'%5D) == 5%0A assert props%5B'start'%5D == '2007:006:01:56:46.817'%0A%0A props = dark_cal.get_dark_cal_props('2007:008', include_image=True)%0A assert len(props%5B'replicas'%5D) == 5%0A assert props%5B'start'%5D == '2007:006:01:56:46.817'%0A assert props%5B'image'%5D.shape == (1024, 1024)%0A%0A%0Adef test_get_dark_cal_props_table():%0A props = dark_cal.get_dark_cal_props_table('2007:001', '2008:001')%0A assert np.allclose(props%5B'eb'%5D, %5B24.6, 25.89, 51.13, 1.9%5D)%0A
|
|
dd3ed1c8fdf9024a7978a1443baf8ca101f21642
|
add demo object for channel
|
server/Mars/MarsRpc/ChannelObjs.py
|
server/Mars/MarsRpc/ChannelObjs.py
|
Python
| 0.000001 |
@@ -0,0 +1,2327 @@
+#!/usr/bin/env python%0A# -*- encoding: utf-8 -*-%0A#%0A# Copyright (c) 2016 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0A%0Afrom MarsLog.LogManager import LogManager%0Afrom Utils.Const import *%0A%0Aclass EchoChannelObj(object):%0A def __init__(self, connector):%0A super(EchoChannelObj, self).__init__()%0A self.logger = LogManager.getLogger('MarsRpc.EchoChannelObj')%0A self.connector = connector%0A%0A def onDisconnected(self):%0A self.logger.info('connector closed')%0A%0A def onRead(self, data):%0A self.logger.info('received data: %25s', data)%0A self.connector.writeData(data)%0A return MARS_RC_SUCCESSED%0A%0Aclass LoggingChannelObj(object):%0A def __init__(self, connector):%0A super(LoggingChannelObj, self).__init__()%0A self.loggero = LogManager.getLogger('MarsRpc.LoggingChannelObj')%0A self.connector = connector%0A%0A def onDisconnected(self):%0A self.logger.info('connector closed')%0A%0A def onRead(self, data):%0A self.logger.info('received data: %25s', data)%0A return MARS_RC_SUCCESSED%0A
|
|
81b9d141295ee2a8b31974aa86d89b80dfefe3ca
|
Create question5.py
|
chengjun/question5.py
|
chengjun/question5.py
|
Python
| 0.99937 |
@@ -0,0 +1,1117 @@
+#!usr/bin/python%0Aimport re%0Aclass extrac_url():%0A%09def __init__(self,url):%0A%09%09self.url = url%0A%09def pater(self):%0A%09%09url = self.url%0A%09%09%5Bscheme1,url_rest%5D = url.split('//')%0A%09%09scheme = re.search(r'(.+)//',url).group(1)%0A%09%09#print %22scheme is %25s %22 %25 scheme%0A%09%09netloc = re.search(r'//(.+)/',url).group(1)%0A%09%09#print %22netloc is %25s %22 %25 netloc%0A%09%09path = re.search(r'(/.+)%5C?',url_rest).group(1)%0A%09%09#print 'path is %25s'%25path%0A%09%09#tt =re.compile(r'%5C?.+')%0A%09%09query_param = re.search(r'%5C?(.+)#',url).group(1)%0A%09%09query_params=%7B%7D%0A%09%09for item in re.split(r'&', query_param):%0A%09%09%09#print item%0A%09%09%09index = item.find('=')%0A%09%09%09query_params%5Bitem%5B:index%5D%5D = item%5Bindex+1:%5D%0A%09%09#print %22query_params is %25s %22 %25query_params%0A%09%09fragment = re.search(r'#(.+)',url).group(1)%0A%09%09#print %22fragment is %25s %22 %25self.fragment%0A%09%09return %5Bscheme,netloc,path,query_params,fragment%5D%0A%09%09%0Aif __name__==%22__main__%22:%0A%09ttt = extrac_url(%22http://mp.weixin.qq.com/s?__biz=MzA4MjEyNTA5Mw==&mid=2652566513#wechat_redirect%22).pater()%0A%09print %22scheme is %25s %22 %25 ttt%5B0%5D%0A%09print %22netloc is %25s %22 %25 ttt%5B1%5D%0A%09print 'path is %25s'%25ttt%5B2%5D%0A%09print 'query_params is %25s'%25ttt%5B3%5D%0A%09print 'fragment is %25s'%25ttt%5B4%5D%0A%09#rint ttt%0A
|
|
5eefc407b8f51c017a3f4193c88f6dc188a88601
|
Include OpenCV based Python CLAHE script
|
src/CLAHE_dir.py
|
src/CLAHE_dir.py
|
Python
| 0 |
@@ -0,0 +1,1125 @@
+from PIL import Image%0Aimport numpy as np%0Aimport h5py%0Aimport os%0Aimport sys%0Aimport cv2%0A%0A# Maybe consider implemeting more involved auto-balancing%0A# http://wiki.cmci.info/documents/120206pyip_cooking/python_imagej_cookbook#automatic_brightnesscontrast_button%0A%0Adef apply_clahe_to_H5(fn, clahe):%0A%09f = h5py.File(fn, %22r+%22)%0A%09img = f%5B%22/img%22%5D%0A%09# apply clahe%0A%09arr = clahe.apply(np.array(img))%0A%09# stretch distribution across 0-255 range%0A%09max_a = np.max(arr)%0A%09min_a = np.min(arr)%0A%09alpha = 255.0/(max_a - min_a)%0A%09beta = -alpha*min_a%0A%09arr = (alpha*arr + beta).astype(np.uint8)%0A%09# resave image%0A%09img%5B...%5D = arr%0A%09f.close()%0A%0Adef get_H5_array(fn):%0A%09f = h5py.File(fn, %22r%22)%0A%09return np.array(f%5B%22/img%22%5D)%0A%0Adef main():%0A%09%22%22%22Make TIF images of all H5 matrices in directory%0A%09%22%22%22%0A%09dir = os.getcwd()%0A%09# file = sys.argv%5B1%5D%0A%09files = os.listdir(dir)%0A%09clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(63,63))%0A%09for file in files:%0A%09%09if file.endswith(%221,1_prealigned.h5%22):%0A%09%09%09print %22Applying CLAHE to %22 + file%0A%09%09# if file == 'Tile_r1-c7_S2-W001_sec15.h5':%0A%09%09%09fn = os.path.join(dir, file)%0A%09%09%09apply_clahe_to_H5(fn, clahe)%0A%0A# if __name__ == '__main__':%0A# %09main()
|
|
5fbd3d187c0a1c164c34320ad504030206429c19
|
Use --first-parent when collecting commits.
|
asv/plugins/git.py
|
asv/plugins/git.py
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Supports git repositories for the benchmarked project.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import re
from ..console import log
from ..repo import Repo
from .. import util
class Git(Repo):
def __init__(self, url, path):
self._git = util.which("git")
self._path = os.path.abspath(path)
if not os.path.exists(self._path):
log.info("Cloning project")
self._run_git(['clone', url, self._path], chdir=False)
log.info("Fetching recent changes")
self.pull()
@property
def path(self):
return self._path
@classmethod
def url_match(cls, url):
regexes = [
'^https?://.*?\.git$',
'^git@.*?\.git$']
for regex in regexes:
if re.match(regex, url):
return True
return False
def _run_git(self, args, chdir=True, **kwargs):
if chdir:
orig_dir = os.getcwd()
os.chdir(self._path)
try:
return util.check_output(
[self._git] + args, **kwargs)
finally:
if chdir:
os.chdir(orig_dir)
def pull(self):
self._run_git(['fetch', 'origin'])
self.checkout('master')
self._run_git(['pull'])
def checkout(self, branch='master'):
self._run_git(['checkout', branch])
self.clean()
def clean(self):
self._run_git(['clean', '-fxd'])
def get_date(self, hash):
# TODO: This works on Linux, but should be extended for other platforms
return int(self._run_git(
['show', hash, '--quiet', '--format=format:%ct'],
dots=False).strip().split()[0]) * 1000
def get_hashes_from_range(self, range_spec):
if range_spec == 'master':
range_spec = 'master^!'
return self._run_git(
['log', '--quiet', '--format=format:%H', range_spec], dots=False
).strip().split()
def get_hash_from_tag(self, tag):
return self._run_git(
['show', tag, '--quiet', '--format=format:%H'],
dots=False).strip().split()[0]
def get_tags(self):
return self._run_git(
['tag', '-l']).strip().split()
def get_date_from_tag(self, tag):
return self.get_date(tag + "^{commit}")
|
Python
| 0 |
@@ -2047,32 +2047,50 @@
log', '--quiet',
+ '--first-parent',
'--format=forma
@@ -2095,16 +2095,29 @@
mat:%25H',
+%0A
range_s
|
d4d9d9ac478bdaf2385ecff0a43bfc8fe4bb11c7
|
Add decorator for ignoring DeprecationWarnings
|
oscar/test/decorators.py
|
oscar/test/decorators.py
|
import mock
def dataProvider(fn_data_provider):
"""
Data provider decorator, allows another callable to provide the data for
the test. This is a nice feature from PHPUnit which is very useful. Am
sticking with the JUnit style naming as unittest does this already.
Implementation based on:
http://melp.nl/2011/02/phpunit-style-dataprovider-in-python-unit-test/#more-525
"""
def test_decorator(test_method):
def execute_test_method_with_each_data_set(self):
for data in fn_data_provider():
if (len(data) == 2 and isinstance(data[0], tuple) and
isinstance(data[1], dict)):
# Both args and kwargs being provided
args, kwargs = data[:]
else:
args, kwargs = data, {}
try:
test_method(self, *args, **kwargs)
except AssertionError, e:
self.fail("%s (Provided data: %s, %s)" % (e, args, kwargs))
return execute_test_method_with_each_data_set
return test_decorator
# This will be in Oscar 0.6 - it should be functools though!
def compose(*functions):
"""
Compose functions
This is useful for combining decorators.
"""
def _composed(*args):
for fn in functions:
try:
args = fn(*args)
except TypeError:
# args must be scalar so we don't try to expand it
args = fn(args)
return args
return _composed
no_database = mock.patch(
'django.db.backends.util.CursorWrapper', mock.Mock(
side_effect=RuntimeError("Using the database is not permitted!")))
no_filesystem = mock.patch('__builtin__.open', mock.Mock(
side_effect=RuntimeError("Using the filesystem is not permitted!")))
no_sockets = mock.patch('socket.getaddrinfo', mock.Mock(
side_effect=RuntimeError("Using sockets is not permitted!")))
no_externals = no_diggity = compose(
no_database, no_filesystem, no_sockets) # = no doubt
|
Python
| 0 |
@@ -1,8 +1,53 @@
+import warnings%0Afrom functools import wraps%0A%0A
import m
@@ -2107,8 +2107,1061 @@
o doubt%0A
+%0A%0Adef ignore_deprecation_warnings(target):%0A %22%22%22%0A Ignore deprecation warnings for the wrapped TestCase or test method%0A%0A This is useful as the test runner can be set to raise an exception on a%0A deprecation warning. Using this decorator allows tests to exercise%0A deprecated code without an exception.%0A %22%22%22%0A if not target.__class__.__name__ == 'instancemethod':%0A # Decorate every test method in class%0A for attr in dir(target):%0A if not attr.startswith('test'):%0A continue%0A attr_value = getattr(target, attr)%0A if not hasattr(attr_value, '__call__'):%0A continue%0A setattr(target, attr, ignore_deprecation_warnings(attr_value))%0A return target%0A else:%0A # Decorate single test method%0A @wraps(target)%0A def _wrapped(*args, **kwargs):%0A with warnings.catch_warnings():%0A warnings.filterwarnings(%22ignore%22, category=DeprecationWarning)%0A return target(*args, **kwargs)%0A return _wrapped%0A
|
f0af14b8fcd420b63a47e18938664e14cf9ea968
|
Add generic asynchronous/synchronous run command
|
subiquity/utils.py
|
subiquity/utils.py
|
Python
| 0 |
@@ -0,0 +1,3067 @@
+# Copyright 2015 Canonical, Ltd.%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Aimport errno%0Aimport subprocess%0Aimport os%0Aimport codecs%0Aimport pty%0Afrom tornado.process import Subprocess%0Afrom subiquity.async import Async%0Aimport shlex%0Aimport logging%0A%0Alog = logging.getLogger(%22subiquity.utils%22)%0ASTREAM = Subprocess.STREAM%0A%0A%0Adef run_command_async(cmd, streaming_callback=None):%0A return Async.pool.submit(run_command, cmd, streaming_callback)%0A%0A%0Adef run_command(cmd, streaming_callback=None):%0A %22%22%22 Executes %60cmd%60 sending its output to %60streaming_callback%60%0A %22%22%22%0A if isinstance(cmd, str):%0A cmd = shlex.split(cmd)%0A log.debug(%22Running command: %7B%7D%22.format(cmd))%0A stdoutm, stdouts = pty.openpty()%0A proc = subprocess.Popen(cmd,%0A stdout=stdouts,%0A stderr=subprocess.PIPE)%0A os.close(stdouts)%0A decoder = codecs.getincrementaldecoder('utf-8')()%0A%0A def last_ten_lines(s):%0A chunk = s%5B-1500:%5D%0A lines = chunk.splitlines(True)%0A return ''.join(lines%5B-10:%5D).replace('%5Cr', '')%0A%0A decoded_output = %22%22%0A try:%0A while proc.poll() is None:%0A try:%0A b = os.read(stdoutm, 512)%0A except OSError as e:%0A if e.errno != errno.EIO:%0A raise%0A break%0A else:%0A final = False%0A if not b:%0A final = True%0A decoded_chars = decoder.decode(b, final)%0A if decoded_chars is None:%0A continue%0A%0A decoded_output += decoded_chars%0A if streaming_callback:%0A ls = last_ten_lines(decoded_output)%0A%0A streaming_callback(ls)%0A if final:%0A break%0A finally:%0A os.close(stdoutm)%0A if proc.poll() is None:%0A proc.kill()%0A proc.wait()%0A%0A errors = %5Bl.decode('utf-8') for l in proc.stderr.readlines()%5D%0A if streaming_callback:%0A streaming_callback(last_ten_lines(decoded_output))%0A%0A errors = ''.join(errors)%0A%0A if proc.returncode == 0:%0A return decoded_output.strip()%0A else:%0A log.debug(%22Error with command: %22%0A %22%5BOutput%5D '%7B%7D' %5BError%5D '%7B%7D'%22.format(%0A decoded_output.strip(),%0A errors.strip()))%0A raise Exception(%22Problem running command: %5BError%5D '%7B%7D'%22.format(%0A errors.strip()))%0A
|
|
2aab90ab9e4a32bef1496149a2780b7385318043
|
Add tests
|
symengine/tests/test_cse.py
|
symengine/tests/test_cse.py
|
Python
| 0.000001 |
@@ -0,0 +1,469 @@
+from symengine import cse, sqrt, symbols%0A%0Adef test_cse_single():%0A x, y, x0 = symbols(%22x, y, x0%22)%0A e = pow(x + y, 2) + sqrt(x + y)%0A substs, reduced = cse(%5Be%5D)%0A assert substs == %5B(x0, x + y)%5D%0A assert reduced == %5Bsqrt(x0) + x0**2%5D%0A%0A%0Adef test_multiple_expressions():%0A w, x, y, z, x0 = symbols(%22w, x, y, z, x0%22)%0A e1 = (x + y)*z%0A e2 = (x + y)*w%0A substs, reduced = cse(%5Be1, e2%5D)%0A assert substs == %5B(x0, x + y)%5D%0A assert reduced == %5Bx0*z, x0*w%5D%0A
|
|
5a21b66f7ab77f419245d8c07d7473a6e1600fc4
|
Add crawler for 'Hark, A Vagrant'
|
comics/crawler/crawlers/harkavagrant.py
|
comics/crawler/crawlers/harkavagrant.py
|
Python
| 0.000002 |
@@ -0,0 +1,982 @@
+from comics.crawler.base import BaseComicCrawler%0Afrom comics.crawler.meta import BaseComicMeta%0A%0Aclass ComicMeta(BaseComicMeta):%0A name = 'Hark, A Vagrant!'%0A language = 'en'%0A url = 'http://www.harkavagrant.com/'%0A start_date = '2008-05-01'%0A history_capable_days = 120%0A schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'%0A time_zone = -8%0A rights = 'Kate Beaton'%0A%0Aclass ComicCrawler(BaseComicCrawler):%0A def _get_url(self):%0A self.parse_feed('http://www.rsspect.com/rss/vagrant.xml')%0A%0A for entry in self.feed.entries:%0A if self.timestamp_to_date(entry.updated_parsed) == self.pub_date:%0A pieces = entry.summary.split('%22')%0A for i, piece in enumerate(pieces):%0A if piece.count('src='):%0A self.url = pieces%5Bi + 1%5D%0A if piece.count('title='):%0A self.title = pieces%5Bi + 1%5D%0A if self.url and self.title:%0A return%0A
|
|
2f188d3d43741821126e381af9753e0e3d7be231
|
test hello python file
|
t/library/hello.py
|
t/library/hello.py
|
Python
| 0.00043 |
@@ -0,0 +1,43 @@
+import ngx%0A%0Angx.echo(%22Hello, Ngx_python%5Cn%22)
|
|
c2a0b66ec1ad7f32e1291fc6a2312d2a4a06a6e3
|
Add class-file to right location
|
src/mmhandler.py
|
src/mmhandler.py
|
Python
| 0.000001 |
@@ -0,0 +1,24 @@
+class MmHandler:%0A pass%0A
|
|
4f155252bf9d9508b955d7eecf589da347bff817
|
Add a setup.cfg.
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,714 @@
+import setuptools%0D%0A%0D%0Awith open(%22README.md%22, %22r%22) as fh:%0D%0A long_description = fh.read()%0D%0A%0D%0Asetuptools.setup(%0D%0A name=%22imperial-painter-adam-thomas%22,%0D%0A version=%221.0.0%22,%0D%0A author=%22Adam Thomas%22,%0D%0A author_email=%[email protected]%22,%0D%0A description=%22A tool for generating prototype cards from Excel files and Django templates%22,%0D%0A long_description=long_description,%0D%0A long_description_content_type=%22text/markdown%22,%0D%0A url=%22https://github.com/adam-thomas/imperial-painter%22,%0D%0A packages=setuptools.find_packages(),%0D%0A classifiers=%5B%0D%0A %22Programming Language :: Python :: 3%22,%0D%0A %22License :: OSI Approved :: MIT License%22,%0D%0A %22Operating System :: OS Independent%22,%0D%0A %5D,%0D%0A)
|
|
3d59614f45cb14e7a053d62f274cb42fbedccc12
|
Add test for parsing the tasks with optional modstrings
|
tests/test_vwtask_parsing.py
|
tests/test_vwtask_parsing.py
|
# -*- coding: utf-8 -*-
from datetime import datetime
from base import MockVim, MockCache
import sys
from tasklib import local_zone
class TestParsingVimwikiTask(object):
def setup(self):
self.mockvim = MockVim()
self.cache = MockCache()
sys.modules['vim'] = self.mockvim
from taskwiki.vwtask import VimwikiTask
self.VimwikiTask = VimwikiTask
def teardown(self):
self.mockvim.reset()
self.cache.reset()
def test_simple(self):
self.mockvim.current.buffer[0] = "* [ ] This is task description"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == "This is task description"
assert vwtask['uuid'] == None
assert vwtask['priority'] == None
assert vwtask['due'] == None
assert vwtask['indent'] == ''
def test_simple_with_unicode(self):
self.mockvim.current.buffer[0] = "* [ ] This is täsk description"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"This is täsk description"
assert vwtask['uuid'] == None
assert vwtask['priority'] == None
assert vwtask['due'] == None
assert vwtask['indent'] == ''
def test_due_full(self):
self.mockvim.current.buffer[0] = "* [ ] Random task (2015-08-08 15:15)"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"Random task"
assert vwtask['due'] == local_zone.localize(datetime(2015,8,8,15,15))
assert vwtask['uuid'] == None
assert vwtask['priority'] == None
assert vwtask['indent'] == ''
def test_due_short(self):
self.mockvim.current.buffer[0] = "* [ ] Random task (2015-08-08)"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"Random task"
assert vwtask['due'] == local_zone.localize(datetime(2015,8,8,0,0))
assert vwtask['uuid'] == None
assert vwtask['priority'] == None
assert vwtask['indent'] == ''
def test_priority_low(self):
self.mockvim.current.buffer[0] = "* [ ] Semi-Important task !"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"Semi-Important task"
assert vwtask['priority'] == 'L'
assert vwtask['uuid'] == None
def test_priority_medium(self):
self.mockvim.current.buffer[0] = "* [ ] Important task !!"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"Important task"
assert vwtask['priority'] == 'M'
assert vwtask['uuid'] == None
def test_priority_high(self):
self.mockvim.current.buffer[0] = "* [ ] Very important task !!!"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"Very important task"
assert vwtask['priority'] == 'H'
assert vwtask['uuid'] == None
assert vwtask['due'] == None
def test_priority_and_due(self):
self.mockvim.current.buffer[0] = "* [ ] Due today !!! (2015-08-08)"
vwtask = self.VimwikiTask.from_line(self.cache, 0)
assert vwtask['description'] == u"Due today"
assert vwtask['priority'] == 'H'
assert vwtask['due'] == local_zone.localize(datetime(2015,8,8))
assert vwtask['uuid'] == None
|
Python
| 0 |
@@ -3393,28 +3393,413 @@
ert vwtask%5B'uuid'%5D == None%0A%0A
+ def test_added_modstring(self):%0A self.mockvim.current.buffer%5B0%5D = %22* %5B %5D Home task -- project:Home%22%0A vwtask = self.VimwikiTask.from_line(self.cache, 0)%0A%0A assert vwtask%5B'description'%5D == u%22Home task%22%0A assert vwtask%5B'project'%5D == u%22Home%22%0A assert vwtask%5B'priority'%5D == None%0A assert vwtask%5B'due'%5D == None%0A assert vwtask%5B'uuid'%5D == None%0A
|
f874c337e0d0bb8cce8cfe6523c0d06c37b93198
|
add basic setup.py definition
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,258 @@
+from distutils.core import setup%0A%0Asetup(%0A name='SaasyDjangoRestFramework',%0A version='0.1dev',%0A packages=%5B%5D,%0A license='Creative Commons Attribution-Noncommercial-Share Alike license',%0A description=%22SaaS plugin for the django rest framework%22,%0A)%0A
|
|
63143c94cef353d7bae13f7b13650801bb901c94
|
Test for explicit start/end args to str methods for unicode.
|
tests/unicode/unicode_pos.py
|
tests/unicode/unicode_pos.py
|
Python
| 0 |
@@ -0,0 +1,169 @@
+# str methods with explicit start/end pos%0Aprint(%22%D0%9F%D1%80%D0%B8%D0%B2%D0%B5%D1%82%22.startswith(%22%D0%9F%22))%0Aprint(%22%D0%9F%D1%80%D0%B8%D0%B2%D0%B5%D1%82%22.startswith(%22%D1%80%22, 1))%0Aprint(%22%D0%B0%D0%B1%D0%B2%D0%B1%D0%B0%22.find(%22%D0%B0%22, 1))%0Aprint(%22%D0%B0%D0%B1%D0%B2%D0%B1%D0%B0%22.find(%22%D0%B0%22, 1, -1))%0A
|
|
e0b17a1778fb8946adff14614098ba6d34014746
|
add some more testing
|
test/test_route.py
|
test/test_route.py
|
import unittest
import bottle
from tools import api
class TestRoute(unittest.TestCase):
@api('0.12')
def test_callback_inspection(self):
def x(a, b): pass
def d(f):
def w():
return f()
return w
route = bottle.Route(None, None, None, d(x))
self.assertEqual(route.get_undecorated_callback(), x)
self.assertEqual(set(route.get_callback_args()), set(['a', 'b']))
def d2(foo):
def d(f):
def w():
return f()
return w
return d
route = bottle.Route(None, None, None, d2('foo')(x))
self.assertEqual(route.get_undecorated_callback(), x)
self.assertEqual(set(route.get_callback_args()), set(['a', 'b']))
def test_callback_inspection_multiple_args(self):
# decorator with argument, modifying kwargs
def d2(f="1"):
def d(fn):
def w(*args, **kwargs):
# modification of kwargs WITH the decorator argument
# is necessary requirement for the error
kwargs["a"] = f
return fn(*args, **kwargs)
return w
return d
@d2(f='foo')
def x(a, b):
return
route = bottle.Route(None, None, None, x)
# triggers the "TypeError: 'foo' is not a Python function"
self.assertEqual(set(route.get_callback_args()), set(['a', 'b']))
if bottle.py3k:
def test_callback_inspection_newsig(self):
env = {}
eval(compile('def foo(a, *, b=5): pass', '<foo>', 'exec'), env, env)
route = bottle.Route(None, None, None, env['foo'])
self.assertEqual(set(route.get_callback_args()), set(['a', 'b']))
|
Python
| 0 |
@@ -45,16 +45,466 @@
ort api%0A
+from bottle import _re_flatten%0A%0A%0Aclass TestReFlatten(unittest.TestCase):%0A%0A def test_re_flatten(self):%0A self.assertEqual(_re_flatten(r%22(?:aaa)(_bbb)%22), '(?:aaa)(?:_bbb)')%0A self.assertEqual(_re_flatten(r%22(aaa)(_bbb)%22), '(?:aaa)(?:_bbb)')%0A self.assertEqual(_re_flatten(r%22aaa)(_bbb)%22), 'aaa)(?:_bbb)')%0A self.assertEqual(_re_flatten(r%22aaa(_bbb)%22), 'aaa(?:_bbb)')%0A self.assertEqual(_re_flatten(r%22aaa_bbb%22), 'aaa_bbb')%0A
%0A%0Aclass
@@ -704,24 +704,16 @@
eturn w%0A
-
%0A
|
5a3971a3048adec420796ad5a781f0a84eca7d31
|
Remove transifex-client dev dependency
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import re
import sys
from setuptools import setup, find_packages
# requirements
setup_requirements = ['pytest-runner'] if {'pytest', 'test', 'ptr'}.intersection(sys.argv) else []
install_requirements = ['guessit>=2.0.1', 'babelfish>=0.5.2', 'enzyme>=0.4.1', 'beautifulsoup4>=4.2.0',
'requests>=2.0', 'click>=4.0', 'dogpile.cache>=0.5.4', 'stevedore>=1.0.0',
'chardet>=2.3.0', 'pysrt>=1.0.1', 'six>=1.9.0']
test_requirements = ['sympy', 'vcrpy>=1.6.1', 'pytest', 'pytest-pep8', 'pytest-flakes', 'pytest-cov']
if sys.version_info < (3, 3):
test_requirements.append('mock')
dev_requirements = ['tox', 'sphinx', 'transifex-client', 'wheel']
# package informations
with io.open('subliminal/__init__.py', 'r') as f:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]$', f.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with io.open('README.rst', 'r', encoding='utf-8') as f:
readme = f.read()
with io.open('HISTORY.rst', 'r', encoding='utf-8') as f:
history = f.read()
setup(name='subliminal',
version=version,
license='MIT',
description='Subtitles, faster than your thoughts',
long_description=readme + '\n\n' + history,
keywords='subtitle subtitles video movie episode tv show',
url='https://github.com/Diaoul/subliminal',
author='Antoine Bertin',
author_email='[email protected]',
packages=find_packages(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Multimedia :: Video'
],
entry_points={
'subliminal.providers': [
'addic7ed = subliminal.providers.addic7ed:Addic7edProvider',
'opensubtitles = subliminal.providers.opensubtitles:OpenSubtitlesProvider',
'podnapisi = subliminal.providers.podnapisi:PodnapisiProvider',
'subscenter = subliminal.providers.subscenter:SubsCenterProvider',
'thesubdb = subliminal.providers.thesubdb:TheSubDBProvider',
'tvsubtitles = subliminal.providers.tvsubtitles:TVsubtitlesProvider'
],
'babelfish.language_converters': [
'addic7ed = subliminal.converters.addic7ed:Addic7edConverter',
'thesubdb = subliminal.converters.thesubdb:TheSubDBConverter',
'tvsubtitles = subliminal.converters.tvsubtitles:TVsubtitlesConverter'
],
'console_scripts': [
'subliminal = subliminal.cli:subliminal'
]
},
setup_requires=setup_requirements,
install_requires=install_requirements,
tests_require=test_requirements,
extras_require={
'test': test_requirements,
'dev': dev_requirements
})
|
Python
| 0 |
@@ -718,28 +718,8 @@
nx',
- 'transifex-client',
'wh
|
601fd8a7b4fea5db2f23741735e6e7f1332b4417
|
Fix issue #949 - Add mock as dependency
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2>=2.8",
"boto>=2.36.0",
"boto3>=1.2.1",
"cookies",
"requests>=2.0",
"xmltodict",
"dicttoxml",
"six",
"werkzeug",
"pyaml",
"pytz",
"python-dateutil",
]
extras_require = {
'server': ['flask'],
}
setup(
name='moto',
version='1.0.0',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
|
Python
| 0 |
@@ -325,16 +325,28 @@
eutil%22,%0A
+ %22mock%22,%0A
%5D%0A%0Aextra
|
84c1ee14e1717ec63782dd5a159fe5848fce1cc4
|
Add Python 3.6 and 3.7 to PyPI page
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.version_info < (2, 7):
raise NotImplementedError("Sorry, you need at least Python 2.7 or Python 3.2+ to use bottle.")
import bottle
setup(name='bottle',
version=bottle.__version__,
description='Fast and simple WSGI-framework for small web-applications.',
long_description=bottle.__doc__,
author=bottle.__author__,
author_email='[email protected]',
url='http://bottlepy.org/',
py_modules=['bottle'],
scripts=['bottle.py'],
license='MIT',
platforms='any',
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Application',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
Python
| 0 |
@@ -1705,16 +1705,136 @@
: 3.5',%0A
+ 'Programming Language :: Python :: 3.6',%0A 'Programming Language :: Python :: 3.7',%0A
|
f539736b563fb6859a6bffb03aed42b57880744f
|
create module
|
test_vbn_parser.py
|
test_vbn_parser.py
|
Python
| 0.000001 |
@@ -0,0 +1,500 @@
+import networkx as nx%0Aimport matplotlib.pyplot as plt%0Aimport vbn_parser as p%0A%0A# initialize%0AG = nx.Graph()%0Alink = 'http://vbn.aau.dk/da/organisations/antennas-propagation-and-radio-networking(c2c38bb3-3d28-4b2c-8bc4-949211d2d486)/publications.rss?altordering=publicationOrderByPublicationYearThenCreated&pageSize=500'%0A%0A# populate the graph%0Ap.parse_vbn(link, G)%0A%0A# visualize the graph%0Alabels = nx.get_node_attributes(G, 'name')%0Anx.draw(G, labels=labels)%0A%0Aplt.show()%0Anx.write_graphml(G,%22test1.graphml%22)%0A
|
|
30fc52d77170844c5b3820d997286df744eb56db
|
Add setup.py for packaging and PyPI submission.
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,1089 @@
+from setuptools import setup%0D%0Aname = 'hsalf'%0D%0Asetup(%0D%0A name=name,%0D%0A version='0.0.1',%0D%0A author='Nam T. Nguyen',%0D%0A author_email='[email protected]',%0D%0A url='https://bitbucket.org/namn/hsalf/overview',%0D%0A description='Hsalf is a pure Python library to read and write Flash files (SWF).',%0D%0A long_description='Hsalf is a pure Python library to read and write Flash files (SWF).',%0D%0A platforms='Any',%0D%0A package_dir=%7B'':'.'%7D,%0D%0A packages=%5B'hsalf'%5D,%0D%0A package_data=%7B'': %5B'README', 'LICENSE'%5D%7D,%0D%0A license='MIT',%0D%0A classifiers=%5B%0D%0A 'Development Status :: 3 - Alpha',%0D%0A 'Intended Audience :: Developers',%0D%0A 'License :: OSI Approved :: MIT License',%0D%0A 'Operating System :: OS Independent',%0D%0A 'Programming Language :: Python',%0D%0A 'Topic :: Internet :: WWW/HTTP',%0D%0A 'Topic :: Multimedia',%0D%0A 'Topic :: Security',%0D%0A 'Topic :: Software Development :: Assemblers',%0D%0A 'Topic :: Software Development :: Disassemblers',%0D%0A 'Topic :: Software Development :: Libraries :: Python Modules',%0D%0A %5D%0D%0A)%0D%0A
|
|
d433d16a0375669c1664bbe8f20a8db5924fa92e
|
Add basic benchmark for length
|
tests/benchmark.py
|
tests/benchmark.py
|
Python
| 0.000026 |
@@ -0,0 +1,535 @@
+from random import choice%0Afrom string import ascii_lowercase%0Aimport timeit%0A%0Aimport grapheme%0A%0Adef random_ascii_string(n):%0A return %22%22.join(choice(ascii_lowercase) for i in range(n))%0A%0Along_ascii_string = random_ascii_string(1000)%0A%0Astatements = %5B%0A %22len(long_ascii_string)%22,%0A %22grapheme.length(long_ascii_string)%22,%0A%5D%0Afor statement in statements:%0A n = 100%0A result = timeit.timeit(statement, setup=%22from __main__ import long_ascii_string; import grapheme%22, number=n) / 100%0A print(%22%7B%7D: %7B%7D seconds%22.format(statement, result))%0A%0A
|
|
dd015a7bf9c69e2f96488c9239be694303b30176
|
Create setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,985 @@
+import setuptools%0A%0Awith open(%22README.md%22, %22r%22) as fh:%0A long_description = fh.read()%0A%0Asetuptools.setup(%0A name=%22deepctr%22,%0A version=%220.1.3%22,%0A author=%22Weichen Shen%22,%0A author_email=%[email protected]%22,%0A description=%22DeepCTR is a Easy-to-use,Modular and Extendible package of deep-learning based CTR models ,including serval DNN-based CTR models and lots of core components layer of the models which can be used to build your own custom model.%22,%0A long_description=long_description,%0A long_description_content_type=%22text/markdown%22,%0A url=%22https://github.com/shenweichen/deepctr%22,%0A packages=setuptools.find_packages(),%0A install_requires=%5B%5D,%0A extras_require=%7B%0A %22tf%22: %5B%22tensorflow%3E=1.4.0,%3C1.7.0%22%5D,%0A %22tf_gpu%22: %5B%22tensorflow-gpu%3E=1.4.0,%3C1.7.0%22%5D,%0A %7D,%0A entry_points=%7B%0A %7D,%0A classifiers=(%0A %22Programming Language :: Python :: 3%22,%0A %22License :: OSI Approved :: MIT License%22,%0A %22Operating System :: OS Independent%22,%0A ),%0A)%0A
|
|
5a2a2aa33a2e206042b3d28a830d00bdae2f5ad8
|
Add setup.py for distribution
|
setup.py
|
setup.py
|
Python
| 0 |
@@ -0,0 +1,450 @@
+from ez_setup import use_setuptools%0Ause_setuptools()%0A%0Afrom setuptools import setup, find_packages%0Asetup(%0A name = %22rw%22,%0A version = %220.0.1%22,%0A packages = find_packages(),%0A scripts = %5B'scripts/rw'%5D,%0A%0A install_requires = %5B'docopt'%5D,%0A %0A author = %22Ben Pringle%22,%0A author_email = %[email protected]%22,%0A url = %22http://github.com/Pringley/rw%22,%0A description = %22Generate random words (e.g. for passwords)%22,%0A license = %22MIT%22,%0A)%0A
|
|
91affa8b785e0b5261f69448c1c08de429460bb9
|
Add setup.py
|
setup.py
|
setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,185 @@
+from setuptools import setup%0A%0Asetup(%0A name='django-yadt',%0A packages=(%0A 'django_yadt',%0A 'django_yadt.management',%0A 'django_yadt.management.commands',%0A ),%0A)%0A
|
|
090568a6e31fd8de1975d0e2cecb2fcd559acd3e
|
Add natsort to setup.py
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -----------------------------------------------------------------------------
# Copyright (c) 2013, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
__version__ = "0.1.0-dev"
from setuptools import setup
from glob import glob
classes = """
Development Status :: 4 - Beta
License :: OSI Approved :: BSD License
Topic :: Scientific/Engineering :: Bio-Informatics
Topic :: Software Development :: Libraries :: Application Frameworks
Topic :: Software Development :: Libraries :: Python Modules
Programming Language :: Python
Programming Language :: Python :: 2.7
Programming Language :: Python :: Implementation :: CPython
Operating System :: OS Independent
Operating System :: POSIX :: Linux
Operating System :: MacOS :: MacOS X
"""
long_description = """Qiita is a databasing and UI effort for QIIME"""
classifiers = [s.strip() for s in classes.split('\n') if s]
setup(name='qiita',
version=__version__,
long_description=long_description,
license="BSD",
description='Qiita',
author="Qiita development team",
author_email="[email protected]",
url='http://biocore.github.io/qiita',
test_suite='nose.collector',
packages=['qiita_core',
'qiita_db',
'qiita_pet',
'qiita_ware',
],
package_data={'qiita_core': ['support_files/config_test.txt'],
'qiita_db': ['support_files/*sql',
'support_files/test_data/preprocessed_data/*',
'support_files/test_data/processed_data/*',
'support_files/test_data/raw_data/*',
'support_files/test_data/analysis/*',
'support_files/test_data/reference/*',
'support_files/test_data/job/*.txt',
'support_files/test_data/job/2_test_folder/*',
'support_files/work_data/*']},
scripts=glob('scripts/*'),
extras_require={'test': ["nose >= 0.10.1", "pep8"],
'doc': ["Sphinx >= 1.2.2", "sphinx-bootstrap-theme"]},
install_requires=['psycopg2', 'click == 1.0', 'future >= 0.13.0',
'bcrypt', 'pandas', 'numpy >= 1.7', 'tornado==3.1.1',
'tornado_redis', 'redis', 'ipython[all]', 'pyparsing',
'mock', 'h5py', 'biom-format',
'scikit-bio == 0.2.0'],
classifiers=classifiers
)
|
Python
| 0 |
@@ -2722,16 +2722,26 @@
format',
+ 'natsort'
%0A
|
631afff160077cc629054613d59cb47747f6c20d
|
Fix setup to exclude tests
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014, 2015 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from setuptools import setup, find_packages
from pip.req import parse_requirements
from pip.download import PipSession
LONG_DESCRIPTION = open('README.md').read()
REQUIREMENTS = [str(ir.req) for ir in parse_requirements('requirements.txt', session=PipSession())
if not (getattr(ir, 'link', False) or getattr(ir, 'url', False))]
setup(
name='Superdesk-Core',
version='0.0.1-dev',
description='Superdesk Core library',
long_description=LONG_DESCRIPTION,
author='petr jasek',
author_email='[email protected]',
url='https://github.com/superdesk/superdesk-core',
license='GPLv3',
platforms=['any'],
packages=find_packages(),
install_requires=REQUIREMENTS,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
]
)
|
Python
| 0.000001 |
@@ -1016,16 +1016,33 @@
ackages(
+exclude=%5B'tests'%5D
),%0A i
|
5b251911d112abea610477a3f552a78be6b5b1e1
|
add utils module
|
utils.py
|
utils.py
|
Python
| 0 |
@@ -0,0 +1,567 @@
+from flask import Response, request%0A%0A%0Adef add_basic_auth(blueprint, username, password, realm='RQ Dashboard'):%0A '''Add HTTP Basic Auth to a blueprint.%0A Note this is only for casual use!%0A '''%0A @blueprint.before_request%0A def basic_http_auth(*args, **kwargs):%0A auth = request.authorization%0A if (auth is None or auth.password != password or auth%0A .username != username):%0A%0A return Response(%0A 'Please login',%0A 401,%0A %7B'WWW-Authenticate': 'Basic realm=%22%7B0%7D%22'.format(realm)%7D)%0A
|
|
58b0b12db2139c6eba5efac5caf9b9dd18170de6
|
Add unit tests for "ironic node-delete" shell cmd
|
ironicclient/tests/v1/test_node_shell.py
|
ironicclient/tests/v1/test_node_shell.py
|
# -*- coding: utf-8 -*-
#
# Copyright 2013 IBM Corp
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from ironicclient.openstack.common import cliutils
from ironicclient.tests import utils
import ironicclient.v1.node_shell as n_shell
class NodeShellTest(utils.BaseTestCase):
def test_node_show(self):
actual = {}
fake_print_dict = lambda data, *args, **kwargs: actual.update(data)
with mock.patch.object(cliutils, 'print_dict', fake_print_dict):
node = object()
n_shell._print_node_show(node)
exp = ['chassis_uuid',
'created_at',
'console_enabled',
'driver',
'driver_info',
'extra',
'instance_info',
'instance_uuid',
'last_error',
'maintenance',
'power_state',
'properties',
'provision_state',
'reservation',
'target_power_state',
'target_provision_state',
'updated_at',
'uuid']
act = actual.keys()
self.assertEqual(sorted(exp), sorted(act))
def test_do_node_vendor_passthru_with_args(self):
client_mock = mock.MagicMock()
args = mock.MagicMock()
args.node = 'node_uuid'
args.method = 'method'
args.arguments = [['arg1=val1', 'arg2=val2']]
n_shell.do_node_vendor_passthru(client_mock, args)
kwargs = {
'node_id': 'node_uuid',
'method': 'method',
'args': {'arg1': 'val1', 'arg2': 'val2'}
}
client_mock.node.vendor_passthru.assert_called_once_with(**kwargs)
def test_do_node_vendor_passthru_without_args(self):
client_mock = mock.MagicMock()
args = mock.MagicMock()
args.node = 'node_uuid'
args.method = 'method'
args.arguments = [[]]
n_shell.do_node_vendor_passthru(client_mock, args)
kwargs = {
'node_id': 'node_uuid',
'method': 'method',
'args': {}
}
client_mock.node.vendor_passthru.assert_called_once_with(**kwargs)
def test_do_node_set_boot_device(self):
client_mock = mock.MagicMock()
args = mock.MagicMock()
args.node = 'node_uuid'
args.persistent = False
args.device = 'pxe'
n_shell.do_node_set_boot_device(client_mock, args)
client_mock.node.set_boot_device.assert_called_once_with(
'node_uuid', 'pxe', False)
def test_do_node_get_boot_device(self):
client_mock = mock.MagicMock()
args = mock.MagicMock()
args.node = 'node_uuid'
n_shell.do_node_get_boot_device(client_mock, args)
client_mock.node.get_boot_device.assert_called_once_with('node_uuid')
def test_do_node_get_supported_boot_devices(self):
client_mock = mock.MagicMock()
args = mock.MagicMock()
args.node = 'node_uuid'
n_shell.do_node_get_supported_boot_devices(client_mock, args)
client_mock.node.get_supported_boot_devices.assert_called_once_with(
'node_uuid')
|
Python
| 0.000002 |
@@ -1698,16 +1698,607 @@
(act))%0A%0A
+ def test_do_node_delete(self):%0A client_mock = mock.MagicMock()%0A args = mock.MagicMock()%0A args.node = %5B'node_uuid'%5D%0A%0A n_shell.do_node_delete(client_mock, args)%0A client_mock.node.delete.assert_called_once_with('node_uuid')%0A%0A def test_do_node_delete_multiple(self):%0A client_mock = mock.MagicMock()%0A args = mock.MagicMock()%0A args.node = %5B'node_uuid1', 'node_uuid2'%5D%0A%0A n_shell.do_node_delete(client_mock, args)%0A client_mock.node.delete.assert_has_calls(%0A %5Bmock.call('node_uuid1'), mock.call('node_uuid2')%5D)%0A%0A
def
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.