commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
5e51cda3a7441f6e31477988b1288d1497fe23d9
|
Add arguments snippet
|
code/python/snippets/arguments.py
|
code/python/snippets/arguments.py
|
Python
| 0.000007 |
@@ -0,0 +1,861 @@
+%22%22%22%0AAdd command line arguments to your script.%0A%0AThis snippet adds the default command line arguments required for any interaction with the UpGuard API.%0A%0ATo Use:%0A%0A1. Copy snippet to the top of your script%0A2. Populate description (this is shown when running %60--help%60)%0A3. Access arguments with %60args%60 object, for example: %60args.target_url%60%0A%22%22%22%0A%0Aimport argparse%0A%0Aparser = argparse.ArgumentParser(description='Retrieve a list of open User Tasks and their associated nodes')%0Aparser.add_argument('--target-url', required=True, help='URL for the UpGuard instance')%0Aparser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')%0Aparser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')%0Aparser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate checks')%0Aargs = parser.parse_args()%0A
|
|
8bdc9c0685500b822787779b5ebffa46b00d8138
|
Add script
|
lightshow.py
|
lightshow.py
|
Python
| 0.000002 |
@@ -0,0 +1,1322 @@
+#!/usr/bin/sudo / usr/bin/python%0A%0Aimport RPi.GPIO as GPIO%0Afrom time import sleep%0A%0AGPIO.setmode(GPIO.BOARD)%0AGPIO.setwarnings(False)%0A%0Aleds = %7B'floor':%5B%5D, 'top-left':%5B%5D%7D%0A%0Adef setupled(name, pins):%0A%09for i in range(0, 3):%0A%09%09GPIO.setup(pins%5Bi%5D, GPIO.OUT)%0A%09%09leds%5Bname%5D.append(GPIO.PWM(pins%5Bi%5D, 100))%0A%0Asetupled('floor', %5B11, 13, 15%5D)%0Asetupled('top-left', %5B12, 16, 18%5D)%0A%0Afor key, value in leds.items():%0A%09for i in value:%0A%09%09i.start(0)%0A%0AWHITE = %5B255, 255, 255%5D%0ABLACK = %5B0, 0, 0%5D%0ARED = %5B255, 0, 0%5D%0AGREEN = %5B0, 255, 0%5D%0ABLUE = %5B0, 0, 255%5D%0AYELLOW = %5B255, 255, 0%5D%0APURPLE = %5B255, 0, 255%5D%0ACYAN = %5B0, 255, 255%5D%0A%0Adef setcolor(led, color):%0A%09for i in xrange(0, 3):%0A%09%09leds%5Bled%5D%5Bi%5D.ChangeDutyCycle((255 - color%5Bi%5D) * 100 / 255)%0A%09print('Setting %7B%7D to %7B%7D'.format(led, color))%0A%0A# Start program here%0Awhile True:%0A%09setcolor('floor', RED)%0A%09sleep(1)%0A%09setcolor('top-left', GREEN)%0A%09sleep(1)%0A%09setcolor('floor', BLUE)%0A%09sleep(1)%0A%09setcolor('top-left', YELLOW)%0A%09sleep(1)%0A%09setcolor('floor', PURPLE)%0A%09sleep(1)%0A%09setcolor('top-left', CYAN)%0A%09sleep(1)%0A%09setcolor('floor', WHITE)%0A%09sleep(1)%0A%09setcolor('top-left', BLACK)%0A%09sleep(1)%0A%09%0A%09for i in xrange(0, 256):%0A%09%09setcolor('floor', %5Bi, i, i%5D)%0A%09%09sleep(0.01)%0A%09%0A%09for x in xrange(0, 256):%0A%09%09y = 255 - x%0A%09%09setcolor('top-left', %5By, y, y%5D)%0A%09%09sleep(0.01)%0A%0Afor key, value in rooms.items():%0A%09for i in value:%0A%09%09i.stop()%0A%0AGPIO.cleanup()%0A
|
|
1a4052deb8e0ab2deb7038220ae23d7bb9311ce9
|
Add initial version of the script
|
ovf_to_facter.py
|
ovf_to_facter.py
|
Python
| 0.000001 |
@@ -0,0 +1,1900 @@
+#!/usr/bin/python%0A%0A#stdlib%0Aimport json%0Aimport os%0Aimport subprocess%0Afrom xml.dom.minidom import parseString%0A%0Adef which(cmd):%0A %22%22%22Python implementation of %60which%60 command.%22%22%22%0A for path in os.environ%5B%22PATH%22%5D.split(os.pathsep):%0A file = os.path.join(path, cmd)%0A if os.path.exists(file) and os.access(file, os.X_OK):%0A return file%0A elif os.name == %22nt%22:%0A for ext in os.environ%5B%22PATHEXT%22%5D.split(os.pathsep):%0A full = file + ext%0A if os.path.exists(full) and os.access(full, os.X_OK):%0A return full%0A return None%0A%0AFACTER = which(%22facter%22)%0AVMTOOLS = which(%22vmtoolsd%22)%0A%0Adef facter(*args):%0A facts = json.loads(subprocess.check_output(%5BFACTER, '--json', '--no-external'%5D + %5B arg for arg in args %5D))%0A return facts%0A%0Adef findXmlSection(dom, sectionName):%0A sections = dom.getElementsByTagName(sectionName)%0A return sections%5B0%5D%0A%0Adef getOVFProperties(ovfEnv):%0A dom = parseString(ovfEnv)%0A section = findXmlSection(dom, %22PropertySection%22)%0A propertyMap = %7B%7D%0A for property in section.getElementsByTagName(%22Property%22):%0A key = property.getAttribute(%22oe:key%22)%0A value = property.getAttribute(%22oe:value%22)%0A propertyMap%5Bkey%5D = value%0A dom.unlink()%0A return propertyMap%0A%0Adef getVMWareOvfEnv():%0A if VMTOOLS == None:%0A raise Exception(%22VMWare Tools not installed.%22)%0A try:%0A ovf = subprocess.check_output(%5BVMTOOLS, '--cmd', 'info-get guestinfo.ovfenv'%5D, stderr=subprocess.STDOUT)%0A properties = getOVFProperties(ovf)%0A print %22ovf=true%22%0A for key, value in properties.iteritems():%0A print %22ovf_%22 + key + %22=%22 + value%0A except:%0A print %22ovf=false%22%0A return%0A%0Aif __name__ == %22__main__%22:%0A facts = facter(%22is_virtual%22, %22virtual%22)%0A if (facts%5B'is_virtual'%5D == 'true') and (facts%5B'virtual'%5D == 'vmware'):%0A getVMWareOvfEnv()%0A
|
|
2f0ba9368bc44cffce1dcf2ec483aabf04c2e127
|
add python #5
|
python/5.py
|
python/5.py
|
Python
| 0.000032 |
@@ -0,0 +1,1270 @@
+#!/usr/bin/env python %0A%0A'''%0AProblem%0A=======%0A%0A2520 is the smallest number that can be divided by each of the numbers from 1 to 10 %0Awithout any remainder. What is the smallest positive number that is evenly divisible%0Aby all of the numbers from 1 to 20?%0A%0ALatest Run Stats%0A====== === =====%0A%0A'''%0Afrom math import ceil%0Afrom math import sqrt%0A%0Adef primeSieve(num):%0A noprimes = %7Bj for i in range(2, int(ceil(sqrt(num)))) for j in range(i*2, num, i)%7D %0A return %7Bi for i in range(2, num) if i not in noprimes%7D%0A%0Adef anyDivisible(nums, divisor):%0A for i in nums:%0A if i%25divisor == 0:%0A return True%0A return False%0A%0Alimit = 20 #upper limit for divisors%0A%0Adivisors = range(2,limit+1)%0Aprimes = primeSieve(max(divisors))%0A%0AprimeFactors = %5B%5D%0A%0A# Use a LCM table to determine the prime factors that make up the solution%0Afor prime in primes:%0A if divisors == %5B%5D:%0A break%0A while True:%0A divisible = anyDivisible(divisors, prime)%0A if not divisible:%0A break%0A divisors = %5Bi if i%25 prime != 0 else i/prime for i in divisors%5D%0A divisors = %5Bi for i in divisors if i %3E 1%5D%0A primeFactors.append(prime)%0A%0Aanswer = reduce(lambda primeFactor, total: primeFactor*total, primeFactors)%0Aprint answer #should be only print statement%0A%0A
|
|
459f87be465e0f5554c708fe60679494d152c8fd
|
Create permissions.py
|
templates/root/main/permissions.py
|
templates/root/main/permissions.py
|
Python
| 0.000001 |
@@ -0,0 +1,499 @@
+from rest_framework import permissions%0A%0Aclass IsOwnerOrReadOnly(permissions.BasePermission):%0A%09%22%22%22%0A%09Custom permission to only allow owners of an object to edit it.%0A%09%22%22%22%0A%0A%09def has_object_permissions(self, request, view, obj):%0A%09%09# Read permissions are allowed to any request,%0A%09%09# so we'll always allow GET, HEAD, or OPTIONS requests.%0A%09%09if request.method in permissions.SAFE_METHODS:%0A%09%09%09return True%0A%0A%09%09# Write permissions are only allowed to the owner of the snippet.%0A%09%09return obj.owner == request.user%0A
|
|
4a98686b63563b209456a8933ef34477adcdae43
|
extend Phabricator class and do nothing
|
phabricate/phab.py
|
phabricate/phab.py
|
Python
| 0 |
@@ -0,0 +1,92 @@
+from phabricator import Phabricator as _Phabricator%0A%0Aclass Phabricator(_Phabricator):%0A%09pass%0A
|
|
cca6b0c28747a3b0307fccd33dee60fcb42d910d
|
Test Fix.
|
tests/components/garage_door/test_demo.py
|
tests/components/garage_door/test_demo.py
|
"""
tests.components.garage_door.test_demo
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests demo garage door component.
"""
import unittest
import homeassistant.core as ha
import homeassistant.components.garage_door as gd
LEFT = 'garage_door.left_garage_door'
RIGHT = 'garage_door.right_garage_door'
class TestGarageDoorDemo(unittest.TestCase):
""" Test the demo garage door. """
def setUp(self): # pylint: disable=invalid-name
self.hass = ha.HomeAssistant()
self.assertTrue(gd.setup(self.hass, {
'garage_door': {
'platform': 'demo'
}
}))
def tearDown(self): # pylint: disable=invalid-name
""" Stop down stuff we started. """
self.hass.stop()
def test_is_closed(self):
self.assertTrue(gd.is_closed(self.hass, LEFT))
self.hass.states.is_state(LEFT, 'close')
self.assertFalse(gd.is_closed(self.hass, RIGHT))
self.hass.states.is_state(RIGHT, 'open')
def test_open_door(self):
gd.open_door(self.hass, LEFT)
self.hass.pool.block_till_done()
self.assertTrue(gd.is_closed(self.hass, LEFT))
def test_close_door(self):
gd.close_door(self.hass, RIGHT)
self.hass.pool.block_till_done()
self.assertFalse(gd.is_closed(self.hass, RIGHT))
|
Python
| 0 |
@@ -1092,35 +1092,36 @@
self.assert
-Tru
+Fals
e(gd.is_closed(s
@@ -1263,36 +1263,35 @@
self.assert
-Fals
+Tru
e(gd.is_closed(s
|
67d0d381003dc02d5e1eae9d0c8591daee4b93b3
|
Migrate SnafuComics to single-class module.
|
dosagelib/plugins/snafu.py
|
dosagelib/plugins/snafu.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2005 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2016 Tobias Gruetzmacher
from __future__ import absolute_import, division, print_function
from ..scraper import _ParserScraper
from ..helpers import indirectStarter
class _Snafu(_ParserScraper):
# Next and Previous are swapped...
prevSearch = '//a[@class="next"]'
imageSearch = '//div[@class="comicpage"]/img'
latestSearch = '//div[@id="feed"]/a'
starter = indirectStarter
def __init__(self, name):
super(_Snafu, self).__init__('SnafuComics/' + name)
def namer(self, image_url, page_url):
year, month, name = image_url.rsplit('/', 3)[1:]
return "%04s_%02s_%s" % (year, month, name)
@property
def url(self):
return 'http://snafu-comics.com/swmseries/' + self.path
class Braindead(_Snafu):
path = 'braindead'
class Bunnywith(_Snafu):
path = 'bunnywith'
class DeliverUsEvil(_Snafu):
path = 'deliverusevil'
class DigitalPurgatory(_Snafu):
path = 'digital-purgatory'
class EA(_Snafu):
path = 'ea'
class FT(_Snafu):
path = 'ft'
class GrimTalesFromDownBelow(_Snafu):
path = 'grimtales'
class KOF(_Snafu):
path = 'kof'
class MyPanda(_Snafu):
path = 'mypanda'
class NarutoHeroesPath(_Snafu):
path = 'naruto'
class NewSuperMarioAdventures(_Snafu):
path = 'nsma'
class PowerPuffGirls(_Snafu):
path = 'powerpuffgirls'
class PSG2(_Snafu):
path = 'psg2'
class SatansExcrement(_Snafu):
path = 'satansexcrement'
class SF(_Snafu):
path = 'sf'
class SkullBoy(_Snafu):
path = 'skullboy'
class Snafu(_Snafu):
path = 'snafu'
class Soul(_Snafu):
path = 'soul'
class Sugar(_Snafu):
path = 'sugarbits'
class SureToBeBanD(_Snafu):
path = 'stbb'
class TheLeague(_Snafu):
path = 'league'
class Tin(_Snafu):
path = 'tin'
class Titan(_Snafu):
path = 'titan'
class TrunksAndSoto(_Snafu):
path = 'trunks-and-soto'
class TW(_Snafu):
path = 'tw'
class Zim(_Snafu):
path = 'zim'
|
Python
| 0 |
@@ -322,17 +322,16 @@
%0A%0Aclass
-_
Snafu(_P
@@ -571,16 +571,22 @@
lf, name
+, path
):%0A
@@ -594,17 +594,16 @@
super(
-_
Snafu, s
@@ -634,24 +634,87 @@
cs/' + name)
+%0A self.url = 'http://snafu-comics.com/swmseries/' + path
%0A%0A def na
@@ -864,1334 +864,1088 @@
@
-property%0A def url(self):%0A return 'http://snafu-comics.com/swmseries/' + self.path%0A%0A%0Aclass Braindead(_Snafu):%0A path = 'braindead'%0A%0A%0Aclass Bunnywith(_Snafu):%0A path = 'bunnywith'%0A%0A%0Aclass DeliverUsEvil(_Snafu):%0A path = 'deliverusevil'%0A%0A%0Aclass DigitalPurgatory(_Snafu):%0A path = 'digital-purgatory'%0A%0A%0Aclass EA(_Snafu):%0A path = 'ea'%0A%0A%0Aclass FT(_Snafu):%0A path = 'ft'%0A%0A%0Aclass GrimTalesFromDownBelow(_Snafu):%0A path = 'grimtales'%0A%0A%0Aclass KOF(_Snafu):%0A path = 'kof'%0A%0A%0Aclass MyPanda(_Snafu):%0A path = 'mypanda'%0A%0A%0Aclass NarutoHeroesPath(_Snafu):%0A path = 'naruto'%0A%0A%0Aclass NewSuperMarioAdventures(_Snafu):%0A path = 'nsma'%0A%0A%0Aclass PowerPuffGirls(_Snafu):%0A path = 'powerpuffgirls'%0A%0A%0Aclass PSG2(_Snafu):%0A path = 'psg2'%0A%0A%0Aclass SatansExcrement(_Snafu):%0A path = 'satansexcrement'%0A%0A%0Aclass SF(_Snafu):%0A path = 'sf'%0A%0A%0Aclass SkullBoy(_Snafu):%0A path = 'skullboy'%0A%0A%0Aclass Snafu(_S
+classmethod%0A def getmodules(cls):%0A return %5B%0A cls('Braindead', 'braindead'),%0A cls('Bunnywith', 'bunnywith'),%0A cls('DeliverUsEvil', 'deliverusevil'),%0A cls('EA', 'ea'),%0A cls('FT', 'ft'),%0A cls('GrimTalesFromDownBelow', 'grimtales'),%0A cls('KOF', 'kof'),%0A cls('MyPanda', 'mypanda'),%0A cls('NarutoHeroesPath', 'naruto'),%0A cls('NewSuperMarioAdventures', 'nsma'),%0A cls('PowerPuffGirls', 'powerpuffgirls'),%0A # cls('PSG2', 'psg2'), -- Strangely broken%0A cls('SatansExcrement', 'satansexcrement'),%0A cls('SF', 'sf'),%0A cls('SkullBoy', 'skullboy'),%0A cls('Snafu', 's
nafu
-):
+'),
%0A
-path = 'snafu'%0A%0A%0Aclass Soul(_Snafu):%0A path = 'soul'%0A%0A%0Aclass Sugar(_Snafu):%0A path = 'sugarbits'%0A%0A%0Aclass SureToBeBanD(_Snafu):%0A path = 'stbb'%0A%0A%0Aclass TheLeague(_Snafu):%0A path = 'league'%0A%0A%0Aclass Tin(_Snafu):%0A path = 'tin'%0A%0A%0Aclass Titan(_Snafu):%0A path = 'titan'%0A%0A%0Aclass TrunksAndSoto(_Snafu):%0A path = 'trunks-and-soto'%0A%0A%0Aclass TW(_Snafu):%0A path = 'tw'%0A%0A%0Aclass Zim(_Snafu):%0A path = 'zim'
+ cls('Soul', 'soul'),%0A cls('Sugar', 'sugarbits'),%0A cls('SureToBeBanD', 'stbb'),%0A cls('TheLeague', 'league'),%0A cls('Tin', 'tin'),%0A cls('Titan', 'titan'),%0A cls('TrunksAndSoto', 'trunks-and-soto'),%0A cls('TW', 'tw'),%0A cls('Zim', 'zim'),%0A %5D
%0A
|
43cf23e793794fd45322471a52c83785070ac243
|
add simple_graph
|
simple_graph.py
|
simple_graph.py
|
Python
| 0.999579 |
@@ -0,0 +1,1558 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0A%0Aclass Graph(object):%0A%0A def __init__(self):%0A self.gdict = %7B%7D%0A%0A def nodes(self):%0A return self.gdict.keys()%0A%0A def edges(self):%0A self.edges = %5B%5D%0A for node in self.gdict:%0A for end in self.gdict%5Bnode%5D:%0A self.edges.append((node, end))%0A return self.edges%0A%0A def add_node(self, n):%0A self.gdict.setdefault(n, %5B%5D)%0A%0A def add_edge(self, n1, n2):%0A self.gdict%5Bn1%5D.setdefault(n2, %5B%5D)%0A try:%0A self.gdict%5Bn1%5D.append(n2)%0A except KeyError:%0A self.gdict%5Bn1%5D = %5Bn2%5D%0A%0A def del_node(self, n):%0A try:%0A del self.gdict%5Bn1%5D%0A except KeyError:%0A raise KeyError('%7B%7D not in the graph.'.format(n1))%0A for nodelist in self.gdit.values():%0A try:%0A nodelist.remove(n)%0A except ValueError:%0A continue%0A%0A def del_edge(self, n1, n2):%0A try:%0A self.gdict%5Bn1%5D.remove%5Bn2%5D%0A except KeyError, ValueError:%0A raise ValueError('Edge %7B%7D, %7B%7D not in the graph.'.format(n1, n2))%0A%0A def has_node(self, n):%0A return n in self.gdict%0A%0A def neighbors(self, n):%0A try:%0A return self.gdict%5Bn%5D%0A except KeyError:%0A raise KeyError('%7B%7D not in the graph.'.format(n1))%0A%0A def adjacent(self, n1, n2):%0A if n1 not in self.dict or n2 not in self.gdict:%0A raise KeyError('One of these nodes is not in the graph.')%0A return n2 in self.gdict%5Bn1%5D%0A
|
|
d7e6291564a5d5683a8b03fc9a761ad3e3dd70ea
|
Bump version to stable.
|
usb/__init__.py
|
usb/__init__.py
|
# Copyright (C) 2009-2014 Wander Lairson Costa
#
# The following terms apply to all files associated
# with the software unless explicitly disclaimed in individual files.
#
# The authors hereby grant permission to use, copy, modify, distribute,
# and license this software and its documentation for any purpose, provided
# that existing copyright notices are retained in all copies and that this
# notice is included verbatim in any distributions. No written agreement,
# license, or royalty fee is required for any of the authorized uses.
# Modifications to this software may be copyrighted by their authors
# and need not follow the licensing terms described here, provided that
# the new terms are clearly indicated on the first page of each file where
# they apply.
#
# IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
# FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
# ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
# DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE
# IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
# NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
# MODIFICATIONS.
r"""PyUSB - Easy USB access in Python
This package exports the following modules and subpackages:
core - the main USB implementation
legacy - the compatibility layer with 0.x version
backend - the support for backend implementations.
control - USB standard control requests.
libloader - helper module for backend library loading.
Since version 1.0, main PyUSB implementation lives in the 'usb.core'
module. New applications are encouraged to use it.
"""
import logging
import os
__author__ = 'Wander Lairson Costa'
# Use Semantic Versioning, http://semver.org/
version_info = (1, 0, 0, 'rc1')
__version__ = '%d.%d.%d%s' % version_info
__all__ = ['legacy', 'control', 'core', 'backend', 'util', 'libloader']
def _setup_log():
from usb import _debug
logger = logging.getLogger('usb')
debug_level = os.getenv('PYUSB_DEBUG')
if debug_level is not None:
_debug.enable_tracing(True)
filename = os.getenv('PYUSB_LOG_FILENAME')
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
level = LEVELS.get(debug_level, logging.CRITICAL + 10)
logger.setLevel(level = level)
try:
handler = logging.FileHandler(filename)
except:
handler = logging.StreamHandler()
fmt = logging.Formatter('%(asctime)s %(levelname)s:%(name)s:%(message)s')
handler.setFormatter(fmt)
logger.addHandler(handler)
else:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# We set the log level to avoid delegation to the
# parent log handler (if there is one).
# Thanks to Chris Clark to pointing this out.
logger.setLevel(logging.CRITICAL + 10)
logger.addHandler(NullHandler())
_setup_log()
# We import all 'legacy' module symbols to provide compatibility
# with applications that use 0.x versions.
from usb.legacy import *
|
Python
| 0 |
@@ -2073,15 +2073,8 @@
0, 0
-, 'rc1'
)%0A__
@@ -2098,10 +2098,8 @@
d.%25d
-%25s
' %25
|
59c9f7d63e333a9e00ffbb3089dfde3fe2d34826
|
Fix check on empty body in serialization
|
src/sentry/interfaces/http.py
|
src/sentry/interfaces/http.py
|
"""
sentry.interfaces.http
~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
__all__ = ('Http',)
from django.utils.translation import ugettext as _
from urllib import urlencode
from urlparse import parse_qsl, urlsplit, urlunsplit
from sentry.constants import HTTP_METHODS
from sentry.interfaces.base import Interface
from sentry.utils.safe import trim, trim_dict
from sentry.web.helpers import render_to_string
class Http(Interface):
"""
The Request information is stored in the Http interface. Two arguments
are required: ``url`` and ``method``.
The ``env`` variable is a compounded dictionary of HTTP headers as well
as environment information passed from the webserver. Sentry will explicitly
look for ``REMOTE_ADDR`` in ``env`` for things which require an IP address.
The ``data`` variable should only contain the request body (not the query
string). It can either be a dictionary (for standard HTTP requests) or a
raw request body.
>>> {
>>> "url": "http://absolute.uri/foo",
>>> "method": "POST",
>>> "data": {
>>> "foo": "bar"
>>> },
>>> "query_string": "hello=world",
>>> "cookies": "foo=bar",
>>> "headers": {
>>> "Content-Type": "text/html"
>>> },
>>> "env": {
>>> "REMOTE_ADDR": "192.168.0.1"
>>> }
>>> }
.. note:: This interface can be passed as the 'request' key in addition
to the full interface path.
"""
display_score = 1000
score = 800
FORM_TYPE = 'application/x-www-form-urlencoded'
@classmethod
def to_python(cls, data):
assert data.get('url')
kwargs = {}
if data.get('method'):
method = data['method'].upper()
assert method in HTTP_METHODS
kwargs['method'] = method
else:
kwargs['method'] = None
scheme, netloc, path, query_bit, fragment_bit = urlsplit(data['url'])
query_string = data.get('query_string') or query_bit
if query_string:
# if querystring was a dict, convert it to a string
if isinstance(query_string, dict):
query_string = urlencode(query_string.items())
else:
query_string = query_string
if query_string[0] == '?':
# remove '?' prefix
query_string = query_string[1:]
kwargs['query_string'] = trim(query_string, 1024)
else:
kwargs['query_string'] = ''
fragment = data.get('fragment') or fragment_bit
cookies = data.get('cookies')
# if cookies were [also] included in headers we
# strip them out
headers = data.get('headers')
if headers:
if 'Cookie' in headers and not cookies:
cookies = headers.pop('Cookie')
headers = trim_dict(headers)
else:
headers = {}
body = data.get('data')
if isinstance(body, (list, tuple)):
body = trim_dict(dict(enumerate(body)))
elif isinstance(body, dict):
body = trim_dict(body)
else:
body = trim(body, 2048)
if headers.get('Content-Type') == cls.FORM_TYPE and '=' in body:
body = dict(parse_qsl(body))
# if cookies were a string, convert to a dict
# parse_qsl will parse both acceptable formats:
# a=b&c=d
# and
# a=b;c=d
if isinstance(cookies, basestring):
cookies = dict(parse_qsl(cookies, keep_blank_values=True))
elif not cookies:
cookies = {}
kwargs['cookies'] = trim_dict(cookies)
kwargs['env'] = trim_dict(data.get('env') or {})
kwargs['headers'] = headers
kwargs['data'] = body
kwargs['url'] = urlunsplit((scheme, netloc, path, '', ''))
kwargs['fragment'] = trim(fragment, 256)
return cls(**kwargs)
@property
def full_url(self):
url = self.url
if self.query_string:
url = url + '?' + self.query_string
if self.fragment:
url = url + '#' + self.fragment
return url
def to_email_html(self, event, **kwargs):
return render_to_string('sentry/partial/interfaces/http_email.html', {
'event': event,
'url': self.full_url,
'short_url': self.url,
'method': self.method,
'query_string': self.query_string,
})
def to_html(self, event, is_public=False, **kwargs):
context = {
'is_public': is_public,
'event': event,
'url': self.full_url,
'short_url': self.url,
'method': self.method,
'query_string': self.query_string,
'fragment': self.fragment,
'headers': self.headers,
}
if not is_public:
# It's kind of silly we store this twice
context.update({
'cookies': self.cookies,
'env': self.env,
'data': self.data,
})
return render_to_string('sentry/partial/interfaces/http.html', context)
def get_alias(self):
return 'request'
def get_title(self):
return _('Request')
|
Python
| 0.000001 |
@@ -3276,34 +3276,39 @@
body)%0A el
-se
+if body
:%0A bo
|
7d9b004b3fb33ed9f16ca657ddb6ee3ddf452802
|
add dump2pe (t2_08 sample)
|
elfesteem/t2_08_dump2pe.py
|
elfesteem/t2_08_dump2pe.py
|
Python
| 0 |
@@ -0,0 +1,1241 @@
+#! /usr/bin/env python%0A%0Aimport pe%0Afrom pe_init import PE%0Aimport rlcompleter,readline,pdb, sys%0Afrom pprint import pprint as pp%0Areadline.parse_and_bind(%22tab: complete%22)%0Aimport shlex%0A%0Af = open('my_dump.txt', 'r')%0A%0Afor i in xrange(27):%0A f.readline()%0A%0Astate = 0%0Afuncs = %5B%5D%0Adll = %22%22%0A%0A#parse imprec output%0Anew_dll = %5B%5D%0Awhile True:%0A l = f.readline()%0A if not l:%0A break%0A l = l.strip()%0A if state == 0 and l.startswith(%22FThunk%22):%0A t = %5Br for r in shlex.shlex(l)%5D%0A ad = int(t%5B2%5D, 16)%0A state = 1%0A continue%0A if state == 1:%0A t = %5Br for r in shlex.shlex(l)%5D%0A if not len(t):%0A new_dll.append((%7B%22name%22:dll,%0A %22firstthunk%22:ad%7D,funcs%5B:%5D ))%0A dll = %22%22%0A funcs, state = %5B%5D, 0%0A else:%0A dll = t%5B2%5D%0A funcs.append(t%5B6%5D) %0A continue%0A%0A%0App(new_dll)%0A%0Adata = open('DUMP_00401000-00479000', 'rb').read()%0A%0Ae = PE()%0Ae.DirImport.add_dlldesc(new_dll)%0As_text = e.SHList.add_section(name = %22text%22, addr = 0x1000, data = data)%0As_myimp = e.SHList.add_section(name = %22myimp%22, rawsize = len(e.DirImport))%0Ae.DirImport.set_rva(s_myimp.addr)%0A%0Ae.Opthdr.Opthdr.AddressOfEntryPoint = s_text.addr%0A%0Aopen('uu.bin', 'wb').write(str(e))%0A %0A
|
|
062b4d045580adaebf30376cae1b88387dc7f3bb
|
add test_db
|
www/test_deb.py
|
www/test_deb.py
|
Python
| 0.000001 |
@@ -0,0 +1,496 @@
+# coding=utf-8%0Afrom www.models import User%0Afrom www.transwarp import db%0A%0A__author__ = 'xubinggui'%0A%0Adb.create_engine(user='www-data', password='www-data', database='awesome')%0A%0Au = User(name='Test', email='[email protected]', password='1234567890', image='about:blank')%0A%0Au.insert()%0A%0Aprint 'new user id:', u.id%0A%0Au1 = User.find_first('where email=?', '[email protected]')%0Aprint 'find user%5C's name:', u1.name%0A%0Au1.delete()%0A%0Au2 = User.find_first('where email=?', '[email protected]')%0Aprint 'find user:', u2
|
|
64c24ee2813e5d85866d14cfdee8258b91c09df6
|
add debug topology file
|
evaluation/topo-fattree.py
|
evaluation/topo-fattree.py
|
Python
| 0.000001 |
@@ -0,0 +1,1891 @@
+%22%22%22Custom topology example%0A%0ATwo directly connected switches plus a host for each switch:%0A%0A host --- switch --- switch --- host%0A%0AAdding the 'topos' dict with a key/value pair to generate our newly defined%0Atopology enables one to pass in '--topo=mytopo' from the command line.%0A%22%22%22%0A%0Afrom mininet.topo import Topo%0A%0Aclass MyTopo( Topo ):%0A %22Simple topology example.%22%0A%0A def __init__( self ):%0A %22Create custom topo.%22%0A%0A # Initialize topology%0A Topo.__init__( self )%0A%0A # Add hosts and switches%0A%09host1 = self.addHost('h1')%0A%09host2 = self.addHost('h2')%0A%09host3 = self.addHost('h3')%0A%09host4 = self.addHost('h4')%0A%09host5 = self.addHost('h5')%0A%09host6 = self.addHost('h6')%0A%09host7 = self.addHost('h7')%0A%09host8 = self.addHost('h8')%0A%09%0A%09switch1 = self.addSwitch('s1')%0A%09switch2 = self.addSwitch('s2')%0A%09switch3 = self.addSwitch('s3')%0A%09switch4 = self.addSwitch('s4')%0A%09switch5 = self.addSwitch('s5')%0A%09switch6 = self.addSwitch('s6')%0A%09switch7 = self.addSwitch('s7')%0A%09switch8 = self.addSwitch('s8')%0A%09switch9 = self.addSwitch('s9')%0A%09switch10 = self.addSwitch('s10')%0A%0A # Add links%0A%09self.addLink(host1, switch1)%0A%09self.addLink(host2, switch1)%0A%09self.addLink(host3, switch2)%0A%09self.addLink(host4, switch2)%0A%09self.addLink(switch1, switch3)%0A%09self.addLink(switch1, switch4)%0A%09self.addLink(switch2, switch3)%0A%09self.addLink(switch2, switch4)%0A%09%0A%09self.addLink(host5, switch5)%0A%09self.addLink(host6, switch5)%0A%09self.addLink(host7, switch6)%0A%09self.addLink(host8, switch6)%0A%09self.addLink(switch5, switch7)%0A%09self.addLink(switch5, switch8)%0A%09self.addLink(switch6, switch7)%0A%09self.addLink(switch6, switch8)%0A%0A%09self.addLink(switch3, switch9)%0A%09self.addLink(switch3, switch10)%0A%09self.addLink(switch4, switch9)%0A%09self.addLink(switch4, switch10)%0A%09self.addLink(switch7, switch9)%0A%09self.addLink(switch7, switch10)%0A%09self.addLink(switch8, switch9)%0A%09self.addLink(switch8, switch10)%0A%0Atopos = %7B 'fattree': ( lambda: MyTopo() ) %7D%0A
|
|
d0474ea69c9bcc5b07829603778e0277d1fd733a
|
fix moved Glottolog identifier of nepa1252
|
migrations/versions/1715ee79365_fix_missing_nepa1252_identifier.py
|
migrations/versions/1715ee79365_fix_missing_nepa1252_identifier.py
|
Python
| 0 |
@@ -0,0 +1,1654 @@
+# coding=utf-8%0A%22%22%22fix missing nepa1252 identifier%0A%0ARevision ID: 1715ee79365%0ARevises: 506dcac7d75%0ACreate Date: 2015-04-15 19:34:27.655000%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '1715ee79365'%0Adown_revision = '506dcac7d75'%0A%0Aimport datetime%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A id, name = 'nepa1252', 'Nepali'%0A%0A insert_ident = sa.text('INSERT INTO identifier '%0A '(created, updated, active, version, type, description, lang, name) '%0A 'SELECT now(), now(), true, 1, :type, :description, :lang, :name '%0A 'WHERE NOT EXISTS (SELECT 1 FROM identifier WHERE type = :type '%0A 'AND description = :description AND lang = :lang AND name = :name)'%0A ).bindparams(type='name', description='Glottolog', lang='en')%0A%0A insert_lang_ident = sa.text('INSERT INTO languageidentifier '%0A '(created, updated, active, version, language_pk, identifier_pk) '%0A 'SELECT now(), now(), true, 1, '%0A '(SELECT pk FROM language WHERE id = :id), '%0A '(SELECT pk FROM identifier WHERE type = :type '%0A 'AND description = :description AND lang = :lang AND name = :name) '%0A 'WHERE NOT EXISTS (SELECT 1 FROM languageidentifier '%0A 'WHERE language_pk = (SELECT pk FROM language WHERE id = :id) '%0A 'AND identifier_pk = (SELECT pk FROM identifier WHERE type = :type '%0A 'AND description = :description AND lang = :lang AND name = :name))'%0A ).bindparams(type='name', description='Glottolog', lang='en')%0A%0A op.execute(insert_ident.bindparams(name=name))%0A op.execute(insert_lang_ident.bindparams(id=id, name=name))%0A%0A%0Adef downgrade():%0A pass%0A
|
|
18e66983c49c68e9000acd331d6888c4c72a99b3
|
Fix mypy error.
|
zerver/signals.py
|
zerver/signals.py
|
from __future__ import absolute_import
from django.dispatch import receiver
from django.contrib.auth.signals import user_logged_in
from django.core.mail import send_mail
from django.conf import settings
from django.template import loader
from django.utils import timezone
from typing import Any, Dict, Optional
def get_device_browser(user_agent):
# type: (str) -> Optional[str]
user_agent = user_agent.lower()
if "chrome" in user_agent and "chromium" not in user_agent:
return 'Chrome'
elif "firefox" in user_agent and "seamonkey" not in user_agent and "chrome" not in user_agent:
return "Firefox"
elif "chromium" in user_agent:
return "Chromium"
elif "safari" in user_agent and "chrome" not in user_agent and "chromium" not in user_agent:
return "Safari"
elif "opera" in user_agent:
return "Opera"
elif "msie" in user_agent or "trident" in user_agent:
return "Internet Explorer"
elif "edge" in user_agent:
return "Edge"
else:
return None
def get_device_os(user_agent):
# type: (str) -> Optional[str]
user_agent = user_agent.lower()
if "windows" in user_agent:
return "Windows"
elif "macintosh" in user_agent:
return "MacOS"
elif "linux" in user_agent and "android" not in user_agent:
return "Linux"
elif "android" in user_agent:
return "Android"
elif "like mac os x" in user_agent:
return "iOS"
else:
return None
@receiver(user_logged_in, dispatch_uid="only_on_login")
def email_on_new_login(sender, user, request, **kwargs):
# type: (Any, UserProfile, Any, Any) -> None
# We import here to minimize the dependencies of this module,
# since it runs as part of `manage.py` initialization
from zerver.context_processors import common_context
from zerver.models import UserProfile
if not settings.SEND_LOGIN_EMAILS:
return
if request:
# Login emails are for returning users, not new registrations.
# Determine if login request was from new registration.
path = request.META.get('PATH_INFO', None)
if path:
if path == "/accounts/register/":
return
login_time = timezone.now().strftime('%A, %B %d, %Y at %I:%M%p ') + \
timezone.get_current_timezone_name()
user_agent = request.META.get('HTTP_USER_AGENT', "").lower()
device_browser = get_device_browser(user_agent)
device_os = get_device_os(user_agent)
device_ip = request.META.get('REMOTE_ADDR') or "Uknown IP address"
device_info = {"device_browser": device_browser,
"device_os": device_os,
"device_ip": device_ip,
"login_time": login_time
}
context = common_context(user)
context['device_info'] = device_info
context['zulip_support'] = settings.ZULIP_ADMINISTRATOR
context['user'] = user
text_template = 'zerver/emails/new_login/new_login_alert.txt'
html_template = 'zerver/emails/new_login/new_login_alert.html'
text_content = loader.render_to_string(text_template, context)
html_content = loader.render_to_string(html_template, context)
sender = settings.NOREPLY_EMAIL_ADDRESS
recipients = [user.email]
subject = loader.render_to_string('zerver/emails/new_login/new_login_alert.subject').strip()
send_mail(subject, text_content, sender, recipients, html_message=html_content)
|
Python
| 0 |
@@ -304,16 +304,54 @@
Optional
+%0Afrom zerver.models import UserProfile
%0A%0Adef ge
@@ -1882,50 +1882,8 @@
text
-%0A from zerver.models import UserProfile
%0A%0A
|
2749b4b754562c45a54b3df108c5c40c8d548038
|
Create __init__.py
|
web/__init__.py
|
web/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
1396ff4ab4e6664c265f97958951815a525f7823
|
Remove confusing navigation tabs from header.
|
reddit_donate/pages.py
|
reddit_donate/pages.py
|
from r2.lib.pages import Reddit
from r2.lib.wrapped import Templated
class DonatePage(Reddit):
extra_stylesheets = Reddit.extra_stylesheets + ["donate.less"]
def __init__(self, title, content, **kwargs):
Reddit.__init__(
self,
title=title,
content=content,
show_sidebar=False,
**kwargs
)
class DonateLanding(Templated):
pass
|
Python
| 0 |
@@ -369,16 +369,102 @@
)%0A%0A
+ def build_toolbars(self):%0A # get rid of tabs on the top%0A return %5B%5D%0A%0A
%0Aclass D
|
eefa1f039d935a7242bb14bdb6f672db1ff24302
|
Create omega-virus.py
|
omega-virus.py
|
omega-virus.py
|
Python
| 0 |
@@ -0,0 +1,906 @@
+#!/usr/bin/python%0A%0A#%0A# Insert docopt user help menu here?%0A#%0A%0A%0A#%0A# End docopt%0A#%0A%0Adef sectors()%0A%09# Blue%0A%09# Green%0A%09# Red%0A%09# Yellow%0A%09%0Adef roomList()%0A%09# List of rooms%0A%09# Green (open rooms)%0A%09# Blue (requires blue key)%0A%09# Red (requires red key)%0A%09# Yellow (requires yellow key)%0A%0Adef roomContents()%0A%09# Each room can have one of:%0A%09# 1. An ADV%0A%09# 2. An access card%0A%09# 3. A Probe (more on this in a little bit)%0A%09# 4. A hazard%0A%09# 5. The virus (presumably chilling out)%0A%09# 6. Nothing at all%0A%0Adef items()%0A%09# Access keys (Blue, Red, Yellow)%0A%09# Decoder - yellow%0A%09# Disruptor - blue%0A%09# Negatron - red%0A%09# Probe%0A%09%0Adef players()%0A%09# Blue%0A%09# Green%0A%09# Red%0A%09# Yellow%0A%09%0Adef rng()%0A%09# Random number generator%0A%09# Values 0,1,2%0A%09%0Adef secretCode()%0A%09# Secret codes let players know where the virus is provided:%0A%09#%09a) They enter a room where the virus is%0A%09#%09b) They do not have all three weapons%0A%09#%09c) Should we let probes find the virus?%0A%0A
|
|
04f937a24279699164278d47fc5d0790a9062132
|
add gunicorn.py
|
wsgi_gunicorn.py
|
wsgi_gunicorn.py
|
Python
| 0.007978 |
@@ -0,0 +1,234 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Afrom werkzeug.contrib.fixers import ProxyFix%0Afrom app import create_app%0A%0Aapp = create_app()%0A%0Aapp.wsgi_app = ProxyFix(app.wsgi_app)%0A%0Aif __name__ == '__main__':%0A app.run(host='0.0.0.0')%0A
|
|
7a9a25f5e1f57d4cdd4d324c5937714c74976a56
|
Fix test_get_creds_from_env_vars_when_required_vars_missing
|
tests/unit/cli/test_envutils.py
|
tests/unit/cli/test_envutils.py
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from six import moves
from rally.cli import envutils
from rally import exceptions
from tests.unit import test
class EnvUtilsTestCase(test.TestCase):
def test_default_from_global(self):
@envutils.default_from_global("test_arg_name",
"test_env_name",
"test_missing_arg")
def test_function(test_arg_name=None):
pass
with mock.patch("sys.stdout",
new_callable=moves.StringIO) as mock_stdout:
test_function()
self.assertEqual(mock_stdout.getvalue(),
"Missing argument: --test_missing_arg\n")
@mock.patch.dict(os.environ,
values={envutils.ENV_DEPLOYMENT: "my_deployment_id"},
clear=True)
def test_get_deployment_id_in_env(self):
deployment_id = envutils.get_global(envutils.ENV_DEPLOYMENT)
self.assertEqual("my_deployment_id", deployment_id)
@mock.patch.dict(os.environ, values={}, clear=True)
@mock.patch("rally.cli.envutils.fileutils.load_env_file")
def test_get_deployment_id_with_exception(self, mock_load_env_file):
self.assertRaises(exceptions.InvalidArgumentsException,
envutils.get_global, envutils.ENV_DEPLOYMENT, True)
mock_load_env_file.assert_called_once_with(os.path.expanduser(
"~/.rally/globals"))
@mock.patch.dict(os.environ, values={}, clear=True)
@mock.patch("rally.cli.envutils.fileutils.load_env_file")
def test_get_deployment_id_with_none(self, mock_load_env_file):
self.assertIsNone(envutils.get_global(envutils.ENV_DEPLOYMENT))
mock_load_env_file.assert_called_once_with(os.path.expanduser(
"~/.rally/globals"))
@mock.patch.dict(os.environ, values={envutils.ENV_TASK: "my_task_id"},
clear=True)
def test_get_task_id_in_env(self):
self.assertEqual("my_task_id", envutils.get_global(envutils.ENV_TASK))
@mock.patch.dict(os.environ, values={}, clear=True)
@mock.patch("rally.cli.envutils.fileutils.load_env_file")
def test_get_task_id_with_exception(self, mock_load_env_file):
self.assertRaises(exceptions.InvalidArgumentsException,
envutils.get_global, envutils.ENV_TASK, True)
mock_load_env_file.assert_called_once_with(os.path.expanduser(
"~/.rally/globals"))
@mock.patch.dict(os.environ, values={}, clear=True)
@mock.patch("rally.cli.envutils.fileutils.load_env_file")
def test_get_task_id_with_none(self, mock_load_env_file):
self.assertIsNone(envutils.get_global("RALLY_TASK"))
mock_load_env_file.assert_called_once_with(os.path.expanduser(
"~/.rally/globals"))
@mock.patch.dict(os.environ,
values={envutils.ENV_DEPLOYMENT: "test_deployment_id"},
clear=True)
@mock.patch("os.path.exists")
@mock.patch("rally.cli.envutils.fileutils.update_env_file",
return_value=True)
def test_clear_global(self, mock_update_env_file, mock_path_exists):
envutils.clear_global(envutils.ENV_DEPLOYMENT)
mock_update_env_file.assert_called_once_with(os.path.expanduser(
"~/.rally/globals"), envutils.ENV_DEPLOYMENT, "\n")
self.assertEqual(os.environ, {})
@mock.patch.dict(os.environ,
values={envutils.ENV_DEPLOYMENT: "test_deployment_id",
envutils.ENV_TASK: "test_task_id"},
clear=True)
@mock.patch("os.path.exists")
@mock.patch("rally.cli.envutils.fileutils.update_env_file",
return_value=True)
def test_clear_env(self, mock_update_env_file, mock_path_exists):
envutils.clear_env()
self.assertEqual(os.environ, {})
@mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url",
"OS_USERNAME": "fake_username",
"OS_PASSWORD": "fake_password",
"OS_TENANT_NAME": "fake_tenant_name",
"OS_REGION_NAME": "fake_region_name",
"OS_ENDPOINT": "fake_endpoint",
"OS_INSECURE": "True",
"OS_CACERT": "fake_cacert"})
def test_get_creds_from_env_vars(self):
expected_creds = {
"auth_url": "fake_auth_url",
"admin": {
"username": "fake_username",
"password": "fake_password",
"tenant_name": "fake_tenant_name"
},
"endpoint": "fake_endpoint",
"region_name": "fake_region_name",
"https_cacert": "fake_cacert",
"https_insecure": True
}
creds = envutils.get_creds_from_env_vars()
self.assertEqual(expected_creds, creds)
@mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url",
"OS_PASSWORD": "fake_password",
"OS_REGION_NAME": "fake_region_name",
"OS_ENDPOINT": "fake_endpoint",
"OS_INSECURE": "True",
"OS_CACERT": "fake_cacert"})
def test_get_creds_from_env_vars_when_required_vars_missing(self):
self.assertRaises(exceptions.ValidationError,
envutils.get_creds_from_env_vars)
|
Python
| 0.000003 |
@@ -6060,32 +6060,114 @@
_missing(self):%0A
+ if %22OS_USERNAME%22 in os.environ:%0A del os.environ%5B%22OS_USERNAME%22%5D%0A
self.ass
|
14baa70de3353975b6240d470c2406e1a889358c
|
Update project.py
|
erpnext/projects/doctype/project/project.py
|
erpnext/projects/doctype/project/project.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, getdate, get_url
from frappe import _
from frappe.model.document import Document
class Project(Document):
def get_feed(self):
return '{0}: {1}'.format(_(self.status), self.project_name)
def onload(self):
"""Load project tasks for quick view"""
if not self.get("tasks"):
for task in self.get_tasks():
self.append("tasks", {
"title": task.subject,
"status": task.status,
"start_date": task.exp_start_date,
"end_date": task.exp_end_date,
"description": task.description,
"task_id": task.name
})
def __setup__(self):
self.onload()
def get_tasks(self):
return frappe.get_all("Task", "*", {"project": self.name}, order_by="exp_start_date asc")
def validate(self):
self.validate_dates()
self.sync_tasks()
self.tasks = []
self.send_welcome_email()
def validate_dates(self):
if self.expected_start_date and self.expected_end_date:
if getdate(self.expected_end_date) < getdate(self.expected_start_date):
frappe.throw(_("Expected End Date can not be less than Expected Start Date"))
def sync_tasks(self):
"""sync tasks and remove table"""
if self.flags.dont_sync_tasks: return
task_names = []
for t in self.tasks:
if t.task_id:
task = frappe.get_doc("Task", t.task_id)
else:
task = frappe.new_doc("Task")
task.project = self.name
task.update({
"subject": t.title,
"status": t.status,
"exp_start_date": t.start_date,
"exp_end_date": t.end_date,
"description": t.description,
})
task.flags.ignore_links = True
task.flags.from_project = True
task.flags.ignore_feed = True
task.save(ignore_permissions = True)
task_names.append(task.name)
# delete
for t in frappe.get_all("Task", ["name"], {"project": self.name, "name": ("not in", task_names)}):
frappe.delete_doc("Task", t.name)
self.update_percent_complete()
self.update_costing()
def update_project(self):
self.update_percent_complete()
self.update_costing()
self.flags.dont_sync_tasks = True
self.save()
def update_percent_complete(self):
total = frappe.db.sql("""select count(*) from tabTask where project=%s""", self.name)[0][0]
if total:
completed = frappe.db.sql("""select count(*) from tabTask where
project=%s and status in ('Closed', 'Cancelled')""", self.name)[0][0]
self.percent_complete = flt(flt(completed) / total * 100, 2)
def update_costing(self):
from_time_log = frappe.db.sql("""select
sum(costing_amount) as costing_amount,
sum(billing_amount) as billing_amount,
min(from_time) as start_date,
max(to_time) as end_date,
sum(hours) as time
from `tabTime Log` where project = %s and docstatus = 1""", self.name, as_dict=1)[0]
from_expense_claim = frappe.db.sql("""select
sum(total_sanctioned_amount) as total_sanctioned_amount
from `tabExpense Claim` where project = %s and approval_status='Approved'
and docstatus = 1""",
self.name, as_dict=1)[0]
self.actual_start_date = from_time_log.start_date
self.actual_end_date = from_time_log.end_date
self.total_costing_amount = from_time_log.costing_amount
self.total_billing_amount = from_time_log.billing_amount
self.actual_time = from_time_log.time
self.total_expense_claim = from_expense_claim.total_sanctioned_amount
self.gross_margin = flt(self.total_billing_amount) - flt(self.total_costing_amount)
if self.total_billing_amount:
self.per_gross_margin = (self.gross_margin / flt(self.total_billing_amount)) *100
def update_purchase_costing(self):
total_purchase_cost = frappe.db.sql("""select sum(base_net_amount)
from `tabPurchase Invoice Item` where project = %s and docstatus=1""", self.name)
self.total_purchase_cost = total_purchase_cost and total_purchase_cost[0][0] or 0
def send_welcome_email(self):
url = get_url("/project/?name={0}".format(self.name))
messages = (
_("You have been invited to collaborate on the project: {0}".format(self.name)),
url,
_("Join")
)
content = """
<p>{0}.</p>
<p><a href="{1}">{2}</a></p>
"""
for user in self.users:
if user.welcome_email_sent==0:
frappe.sendmail(user.user, subject=_("Project Collaboration Invitation"), content=content.format(*messages), bulk=True)
user.welcome_email_sent=1
def get_project_list(doctype, txt, filters, limit_start, limit_page_length=20):
return frappe.db.sql('''select distinct project.*
from tabProject project, `tabProject User` project_user
where
(project_user.user = %(user)s
and project_user.parent = project.name)
or project.owner = %(user)s
order by project.modified desc
limit {0}, {1}
'''.format(limit_start, limit_page_length),
{'user':frappe.session.user},
as_dict=True,
update={'doctype':'Project'})
def get_list_context(context=None):
return {
"title": _("My Projects"),
"get_list": get_project_list,
"row_template": "templates/includes/projects/project_row.html"
}
@frappe.whitelist()
def get_cost_center_name(project):
return frappe.db.get_value("Project", project, "cost_center")
|
Python
| 0.000001 |
@@ -2224,16 +2224,41 @@
lf.save(
+ignore_permissions = True
)%0A%0A%09def
|
d254428e484172ccd0a0763eb989241b08a26c3b
|
string compress kata second day
|
string-compress-kata/day-2.py
|
string-compress-kata/day-2.py
|
Python
| 0.999077 |
@@ -0,0 +1,1316 @@
+# -*- codeing: utf-8 -*-%0A%0Aclass Compressor(object):%0A%0A def compress(self, toCompress):%0A if toCompress is None:%0A return %22%22%0A else:%0A compressed = %5B%5D%0A index = 0%0A length = len(toCompress)%0A while index %3C length:%0A counter = 1%0A index += 1%0A while index %3C length and toCompress%5Bindex%5D == toCompress%5Bindex - 1%5D:%0A counter += 1%0A index += 1%0A compressed.append(str(counter))%0A compressed.append(toCompress%5Bindex - 1%5D)%0A return ''.join(compressed)%0A%0A%0Aimport unittest%0A%0A%0Aclass StringComperssorTest(unittest.TestCase):%0A%0A def setUp(self):%0A self.compressor = Compressor()%0A%0A def test_none_compresses_to_empty_string(self):%0A self.assertEqual(%22%22, self.compressor.compress(None))%0A%0A def test_one_char_string(self):%0A self.assertEqual(%221a%22, self.compressor.compress(%22a%22))%0A%0A def test_string_of_unique_chars(self):%0A self.assertEqual(%221a1b1c%22, self.compressor.compress(%22abc%22))%0A%0A def test_string_of_duobled_chars(self):%0A self.assertEqual(%222a2b2c%22, self.compressor.compress(%22aabbcc%22))%0A%0A def test_empty_string_compressed_into_empty_string(self):%0A self.assertEqual(%22%22, self.compressor.compress(%22%22))%0A
|
|
a81f39089b4c60e2cb05ea892afacbcbea6f1c5d
|
add tests for oxml_parser
|
tests/oxml/test___init__.py
|
tests/oxml/test___init__.py
|
Python
| 0.000001 |
@@ -0,0 +1,1355 @@
+# encoding: utf-8%0A%0A%22%22%22%0ATest suite for pptx.oxml.__init__.py module, primarily XML parser-related.%0A%22%22%22%0A%0Afrom __future__ import print_function, unicode_literals%0A%0Aimport pytest%0A%0Afrom lxml import etree, objectify%0A%0Afrom pptx.oxml import oxml_parser%0A%0A%0Aclass DescribeOxmlParser(object):%0A%0A def it_enables_objectified_xml_parsing(self, xml_bytes):%0A foo = objectify.fromstring(xml_bytes, oxml_parser)%0A assert foo.bar == 'foobar'%0A%0A def it_strips_whitespace_between_elements(self, foo, stripped_xml_bytes):%0A xml_bytes = etree.tostring(foo)%0A assert xml_bytes == stripped_xml_bytes%0A%0A%0A# ===========================================================================%0A# fixtures%0A# ===========================================================================%0A%[email protected]%0Adef foo(xml_bytes):%0A return objectify.fromstring(xml_bytes, oxml_parser)%0A%0A%[email protected]%0Adef stripped_xml_bytes():%0A return (%0A '%3Ca:foo xmlns:a=%22http://schemas.openxmlformats.org/drawingml/2006/ma'%0A 'in%22%3E%3Ca:bar%3Efoobar%3C/a:bar%3E%3C/a:foo%3E'%0A ).encode('utf-8')%0A%0A%[email protected]%0Adef xml_bytes():%0A return (%0A '%3C?xml version=%221.0%22 encoding=%22UTF-8%22 standalone=%22yes%22?%3E%5Cn'%0A '%3Ca:foo xmlns:a=%22http://schemas.openxmlformats.org/drawingml/2006/ma'%0A 'in%22%3E%5Cn'%0A ' %3Ca:bar%3Efoobar%3C/a:bar%3E%5Cn'%0A '%3C/a:foo%3E%5Cn'%0A ).encode('utf-8')%0A
|
|
f8c3feaf3f400cbcf3e04d9705f0cb36d083c6d7
|
Include migratio for ProductPlan.
|
conductor/accounts/migrations/0012_productplan.py
|
conductor/accounts/migrations/0012_productplan.py
|
Python
| 0 |
@@ -0,0 +1,819 @@
+# Generated by Django 2.0.9 on 2018-11-08 02:50%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B(%22accounts%22, %220011_auto_20180831_0320%22)%5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name=%22ProductPlan%22,%0A fields=%5B%0A (%0A %22id%22,%0A models.AutoField(%0A auto_created=True,%0A primary_key=True,%0A serialize=False,%0A verbose_name=%22ID%22,%0A ),%0A ),%0A (%22active%22, models.BooleanField(default=False)),%0A (%22stripe_plan_id%22, models.CharField(max_length=32)),%0A (%22trial_days%22, models.IntegerField(default=0)),%0A %5D,%0A )%0A %5D%0A
|
|
4db13bdab18934bebcfe5b102044f936e0eab892
|
Add a place to put random stuff and a list of components as a python module.
|
etc/component_list.py
|
etc/component_list.py
|
Python
| 0 |
@@ -0,0 +1,1510 @@
+COMPONENTS = %5B%09%0A %22AdaBoost%22,%0A%09%22AutoInvert%22,%0A%09%22AutoMlpClassifier%22,%0A%09%22BiggestCcExtractor%22,%0A%09%22BinarizeByHT%22,%0A%09%22BinarizeByOtsu%22,%0A%09%22BinarizeByRange%22,%0A%09%22BinarizeBySauvola%22,%0A%09%22BitDataset%22,%0A%09%22BitNN%22,%0A%09%22BookStore%22,%0A%09%22CascadedMLP%22,%0A%09%22CenterFeatureMap%22,%0A%09%22ConnectedComponentSegmenter%22,%0A%09%22CurvedCutSegmenter%22,%0A%09%22CurvedCutWithCcSegmenter%22,%0A%09%22Degradation%22,%0A%09%22DeskewGrayPageByRAST%22,%0A%09%22DeskewPageByRAST%22,%0A%09%22DocClean%22,%0A%09%22DpSegmenter%22,%0A%09%22EnetClassifier%22,%0A%09%22EuclideanDistances%22,%0A%09%22KnnClassifier%22,%0A%09%22LatinClassifier%22,%0A%09%22Linerec%22,%0A%09%22LinerecExtracted%22,%0A%09%22MetaLinerec%22,%0A%09%22NullLinerec%22,%0A%09%22OcroFST%22,%0A%09%22OldBookStore%22,%0A%09%22PageFrameRAST%22,%0A%09%22Pages%22,%0A%09%22RaggedDataset8%22,%0A%09%22RaveledExtractor%22,%0A%09%22RmBig%22,%0A%09%22RmHalftone%22,%0A%09%22RmUnderline%22,%0A%09%22RowDataset8%22,%0A%09%22ScaledImageExtractor%22,%0A%09%22SegmentLineByCCS%22,%0A%09%22SegmentLineByGCCS%22,%0A%09%22SegmentLineByProjection%22,%0A%09%22SegmentPageBy1CP%22,%0A%09%22SegmentPageByMorphTrivial%22,%0A%09%22SegmentPageByRAST%22,%0A%09%22SegmentPageByRAST1%22,%0A%09%22SegmentPageByVORONOI%22,%0A%09%22SegmentPageByXYCUTS%22,%0A%09%22SegmentWords%22,%0A%09%22SimpleFeatureMap%22,%0A%09%22SimpleGrouper%22,%0A%09%22SkelSegmenter%22,%0A%09%22SmartBookStore%22,%0A%09%22SqliteBuffer%22,%0A%09%22SqliteDataset%22,%0A%09%22StandardExtractor%22,%0A%09%22StandardGrouper%22,%0A%09%22StandardPreprocessing%22,%0A%09%22TextImageSegByLogReg%22,%0A%09%22adaboost%22,%0A%09%22biggestcc%22,%0A%09%22bitdataset%22,%0A%09%22bitnn%22,%0A%09%22cfmap%22,%0A%09%22cmlp%22,%0A%09%22dpseg%22,%0A%09%22edist%22,%0A%09%22enet%22,%0A%09%22knn%22,%0A%09%22latin%22,%0A%09%22linerec%22,%0A%09%22linerec_extracted%22,%0A%09%22mappedmlp%22,%0A%09%22metalinerec%22,%0A%09%22mlp%22,%0A%09%22nulllinerec%22,%0A%09%22raggeddataset8%22,%0A%09%22raveledfe%22,%0A%09%22rowdataset8%22,%0A%09%22scaledfe%22,%0A%09%22sfmap%22,%0A%09%22simplegrouper%22,%0A%09%22sqlitebuffer%22,%0A%09%22sqliteds%22,%0A%5D%0A
|
|
f7aeb7a708ef2e40546d27d480073fdc113d639e
|
Add check_babel_syntax ; see note below
|
unnaturalcode/check_babel_syntax.py
|
unnaturalcode/check_babel_syntax.py
|
Python
| 0 |
@@ -0,0 +1,2414 @@
+#!/usr/bin/python%0A# Copyright 2017 Dhvani Patel%0A#%0A# This file is part of UnnaturalCode.%0A# %0A# UnnaturalCode is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# UnnaturalCode is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with UnnaturalCode. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0A# Takes in a string of JavaScript code and checks for errors%0A# NOTE: FOR BABEL%0A%0Aimport os%0Aimport subprocess%0Aimport sys%0Aimport tempfile%0Afrom compile_error import CompileError%0A%0A%0A# Method for finding index of certain characters in a string, n being the n'th occurence of the character/string%0Adef find_nth(haystack, needle, n):%0A start = haystack.find(needle)%0A while start %3E= 0 and n %3E 1:%0A start = haystack.find(needle, start+len(needle))%0A n -= 1%0A return start%09%0A%0A# Main method%0Adef checkBabelSyntax(src):%0A%09%09myFile = open(%22toCheck.js%22, %22w%22)%0A%09%09myFile.write(src)%0A%09%09myFile.close()%0A%09%09proc = subprocess.Popen(%5B'node_modules/.bin/babel', 'toCheck.js', '-o', '/dev/null'%5D, stderr=subprocess.PIPE)%0A%09%09streamdata, err = proc.communicate()%0A%09%09rc = proc.returncode%0A%09%09if rc == 0:%0A%09%09%09# No errors, all good%0A%09%09%09os.remove(%22toCheck.js%22)%0A%09%09%09return None%0A%09%09else:%0A%09%09%09# Error, disect data for constructor%09%09%0A%09%09%09colonFirInd = find_nth(err, ':', 1)%0A%09%09%09colonSecInd = find_nth(err, ':', 2)%0A%09%09%09colonThirInd = find_nth(err, ':', 3)%09%09%0A%09%09%09lineBegin = find_nth(err, '(', 1)%09%09%0A%09%09%09lineEnd = find_nth(err, ')', 1)%09%0A%0A%09%09%09fileName = err%5BcolonFirInd+2:colonSecInd%5D%0A%09%09%09line = int(err%5BlineBegin+1:colonThirInd%5D)%0A%09%09%09column = int(err%5BcolonThirInd+1:lineEnd%5D)%0A%0A%09%09%09errorname = err%5B0:colonFirInd%5D%0A%0A%09%09%09flagStart = find_nth(err, '%3E', 1)%0A%0A%09%09%09temp = err%5BflagStart:%5D%0A%09%09%09ind = find_nth(temp, '%5Cn', 1)%0A%0A%09%09%09textBefore = err%5BcolonSecInd+2:lineBegin-1%5D%0A%09%09%09textAfter = err%5BflagStart+26:flagStart+ind%5D%0A%09%09%09text = textBefore + ' ' + textAfter%0A%09%09%09%0A%09%09%09errorObj = CompileError(fileName, line, column, None, text, errorname)%0A%09%09%09os.remove(%22toCheck.js%22)%0A%09%09%09return %5BerrorObj%5D%09%0A%0A
|
|
b29fe95eb2cb86a7ae9170fbf8ceb2533bc84578
|
Add the photo.index module (with minimal functionality so far).
|
photo/index.py
|
photo/index.py
|
Python
| 0 |
@@ -0,0 +1,2324 @@
+%22%22%22Provide the class Index which represents an index of photos.%0A%22%22%22%0A%0Aimport os%0Aimport os.path%0Aimport fnmatch%0Afrom collections import MutableSequence%0Aimport yaml%0A%0A%0Aclass Index(MutableSequence):%0A%0A defIdxFilename = %22.index.yaml%22%0A%0A def __init__(self, idxfile=None, imgdir=None):%0A super(Index, self).__init__()%0A self.directory = None%0A self.idxfilename = None%0A self.items = %5B%5D%0A if idxfile:%0A self.read(idxfile)%0A elif imgdir:%0A self.readdir(imgdir)%0A%0A def __len__(self):%0A return len(self.items)%0A%0A def __getitem__(self, index):%0A return self.items.__getitem__(index)%0A%0A def __setitem__(self, index, value):%0A self.items.__setitem__(index, value)%0A%0A def __delitem__(self, index):%0A self.items.__delitem__(index)%0A%0A def insert(self, index, value):%0A self.items.insert(index, value)%0A%0A def _idxfilename(self, idxfile):%0A %22%22%22Determine the index file name for reading and writing.%0A %22%22%22%0A if idxfile is not None:%0A return os.path.abspath(idxfile)%0A elif self.idxfilename is not None:%0A return self.idxfilename%0A else:%0A d = self.directory if self.directory is not None else os.getcwd()%0A return os.path.abspath(os.path.join(d, self.defIdxFilename))%0A%0A def readdir(self, imgdir):%0A %22%22%22Create a new index of all image files in a directory.%0A %22%22%22%0A self.directory = os.path.abspath(imgdir)%0A self.items = %5B%5D%0A for f in sorted(os.listdir(self.directory)):%0A if (os.path.isfile(os.path.join(self.directory,f)) and %0A fnmatch.fnmatch(f, '*.jpg')):%0A self.items.append(%7B'filename':f, 'tags':%5B%5D%7D)%0A%0A def read(self, idxfile=None):%0A %22%22%22Read the index from a file.%0A %22%22%22%0A self.idxfilename = self._idxfilename(idxfile)%0A self.directory = os.path.dirname(self.idxfilename)%0A with open(self.idxfilename, 'rt') as f:%0A self.items = yaml.load(f)%0A%0A def write(self, idxfile=None):%0A %22%22%22Write the index to a file.%0A %22%22%22%0A self.idxfilename = self._idxfilename(idxfile)%0A self.directory = os.path.dirname(self.idxfilename)%0A with open(self.idxfilename, 'wt') as f:%0A yaml.dump(self.items, f, default_flow_style=False)%0A%0A
|
|
eff993eac0924299cd273d0c582e24c57f2c4a84
|
Add 263-ugly-number.py
|
263-ugly-number.py
|
263-ugly-number.py
|
Python
| 0.999999 |
@@ -0,0 +1,1457 @@
+%22%22%22%0AQuestion:%0A Ugly Number%0A%0A Write a program to check whether a given number is an ugly number.%0A%0A Ugly numbers are positive numbers whose prime factors only include 2, 3, 5. For example, 6, 8 are ugly while 14 is not ugly since it includes another prime factor 7.%0A%0A Note that 1 is typically treated as an ugly number.%0A%0A Credits:%0A Special thanks to @jianchao.li.fighter for adding this problem and creating all test cases.%0A%0APerformance:%0A 1. Total Accepted: 19816 Total Submissions: 60714 Difficulty: Easy%0A 2. Your runtime beats 60.64%25 of python submissions.%0A%22%22%22%0A%0A%0Aclass Solution(object):%0A def isUgly(self, num):%0A %22%22%22%0A :type num: int%0A :rtype: bool%0A %22%22%22%0A if num == 0:%0A return False%0A%0A for ugly_divisor in %5B2, 3, 5%5D:%0A while (num %25 ugly_divisor) == 0:%0A num /= ugly_divisor%0A return num == 1%0A%0Aassert Solution().isUgly(0) is False%0Aassert Solution().isUgly(1) is True%0Aassert Solution().isUgly(2) is True%0Aassert Solution().isUgly(3) is True%0Aassert Solution().isUgly(4) is True%0Aassert Solution().isUgly(5) is True%0Aassert Solution().isUgly(6) is True%0Aassert Solution().isUgly(7) is False%0Aassert Solution().isUgly(8) is True%0Aassert Solution().isUgly(9) is True%0Aassert Solution().isUgly(10) is True%0Aassert Solution().isUgly(11) is False%0Aassert Solution().isUgly(12) is True%0Aassert Solution().isUgly(14) is False%0Aassert Solution().isUgly(-2147483648) is False%0A
|
|
cd44e4a62e8c8f8ddba0634ccc0bb157f7745726
|
add 129
|
vol3/129.py
|
vol3/129.py
|
Python
| 0.999994 |
@@ -0,0 +1,266 @@
+def A(n):%0A if n %25 5 == 0:%0A return 1%0A x = 1%0A ret = 1%0A while x != 0:%0A x = (x * 10 + 1) %25 n%0A ret += 1%0A return ret%0A%0Aif __name__ == %22__main__%22:%0A LIMIT = 10 ** 6%0A i = LIMIT + 1%0A while A(i) %3C= LIMIT:%0A i += 2%0A print i %0A
|
|
a8663257ad4b4d0688c54d0e94949ab602c61561
|
allow validation for empty/missing @brief.
|
util/py_lib/seqan/dox/validation.py
|
util/py_lib/seqan/dox/validation.py
|
#!/usr/env/bin python
"""Some validation for proc_doc.Proc*"""
__author__ = 'Manuel Holtgrewe <[email protected]>'
class ProcDocValidator(object):
"""Validate proc_doc.Proc* objects.
Implements the visitor pattern.
"""
def __init__(self, msg_printer):
self.msg_printer = msg_printer
def validate(self, proc_entry):
return
class MissingSignatureValidator(ProcDocValidator):
"""Validates for missing or empty signature."""
def validate(self, proc_entry):
IGNORED = ['variable', 'member_variable', 'tag', 'grouped_tag', 'typedef',
'grouped_typedef', 'signature', 'concept', 'member_typedef',
'enum', 'grouped_enum']
if not hasattr(proc_entry, 'signatures') or proc_entry.kind in IGNORED:
return # Skip if type has no signatures.
if not proc_entry.signatures:
msg = 'Missing @signature for this entry!'
self.msg_printer.printTokenError(proc_entry.raw.first_token, msg, 'warning')
class MissingParameterDescriptionValidator(ProcDocValidator):
"""Warns if the description is missing for a @param or @return."""
def validate(self, proc_entry):
if not hasattr(proc_entry, 'params') and \
not hasattr(proc_entry, 'tparams') and \
not hasattr(proc_entry, 'returns'):
return # Skip if type has no parameters
# Check for empty name.
for key in ['params', 'tparams', 'returns']:
if not hasattr(proc_entry, key):
continue # Skip if missing.
for val in getattr(proc_entry, key):
if hasattr(val, 'name') and not val.name:
msg = 'Missing name for @%s' % key[:-1]
elif hasattr(val, 'type') and not val.type:
msg = 'Missing type for @%s' % key[:-1]
else:
continue # skip
self.msg_printer.printTokenError(val.raw.first_token, msg, 'warning')
# Check for empty description.
for key in ['params', 'tparams', 'returns']:
if not hasattr(proc_entry, key):
continue # Skip if missing.
for val in getattr(proc_entry, key):
if val.desc.empty:
msg = 'Missing description for @%s' % key[:-1]
self.msg_printer.printTokenError(val.raw.first_token, msg, 'warning')
class ReturnVoidValidator(ProcDocValidator):
"""Warns if there is a (superflous) @return void entry."""
def validate(self, proc_entry):
if not hasattr(proc_entry, 'returns'):
return # Skip if type has no returns member.
for r in proc_entry.returns:
if r.type == 'void':
msg = '@return superflous for "void" type -- simply show "void" in signature.'
self.msg_printer.printTokenError(r.raw.first_token, msg, 'warning')
# Array with the validator classes to use.
VALIDATORS = [MissingSignatureValidator,
MissingParameterDescriptionValidator,
ReturnVoidValidator]
|
Python
| 0 |
@@ -2939,16 +2939,580 @@
ning')%0A%0A
+%0Aclass EmptyBriefValidator(ProcDocValidator):%0A %22%22%22Warns if there is no non-empty @brief section for an entry.%22%22%22%0A%0A def validate(self, proc_entry):%0A IGNORED = %5B'mainpage', 'page'%5D%0A if proc_entry.kind in IGNORED:%0A return # Skip.%0A if not hasattr(proc_entry, 'brief'):%0A return # Skip if type has no returns member.%0A if not proc_entry.brief or proc_entry.brief.empty:%0A msg = 'Missing non-empty @brief clause.'%0A self.msg_printer.printTokenError(proc_entry.raw.first_token, msg, 'warning')%0A%0A
# Array
@@ -3672,10 +3672,45 @@
alidator
+,%0A EmptyBriefValidator
%5D%0A
|
0ede4e22370a3f8217fee8ff995a9c7057d8b00b
|
Add test for redis test helper
|
vumi_http_retry/tests/test_redis.py
|
vumi_http_retry/tests/test_redis.py
|
Python
| 0 |
@@ -0,0 +1,910 @@
+import json%0A%0Afrom twisted.trial.unittest import TestCase%0Afrom twisted.internet.defer import inlineCallbacks%0A%0Afrom vumi_http_retry.tests.redis import create_client, zitems%0A%0A%0Aclass TestRedis(TestCase):%0A @inlineCallbacks%0A def setUp(self):%0A self.redis = yield create_client()%0A%0A @inlineCallbacks%0A def tearDown(self):%0A yield self.redis.delete('foo')%0A yield self.redis.transport.loseConnection()%0A%0A @inlineCallbacks%0A def test_add_request(self):%0A self.assertEqual((yield zitems(self.redis, 'foo')), %5B%5D)%0A%0A yield self.redis.zadd('foo', 1, json.dumps(%7B'bar': 23%7D))%0A%0A self.assertEqual((yield zitems(self.redis, 'foo')), %5B%0A (1, %7B'bar': 23%7D),%0A %5D)%0A%0A yield self.redis.zadd('foo', 2, json.dumps(%7B'baz': 42%7D))%0A%0A self.assertEqual((yield zitems(self.redis, 'foo')), %5B%0A (1, %7B'bar': 23%7D),%0A (2, %7B'baz': 42%7D),%0A %5D)%0A
|
|
d0237f2b77a49933a4b22b43f967e414be196ff4
|
Add sysmod module to replace old introspection modules
|
salt/modules/sysmod.py
|
salt/modules/sysmod.py
|
Python
| 0 |
@@ -0,0 +1,1480 @@
+'''%0AThe sys module provides information about the available functions on the%0Aminion.%0A'''%0A%0Adef __virtual__():%0A '''%0A Return as sys%0A '''%0A return 'sys'%0A%0A%0Adef doc(module=''):%0A '''%0A Return the docstrings for all modules, these strings are aggregated into%0A a single document on the master for easy reading.%0A%0A CLI Example::%0A%0A salt %5C* sys.doc%0A '''%0A docs = %7B%7D%0A for fun in __salt__:%0A if fun.startswith(module):%0A docs%5Bfun%5D = __salt__%5Bfun%5D.__doc__%0A return docs%0A%0A%0Adef list_functions(module=''):%0A '''%0A List the functions. Optionally, specify a module to list from.%0A%0A CLI Example::%0A%0A salt %5C* sys.list_functions%0A '''%0A names = set()%0A for func in __salt__:%0A if module:%0A if func.startswith('%7B0%7D.'.format(module)):%0A names.add(func)%0A else:%0A names.add(func)%0A return sorted(names)%0A%0A%0Adef list_modules():%0A '''%0A List the modules loaded on the minion%0A%0A CLI Example::%0A%0A salt %5C* sys.list_modules%0A '''%0A modules = set()%0A for func in __salt__:%0A comps = func.split('.')%0A if len(comps) %3C 2:%0A continue%0A modules.add(comps%5B0%5D)%0A return sorted(modules)%0A%0A%0Adef reload_modules():%0A '''%0A Tell the minion to reload the execution modules%0A%0A CLI Example::%0A%0A salt %5C* sys.reload_modules%0A '''%0A # This is handled inside the minion.py file, the function is caught before%0A # it ever gets here%0A return True%0A
|
|
fa55ceb71ff254f8ed3413a35acfe20da7c03a91
|
Create BT Comm wrapper class
|
rxbtcomm.py
|
rxbtcomm.py
|
Python
| 0 |
@@ -0,0 +1,1613 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A# Copyright (c) 2016 F Dou%[email protected]%3E%0A# See LICENSE for details.%0A%0A%0Aimport bluetooth%0Aimport logging%0A%0Aclass RxBtComm(object):%0A %22%22%22BT communication wrapper:%0A%0A Attributes:%0A addy: A string representing the device address.%0A name: A string representing the device name.%0A %22%22%22%0A logging.basicConfig(level=logging.DEBUG)%0A%0A def __init__(self, addr, name=None):%0A %22%22%22Return a RxBtComm object%0A param *addr* device address%0A param *name* device name%0A %22%22%22%0A self.addr = addr%0A self.name = name%0A self.sock = None%0A%0A %22%22%22connect:%0A Connect to BT addr%0A %22%22%22%0A def connect(self):%0A try:%0A port = 1%0A self.sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM )%0A self.sock.connect((self.addr, port))%0A return bluetooth.lookup_name(self.addr)%0A except Exception as e:%0A logging.exception(e)%0A return ''%0A%0A %22%22%22disconnect:%0A Disconnect from BT addr%0A %22%22%22%0A def disconnect(self):%0A try:%0A self.sock.close()%0A except Exception as e:%0A logging.exception(e)%0A self.sock = None%0A%0A %22%22%22send:%0A Send a command to host%0A %22%22%22%0A def send(self, cmd):%0A self.sock.send(cmd)%0A%0A %22%22%22recieve:%0A Recieve a response from host%0A %22%22%22%0A def recieve(self):%0A self.sock.recieve(cmd)%0A%0A### Replace xx:xx:xx:xx:xx:xx with your test device address%0A#test = RXComm('xx:xx:xx:xx:xx:xx', 'Test Device')%0A#test.connect()%0A#test.send('date')%0A#test.disconnect()%0A%0A
|
|
4053aa99100e2fdc1a342a472492f53138a66d6b
|
Add internal utils module
|
pies/_utils.py
|
pies/_utils.py
|
Python
| 0.000001 |
@@ -0,0 +1,2341 @@
+%22%22%22%0A pies/_utils.py%0A%0A Utils internal to the pies library and not meant for direct external usage.%0A%0A Copyright (C) 2013 Timothy Edmund Crosley%0A%0A Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated%0A documentation files (the %22Software%22), to deal in the Software without restriction, including without limitation%0A the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and%0A to permit persons to whom the Software is furnished to do so, subject to the following conditions:%0A%0A The above copyright notice and this permission notice shall be included in all copies or%0A substantial portions of the Software.%0A%0A THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED%0A TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL%0A THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF%0A CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR%0A OTHER DEALINGS IN THE SOFTWARE.%0A%22%22%22%0A%0A%0Adef with_metaclass(meta, *bases):%0A %22%22%22%0A Enables use of meta classes across Python Versions.%0A taken from jinja2/_compat.py%0A%0A Use it like this::%0A%0A class BaseForm(object):%0A pass%0A%0A class FormType(type):%0A pass%0A%0A class Form(with_metaclass(FormType, BaseForm)):%0A pass%0A %22%22%22%0A class metaclass(meta):%0A __call__ = type.__call__%0A __init__ = type.__init__%0A def __new__(cls, name, this_bases, d):%0A if this_bases is None:%0A return type.__new__(cls, name, (), d)%0A return meta(name, bases, d)%0A return metaclass('temporary_class', None, %7B%7D)%0A%0A%0Adef unmodified_isinstance(*bases):%0A %22%22%22%0A When called in the form MyOverrideClass(unmodified_isinstance(BuiltInClass))%0A%0A it allows calls against passed in built in instances to pass even if there not a subclass%0A %22%22%22%0A class UnmodifiedIsInstance(type):%0A def __instancecheck__(cls, instance):%0A return isinstance(instance, bases)%0A%0A return with_metaclass(UnmodifiedIsInstance, *bases)%0A
|
|
b52b4cb39029d55a06e15b527cb4789e2988093d
|
Add word2vec example
|
word2vec.py
|
word2vec.py
|
Python
| 0.002897 |
@@ -0,0 +1,1191 @@
+from pyspark.sql import SparkSession%0Afrom pyspark.ml.feature import Word2Vec%0A%0Adef main():%0A spark = SparkSession.builder %5C%0A .appName(%22Spark CV-job ad matching%22) %5C%0A .config(%22spark.some.config.option%22, %22some-value%22) %5C%0A .master(%22local%5B*%5D%22) %5C%0A .getOrCreate()%0A%0A%0A # Input data: Each row is a bag of words from a sentence or document.%0A documentDF = spark.createDataFrame(%5B%0A (%22Hi I heard about Spark%22.split(%22 %22), ),%0A (%22I wish Java could use case classes%22.split(%22 %22), ),%0A (%22Logistic regression models are neat%22.split(%22 %22), )%0A %5D, %5B%22text%22%5D)%0A%0A documentDF2 = spark.createDataFrame(%5B%0A (%22Hi I heard about Spark%22.split(%22 %22), ),%0A (%22I wish Java could use case classes%22.split(%22 %22), )%0A %5D, %5B%22text%22%5D)%0A%0A # Learn a mapping from words to Vectors.%0A word2Vec = Word2Vec(vectorSize=3, minCount=0, inputCol=%22text%22, outputCol=%22result%22)%0A model = word2Vec.fit(documentDF)%0A model2 = word2Vec.fit(documentDF2)%0A%0A result = model.transform(documentDF)%0A for row in result.collect():%0A text, vector = row%0A print(%22Text: %5B%25s%5D =%3E %5CnVector: %25s%5Cn%22 %25 (%22, %22.join(text), str(vector)))%0A%0A%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
9607c55eacfd58704a4e83a2476471aa2da6124c
|
add package py-doxypypy (#3284)
|
var/spack/repos/builtin/packages/py-doxypypy/package.py
|
var/spack/repos/builtin/packages/py-doxypypy/package.py
|
Python
| 0 |
@@ -0,0 +1,1635 @@
+##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass PyDoxypypy(PythonPackage):%0A %22%22%22A Doxygen filter for Python.%0A%0A A more Pythonic version of doxypy, a Doxygen filter for Python.%0A %22%22%22%0A%0A homepage = %22https://github.com/Feneric/doxypypy%22%0A url = %22https://pypi.io/packages/source/d/doxypypy/doxypypy-0.8.8.6.tar.gz%22%0A%0A version('0.8.8.6', '6b3fe4eff5d459400071b626333fe15f')%0A%0A depends_on('py-setuptools', type='build')%0A
|
|
83fa6b23563903192376a3419b460b9b06479248
|
Add procstat.py
|
src/procstat.py
|
src/procstat.py
|
Python
| 0 |
@@ -0,0 +1,1714 @@
+import os.path%0Aimport argparse%0Aimport logging%0A%0Alogging.basicConfig(level=logging.DEBUG)%0A%0A%0Aclass Procstat():%0A%09PROCSTATPATH = '/proc/%25d/stat'%0A%09STATLIST = (%0A%09%09'pid',%0A%09%09'comm',%0A%09%09'state',%0A%09%09'ppid',%0A%09%09'pgrp',%0A%09%09'session',%0A%09%09'tty_nr',%0A%09%09'tpgid',%0A%09%09'flags',%0A%09%09'minflt',%0A%09%09'cminflt',%0A%09%09'mjflt',%0A%09%09'cmajflt',%0A%09%09'utime',%0A%09%09'stime',%0A%09%09'cutime',%0A%09%09'cstime',%0A%09%09'priority',%0A%09%09'nice',%0A%09%09'num_threads',%0A%09%09'itrealvalue',%0A%09%09'starttime',%0A%09%09'vsize',%0A%09%09'rss',%0A%09%09'rsslim',%0A%09%09'startcode',%0A%09%09'endcode',%0A%09%09'startstack',%0A%09%09'kstkesp',%0A%09%09'kstkeip',%0A%09%09'signal',%0A%09%09'blocked',%0A%09%09'sigignore',%0A%09%09'sigcatch',%0A%09%09'wchan',%0A%09%09'nswap',%0A%09%09'cnswap',%0A%09%09'exit_signal',%0A%09%09'processor',%0A%09%09'rt_priority',%0A%09%09'policy',%0A%09%09'delayacct_blkio_ticks',%0A%09%09'guest_time',%0A%09%09'cguest_time')%0A%0A%09def __init__(self, pid):%0A%09%09fstat = self.PROCSTATPATH %25 args.pid%0A%0A%09%09if not os.path.exists(fstat):%0A%09%09 logging.error('PID is not valid')%0A%09%09 return None%0A%0A%09%09with open(fstat, 'r') as f:%0A%09%09 procStat = f.readline().split()%0A%0A%09%09self.stat = %7B%7D%0A%09%09for i in range(len(self.STATLIST)):%0A%09%09 self.stat%5Bself.STATLIST%5Bi%5D%5D = procStat%5Bi%5D%0A%0A%09%09strComm = self.stat%5B'comm'%5D%0A%09%09self.stat%5B'comm'%5D = str(strComm%5B1:len(strComm) - 1%5D)%0A%0A%09def __str__(self):%0A%09%09rl = ''%0A%09%09for i in self.STATLIST:%0A%09%09 rl += '%25s(%25s)' %25 (i, self.stat%5Bi%5D)%0A%0A%09%09return rl%0A%0A%09def getStat(self, name):%0A%09%09return self.stat%5Bname%5D if self.stat%5Bname%5D else ''%0A%0A%09def printStat(self, readable=False):%0A%09%09l = ''%0A%09%09for i in self.STATLIST:%0A%09%09%09v = self.stat%5Bi%5D%0A%09%09%09%0A%09%09%09l += '%25-12s : %25s%5Cn' %25 (i, v)%0A%0A%09%09print(l)%0A%0Aif __name__ == '__main__':%0A%09parser = argparse.ArgumentParser(description='Process stat information parser')%0A%09parser.add_argument('pid', type=int, help='Pid')%0A%0A%09args = parser.parse_args()%0A%0A%09pstat = Procstat(args.pid)%0A%09pstat.printStat()%0A
|
|
0728e6a4f8f06e1d4d137259f76796d1dbfa1a9d
|
add a wsgi.py that eagerly reads in POSTdata
|
edx_ora/wsgi_eager.py
|
edx_ora/wsgi_eager.py
|
Python
| 0 |
@@ -0,0 +1,2185 @@
+%22%22%22%0AWSGI config for ora project.%0A%0AThis module contains the WSGI application used by Django's development server%0Aand any production WSGI deployments. It should expose a module-level variable%0Anamed %60%60application%60%60. Django's %60%60runserver%60%60 and %60%60runfcgi%60%60 commands discover%0Athis application via the %60%60WSGI_APPLICATION%60%60 setting.%0A%0AUsually you will have the standard Django WSGI application here, but it also%0Amight make sense to replace the whole Django WSGI application with a custom one%0Athat later delegates to the Django one. For example, you could introduce WSGI%0Amiddleware here, or combine a Django application with an application of another%0Aframework.%0A%0A%22%22%22%0Aimport os%0A%0Aos.environ.setdefault(%22DJANGO_SETTINGS_MODULE%22, %22edx_ora.settings%22)%0A%0A# This application object is used by any WSGI server configured to use this%0A# file. This includes Django's development server, if the WSGI_APPLICATION%0A# setting points here.%0Afrom django.core.wsgi import WSGIHandler%0A%0A%0Aclass ForceReadPostHandler(WSGIHandler):%0A %22%22%22WSGIHandler that forces reading POST data before forwarding to the%0A application.%0A%0A nginx as a proxy expects the backend to respond only after the%0A whole body of the request has been read. In some cases (see below)%0A the backend starts responding before reading the request. This%0A causes nginx to return a 502 error, instead of forwarding the%0A proper response to the client, which makes very hard to debug%0A problems with the backend.%0A%0A Cases where the backend responds early:%0A%0A - Early errors from django, for example errors from view decorators.%0A - POST request with large payloads, which may get chunked by nginx.%0A django sends a 100 Continue response before reading the whole body.%0A%0A For more information:%0A http://kudzia.eu/b/2012/01/switching-from-apache2-to-nginx-as-reverse-proxy%0A%0A %22%22%22%0A%0A def get_response(self, request):%0A data = request.POST.copy() # read the POST data passing it%0A return super(ForceReadPostHandler, self).get_response(request)%0A%0Aapplication = ForceReadPostHandler()%0A%0A# Apply WSGI middleware here.%0A# from helloworld.wsgi import HelloWorldApplication%0A# application = HelloWorldApplication(application)%0A
|
|
fb336764f1a95d591e04f0061009c555b7217274
|
Create FoodDiversity.py
|
FoodDiversity.py
|
FoodDiversity.py
|
Python
| 0 |
@@ -0,0 +1,1449 @@
+import csv%0Aimport math%0Aimport collections%0Afrom collections import Counter%0A%0A# EntFunc calculates the Shannon index for the diversity of venues in a given zip code.%0Adef EntFunc(list,list2):%0A k = 0%0A Entropy = 0%0A for k in range(0, len(BusinessName)):%0A if BusinessName%5Bk%5D != BusinessName%5Bk - 1%5D:%0A p = float(BusinessName.count(BusinessName%5Bk%5D)) / float(len(BusinessName))%0A Entropy = -1.0 * math.log(p) * p + Entropy%0A k = k + 1%0A if Entropy != 0: print zip%5Bj%5D,k,Entropy%0A%0A#Take in data from ESRI business lists by zip code.%0A#The entry matrix takes in values by zip code then the business name within the zip code.%0A#The BusinessName list is there simply to take in business names and determine how often unique values repeat for diversity calculations.%0AReadFile = 'SIC581208.csv'%0Ainf = csv.reader(open(ReadFile, %22rU%22))%0Ai = 0%0Aentry=%5B%5B%5D,%5B%5D%5D%0ABusinessName=%5B%5D%0A%0A#Store zip and business name data from ESRI file.%0Afor row in inf:%0A i = i + 1%0A if i %3E 1:%0A entry%5B0%5D.append(long(row%5B6%5D))%0A entry%5B1%5D.append(row%5B1%5D)%0A%0A#Sort the zip code values by zip code.%0Azip = sorted(list(set(entry%5B0%5D)),key=float)%0A%0A#Sort all stored information by zip code.%0A#Output business diversity by zip code.%0Aj=0%0Aentry.sort(key=lambda x: x%5B0%5D)%0Afor i in range(0,len(entry%5B0%5D)):%0A if entry%5B0%5D%5Bi%5D == zip%5Bj%5D:%0A BusinessName.append(entry%5B1%5D%5Bi%5D)%0A else:%0A EntFunc(BusinessName,zip%5Bj%5D)%0A j=j+1%0A BusinessName=%5B%5D%0A
|
|
40d687be843e3de56eb00a028e07866391593315
|
Add defaults.py
|
salt/defaults.py
|
salt/defaults.py
|
Python
| 0 |
@@ -0,0 +1,294 @@
+# -*- coding: utf-8 -*-%0A'''%0ADefault values, to be imported elsewhere in Salt code%0A%0ADo NOT, import any salt modules (salt.utils, salt.config, etc.) into this file,%0Aas this may result in circular imports.%0A'''%0A%0A# Default delimiter for multi-level traversal in targeting%0ADEFAULT_TARGET_DELIM = ':'%0A
|
|
3de2e625af9047b64cc2718e6e79be0c428b6ae7
|
Solve Code Fights extract each kth problem
|
CodeFights/extractEachKth.py
|
CodeFights/extractEachKth.py
|
Python
| 0.000853 |
@@ -0,0 +1,752 @@
+#!/usr/local/bin/python%0A# Code Fights Extract Each Kth Problem%0A%0A%0Adef extractEachKth(inputArray, k):%0A return %5Be for i, e in enumerate(inputArray) if (i + 1) %25 k != 0%5D%0A%0A%0Adef main():%0A tests = %5B%0A %5B%5B1, 2, 3, 4, 5, 6, 7, 8, 9, 10%5D, 3, %5B1, 2, 4, 5, 7, 8, 10%5D%5D,%0A %5B%5B1, 1, 1, 1, 1%5D, 1, %5B%5D%5D,%0A %5B%5B1, 2, 1, 2, 1, 2, 1, 2%5D, 2, %5B1, 1, 1, 1%5D%5D%0A %5D%0A%0A for t in tests:%0A res = extractEachKth(t%5B0%5D, t%5B1%5D)%0A ans = t%5B2%5D%0A if ans == res:%0A print(%22PASSED: extractEachKth(%7B%7D, %7B%7D) returned %7B%7D%22%0A .format(t%5B0%5D, t%5B1%5D, res))%0A else:%0A print((%22FAILED: extractEachKth(%7B%7D, %7B%7D) returned %7B%7D,%22%0A %22answer: %7B%7D%22).format(t%5B0%5D, t%5B1%5D, res, ans))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
0f1f96ce23ab89c8de3cf24645c4ea77fa2a9196
|
add first test with random data
|
test_window.py
|
test_window.py
|
Python
| 0.000001 |
@@ -0,0 +1,417 @@
+from telescope import LST%0Afrom windows import TelescopeEventView%0Aimport tkinter as tk%0Aimport numpy as np%0A%0Alst = LST(0, 0, 0)%0A%0Aroot = tk.Tk()%0Aviewer1 = TelescopeEventView(root, lst, np.random.normal(size=lst.n_pixel))%0Aviewer2 = TelescopeEventView(root, lst, np.random.normal(size=lst.n_pixel))%0Aviewer1.pack(side=tk.LEFT, expand=True, fill=tk.BOTH)%0Aviewer2.pack(side=tk.LEFT, expand=True, fill=tk.BOTH)%0Aroot.mainloop()%0A
|
|
60b5948508a67cb213ca04b5faacb77e27d8f84c
|
Add fields expicitly declared in form
|
samples/forms.py
|
samples/forms.py
|
import datetime #for checking renewal date range.
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from .models import (Patient, AdmissionNote, FluVaccine,
CollectionType, CollectedSample,
Symptom, ObservedSymptom,
)
from fiocruz.settings.base import DATE_INPUT_FORMATS
class AdmissionNoteForm(forms.ModelForm):
class Meta:
model = AdmissionNote
fields = [
'id_gal_origin',
]
class PatientForm(forms.ModelForm):
class Meta:
model = Patient
fields = [
'name',
]
class FluVaccineForm(forms.ModelForm):
date_applied = forms.DateField(input_formats=DATE_INPUT_FORMATS)
class Meta:
model = FluVaccine
exclude = ['admission_note', ]
|
Python
| 0 |
@@ -798,34 +798,46 @@
-exclude = %5B'admission_note
+fields = %5B'was_applied', 'date_applied
', %5D
|
5ca9e468b9709ae2c7358551a19e668e580ea396
|
add deserialized json object validation functions
|
src/validate.py
|
src/validate.py
|
Python
| 0 |
@@ -0,0 +1,814 @@
+from collections import Counter%0A%0Amodeltypes = set(%5B%22asymmetric_beta_bernoulli%22, %22normal_inverse_gamma%22, %22pitmanyor_atom%22, %22symmetric_dirichlet_discrete%22, %22poisson_gamma%22%5D)%5D)%0A%0Adef assert_map_consistency(map_1, map_2):%0A assert(len(map_1)==len(map_2))%0A for key in map_1:%0A assert(key == map_2%5Bmap_1%5Bkey%5D%5D)%0A%0Adef assert_mc_consistency(mc):%0A assert_map_consistency(mc%5B%22name_to_idx%22%5D, mc%5B%22idx_to_name%22%5D)%0A assert(len(mc%5B%22name_to_idx%22%5D)==len(mc%5B%22column_metadata%22%5D))%0A for column_metadata_i in column_metadata:%0A assert(column_metadata_i%5B%22modeltype%22%5D in modeltypes)%0A assert_map_consistency(column_metadata_i%5B%22value_to_code%22%5D,%0A column_metadata_i%5B%22code_to_value%22%5D)%0A%0Adef assert_mr_consistency(mr):%0A assert_map_consistency(mr%5B%22name_to_idx%22%5D, mr%5B%22idx_to_name%22%5D)%0A%0A
|
|
fc017a578a402b3d24523d1a41b7a4fdc0b107ef
|
add a starter proxy script
|
scripts/proxy.py
|
scripts/proxy.py
|
Python
| 0 |
@@ -0,0 +1,1136 @@
+#!/usr/bin/env python%0A%0A'''%0A%09Copyright (C) Kalan MacRow, 2013%0A%0A%09This code is distributed with jquery.instagram.js %0A%09under the MIT license.%0A%0A%09https://github.com/kmacrow/jquery.instagram.js%0A'''%0A%0Aimport os%0Aimport cgi%0Aimport sys%0Aimport cgitb%0Aimport urllib2%0A%0A %0A# Base URL for Instagram API endpoints%0AINSTAGRAM_BASE = 'https://api.instagram.com/v1/'%0A%0A# Add acceptable origins here...%0AACCEPT_ORIGINS = %5B'http://localhost',%0A%09%09 %09%09 'http://localhost:8888',%0A%09%09 %09%09 'http://localhost:8080'%5D%0A%0A# Initialize CGI with JSON output%0Acgitb.enable()%0Aform = cgi.FieldStorage()%0Aprint %22Content-Type: application/json%22%0A%0A# Support cross origin resource sharing%0Aorigin = os.environ.get('HTTP_ORIGIN')%0Aif origin in ACCEPT_ORIGINS:%0A%09print %22Access-Control-Allow-Origin: %25s%22 %25 origin%0A%0A# empty line after headers%0Aprint%0A%0Aclient_id = form.getfirst('client_id', None)%0Atag_name = form.getfirst('tag', None)%0A%0Aif not client_id or not tag_name:%0A%09print '%7B%22error%22:%22client_id and tag required.%22%7D'%0A%09sys.exit(0)%0A%0A# Get the data from Instagram%0Astream = urllib2.urlopen(INSTAGRAM_BASE + 'tags/' + tag_name %5C%0A%09%09%09%09%09%09%09+ '/media/recent/?client_id=' + client_id)%0A%0Aprint stream.read()%0A
|
|
a319f2f1606a5c4d33e846b496e555140607c98d
|
Add track_name script
|
track_names.py
|
track_names.py
|
Python
| 0.000002 |
@@ -0,0 +1,529 @@
+import midi%0Aimport sys%0A%0Adef track_name(track):%0A for ev in track:%0A if isinstance(ev, midi.TrackNameEvent):%0A return ''.join(map(chr, ev.data))%0A name = 'no name, first 6 events:'%0A for ev in track%5B:6%5D:%0A name += '%5Cn %25s' %25 ev%0A return name%0A%0Adef main(argv):%0A if len(argv) %3C 2:%0A print 'usage: track_names.py file.mid'%0A return%0A%0A mid = midi.read_midifile(argv%5B1%5D)%0A print '%25d tracks' %25 len(mid)%0A for i, t in enumerate(mid):%0A print ' %2503d: %25s' %25 (i, track_name(t))%0A%0Aif __name__ == '__main__':%0A main(sys.argv)%0A
|
|
877a7b7449a1d88c14633376a2dfaca8c619c26a
|
Add solution to exercis 3.6.
|
exercises/chapter_03/exercise_03_06/exercise_03_06.py
|
exercises/chapter_03/exercise_03_06/exercise_03_06.py
|
Python
| 0 |
@@ -0,0 +1,1897 @@
+# 3-6 Guest List%0A%0Aguest_list = %5B%22Albert Einstein%22, %22Isac Newton%22, %22Marie Curie%22, %22Galileo Galilei%22%5D%0A%0Amessage = %22Hi %22 + guest_list%5B0%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B1%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B2%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B3%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Acancelation_message = guest_list%5B1%5D + %22 can not attend the dinner.%22%0Aprint(cancelation_message)%0A%0Aguest_list%5B1%5D = %22Charles Darwin%22%0A%0Amessage = %22Hi %22 + guest_list%5B0%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B1%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B2%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B3%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22I have a bigger table now so three more people will be invited.%22%0Aprint(message)%0A%0Aguest_list.insert(0, %22Stephen Hawking%22)%0Aguest_list.insert(2, %22Louis Pasteur%22)%0Aguest_list.append(%22Nicolaus Copernicus%22)%0A%0Amessage = %22Hi %22 + guest_list%5B0%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B1%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B2%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B3%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B4%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B5%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A%0Amessage = %22Hi %22 + guest_list%5B6%5D + %22 you are invited to dinner at 7 on saturday.%22%0Aprint(message)%0A
|
|
5c7538ca1e43eb4529c04169a9a15c513bc3e659
|
Add segment_tangent_angle tests module
|
tests/plantcv/morphology/test_segment_tangent_angle.py
|
tests/plantcv/morphology/test_segment_tangent_angle.py
|
Python
| 0.000001 |
@@ -0,0 +1,595 @@
+import pytest%0Aimport cv2%0Afrom plantcv.plantcv import outputs%0Afrom plantcv.plantcv.morphology import segment_tangent_angle%0A%0A%[email protected](%22size%22, %5B3, 100%5D)%0Adef test_segment_tangent_angle(size, morphology_test_data):%0A # Clear previous outputs%0A outputs.clear()%0A skel = cv2.imread(morphology_test_data.skel_img, -1)%0A leaf_obj = morphology_test_data.load_segments(morphology_test_data.segments_file, %22leaves%22)%0A _ = segment_tangent_angle(segmented_img=skel, objects=leaf_obj, size=size)%0A assert len(outputs.observations%5B'default'%5D%5B'segment_tangent_angle'%5D%5B'value'%5D) == 4%0A
|
|
6d2d224d246569a35a7b4ae5d8086e83bbb67155
|
move server.py to project dir
|
server/server.py
|
server/server.py
|
Python
| 0 |
@@ -0,0 +1,1802 @@
+from datetime import datetime%0Aimport json%0Afrom http.server import BaseHTTPRequestHandler, HTTPServer%0A%0ASERVER_PORT = 90%0AHOST_ADDRESS = ''%0A%0A%0Adef save_data(user_email):%0A file = open('users.txt', 'a+')%0A current_time = datetime.now().strftime('%25Y-%25m-%25d %25H:%25M:%25S')%0A file.write(%22%7B%7D, %7B%7D%22.format(user_email, current_time))%0A file.write(%22%5Cn%22)%0A print(%22save %7B%7D%22.format(user_email))%0A%0A%0Adef get_json(data):%0A try:%0A return json.loads(data)%0A except ValueError:%0A # if user send not json --%3E ignore all that he sent%0A return %5B%5D%0A%0A%0Aclass S(BaseHTTPRequestHandler):%0A def _set_headers(self):%0A self.send_response(200)%0A self.send_header('Content-type', 'application/json')%0A self.end_headers()%0A%0A def do_GET(self):%0A self._set_headers()%0A self.wfile.write(%22%7B%5C%22hello%5C%22:%5C%22friend%5C%22%7D%22.encode(%22utf-8%22))%0A%0A def do_HEAD(self):%0A self._set_headers()%0A%0A def do_POST(self):%0A content_length = int(self.headers%5B'Content-Length'%5D) # %3C--- Gets the size of data%0A post_data_str = self.rfile.read(content_length).decode()%0A post_data_json = get_json(post_data_str)%0A%0A email_key = %22email%22%0A # if client didn't send email as param%0A user_email = post_data_json%5Bemail_key%5D if email_key in post_data_json else None%0A%0A self._set_headers()%0A if user_email is not None:%0A save_data(user_email)%0A self.wfile.write(%22%7B%5C%22successfully%5C%22:%5C%22registered%5C%22%7D%22.encode(%22utf-8%22))%0A else:%0A self.wfile.write(%22%7B%5C%22error%5C%22:%5C%22invalid request%5C%22%7D%22.encode(%22utf-8%22))%0A%0A%0Adef run(server_class=HTTPServer, handler_class=S, port=SERVER_PORT):%0A server_address = (HOST_ADDRESS, port)%0A httpd = server_class(server_address, handler_class)%0A print('Starting httpd...')%0A httpd.serve_forever()%0A%0A%0Arun()%0A
|
|
a8db8c0448d98e2de0e662581542bd644e673c7c
|
Add migration removing generated objects with factories
|
geotrek/core/migrations/0018_remove_other_objects_from_factories.py
|
geotrek/core/migrations/0018_remove_other_objects_from_factories.py
|
Python
| 0 |
@@ -0,0 +1,886 @@
+# Generated by Django 2.0.13 on 2020-04-06 13:40%0A%0Afrom django.conf import settings%0Afrom django.contrib.gis.geos import Point, LineString%0Afrom django.db import migrations%0A%0A%0Adef remove_generated_objects_factories(apps, schema_editor):%0A ComfortModel = apps.get_model('core', 'Comfort')%0A PathSourceModel = apps.get_model('core', 'PathSource')%0A StakeModel = apps.get_model('core', 'Stake')%0A ComfortModel.objects.filter(paths__isnull=True, comfort__icontains=%22Comfort %22).delete()%0A PathSourceModel.objects.filter(paths__isnull=True, comfort__icontains=%22PathSource %22).delete()%0A StakeModel.objects.filter(paths__isnull=True, comfort__icontains=%22Stake %22).delete()%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('core', '0017_remove_path_from_factories'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(remove_generated_objects_factories)%0A %5D%0A
|
|
3ddf1f4a2bcae247978b66fd63848b3ed9782234
|
add donwloader
|
MistDownloader.py
|
MistDownloader.py
|
Python
| 0.000014 |
@@ -0,0 +1,1078 @@
+#!/usr/bin/python%0D%0A# -*- coding: utf-8 -*-%0D%0Aimport urllib%0D%0Aimport os%0D%0Aimport time%0D%0Aimport sys%0D%0Aimport traceback%0D%0A%0D%0Acnt=0%0D%0Aleast_cnt=0%0D%0Aif len(sys.argv)==2:%0D%0A%09least_cnt=int(sys.argv%5B1%5D)%0D%0Aprint least_cnt%0D%0A%0D%0Aif not os.path.exists(%22mp3%22):%0D%0A%09os.mkdir(%22mp3%22)%0D%0Afor path,dirname,filenames in os.walk(%22outdir%22):%0D%0A%09for filename in filenames:%0D%0A%09%09if filename.startswith(%22mp3_url_%22):%0D%0A%09%09%09cnt+=1%0D%0A%09%09%09if cnt%25100==0:%0D%0A%09%09%09%09print (%22Has already downloaded %25d songs!%22 %25 cnt)%0D%0A%09%09%09f=open(%22outdir/%22+filename)%0D%0A%09%09%09for line in f:%0D%0A%09%09%09%09values=line.split()%0D%0A%09%09%09%09if len(values)!=3:%0D%0A%09%09%09%09%09sys.stderr.write(%22Bad line '%25s' in file %25s%5Cn%22 %25 (line,filename))%0D%0A%09%09%09%09sid=values%5B0%5D%0D%0A%09%09%09%09play_cnt=int(values%5B1%5D)%0D%0A%09%09%09%09url=values%5B2%5D%0D%0A%09%09%09%09if play_cnt%3Cleast_cnt:%0D%0A%09%09%09%09%09continue%0D%0A%09%09%09%09fn=%22mp3/%25s.mp3%22 %25 sid%0D%0A%09%09%09%09if not os.path.exists(fn):%0D%0A%09%09%09%09%09try:%0D%0A%09%09%09%09%09%09urllib.urlretrieve(url, fn)%0D%0A%09%09%09%09%09%09print(sid)%0D%0A%09%09%09%09%09except Exception as e:%0D%0A%09%09%09%09%09%09exc_type, exc_value, exc_traceback = sys.exc_info()%0D%0A%09%09%09%09%09%09traceback.print_exception(exc_type, exc_value, exc_traceback,limit=None, file=sys.stderr)%0D%0A%09%09%09%09%09time.sleep(2)%0D%0A%09%09%09f.close()%0D%0A
|
|
54e3d3147feb33f21c5bc78a8f3b4721574fcbb9
|
Create A.py
|
Google-Code-Jam/2017-1B/A.py
|
Google-Code-Jam/2017-1B/A.py
|
Python
| 0.000004 |
@@ -0,0 +1,1054 @@
+import os%0Aimport sys%0A%0A%0Ascript = __file__%0AscriptPath = os.path.dirname(script)%0AscriptFile = os.path.basename(script)%5B0%5D%0Afiles = %5Bf for f in os.listdir(scriptPath) if scriptFile in f and '.in' in f%5D%0Aif '%7B%7D-large'.format(scriptFile) in str(files):%0A size = 'large'%0Aelif '%7B%7D-small'.format(scriptFile) in str(files):%0A size = 'small'%0Aelif '%7B%7D-test'.format(scriptFile) in str(files):%0A size = 'test'%0Aelse:%0A print('%7B%7D-test not found'.format(scriptFile))%0A sys.exit()%0Alatest = sorted(f for f in files if size in f)%5B-1%5D%5B:-3%5D%0AF = '%7B%7D/%7B%7D'.format(scriptPath, latest)%0AI = open(F + '.in', 'r')%0AO = open(F + '.out', 'w')%0Aprint(F)%0AT = int(I.readline()) # nb of test cases%0A%0A# https://code.google.com/codejam/contest/8294486/dashboard%0A# Problem A. %0A%0A%0Afor x in range(T):%0A D, N = map(int, I.readline().rstrip().split())%0A horses = %5Btuple(map(int, I.readline().split())) for _ in range(N)%5D%0A slowpoke = max((D-K)/S for K, S in horses)%0A y = D/slowpoke%0A%0A result = '%7B%7DCase #%7B%7D: %7B%7D'.format('%5Cn' if x else '', x + 1, y)%0A print(result)%0A O.write(result)%0A%0AI.close()%0AO.close()%0A
|
|
a067c18f8534d79a85538eaf11e34e99f9e17286
|
develop update to pair master, going to rename master now
|
oh_shit.py
|
oh_shit.py
|
Python
| 0 |
@@ -0,0 +1,1331 @@
+from app import app, db%0Afrom app.mod_sms.models import *%0A%0Aug1 = UserGroup(name='Canyon Time', phone='+17868378095', active=True)%0Aug2 = UserGroup(name='test', phone='+18503783607', active=True)%0A%0Aryan = User(fname='Ryan', lname='Kuhl', phone='+13058985985', active=True)%0Asimon = User(fname='Simon', lname='', phone='+13109264989', active=True)%0Adan = User(fname='Dan' , lname='Malik', phone='+14152718694', active=True)%0Atom = User(fname='Tom' , lname='Scorer', phone='+13109022760', active=True)%0Asteve = User(fname='Steve', lname='Makuch', phone='+16164609893', active=True)%0Achris = User(fname='Chris', lname='', phone='+16269882527', active=True)%0Aben = User(fname='Ben' , lname='Eisenbise', phone='+13234017625', active=True)%0Aalex = User(fname='Alex', lname='Thorpe', phone='+14243869550', active=True)%0A%0Aug1.groups_to_users.append(ryan)%0Aug1.groups_to_users.append(simon)%0Aug1.groups_to_users.append(dan)%0Aug1.groups_to_users.append(tom)%0Aug1.groups_to_users.append(steve)%0Aug1.groups_to_users.append(chris)%0Aug1.groups_to_users.append(ben)%0Aug1.groups_to_users.append(alex)%0A%0Aug2.groups_to_users.append(ryan)%0A%0Adb.session.add(ug1)%0Adb.session.add(ug2)%0Adb.session.add(ryan)%0Adb.session.add(simon)%0Adb.session.add(dan)%0Adb.session.add(tom)%0Adb.session.add(steve)%0Adb.session.add(chris)%0Adb.session.add(ben)%0Adb.session.add(alex)%0A%0Adb.session.commit()%0A
|
|
01c98087541828421da49295abedd3d894cdb3b5
|
Create luz.py
|
opt/luz.py
|
opt/luz.py
|
Python
| 0.000028 |
@@ -0,0 +1,874 @@
+#!/usr/bin/env python%0A%0A#%09Realizado por: Roberto Arias (@bettocr)%0A#%09%0A#%09Permite encender y apagar luces leds%0A#%09%0A%0Aimport RPi.GPIO as GPIO, time, os %0A%0AGPIO.setmode(GPIO.BCM)%0Aon = 0 # luces encendidas%0AMAX=5200 # luminocidad maxima antes de encender el led, entre mayor mas oscuro%0APIN=23 # pin al relay%0APINRC=24 #pin que lee la photocell%0A%0AGPIO.setup(PIN,GPIO.OUT)%0Adef RCtime (RCpin):%0A reading = 0%0A GPIO.setup(RCpin, GPIO.OUT)%0A GPIO.output(RCpin, GPIO.LOW)%0A time.sleep(0.1)%0A%0A GPIO.setup(RCpin, GPIO.IN)%0A %0A while (GPIO.input(RCpin) == GPIO.LOW):%0A reading += 1%0A return reading%0A%0Awhile True: %0A #print RCtime(24) %0A%09luz = RCtime(PINRC)%0A%09if luz %3E MAX:%0A%09 %09GPIO.output(PIN,True)%0A%09%09on = 1%0A %0A%09if luz %3C MAX and on == 1:%0A%09%09GPIO.output(PIN,False)%0A%09%09%0A%09%09on = 0%0A%09%0A%09%0A
|
|
fe186bf85472cf4e683d9838e36e60c680e6dc77
|
Add test
|
python/ql/test/library-tests/PointsTo/new/code/w_function_values.py
|
python/ql/test/library-tests/PointsTo/new/code/w_function_values.py
|
Python
| 0.000319 |
@@ -0,0 +1,425 @@
+def test_conditoinal_function(cond):%0A def foo():%0A return %22foo%22%0A%0A def bar():%0A return %22bar%22%0A%0A if cond:%0A f = foo%0A else:%0A f = bar%0A%0A sink = f()%0A return sink%0A%0A%0Af_false = test_conditoinal_function(False)%0Af_true = test_conditoinal_function(True)%0A%0A%0Adef foo():%0A return %22foo%22%0A%0A%0Adef test_redefinition():%0A f = foo%0A%0A def foo():%0A return %22refined%22%0A%0A sink = f()%0A return sink
|
|
1afb7bb7b1f3e8ef3070f1100dac683b2b8254ee
|
remove unused table
|
xbrowse_server/base/migrations/0003_delete_xhmmfile.py
|
xbrowse_server/base/migrations/0003_delete_xhmmfile.py
|
Python
| 0.000013 |
@@ -0,0 +1,318 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('base', '0002_auto_20160117_1843'),%0A %5D%0A%0A operations = %5B%0A migrations.DeleteModel(%0A name='XHMMFile',%0A ),%0A %5D%0A
|
|
49c673c5c8374867fc9bf026717fe137bdba84bc
|
Add test file for graph.py and add test of Greengraph class constructor
|
greengraph/test/test_graph.py
|
greengraph/test/test_graph.py
|
Python
| 0 |
@@ -0,0 +1,622 @@
+from greengraph.map import Map%0Afrom greengraph.graph import Greengraph%0Afrom mock import patch%0Aimport geopy%0Afrom nose.tools import assert_equal%0A%0Astart = %22London%22%0Aend = %22Durham%22%0A%0Adef test_Greengraph_init():%0A with patch.object(geopy.geocoders,'GoogleV3') as mock_GoogleV3:%0A test_Greengraph = Greengraph(start,end)%0A #Test that GoogleV3 is called with the correct parameters%0A mock_GoogleV3.assert_called_with(domain=%22maps.google.co.uk%22)%0A #Test that the start and end fields are initialised correctly%0A assert_equal(test_Greengraph.start,start)%0A assert_equal(test_Greengraph.end,end)%0A
|
|
54553efa024d74ec60647ea7616191a52fe9948f
|
Add a command to create collaborator organisations
|
akvo/rsr/management/commands/create_collaborator_organisation.py
|
akvo/rsr/management/commands/create_collaborator_organisation.py
|
Python
| 0 |
@@ -0,0 +1,1364 @@
+# -*- coding: utf-8 -*-%0A%0A# Akvo Reporting is covered by the GNU Affero General Public License.%0A# See more details in the license.txt file located at the root folder of the Akvo RSR module.%0A# For additional details on the GNU license please see %3C http://www.gnu.org/licenses/agpl.html %3E.%0A%0A%22%22%22Create a collaborator organisation for a given organisation.%0A%0AUsage:%0A%0A python manage.py create_collaborator_organisation %3Corg-id%3E%0A%0A%22%22%22%0A%0Aimport sys%0A%0Afrom django.core.management.base import BaseCommand%0A%0Afrom akvo.rsr.models import Organisation%0A%0A%0Aclass Command(BaseCommand):%0A help = __doc__%0A%0A def add_arguments(self, parser):%0A parser.add_argument('org_id', type=int)%0A%0A def handle(self, *args, **options):%0A org_id = options%5B'org_id'%5D%0A try:%0A organisation = Organisation.objects.get(id=org_id)%0A except Organisation.DoesNotExist:%0A sys.exit('Could not find organisation with ID: %7B%7D'.format(org_id))%0A%0A collaborator, _ = Organisation.objects.get_or_create(%0A content_owner=organisation,%0A original=organisation,%0A defaults=dict(%0A name='Collaborator: %7B%7D'.format(organisation.name),%0A long_name='Collaborator: %7B%7D'.format(organisation.long_name),%0A )%0A )%0A print('Collaborator Organisation created with ID: %7B%7D'.format(collaborator.id))%0A
|
|
2b7de72ef67d3a7216902cfc6073760336d9d67a
|
use separate test project from ForgeSVN's tests
|
ForgeImporters/forgeimporters/google/tests/test_code.py
|
ForgeImporters/forgeimporters/google/tests/test_code.py
|
from unittest import TestCase
from mock import Mock, patch
from allura.tests import TestController
from forgesvn.tests import with_svn
from forgeimporters.google.code import (
get_repo_url,
GoogleRepoImporter,
GoogleRepoImportController,
)
class TestGetRepoUrl(TestCase):
def test_svn(self):
r = get_repo_url('projname', 'svn')
self.assertEqual(r, 'http://projname.googlecode.com/svn/')
def test_git(self):
r = get_repo_url('projname', 'git')
self.assertEqual(r, 'https://code.google.com/p/projname/')
def test_hg(self):
r = get_repo_url('projname', 'hg')
self.assertEqual(r, 'https://code.google.com/p/projname/')
class TestGoogleRepoImporter(TestCase):
def _make_project(self, gc_proj_name=None):
project = Mock()
project.get_tool_data.side_effect = lambda *args: gc_proj_name
return project
@patch('forgeimporters.google.code.GoogleCodeProjectExtractor.get_repo_type')
@patch('forgeimporters.google.code.get_repo_url')
def test_import_tool_happy_path(self, get_repo_url, get_repo_type):
get_repo_type.return_value = 'git'
get_repo_url.return_value = 'http://remote/clone/url/'
p = self._make_project(gc_proj_name='myproject')
GoogleRepoImporter().import_tool(p)
p.install_app.assert_called_once_with('Git',
mount_point='code',
mount_label='Code',
init_from_url='http://remote/clone/url/',
)
def test_no_project(self):
with self.assertRaises(Exception) as cm:
GoogleRepoImporter().import_tool()
self.assertEqual(str(cm.exception), "You must supply a project")
def test_no_google_code_project_name(self):
p = self._make_project()
with self.assertRaises(Exception) as cm:
GoogleRepoImporter().import_tool(p)
self.assertEqual(str(cm.exception), "Missing Google Code project name")
class TestGoogleRepoImportController(TestController, TestCase):
def setUp(self):
"""Mount Google Code importer on the SVN admin controller"""
super(TestGoogleRepoImportController, self).setUp()
from forgesvn.svn_main import SVNRepoAdminController
SVNRepoAdminController._importer = GoogleRepoImportController()
@with_svn
def test_index(self):
r = self.app.get('/p/test/admin/src/_importer/')
self.assertIsNotNone(r.html.find(attrs=dict(name="gc_project_name")))
self.assertIsNotNone(r.html.find(attrs=dict(name="mount_label")))
self.assertIsNotNone(r.html.find(attrs=dict(name="mount_point")))
@with_svn
@patch('forgeimporters.google.code.GoogleRepoImporter')
def test_create(self, gri):
from allura import model as M
gri.import_tool.return_value = Mock()
gri.import_tool.return_value.url.return_value = '/p/test/mymount'
params = dict(gc_project_name='poop',
mount_label='mylabel',
mount_point='mymount',
)
r = self.app.post('/p/test/admin/src/_importer/create', params,
status=302)
project = M.Project.query.get(shortname='test')
self.assertEqual(r.location, 'http://localhost/p/test/mymount')
self.assertEqual(project.get_tool_data('google-code', 'project_name'),
'poop')
self.assertEqual(project._id, gri.import_tool.call_args[0][0]._id)
self.assertEqual(u'mymount', gri.import_tool.call_args[1]['mount_point'])
self.assertEqual(u'mylabel', gri.import_tool.call_args[1]['mount_label'])
|
Python
| 0 |
@@ -102,38 +102,275 @@
rom
-forgesvn.tests import with_svn
+allura.tests.decorators import with_tool%0A%0A%0A# important to be distinct from 'test' which ForgeSVN uses, so that the tests can run in parallel and not clobber each other%0Atest_project_with_repo = 'test2'%0Awith_svn = with_tool(test_project_with_repo, 'SVN', 'src', 'SVN')%0A
%0A%0Afr
@@ -2646,28 +2646,26 @@
app.get('/p/
-test
+%7B%7D
/admin/src/_
@@ -2674,16 +2674,47 @@
porter/'
+.format(test_project_with_repo)
)%0A
@@ -3184,20 +3184,18 @@
e = '/p/
-test
+%7B%7D
/mymount
@@ -3195,16 +3195,47 @@
mymount'
+.format(test_project_with_repo)
%0A
@@ -3399,20 +3399,18 @@
ost('/p/
-test
+%7B%7D
/admin/s
@@ -3429,17 +3429,64 @@
/create'
-,
+.format(test_project_with_repo),%0A
params,
@@ -3562,22 +3562,38 @@
ortname=
-'
test
-'
+_project_with_repo
)%0A
@@ -3643,20 +3643,18 @@
lhost/p/
-test
+%7B%7D
/mymount
@@ -3654,16 +3654,47 @@
mymount'
+.format(test_project_with_repo)
)%0A
|
a3d837afe6662edb10395baa8851de551d0915a5
|
add email templates tests
|
auth0/v3/test/management/test_email_endpoints.py
|
auth0/v3/test/management/test_email_endpoints.py
|
Python
| 0.000001 |
@@ -0,0 +1,1337 @@
+import unittest%0Aimport mock%0Afrom ...management.email_templates import EmailTemplates%0A%0A%0Aclass TestClients(unittest.TestCase):%0A%0A @mock.patch('auth0.v3.management.email_templates.RestClient')%0A def test_create(self, mock_rc):%0A mock_instance = mock_rc.return_value%0A%0A c = EmailTemplates(domain='domain', token='jwttoken')%0A c.create(%7B'a': 'b', 'c': 'd'%7D)%0A%0A mock_instance.post.assert_called_with(%0A 'https://domain/api/v2/email-templates',%0A data=%7B'a': 'b', 'c': 'd'%7D%0A )%0A%0A @mock.patch('auth0.v3.management.email_templates.RestClient')%0A def test_get(self, mock_rc):%0A mock_instance = mock_rc.return_value%0A%0A c = EmailTemplates(domain='domain', token='jwttoken')%0A c.get('this-template-name')%0A%0A mock_instance.get.assert_called_with(%0A 'https://domain/api/v2/email-templates/this-template-name'%0A )%0A%0A @mock.patch('auth0.v3.management.email_templates.RestClient')%0A def test_update(self, mock_rc):%0A mock_instance = mock_rc.return_value%0A%0A c = EmailTemplates(domain='domain', token='jwttoken')%0A c.update('this-template-name', %7B'a': 'b', 'c': 'd'%7D)%0A%0A mock_instance.patch.assert_called_with(%0A 'https://domain/api/v2/email-templates/this-template-name',%0A data=%7B'a': 'b', 'c': 'd'%7D%0A )%0A
|
|
cee2683d3c0a60739b8e4f1c1dbaa74981a42392
|
add class skeleton for schedule generator
|
angular_flask/classtime/scheduler.py
|
angular_flask/classtime/scheduler.py
|
Python
| 0 |
@@ -0,0 +1,1038 @@
+class Scheduler(object):%0A %22%22%22%0A Helper class which builds optimal schedules out of %0A class listings.%0A%0A Use static methods only - do not create instances of%0A the class.%0A %22%22%22%0A def __init__(self):%0A pass%0A%0A @staticmethod%0A def generate_schedule(classtimes):%0A %22%22%22%0A Generates one good schedule based on the classtimes%0A provided.%0A%0A classtimes should be in the following format:%0A %5B%0A %7B%0A 'course_name' : 'somename',%0A 'course_attr_a' : 'someattr',%0A ...%0A 'day' : '%3Cdaystring%3E',%0A 'startTime' : '%3Ctime%3E',%0A 'endTime' : '%3Ctime%3E'%0A %7D,%0A ...%0A %7B %0A ...%0A %7D%0A %5D%0A%0A Where %3Cdaystring%3E is a string containing the days the%0A class is scheduled on:%0A - UMTWRFS is Sunday...Saturday%0A - eg 'MWF' or 'TR'%0A%0A And %3Ctime%3E is a time of format 'HH:MM XM'%0A - eg '08:00 AM'%0A %22%22%22%0A pass%0A
|
|
82e4c67bd7643eed06e7cd170ca1d0de41c70912
|
Add a data analyzer class.
|
core/data/DataAnalyzer.py
|
core/data/DataAnalyzer.py
|
Python
| 0 |
@@ -0,0 +1,744 @@
+%22%22%22%0ADataAnalyzer%0A%0A:Authors:%0A%09Berend Klein Haneveld%0A%22%22%22%0A%0A%0Aclass DataAnalyzer(object):%0A%09%22%22%22%0A%09DataAnalyzer%0A%09%22%22%22%0A%0A%09def __init__(self):%0A%09%09super(DataAnalyzer, self).__init__()%0A%0A%09@classmethod%0A%09def histogramForData(cls, data, nrBins):%0A%09%09%22%22%22%0A%09%09Samples the image data in order to create bins%0A%09%09for making a histogram of the data.%0A%09%09%22%22%22%0A%09%09dims = data.GetDimensions()%0A%09%09minVal, maxVal = data.GetScalarRange()%0A%09%09bins = %5B0 for x in range(nrBins)%5D%0A%0A%09%09stepSize = 3%0A%09%09for z in range(0, dims%5B2%5D, stepSize):%0A%09%09%09for y in range(0, dims%5B1%5D, stepSize):%0A%09%09%09%09for x in range(0, dims%5B0%5D, stepSize):%0A%09%09%09%09%09element = data.GetScalarComponentAsFloat(x, y, z, 0)%0A%09%09%09%09%09index = int(((element - minVal) / float(maxVal - minVal)) * (nrBins-1))%0A%09%09%09%09%09bins%5Bindex%5D += 1%0A%0A%09%09return bins%0A
|
|
3f85610873d88592970c64661e526b2a576e300f
|
Add new sms message generator
|
sms_generator.py
|
sms_generator.py
|
Python
| 0.000003 |
@@ -0,0 +1,1145 @@
+def generate_new_procedure_message(procedure, ward, timeframe, doctor):%0A unique_reference = str(1)%0A message = str.format(%22%7B0%7D is available on %7B1%7D. Attend the ward in %7B2%7D and meet %7B3%7D in the junior doctors' office. %22%0A %22To accept this opportunity reply with %7B4%7D%22,%0A procedure,%0A ward,%0A timeframe,%0A doctor,%0A unique_reference)%0A print(message)%0A return message%0A%0A%0Adef generate_success_response_message(procedure, ward, timeframe, doctor):%0A message = str.format(%22Please attend %7B0%7D in %7B1%7D and ask for %7B2%7D to complete this supervised %22%0A %22procedure. This learning opportunity has been reserved exclusively for you, please make %22%0A %22every effort to attend.%22,%0A ward,%0A timeframe,%0A doctor)%0A print(message)%0A return message%0A%0A%0Adef generate_not_success_response_message():%0A message = str.format(%22Sorry - procedure already taken this time.%22)%0A%0A print(message)%0A return message
|
|
926631d068a223788714cd645ae5336881c6853f
|
Update messageable.py
|
praw/models/reddit/mixins/messageable.py
|
praw/models/reddit/mixins/messageable.py
|
"""Provide the MessageableMixin class."""
from ....const import API_PATH
class MessageableMixin:
"""Interface for classes that can be messaged."""
def message(self, subject, message, from_subreddit=None):
"""
Send a message to a redditor or a subreddit's moderators (mod mail).
:param subject: The subject of the message.
:param message: The message content.
:param from_subreddit: A :class:`~.Subreddit` instance or string to send the
message from. When provided, messages are sent from the subreddit
rather than from the authenticated user. Note that the
authenticated user must be a moderator of the subreddit and have
the ``mail`` moderator permission.
For example, to send a private message to ``/u/spez``, try:
.. code:: python
reddit.redditor('spez').message('TEST', 'test message from PRAW')
To send a message to ``u/spez`` from the moderators of ``r/test`` try:
.. code:: python
reddit.redditor('spez').message('TEST', 'test message from r/test',
from_subreddit='test')
To send a message to the moderators of ``/r/test``, try:
.. code:: python
reddit.subreddit('test').message('TEST', 'test PM from PRAW')
"""
data = {
"subject": subject,
"text": message,
"to": "{}{}".format(
getattr(self.__class__, "MESSAGE_PREFIX", ""), self
),
}
if from_subreddit:
data["from_sr"] = str(from_subreddit)
self._reddit.post(API_PATH["compose"], data=data)
|
Python
| 0.000001 |
@@ -475,17 +475,8 @@
g to
- send the
%0A
@@ -484,16 +484,25 @@
+send the
message
@@ -540,24 +540,36 @@
re sent from
+%0A
the subredd
@@ -570,28 +570,16 @@
ubreddit
-%0A
rather
@@ -611,16 +611,28 @@
ed user.
+%0A
Note th
@@ -637,28 +637,16 @@
that the
-%0A
authent
@@ -679,24 +679,36 @@
rator of the
+%0A
subreddit a
@@ -714,28 +714,16 @@
and have
-%0A
the %60%60m
@@ -803,17 +803,16 @@
ge to %60%60
-/
u/spez%60%60
@@ -1224,17 +1224,16 @@
rs of %60%60
-/
r/test%60%60
|
73bd8200f6ad23c60a05831e3b79497b830f19cd
|
Update old lithium comments about llvm-symbolizer 3.6 to 3.8 versions.
|
interestingness/envVars.py
|
interestingness/envVars.py
|
#!/usr/bin/env python
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import copy
import os
import platform
isLinux = (platform.system() == 'Linux')
isMac = (platform.system() == 'Darwin')
isWin = (platform.system() == 'Windows')
ENV_PATH_SEPARATOR = ';' if os.name == 'nt' else ':'
def envWithPath(path, runningEnv=os.environ):
"""Append the path to the appropriate library path on various platforms."""
if isLinux:
libPath = 'LD_LIBRARY_PATH'
elif isMac:
libPath = 'DYLD_LIBRARY_PATH'
elif isWin:
libPath = 'PATH'
env = copy.deepcopy(runningEnv)
if libPath in env:
if path not in env[libPath]:
env[libPath] += ENV_PATH_SEPARATOR + path
else:
env[libPath] = path
return env
def findLlvmBinPath():
"""Return the path to compiled LLVM binaries, which differs depending on compilation method."""
if isLinux:
# Assumes clang was installed through apt-get. Works with version 3.6.2.
# Create a symlink at /usr/bin/llvm-symbolizer for: /usr/bin/llvm-symbolizer-3.6
if os.path.isfile('/usr/bin/llvm-symbolizer'):
return ''
else:
print 'WARNING: Please install clang via `apt-get install clang` if using Ubuntu.'
print 'then create a symlink at /usr/bin/llvm-symbolizer for: /usr/bin/llvm-symbolizer-3.6.'
print 'Try: `ln -s /usr/bin/llvm-symbolizer-3.6 /usr/bin/llvm-symbolizer`'
return ''
if isMac:
# Assumes LLVM was installed through Homebrew. Works with at least version 3.6.2.
brewLLVMPath = '/usr/local/opt/llvm/bin'
if os.path.isdir(brewLLVMPath):
return brewLLVMPath
else:
print 'WARNING: Please install llvm from Homebrew via `brew install llvm`.'
print 'ASan stacks will not have symbols as Xcode does not install llvm-symbolizer.'
return ''
# https://developer.mozilla.org/en-US/docs/Building_Firefox_with_Address_Sanitizer#Manual_Build
if isWin:
return None # The harness does not yet support Clang on Windows
|
Python
| 0 |
@@ -1124,32 +1124,76 @@
th version 3.6.2
+,%0A # assumed to work with clang 3.8.0
.%0A # Crea
@@ -1265,17 +1265,17 @@
lizer-3.
-6
+8
%0A
@@ -1554,17 +1554,17 @@
lizer-3.
-6
+8
.'%0A
@@ -1616,17 +1616,17 @@
lizer-3.
-6
+8
/usr/bi
|
378cb69d413eb8ffaf811b607fc037be923a2aba
|
Write tests for SSLRedirectMiddleware
|
iogt/tests/test_middleware.py
|
iogt/tests/test_middleware.py
|
Python
| 0 |
@@ -0,0 +1,1629 @@
+from django.test import (%0A TestCase,%0A Client,%0A RequestFactory,%0A override_settings,%0A)%0A%0Afrom molo.core.tests.base import MoloTestCaseMixin%0Afrom molo.core.models import Main%0A%0Afrom iogt.middleware import SSLRedirectMiddleware%0A%0A%0APERMANENT_REDIRECT_STATUS_CODE = 301%0A%0A%0A@override_settings(HTTPS_PATHS=%5B'admin'%5D)%0Aclass TestSSLRedirectMiddleware(TestCase, MoloTestCaseMixin):%0A def setUp(self):%0A self.mk_main()%0A self.main = Main.objects.all().first()%0A self.factory = RequestFactory()%0A%0A def test_no_redirect_for_home_page(self):%0A request = self.factory.get('/')%0A%0A middleware = SSLRedirectMiddleware()%0A response = middleware.process_request(request)%0A%0A self.assertEqual(response, None)%0A%0A def test_no_redirect_with_https(self):%0A headers = %7B'HTTP_X_FORWARDED_PROTO': 'https'%7D%0A request = self.factory.get('/', **headers)%0A%0A middleware = SSLRedirectMiddleware()%0A response = middleware.process_request(request)%0A%0A self.assertEqual(response, None)%0A%0A def test_no_redirect_when_secure(self):%0A headers = %7B'HTTP_X_FORWARDED_PROTO': 'https'%7D%0A request = self.factory.get('/admin/', **headers)%0A%0A middleware = SSLRedirectMiddleware()%0A response = middleware.process_request(request)%0A%0A self.assertEqual(response, None)%0A%0A def test_redirect_when_not_secure(self):%0A request = self.factory.get('/admin/')%0A%0A middleware = SSLRedirectMiddleware()%0A response = middleware.process_request(request)%0A%0A self.assertEqual(response.status_code,%0A PERMANENT_REDIRECT_STATUS_CODE)%0A
|
|
e0ac456eae45a1b7e1482ff712be600b384f94b3
|
Include new example to show group circle connectivity.
|
examples/connectivity/plot_custom_grouped_connectivity_circle.py
|
examples/connectivity/plot_custom_grouped_connectivity_circle.py
|
Python
| 0 |
@@ -0,0 +1,2695 @@
+#!/usr/bin/env python%0A%0A%22%22%22%0AExample how to create a custom label groups and plot grouped connectivity%0Acircle with these labels.%0A%0AAuthor: Praveen Sripad %[email protected]%3E%0A Christian Kiefer %[email protected]%3E%0A%22%22%22%0A%0Aimport matplotlib.pyplot as plt%0Afrom jumeg import get_jumeg_path%0Afrom jumeg.connectivity import (plot_grouped_connectivity_circle,%0A generate_random_connectivity_matrix)%0A%0Aimport yaml%0A%0Alabels_fname = get_jumeg_path() + '/data/desikan_label_names.yaml'%0Areplacer_dict_fname = get_jumeg_path() + '/data/replacer_dictionaries.yaml'%0A%0Awith open(labels_fname, 'r') as f:%0A label_names = yaml.safe_load(f)%5B'label_names'%5D%0A%0Awith open(replacer_dict_fname, 'r') as f:%0A replacer_dict = yaml.safe_load(f)%5B'replacer_dict_aparc'%5D%0A%0A# make a random matrix with 68 nodes%0A# use simple seed for reproducibility%0Acon = generate_random_connectivity_matrix(size=(68, 68), symmetric=True)%0A%0A# make groups based on lobes%0Aoccipital = %5B'lateraloccipital', 'lingual', 'cuneus', 'pericalcarine'%5D%0Aparietal = %5B'superiorparietal', 'inferiorparietal', 'precuneus',%0A 'postcentral', 'supramarginal'%5D%0Atemporal = %5B'bankssts', 'temporalpole', 'superiortemporal', 'middletemporal',%0A 'transversetemporal', 'inferiortemporal', 'fusiform',%0A 'entorhinal', 'parahippocampal'%5D%0Ainsula = %5B'insula'%5D%0Acingulate = %5B'rostralanteriorcingulate', 'caudalanteriorcingulate',%0A 'posteriorcingulate', 'isthmuscingulate'%5D%0Afrontal = %5B'superiorfrontal', 'rostralmiddlefrontal', 'caudalmiddlefrontal',%0A 'parsopercularis', 'parsorbitalis', 'parstriangularis',%0A 'lateralorbitofrontal', 'medialorbitofrontal', 'precentral',%0A 'paracentral', 'frontalpole'%5D%0A%0A# we need a list of dictionaries, one dict for each group to denote grouping%0Alabel_groups = %5B%7B'occipital': occipital%7D, %7B'parietal': parietal%7D,%0A %7B'temporal': temporal%7D, %7B'insula': insula%7D,%0A %7B'cingulate': cingulate%7D,%0A %7B'frontal': frontal%7D%5D%0A%0An_colors = len(label_groups)%0Acmap = plt.get_cmap('Pastel1')%0Acortex_colors = cmap.colors%5B:n_colors%5D + cmap.colors%5B:n_colors%5D%5B::-1%5D%0A%0A# plot simple connectivity circle with cortex based grouping and colors%0Aplot_grouped_connectivity_circle(label_groups, con, label_names,%0A labels_mode='replace',%0A replacer_dict=replacer_dict,%0A cortex_colors=cortex_colors, vmin=0., vmax=1.,%0A out_fname='fig_grouped_con_circle_cortex.png',%0A colorbar_pos=(0.1, 0.1), n_lines=50, colorbar=True,%0A colormap='viridis')%0A
|
|
f50efeb78d9b503a7d6e97db8b1cd68b429aa2c4
|
allow to run tox as 'python -m tox', which is handy on Windoze
|
tox/__main__.py
|
tox/__main__.py
|
Python
| 0 |
@@ -0,0 +1,38 @@
+from tox._cmdline import main%0A%0Amain()%0A
|
|
f7e504652707b09c0a0b7e7b1691094ef6d35509
|
add proper tomography example
|
examples/solvers/conjugate_gradient_tomography.py
|
examples/solvers/conjugate_gradient_tomography.py
|
Python
| 0.998527 |
@@ -0,0 +1,2901 @@
+# Copyright 2014-2016 The ODL development group%0A#%0A# This file is part of ODL.%0A#%0A# ODL is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# ODL is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with ODL. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0A%22%22%22Total variation tomography using the %60conjugate_gradient_normal%60 solver.%0A%0ASolves the inverse problem%0A%0A A(x) = g%0A%0AWhere %60%60A%60%60 is a parallel beam forward projector, %60%60x%60%60 the result and%0A %60%60g%60%60 is given noisy data.%0A%22%22%22%0A%0Aimport numpy as np%0Aimport odl%0A%0A%0A# --- Set up the forward operator (ray transform) --- #%0A%0A%0A# Discrete reconstruction space: discretized functions on the rectangle%0A# %5B-20, 20%5D%5E2 with 300 samples per dimension.%0Areco_space = odl.uniform_discr(%0A min_corner=%5B-20, -20%5D, max_corner=%5B20, 20%5D, nsamples=%5B300, 300%5D,%0A dtype='float32')%0A%0A# Make a parallel beam geometry with flat detector%0A# Angles: uniformly spaced, n = 360, min = 0, max = 2 * pi%0Aangle_partition = odl.uniform_partition(0, 2 * np.pi, 360)%0A%0A# Detector: uniformly sampled, n = 558, min = -30, max = 30%0Adetector_partition = odl.uniform_partition(-30, 30, 558)%0Ageometry = odl.tomo.Parallel2dGeometry(angle_partition, detector_partition)%0A%0A# The implementation of the ray transform to use, options:%0A# 'scikit' Requires scikit-image (can be installed by%0A# running %60%60pip install scikit-image%60%60).%0A# 'astra_cpu', 'astra_cuda' Require astra tomography to be installed.%0A# Astra is much faster than scikit. Webpage:%0A# https://github.com/astra-toolbox/astra-toolbox%0Aimpl = 'scikit'%0A%0A# Ray transform aka forward projection.%0Aray_trafo = odl.tomo.RayTransform(reco_space, geometry, impl=impl)%0A%0A%0A# --- Generate artificial data --- #%0A%0A%0A# Create phantom%0Adiscr_phantom = odl.util.shepp_logan(reco_space, modified=True)%0A%0A# Create sinogram of forward projected phantom with noise%0Adata = ray_trafo(discr_phantom)%0Adata += odl.util.white_noise(ray_trafo.range) * np.mean(data) * 0.1%0A%0A# Optionally pass partial to the solver to display intermediate results%0Apartial = (odl.solvers.PrintIterationPartial() &%0A odl.solvers.ShowPartial())%0A%0A# Choose a starting point%0Ax = ray_trafo.domain.zero()%0A%0A# Run the algorithm%0Aodl.solvers.conjugate_gradient_normal(%0A ray_trafo, x, data, niter=20, partial=partial)%0A%0A# Display images%0Adiscr_phantom.show(title='original image')%0Adata.show(title='convolved image')%0Ax.show(title='deconvolved image', show=True)%0A
|
|
236d7d885dadcb681357212a5c6b53c28eac0aa1
|
Create d1-1.py
|
2018/d1-1.py
|
2018/d1-1.py
|
Python
| 0.000001 |
@@ -0,0 +1,167 @@
+with open(%22completed/input_c1-1.txt%22, %22r%22) as f:%0A line = %220%22%0A sum = 0%0A while line:%0A sum += int(line)%0A line = f.readline()%0A print(%22Final Frequency: %7B%7D%22, sum)%0A
|
|
8de92e74317a74b53991bdcbb3594f0e94e4cf17
|
Add Monty Hall simulation
|
montyhall.py
|
montyhall.py
|
Python
| 0 |
@@ -0,0 +1,1230 @@
+import random%0Aimport sys%0A%0Adef game():%0A # Place car behind one door%0A car = random.randint(1, 3)%0A # Player selects a door%0A first_choice = random.randint(1, 3)%0A%0A reveal_options = %5B1, 2, 3%5D%0A # Don't reveal the car%0A reveal_options.remove(car)%0A # Don't reveal the player's choice%0A if first_choice in reveal_options: reveal_options.remove(first_choice)%0A # Reveal a door with a goat%0A reveal = random.choice(reveal_options)%0A%0A second_options = %5B1, 2, 3%5D%0A # Don't select your first choice%0A second_options.remove(first_choice)%0A # Don't select the revealed door%0A second_options.remove(reveal)%0A # Choose the remaining door%0A second_choice = second_options%5B0%5D%0A%0A # Collect and return result%0A first_succ = 1 if first_choice == car else 0%0A second_succ = 1 if second_choice == car else 0%0A return (first_succ, second_succ)%0A%0Adef simulate(rounds):%0A first, second = 0, 0%0A for i in range(rounds):%0A res = game()%0A first += res%5B0%5D%0A second += res%5B1%5D%0A print(%22First choice wins %7B:.1f%7D%25 of cases%22.format(first / rounds * 100))%0A print(%22Second choice wins %7B:.1f%7D%25 of cases%22.format(second / rounds * 100))%0A%0Aif __name__ == '__main__':%0A simulate(int(sys.argv%5B1%5D))%0A
|
|
b177a0f2e9b42347f56c4499aaa080af97e0e530
|
add validity check
|
2018/04.10/python/jya_gAPIclass.2.py
|
2018/04.10/python/jya_gAPIclass.2.py
|
Python
| 0.000001 |
@@ -0,0 +1,1843 @@
+import requests, base64%0Aimport config%0A%0Aid = config.GAPI_CONFIG%5B'client_id'%5D%0Asecret = config.GAPI_CONFIG%5B'client_secret'%5D%0Atype = config.GAPI_CONFIG%5B'grant_type'%5D%0A%0Aclass GapiClass:%0A def __init__(self, host='https://gapi.gabia.com'):%0A self.__host = host%0A self.__headers = self.__encoded_token()%0A self.__max_retry = 5%0A self.__p = 1%0A%0A def __Requests_get(self, url):%0A r = requests.get('%7B0%7D%7B1%7D'.format(self.__host, url), headers = self.__headers)%0A # print(r.status_code)%0A if (r.status_code == 401):%0A print(%22%EC%9C%A0%ED%9A%A8%ED%95%98%EC%A7%80 %EC%95%8A%EC%9D%80 %ED%86%A0%ED%81%B0%EC%9E%85%EB%8B%88%EB%8B%A4%22)%0A while self.__p %3C self.__max_retry:%0A self.__p += 1%0A self.__headers = self.__encoded_token()%0A self.__Requests_get(url)%0A elif (r.status_code == 200):%0A j = r.json()%0A return j%0A else:%0A print(%22%EB%8B%A4%EC%9D%8C %EA%B8%B0%ED%9A%8C%EC%97%90%22)%0A%0A def __Requests_post(self, url, data):%0A r = requests.post('%7B0%7D%7B1%7D'.format(self.__host, url), data = data)%0A j = r.json()%0A return j%0A%0A def __getToken(self):%0A j = self.__Requests_post('/oauth/token', %7B'client_id': id, 'client_secret': secret, 'grant_type': type%7D)%0A token_1 = j%5B'access_token'%5D%0A token_2 = 'www_front:%7B0%7D'.format(token_1) %0A return token_2%0A%0A def __makeHeadersAuth(self, token):%0A encoded_text = token.encode()%0A k = base64.b64encode(encoded_text)%0A l = k.decode()%0A return %7B'Authorization': 'Basic %7B0%7D'.format(l)%7D%0A%0A def __encoded_token(self):%0A return self.__makeHeadersAuth(self.__getToken())%0A%0A def getMember(self, id):%0A j = self.__Requests_get('/members?user_id=%7B0%7D'.format(id))%0A hanname = j%5B'client_info'%5D%5B'hanadmin'%5D%0A return hanname%0A%0A# api1 = GapiClass()%0A# a = api1.getMember('planning_d')%0A# if __name__ == %22__main__%22:%0A# print(a)
|
|
8e8e11990e430302eca24d32ba0b88dcc66233d6
|
Add connect2 wifi via pyobjc
|
clburlison_scripts/connect2_wifi_pyobjc/connect2_wifi_pyobjc.py
|
clburlison_scripts/connect2_wifi_pyobjc/connect2_wifi_pyobjc.py
|
Python
| 0 |
@@ -0,0 +1,546 @@
+#!/usr/bin/python%0A%22%22%22%0AI didn't create this but I'm storing it so I can reuse it.%0Ahttp://stackoverflow.com/a/34967364/4811765%0A%22%22%22%0Aimport objc%0A%0ASSID = %22MyWifiNetwork%22%0APASSWORD = %22MyWifiPassword%22%0A%0Aobjc.loadBundle('CoreWLAN',%0A bundle_path='/System/Library/Frameworks/CoreWLAN.framework',%0A module_globals=globals())%0Aiface = CWInterface.interface()%0Anetworks, err = iface.scanForNetworksWithName_err_(SSID, None)%0Anetwork = networks.anyObject()%0Asuccess, err = iface.associateToNetwork_password_err_(network, PASSWORD, None)%0A
|
|
a6d6b833e33dc465b0fa828018e2cbba748f8282
|
Add utility class for evaluation
|
pygraphc/evaluation/EvaluationUtility.py
|
pygraphc/evaluation/EvaluationUtility.py
|
Python
| 0 |
@@ -0,0 +1,457 @@
+%0Aclass EvaluationUtility(object):%0A @staticmethod%0A def convert_to_text(graph, clusters):%0A # convert clustering result from graph to text%0A new_clusters = %7B%7D%0A for cluster_id, nodes in clusters.iteritems():%0A for node in nodes:%0A members = graph.node%5Bnode%5D%5B'member'%5D%0A for member in members:%0A new_clusters.setdefault(cluster_id, %5B%5D).append(member)%0A%0A return new_clusters%0A
|
|
a2a2d6ab7edaa6fab9d2fb95586fde8f1f74b1cc
|
add new package (#24672)
|
var/spack/repos/builtin/packages/py-aniso8601/package.py
|
var/spack/repos/builtin/packages/py-aniso8601/package.py
|
Python
| 0 |
@@ -0,0 +1,557 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyAniso8601(PythonPackage):%0A %22%22%22A library for parsing ISO 8601 strings.%22%22%22%0A%0A homepage = %22https://bitbucket.org/nielsenb/aniso8601%22%0A pypi = %22aniso8601/aniso8601-9.0.1.tar.gz%22%0A%0A version('9.0.1', sha256='72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973')%0A%0A depends_on('py-setuptools', type='build')%0A
|
|
f9a8642e3c5cfbce2e949c019dce3d538eefcd43
|
Juan Question
|
JuanQuestion/Juan.py
|
JuanQuestion/Juan.py
|
Python
| 0.999983 |
@@ -0,0 +1,626 @@
+from string import uppercase%0D%0Afrom string import lowercase%0D%0A%0D%0Aif __name__ == '__main__':%0D%0A asking = True%0D%0A%0D%0A print(%22Juan Questions%22)%0D%0A print(%22Presione 1 para salir%22)%0D%0A%0D%0A while asking == True:%0D%0A response = input(%22Pregunta algo: %22)%0D%0A%0D%0A if response.endswith(%22?%22) :%0D%0A print(%22Ofi%22)%0D%0A elif response %3E= 'A' and response %3C= 'Z':%0D%0A print(%22Chillea%22)%0D%0A elif response == %22%22 :%0D%0A print(%22mmm%22)%0D%0A elif response == %22 %22 :%0D%0A print(%22Me da igual%22)%0D%0A elif response == %221%22 :%0D%0A print(%22Salir%22)%0D%0A asking = False%0D%0A break%0D%0A
|
|
44eaf0fda528dcf2260f01fd8fd1ac12c138c594
|
Remove Duplicate code.
|
oneflow/profiles/models.py
|
oneflow/profiles/models.py
|
# -*- coding: utf-8 -*-
import uuid
import base64
from jsonfield import JSONField
from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
#from django.db.models.signals import post_save
#from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from sparks.django.mail import send_mail
class AbstractUserProfile(models.Model):
""" A mixin for any User class (even not real Django `User`)
which adds primitives to get/set if a given email was sent
to the user, and various other methods based on profile data.
It's understood that the given user model which will use
this mixin should either have a `.data` attribute of type
``JSONField``, or a `.profile.data` (JSONField too) attribute.
Using this class allow many User classes to work in a similar
way, be they having an dedicated profile, or not.
"""
email_announcements = models.BooleanField(_('Email announcements'),
default=True, blank=True)
last_modified = models.DateTimeField(_('Last modified'), auto_now_add=True)
register_data = JSONField(_('Register data, as JSON'),
default=lambda: {}, blank=True)
hash_codes = JSONField(_(u'Validation codes, as JSON'),
default=lambda: {}, blank=True)
sent_emails = JSONField(_('sent emails names, as JSON'),
default=lambda: {}, blank=True)
data = JSONField(_('Other user data, as JSON'),
default=lambda: {}, blank=True)
class Meta:
abstract = True
def email_user(self, subject, message, from_email=None):
""" Sends an email to this User, [TODO: if not already done ?]. """
send_mail(subject, message, from_email, [self.email])
def has_email_sent(self, email_name):
return self.sent_emails.get('email_sent_' + email_name, False)
def log_email_sent(self, email_name):
return self.sent_emails.setdefault('email_sent_' + email_name, True)
def renew_hash_code(self, name, commit=True):
self.hash_codes[name] = uuid.uuid4().hex
if commit:
self.save(update_fields=('hash_codes', ))
def unsubscribe_url(self):
return u'http://{0}{1}'.format(
settings.SITE_DOMAIN, reverse('unsubscribe', kwargs={
'email': base64.b64encode(self.email),
'hash_code': self.hash_codes.setdefault(
'unsubscribe', uuid.uuid4().hex)}))
class AbstractUserProfile(models.Model):
""" A mixin for any User class (even not real Django `User`)
which adds primitives to get/set if a given email was sent
to the user, and various other methods based on profile data.
It's understood that the given user model which will use
this mixin should either have a `.data` attribute of type
``JSONField``, or a `.profile.data` (JSONField too) attribute.
Using this class allow many User classes to work in a similar
way, be they having an dedicated profile, or not.
"""
email_announcements = models.BooleanField(_('Email announcements'),
default=True, blank=True)
last_modified = models.DateTimeField(_('Last modified'), auto_now_add=True)
register_data = JSONField(_('Register data, as JSON'),
default=lambda: {}, blank=True)
hash_codes = JSONField(_(u'Validation codes, as JSON'),
default=lambda: {}, blank=True)
sent_emails = JSONField(_('sent emails names, as JSON'),
default=lambda: {}, blank=True)
data = JSONField(_('Other user data, as JSON'),
default=lambda: {}, blank=True)
class Meta:
abstract = True
def has_email_sent(self, email_name):
return self.sent_emails.get('email_sent_' + email_name, False)
def log_email_sent(self, email_name):
return self.sent_emails.setdefault('email_sent_' + email_name, True)
def renew_hash_code(self, commit=True):
self.hash_code = uuid.uuid4().hex
if commit:
self.save(update_fields=('hash_code',))
def unsubscribe_url(self):
return u'http://{0}{1}'.format(
settings.SITE_DOMAIN, reverse('unsubscribe', kwargs={
'hash_code': self.hash_codes.setdefault(
'unsubscribe', uuid.uuid4().hex)}))
class UserProfile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL,
related_name='profile',
on_delete=models.CASCADE,
primary_key=True)
email_announcements = models.BooleanField(_('Email announcements'),
default=True, blank=True)
register_request_data = JSONField(_('Register data'),
default=lambda: {}, blank=True)
last_modified = models.DateTimeField(_('Last modified'), auto_now_add=True)
hash_code = models.CharField(_(u'Current validation code'), max_length=32,
default=lambda: uuid.uuid4().hex)
data = JSONField(_('profile data, as JSON'),
default=lambda: {}, blank=True)
class Meta:
verbose_name = _(u'User profile')
verbose_name_plural = _(u'User profiles')
def __unicode__(self):
return u'Profile for User %s' % self.user.username
def renew_hash_code(self, commit=True):
self.hash_code = uuid.uuid4().hex
if commit:
self.save(update_fields=('hash_code',))
def unsubscribe_url(self):
return u'http://{0}{1}'.format(settings.SITE_DOMAIN,
reverse('unsubscribe',
kwargs={'hash_code': self.hash_code}))
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.get_or_create(user=instance)
#post_save.connect(create_user_profile, sender=get_user_model())
|
Python
| 0.000004 |
@@ -2640,1992 +2640,8 @@
)%0A%0A%0A
-class AbstractUserProfile(models.Model):%0A %22%22%22 A mixin for any User class (even not real Django %60User%60)%0A which adds primitives to get/set if a given email was sent%0A to the user, and various other methods based on profile data.%0A%0A It's understood that the given user model which will use%0A this mixin should either have a %60.data%60 attribute of type%0A %60%60JSONField%60%60, or a %60.profile.data%60 (JSONField too) attribute.%0A%0A Using this class allow many User classes to work in a similar%0A way, be they having an dedicated profile, or not.%0A %22%22%22%0A email_announcements = models.BooleanField(_('Email announcements'),%0A default=True, blank=True)%0A last_modified = models.DateTimeField(_('Last modified'), auto_now_add=True)%0A%0A register_data = JSONField(_('Register data, as JSON'),%0A default=lambda: %7B%7D, blank=True)%0A hash_codes = JSONField(_(u'Validation codes, as JSON'),%0A default=lambda: %7B%7D, blank=True)%0A sent_emails = JSONField(_('sent emails names, as JSON'),%0A default=lambda: %7B%7D, blank=True)%0A data = JSONField(_('Other user data, as JSON'),%0A default=lambda: %7B%7D, blank=True)%0A%0A class Meta:%0A abstract = True%0A%0A def has_email_sent(self, email_name):%0A return self.sent_emails.get('email_sent_' + email_name, False)%0A%0A def log_email_sent(self, email_name):%0A return self.sent_emails.setdefault('email_sent_' + email_name, True)%0A%0A def renew_hash_code(self, commit=True):%0A self.hash_code = uuid.uuid4().hex%0A if commit:%0A self.save(update_fields=('hash_code',))%0A%0A def unsubscribe_url(self):%0A return u'http://%7B0%7D%7B1%7D'.format(%0A settings.SITE_DOMAIN, reverse('unsubscribe', kwargs=%7B%0A 'hash_code': self.hash_codes.setdefault(%0A 'unsubscribe', uuid.uuid4().hex)%7D))%0A%0A%0A
clas
|
6a6abadc2395810076b89fb38c759f85426a0304
|
Add framework for own SVM from scratch
|
supportVectorMachine/howItWorksSupportVectorMachine.py
|
supportVectorMachine/howItWorksSupportVectorMachine.py
|
Python
| 0 |
@@ -0,0 +1,781 @@
+# -*- coding: utf-8 -*-%0A%22%22%22Support Vector Machine (SVM) classification for machine learning.%0A%0ASVM is a binary classifier. The objective of the SVM is to find the best%0Aseparating hyperplane in vector space which is also referred to as the%0Adecision boundary. And it decides what separating hyperplane is the 'best'%0Abecause the distance from it and the associating data it is separating is the%0Agreatest at the plane in question.%0A%0AThis is the file where I create the algorithm from scratch.%0A%0Adataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html%0A%0AExample:%0A%0A $ python howItWorksSupportVectorMachine.py%0A%0ATodo:%0A * Sketch out the framework%0A%22%22%22%0A%0A# minimize magnitude(w) and maximize b%0A# with constraint y_i*(x_i*w+b)%3E=1%0A# or Class*(KnownFeatures*w+b)%3E=1%0A
|
|
2e1c257c0215f398e4ac5cc7d2d20ffa62492817
|
Create NewChatAlert.pyw
|
NewChatAlert.pyw
|
NewChatAlert.pyw
|
Python
| 0 |
@@ -0,0 +1,2760 @@
+# TODO: Check for cookie expiration%0A# TODO: Check for failed request%0A# TODO: Check for rejected cookie%0A# TODO: Get Cookie from other browsers (IE and Firefox)%0A# - See https://bitbucket.org/richardpenman/browser_cookie (and perhaps contribute)?%0A%0Afrom os import getenv%0Afrom sqlite3 import connect%0Afrom win32crypt import CryptUnprotectData%0Afrom requests import post%0Afrom ctypes import windll%0Afrom time import sleep, ctime%0A%0A# Function that displays a message box%0Adef MsgBox(title, text, style):%0A windll.user32.MessageBoxW(0, text, title, style)%0A%0A# Function that returns session cookie from chrome%0Adef GetSecureCookie(name):%0A # Connect to Chrome's cookies db%0A cookies_database_path = getenv(%0A %22APPDATA%22) + r%22%5C..%5CLocal%5CGoogle%5CChrome%5CUser Data%5CDefault%5CCookies%22%0A conn = connect(cookies_database_path)%0A cursor = conn.cursor()%0A # Get the encrypted cookie%0A cursor.execute(%0A %22SELECT encrypted_value FROM cookies WHERE name IS %5C%22%22 + name + %22%5C%22%22)%0A results = cursor.fetchone()%0A # Close db%0A conn.close()%0A if results == None:%0A decrypted = None%0A else:%0A decrypted = CryptUnprotectData(results%5B0%5D, None, None, None, 0)%5B%0A 1%5D.decode(%22utf-8%22)%0A return decrypted%0A%0A# Function that returns chat status using a provided session cookie%0Adef GetChatRequestCount(cookie):%0A # Ask TeamSupport for the chat status using cookie%0A response = post(%0A %22https://app.teamsupport.com/chatstatus%22,%0A cookies=%7B%22TeamSupport_Session%22: cookie%7D,%0A data='%7B%22lastChatMessageID%22: -1, %22lastChatRequestID%22: -1%7D'%0A )%0A return response.json()%5B%22ChatRequestCount%22%5D%0A%0Adef main():%0A # Loop forever - checking for new chat requests%0A while True:%0A cookie = GetSecureCookie(%22TeamSupport_Session%22)%0A if cookie == None:%0A MsgBox(%22Session cookie not found%22,%0A %22%22%22TeamSupport session cookie could not be found in Chrome store%0A%0A New chat notifications will not work until this is resolved%0A%0A Log in to TeamSupport using Chrome to fix this%22%22%22,%0A 16)%0A # Pause for 30 seconds before trying again%0A sleep(30)%0A else:%0A chat_request_count = GetChatRequestCount(cookie)%0A # Alert if there are new chat requests or log if none%0A if chat_request_count == 0:%0A print(ctime() + %22 - No new chat requests%22)%0A elif chat_request_count == 1:%0A MsgBox(%22New Chat Request%22, %22There is 1 new chat request%22, 64)%0A else:%0A MsgBox(%22New Chat Requests%22, %22There are %22 +%0A str(chat_request_count) + %22 chat requests%22, 48)%0A # Pause for 10 seconds before checking again%0A sleep(10)%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
1d4938232aa103ea2a919796e9fa35e2699d41d9
|
Create PythonAnswer2.py
|
PythonAnswer2.py
|
PythonAnswer2.py
|
Python
| 0.999202 |
@@ -0,0 +1,370 @@
+def fibonacci(x):%0A a = 0 #first number%0A b = 1 #second number%0A for x in range(x - 1):%0A a, b = b, a + b #a becomes b and b becomes a and b added together%0A return a #returns the next number in the sequence%0A%0Aprint %22Fibonacci Answer%22%0Afor x in range(1, 35): #number of times I need the sequence to run to reach 4million%0A print fibonacci(x)%0A
|
|
5c60be411e61d5edfbf658509b437973d596a3ba
|
Create server.py
|
Networking/server.py
|
Networking/server.py
|
Python
| 0.000001 |
@@ -0,0 +1,2006 @@
+# -*- coding: utf-8 -*-%0A%0Aimport socket, math%0A%0A# demarrage du serveur%0Aserver = %22127.0.0.1%22%0Aport = 55042%0Amysock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)%0Amysock.bind((server, port))%0A%0AJOG_IP = %5BNone,None,None,None,None,None,None,None,None,None%5D%0AJOG_coordinates= %5BNone,None,None,None,None,None,None,None,None,None%5D%0Aennemy_coordinates = %5B0.0,0.0%5D%0A%0A# ne peut etre appelee qu'a condition que tous les tableaux soient remplis%0Adef update_coordinates() :%0A%09global JOG_IP, JOG_coordinates, ennemy_coordinates%0A%09for e in JOG_IP :%0A%09%09# determination des deux plus proches voisins%0A%09%09JOG_ID = JOG_IP.index(e)%0A%09%09current_coordinates = JOG_coordinates%5BJOG_ID%5D%0A%09%09distances = %5Bfloat(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22),float(%22inf%22)%5D%0A%09%09for c in JOG_coordinates :%0A%09%09%09if c != current_coordinates :%0A%09%09%09%09distances%5BJOG_coordinates.index(c)%5D = math.sqrt( (c%5B0%5D-current_coordinates%5B0%5D)**2 + (c%5B1%5D-current_coordinates%5B1%5D)**2 )%0A%09%09neighbour1_ID = distances.index(min(distances))%0A%09%09distances%5Bdistances.index(min(distances))%5D = max(distances)%0A%09%09neighbour2_ID = distances.index(min(distances))%0A%09%09%0A%09%09# formatage et envoi du message%0A%09%09msg_coordinates = 'C'+' '+'A'+str(JOG_coordinates%5Bneighbour1_ID%5D%5B0%5D)+' '+'A'+str(JOG_coordinates%5Bneighbour1_ID%5D%5B1%5D)+' '+'B'+str(JOG_coordinates%5Bneighbour2_ID%5D%5B0%5D)+' '+'B'+str(JOG_coordinates%5Bneighbour1_ID%5D%5B1%5D)+' '+'T'+str(ennemy_coordinates%5B0%5D)+'T'+str(ennemy_coordinates%5B1%5D)+' '+'T'+str(ennemy_velocity%5B1%5D)+'V'+str(ennemy_velocity%5B1%5D)%0A%09%09mysock.sendto(msg_coordinates, e)%0A%0Awhile True :%0A%09msg, client = mysock.recvfrom(255)%0A%09if msg :%0A%09%09msg_parts = msg.split()%0A%09%09JOG_IP%5Bmsg_parts%5B0%5D%5D = client%0A%09%09if msg_parts%5B1%5D == 'C' :%09# cas o%C3%B9 le message re%C3%A7u est une mise %C3%A0 jour de la position%0A%09%09%09JOG_coordinates%5Bmsg_parts%5B0%5D%5D = %5Bfloat(msg_parts%5B2%5D), float(msg_parts%5B3%5D)%5D%0A%09%09elif msg_parts%5B1%5D == 'E' :%09# cas o%C3%B9 le message re%C3%A7u est une erreur%0A%09%09%09# TODO%0A%09%09%09pass%0A%09%09if not ((None in JOG_IP) %7C (None in JOG_coordinates)) :%0A%09%09%09update_coordinates()%0A
|
|
92ec849fc18d7cb610839abe2213ce30ceced46b
|
Add ci settings file for postgresql database
|
InvenTree/InvenTree/ci_postgresql.py
|
InvenTree/InvenTree/ci_postgresql.py
|
Python
| 0 |
@@ -0,0 +1,457 @@
+%22%22%22%0AConfiguration file for running tests against a MySQL database.%0A%22%22%22%0A%0Afrom InvenTree.settings import *%0A%0A# Override the 'test' database%0Aif 'test' in sys.argv:%0A eprint('InvenTree: Running tests - Using MySQL test database')%0A %0A DATABASES%5B'default'%5D = %7B%0A # Ensure postgresql backend is being used%0A 'ENGINE': 'django.db.backends.postgresql',%0A 'NAME': 'inventree_test_db',%0A 'USER': 'postgres',%0A 'PASSWORD': '',%0A %7D%0A
|
|
f7db5d9cac80432a7016043a1b2781fbaa7f040e
|
Create new package. (#6891)
|
var/spack/repos/builtin/packages/r-rappdirs/package.py
|
var/spack/repos/builtin/packages/r-rappdirs/package.py
|
Python
| 0 |
@@ -0,0 +1,1765 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass RRappdirs(RPackage):%0A %22%22%22An easy way to determine which directories on the users computer%0A you should use to save data, caches and logs. A port of Python's%0A 'Appdirs' to R.%22%22%22%0A%0A homepage = %22https://cran.r-project.org/package=rappdirs%22%0A url = %22https://cran.rstudio.com/src/contrib/rappdirs_0.3.1.tar.gz%22%0A list_url = %22https://cran.rstudio.com/src/contrib/Archive/rappdirs%22%0A%0A version('0.3.1', 'fbbdceda2aa49374e61c7d387bf9ea21')%0A%0A depends_on('[email protected]:', type=('build', 'run'))%0A
|
|
6d2efcea281775c31cd1df29eac63054e3fe51df
|
Create solution.py
|
data_structures/linked_list/problems/delete_n_after_m/py/solution.py
|
data_structures/linked_list/problems/delete_n_after_m/py/solution.py
|
Python
| 0.000018 |
@@ -0,0 +1,899 @@
+import LinkedList%0A%0A# Problem description: %0A# Solution time complexity: %0A# Comments: %0A%0A# Linked List Node inside the LinkedList module is declared as:%0A#%0A# class Node:%0A# def __init__(self, val, nxt=None):%0A# self.val = val%0A# self.nxt = nxt%0A#%0A%0Adef DeleteNAfterMNodes(head: LinkedList.Node, n: int, m: int) -%3E LinkedList.Node:%0A if head == None:%0A return None%0A%0A slow = head%0A%0A while slow != None:%0A for _ in range(m - 1):%0A if slow == None:%0A break%0A else:%0A slow = slow.nxt%0A%0A if slow == None:%0A break%0A else:%0A fast = slow.nxt%0A%0A for _ in range(n):%0A if fast == None:%0A break%0A else:%0A fast = fast.nxt%0A%0A slow.nxt = fast%0A slow = slow.nxt%0A%0A return head%0A
|
|
c84ce4b2494771c48890c122420e4665828ac4f8
|
Solve Code Fights different rightmost bit problem
|
CodeFights/differentRightmostBit.py
|
CodeFights/differentRightmostBit.py
|
Python
| 0.00005 |
@@ -0,0 +1,711 @@
+#!/usr/local/bin/python%0A# Code Different Right-most Bit (Core) Problem%0A%0A%0Adef differentRightmostBit(n, m):%0A return (n %5E m) & -(n %5E m)%0A%0A%0Adef main():%0A tests = %5B%0A %5B11, 13, 2%5D,%0A %5B7, 23, 16%5D,%0A %5B1, 0, 1%5D,%0A %5B64, 65, 1%5D,%0A %5B1073741823, 1071513599, 131072%5D,%0A %5B42, 22, 4%5D%0A %5D%0A%0A for t in tests:%0A res = differentRightmostBit(t%5B0%5D, t%5B1%5D)%0A if t%5B2%5D == res:%0A print(%22PASSED: differentRightmostBit(%7B%7D, %7B%7D) returned %7B%7D%22%0A .format(t%5B0%5D, t%5B1%5D, res))%0A else:%0A print((%22FAILED: differentRightmostBit(%7B%7D, %7B%7D) returned %7B%7D,%22%0A %22answer: %7B%7D%22).format(t%5B0%5D, t%5B1%5D, res, t%5B2%5D))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
8d0d564eae53a10b98b488b8c13eb952134cfc5e
|
Create 0408_country_body_part.py
|
2018/0408_country_body_part.py
|
2018/0408_country_body_part.py
|
Python
| 0.000023 |
@@ -0,0 +1,613 @@
+#!/usr/bin/python%0A'''%0ANPR 2018-04-08%0Ahttp://www.npr.org/puzzle%0A%0AName part of the human body, insert a speech hesitation, and you'll name a country %E2%80%94 what is it?%0A'''%0A%0Afrom nltk.corpus import gazetteers%0Aimport nprcommontools as nct%0A%0A#%25%25%0ABODY_PARTS = nct.get_category_members('body_part')%0A%0A# COUNTRIES%0ACOUNTRIES = frozenset(%5Bx.lower() for x in gazetteers.words('countries.txt')%5D)%0A%0A#%25%25%0Afor c in COUNTRIES:%0A for b in BODY_PARTS:%0A if c.startswith(b%5B0%5D) and c.endswith(b%5B-1%5D):%0A for i in range(1,len(b)-1):%0A if c.startswith(b%5B:i%5D) and c.endswith(b%5Bi:%5D):%0A print b,c%0A%0A
|
|
61ec74a685deec0b1ddc0a9274e5df0a597c6b6b
|
Create TweetStreamer.py
|
TweetStreamer.py
|
TweetStreamer.py
|
Python
| 0.000001 |
@@ -0,0 +1,2337 @@
+import tweepy%0Afrom tweepy import Stream%0Afrom tweepy import OAuthHandler%0Afrom tweepy.streaming import StreamListener%0Aimport json%0Afrom elasticsearch import Elasticsearch%0Aimport datetime%0Afrom watson_developer_cloud import NaturalLanguageUnderstandingV1%0Aimport watson_developer_cloud.natural_language_understanding.features.v1 as Features%0A%0A%22%22%22%0AThis twitter code uses a user's numerical ID and will track their tweets live as the come in. Runs through watson's NLU%0AAPI and then uploads to ES.%0A%22%22%22%0A%0Aconsumer_key=%22YBFMgErZkiN8MWqBGcHXm2dCp%22%0Aconsumer_secret=%22fmuMKwya4XyyjegvSyYAwBalZYI8heom3Ds56hkxVZmBuRNQ6t%22%0A%0Aaccess_token=%22918660934528155648-InbzRO92y5NFmhGEmiGI7NGc0wxZhAO%22%0Aaccess_token_secret=%22mn3PehlsuJwJnQ4dlMC3cASwMyqlC0GHPT2uok8KbJltt%22%0A%0Aauth = OAuthHandler(consumer_key, consumer_secret)%0Aauth.set_access_token(access_token, access_token_secret)%0Aapi = tweepy.API(auth)%0A%0A# Setup elasticsearch%0Aes = Elasticsearch(%2210.0.2.81:9200%22)%0A%0A# Setup watson NLU API%0Anatural_language_understanding = NaturalLanguageUnderstandingV1(%0A version='2017-05-19',%0A username='3efc3d64-d9ee-43b3-a289-e530bad6347b',%0A password='uDs5p3a4CPyd')%0A%0A%0Adef natural_language(tweet):%0A response = natural_language_understanding.analyze(%0A text=tweet,%0A features=%5BFeatures.Sentiment(), Features.Emotion()%5D)%0A return response%0A%0A%0Adef fix_tstamp(tstamp):%0A # Mon Oct 16 12:57:50 +0000 2017%0A date = tstamp.replace(%22 +0000%22, %22%22)%0A date = datetime.datetime.strptime(date, '%25a %25b %25d %25H:%25M:%25S %25Y')%0A return str(date)%0A%0A%0A%0Aclass listener(StreamListener):%0A def on_data(self, data):%0A print(data)%0A data = json.loads(data)%0A if not data%5B'retweeted'%5D and '@realDonaldTrump' not in data%5B'text'%5D:%0A data%5B%22created_at%22%5D = fix_tstamp(data%5B%22created_at%22%5D)%0A indexdate = data%5B%22created_at%22%5D%5B:7%5D%0A try:%0A data%5B%22watson_natural_lang%22%5D = (natural_language(data%5B%22text%22%5D))%0A except:%0A print data%5B%22text%22%5D%0A pass%0A print data%0A #es.index(index='presidentialtweets-' + indexdate, doc_type='twitter', id=data%5B%22id%22%5D, body=data)%0A return(True)%0A def on_error(self, status):%0A print status%0A%0Adef main():%0A twitterStream = Stream(auth, listener())%0A twitterStream.filter(follow=%5B'25073877'%5D)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
3345dc2f1ac15f06d3e95b5ead894ee9d3a27d9e
|
Add file writer utility script
|
piwrite.py
|
piwrite.py
|
Python
| 0.000001 |
@@ -0,0 +1,498 @@
+#!/bin/env python%0A%0Aimport argparse%0Aimport sys%0Aimport os%0A%0Aparser = argparse.ArgumentParser(description=%22Write multiple svgs from stdin to files%22)%0Aparser.add_argument('-o', '--outfile', metavar='OUTFILE', default='output.svg')%0A%0Aargs = parser.parse_args()%0A%0Abase, extension = os.path.splitext(args.outfile)%0A%0A%0Adef write_files(collection):%0A for i,s in enumerate(collection):%0A f = open(base + %22%2506d%22 %25 i + extension, 'w')%0A f.write(s)%0A f.close()%0A%0Awrite_files(sys.stdin.readlines())%0A
|
|
7147dfc237acb64a8e655e63681a387282043994
|
Add lc0031_next_permutation.py
|
lc0031_next_permutation.py
|
lc0031_next_permutation.py
|
Python
| 0.000001 |
@@ -0,0 +1,839 @@
+%22%22%22Leetcode 31. Next Permutation%0AMedium%0A%0AURL: https://leetcode.com/problems/next-permutation/%0A%0AImplement next permutation, which rearranges numbers into the lexicographically%0Anext greater permutation of numbers.%0A%0AIf such arrangement is not possible, it must rearrange it as the lowest possible%0Aorder (ie, sorted in ascending order).%0A%0AThe replacement must be in-place and use only constant extra memory.%0A%0AHere are some examples. Inputs are in the left-hand column and its corresponding%0Aoutputs are in the right-hand column.%0A1,2,3 -%3E 1,3,2%0A3,2,1 -%3E 1,2,3%0A1,1,5 -%3E 1,5,1%0A%22%22%22%0A%0Aclass Solution(object):%0A def nextPermutation(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: None Do not return anything, modify nums in-place instead.%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
46d5b197e022815c2074fbc94ca324d31d470dd0
|
Implement a fasttext example (#3446)
|
examples/imdb_fasttext.py
|
examples/imdb_fasttext.py
|
Python
| 0 |
@@ -0,0 +1,2003 @@
+'''This example demonstrates the use of fasttext for text classification%0A%0ABased on Joulin et al's paper:%0A%0ABags of Tricks for Efficient Text Classification%0Ahttps://arxiv.org/abs/1607.01759%0A%0ACan achieve accuracy around 88%25 after 5 epochs in 70s.%0A%0A'''%0A%0Afrom __future__ import print_function%0Aimport numpy as np%0Anp.random.seed(1337) # for reproducibility%0A%0Afrom keras.preprocessing import sequence%0Afrom keras.models import Sequential%0Afrom keras.layers import Dense, Activation, Flatten%0Afrom keras.layers import Embedding%0Afrom keras.layers import AveragePooling1D%0Afrom keras.datasets import imdb%0Afrom keras import backend as K%0A%0A%0A# set parameters:%0Amax_features = 20000%0Amaxlen = 400%0Abatch_size = 32%0Aembedding_dims = 20%0Anb_epoch = 5%0A%0Aprint('Loading data...')%0A(X_train, y_train), (X_test, y_test) = imdb.load_data(nb_words=max_features)%0Aprint(len(X_train), 'train sequences')%0Aprint(len(X_test), 'test sequences')%0A%0Aprint('Pad sequences (samples x time)')%0AX_train = sequence.pad_sequences(X_train, maxlen=maxlen)%0AX_test = sequence.pad_sequences(X_test, maxlen=maxlen)%0Aprint('X_train shape:', X_train.shape)%0Aprint('X_test shape:', X_test.shape)%0A%0Aprint('Build model...')%0Amodel = Sequential()%0A%0A# we start off with an efficient embedding layer which maps%0A# our vocab indices into embedding_dims dimensions%0Amodel.add(Embedding(max_features,%0A embedding_dims,%0A input_length=maxlen))%0A%0A# we add a AveragePooling1D, which will average the embeddings%0A# of all words in the document%0Amodel.add(AveragePooling1D(pool_length=model.output_shape%5B1%5D))%0A%0A# We flatten the output of the AveragePooling1D layer%0Amodel.add(Flatten())%0A%0A# We project onto a single unit output layer, and squash it with a sigmoid:%0Amodel.add(Dense(1, activation = 'sigmoid'))%0A%0Amodel.compile(loss='binary_crossentropy',%0A optimizer='adam',%0A metrics=%5B'accuracy'%5D)%0A%0Amodel.fit(X_train, y_train,%0A batch_size=batch_size,%0A nb_epoch=nb_epoch,%0A validation_data=(X_test, y_test))%0A
|
|
dde0efeec1aca8ed3ec2e444bbb4c179be89fec5
|
Create MooreNeightbourhood.py
|
Checkio/MooreNeightbourhood.py
|
Checkio/MooreNeightbourhood.py
|
Python
| 0 |
@@ -0,0 +1,1787 @@
+def count_neighbours(grid, row, col):%0A neig = 0%0A if (col - 1 %3E= 0):%0A if (grid%5Brow%5D%5Bcol - 1%5D == 1):%0A neig += 1%0A if (col - 1 %3E= 0 and row - 1 %3E= 0):%0A if (grid%5Brow - 1%5D%5Bcol -1%5D == 1):%0A neig += 1%0A if (row - 1 %3E= 0):%0A if (grid%5Brow - 1%5D%5Bcol%5D == 1):%0A neig += 1%0A if (col + 1 %3C len(grid%5B0%5D) and row - 1 %3E= 0):%0A if (grid%5Brow - 1%5D%5Bcol + 1%5D == 1):%0A neig += 1%0A if (col + 1 %3C len(grid%5B0%5D)):%0A if (grid%5Brow%5D%5Bcol + 1%5D == 1):%0A neig += 1%0A if (col + 1 %3C len(grid%5B0%5D) and row + 1 %3C len(grid)):%0A if (grid%5Brow + 1%5D%5Bcol + 1%5D == 1):%0A neig += 1%0A if (row + 1 %3C len(grid)):%0A if (grid%5Brow + 1%5D%5Bcol%5D == 1):%0A neig += 1%0A if (col - 1 %3E= 0 and row + 1 %3C len(grid)):%0A if (grid%5Brow + 1%5D%5Bcol - 1%5D == 1):%0A neig += 1%0A return neig%0A%0A%0Aif __name__ == '__main__':%0A #These %22asserts%22 using only for self-checking and not necessary for auto-testing%0A assert count_neighbours(((1, 0, 0, 1, 0),%0A (0, 1, 0, 0, 0),%0A (0, 0, 1, 0, 1),%0A (1, 0, 0, 0, 0),%0A (0, 0, 1, 0, 0),), 1, 2) == 3, %221st example%22%0A assert count_neighbours(((1, 0, 0, 1, 0),%0A (0, 1, 0, 0, 0),%0A (0, 0, 1, 0, 1),%0A (1, 0, 0, 0, 0),%0A (0, 0, 1, 0, 0),), 0, 0) == 1, %222nd example%22%0A assert count_neighbours(((1, 1, 1),%0A (1, 1, 1),%0A (1, 1, 1),), 0, 2) == 3, %22Dense corner%22%0A assert count_neighbours(((0, 0, 0),%0A (0, 1, 0),%0A (0, 0, 0),), 1, 1) == 0, %22Single%22%0A
|
|
a3df0567c295f0b2879c9a4f095a31108359d531
|
Add missing migration for invoice status
|
nodeconductor/billing/migrations/0003_invoice_status.py
|
nodeconductor/billing/migrations/0003_invoice_status.py
|
Python
| 0 |
@@ -0,0 +1,439 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('billing', '0002_pricelist'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='invoice',%0A name='status',%0A field=models.CharField(max_length=80, blank=True),%0A preserve_default=True,%0A ),%0A %5D%0A
|
|
269a34e87797a3271013e23d504a6f6a159ae48e
|
Index testgroup_test.test_id
|
migrations/versions/3a3366fb7822_index_testgroup_test.py
|
migrations/versions/3a3366fb7822_index_testgroup_test.py
|
Python
| 0.000011 |
@@ -0,0 +1,432 @@
+%22%22%22Index testgroup_test.test_id%0A%0ARevision ID: 3a3366fb7822%0ARevises: 139e272152de%0ACreate Date: 2014-01-02 22:20:55.132222%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '3a3366fb7822'%0Adown_revision = '139e272152de'%0A%0Afrom alembic import op%0A%0A%0Adef upgrade():%0A op.create_index('idx_testgroup_test_test_id', 'testgroup_test', %5B'test_id'%5D)%0A%0A%0Adef downgrade():%0A op.drop_index('idx_testgroup_test_test_id', 'testgroup_test')%0A
|
|
7b40a4902d1dc43c73a7858fc9286a641b3a9666
|
Add validation function removed from main script.
|
assess_isoform_quantification/options.py
|
assess_isoform_quantification/options.py
|
Python
| 0 |
@@ -0,0 +1,191 @@
+from schema import Schema%0A%0A%0Adef validate_file_option(file_option, msg):%0A msg = %22%7Bmsg%7D '%7Bfile%7D'.%22.format(msg=msg, file=file_option)%0A return Schema(open, error=msg).validate(file_option)%0A
|
|
d04118acc5421d4b48e31c78874a740eb469c3d7
|
fix boan1244 'Boëng'
|
migrations/versions/506dcac7d75_fix_boan1244_mojibake.py
|
migrations/versions/506dcac7d75_fix_boan1244_mojibake.py
|
Python
| 0.000001 |
@@ -0,0 +1,821 @@
+# coding=utf-8%0A%22%22%22fix boan1244 mojibake%0A%0ARevision ID: 506dcac7d75%0ARevises: 4513ba6253e1%0ACreate Date: 2015-04-15 19:20:59.059000%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '506dcac7d75'%0Adown_revision = '4513ba6253e1'%0A%0Aimport datetime%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A id, before, after = 'boan1244', u'Bo%5Cxc3%5Cxabng', u'Bo%5Cxebng'%0A%0A update_name = sa.text('UPDATE language SET updated = now(), '%0A 'name = :after WHERE id = :id AND name = :before')%0A update_ident = sa.text('UPDATE identifier SET updated = now(), '%0A 'name = :after WHERE type = :type AND name = :before ')%0A%0A op.execute(update_name.bindparams(id=id, before=before, after=after))%0A op.execute(update_ident.bindparams(type='name', before=before, after=after))%0A%0A%0Adef downgrade():%0A pass%0A
|
|
85e4a327ba641fbe9c275b4760c60683ca215d61
|
Add unit tests.
|
test_pto.py
|
test_pto.py
|
Python
| 0 |
@@ -0,0 +1,1613 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22Tests for PTO.%22%22%22%0Aimport unittest%0A%0A%0Aimport pto%0Aimport time%0A%0A%0A_TIMEOUT = 5%0A_FUZZ_FACTOR = 1%0A%0A%0Aclass SlowClass(object):%0A @pto.timeout(_TIMEOUT)%0A def slow_instance_method(self):%0A cut_off = time.time() + _TIMEOUT%0A while time.time() %3C cut_off + _FUZZ_FACTOR:%0A pass%0A return True%0A%0A @classmethod%0A @pto.timeout(_TIMEOUT)%0A def slow_class_method(cls):%0A cut_off = time.time() + _TIMEOUT%0A while time.time() %3C cut_off + _FUZZ_FACTOR:%0A pass%0A return True%0A%0A @staticmethod%0A @pto.timeout(_TIMEOUT)%0A def slow_static_method():%0A cut_off = time.time() + _TIMEOUT%0A while time.time() %3C cut_off + _FUZZ_FACTOR:%0A pass%0A return True%0A%0A%0Aclass PtoTestCase(unittest.TestCase):%0A def setUp(self):%0A self.slowInstance = SlowClass()%0A%0A def tearDown(self):%0A pass%0A%0A def test_function(self):%0A @pto.timeout(_TIMEOUT)%0A def slow_func():%0A cut_off = time.time() + _TIMEOUT%0A while time.time() %3C cut_off + _FUZZ_FACTOR:%0A pass%0A return True%0A self.assertRaises(pto.TimedOutException, slow_func)%0A%0A def test_instance_method(self):%0A self.assertRaises(pto.TimedOutException, self.slowInstance.slow_instance_method)%0A%0A def test_class_method(self):%0A self.assertRaises(pto.TimedOutException, self.slowInstance.slow_class_method)%0A%0A def test_static_method(self):%0A self.assertRaises(pto.TimedOutException, SlowClass.slow_static_method)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
2067bdb9d0f9947a674cb94d0c988049f3038ea4
|
create test stubs
|
test_viz.py
|
test_viz.py
|
Python
| 0.000002 |
@@ -0,0 +1,636 @@
+def test_create_distance_matrix():%0A pass%0A%0Adef test_get_translation_table():%0A pass%0A%0A%0Adef test_naive_backtranslate():%0A pass%0A%0Adef test_get_peptide_index():%0A pass%0A%0Adef test_demo_dna_features_viewer():%0A pass%0A%0Adef test_ngrams():%0A pass%0A%0Adef test_make_trigrams():%0A pass%0A%0Adef test_nucleotide_distribution():%0A pass%0A%0Adef test_get_peptide_toplot():%0A pass%0A%0Adef test_peptide_distribution():%0A pass%0A%0Adef test_plot_ABI():%0A pass%0A%0Adef test_get_genbank_sequence():%0A pass%0A%0Adef test_get_fasta_sequence():%0A pass%0A%0Adef test_calc_sequence_similarity():%0A pass%0A%0Adef test_make_parser():%0A pass%0A%0Adef test_main():%0A pass%0A
|
|
e304aae71617cdba0ffcb720a24406375fb866a1
|
Copy of Ryan's PCMToWave component.
|
Sketches/MH/audio/ToWAV.py
|
Sketches/MH/audio/ToWAV.py
|
Python
| 0.000002 |
@@ -0,0 +1,2775 @@
+from Axon.Component import component%0Aimport string%0Aimport struct%0Afrom Axon.Ipc import producerFinished, shutdown%0A%0Aclass PCMToWave(component):%0A def __init__(self, bytespersample, samplingfrequency):%0A super(PCMToWave, self).__init__()%0A self.bytespersample = bytespersample%0A self.samplingfrequency = samplingfrequency%0A %0A if self.bytespersample not in %5B2,4%5D:%0A print %22Currently bytespersample must be 2 or 4%22%0A raise ValueError%0A %0A bytestofunction = %7B 2: self.sample2Byte, 4: self.sample4Byte %7D%0A self.pack = bytestofunction%5Bself.bytespersample%5D%0A %0A def sample2Byte(self, value):%0A return struct.pack(%22%3Ch%22, int(value * 32768.0))%0A%0A def sample4Byte(self, value):%0A return struct.pack(%22%3Cl%22, int(value * 2147483648.0))%0A %0A def main(self):%0A #we don't know the length yet, so we say the file lasts an arbitrary (long) time %0A riffchunk = %22RIFF%22 + struct.pack(%22%3CL%22, 0xEFFFFFFF) + %22WAVE%22%0A %0A bytespersecond = self.bytespersample * self.samplingfrequency%0A %0A formatchunk = %22fmt %22%0A formatchunk += struct.pack(%22%3CL%22, 0x10) #16 for PCM%0A formatchunk += struct.pack(%22%3CH%22, 0x01) #PCM/Linear quantization%0A formatchunk += struct.pack(%22%3CH%22, 0x01) #mono%0A formatchunk += struct.pack(%22%3CL%22, self.samplingfrequency)%0A formatchunk += struct.pack(%22%3CL%22, bytespersecond)%0A formatchunk += struct.pack(%22%3CH%22, self.bytespersample)%0A formatchunk += struct.pack(%22%3CH%22, self.bytespersample * 8)%0A %0A self.send(riffchunk, %22outbox%22)%0A self.send(formatchunk, %22outbox%22)%0A datachunkheader = %22data%22 + struct.pack(%22%3CL%22, 0xEFFFFFFF) #again, an arbitrary (large) value%0A self.send(datachunkheader, %22outbox%22)%0A %0A running = True%0A while running:%0A yield 1%0A %0A codedsamples = %5B%5D%0A while self.dataReady(%22inbox%22): # we accept lists of floats%0A samplelist = self.recv(%22inbox%22)%0A %0A for sample in samplelist:%0A %0A if sample %3C -1:%0A sample = -1%0A elif sample %3E 1:%0A sample = 1%0A %0A codedsamples.append(self.pack(sample))%0A %0A del samplelist%0A %0A if codedsamples:%0A self.send(string.join(codedsamples, %22%22), %22outbox%22)%0A %0A while self.dataReady(%22control%22): # we accept lists of floats%0A msg = self.recv(%22control%22)%0A if isinstance(msg, producerFinished) or isinstance(msg, shutdown):%0A return%0A %0A self.pause()%0A
|
|
0ac91856caf274cee92c1261dfd9cb652f1c665f
|
Fix 'test_help' for shell client.
|
glanceclient/tests/functional/test_readonly_glance.py
|
glanceclient/tests/functional/test_readonly_glance.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from tempest_lib import exceptions
from glanceclient.tests.functional import base
class SimpleReadOnlyGlanceClientTest(base.ClientTestBase):
"""Read only functional python-glanceclient tests.
This only exercises client commands that are read only.
"""
def test_list_v1(self):
out = self.glance('--os-image-api-version 1 image-list')
endpoints = self.parser.listing(out)
self.assertTableStruct(endpoints, [
'ID', 'Name', 'Disk Format', 'Container Format',
'Size', 'Status'])
def test_list_v2(self):
out = self.glance('--os-image-api-version 2 image-list')
endpoints = self.parser.listing(out)
self.assertTableStruct(endpoints, ['ID', 'Name'])
def test_fake_action(self):
self.assertRaises(exceptions.CommandFailed,
self.glance,
'this-does-not-exist')
def test_member_list_v1(self):
tenant_name = '--tenant-id %s' % self.creds['project_name']
out = self.glance('--os-image-api-version 1 member-list',
params=tenant_name)
endpoints = self.parser.listing(out)
self.assertTableStruct(endpoints,
['Image ID', 'Member ID', 'Can Share'])
def test_member_list_v2(self):
try:
# NOTE(flwang): If set disk-format and container-format, Jenkins
# will raise an error said can't recognize the params, thouhg it
# works fine at local. Without the two params, Glance will
# complain. So we just catch the exception can skip it.
self.glance('--os-image-api-version 2 image-create --name temp')
except Exception:
pass
out = self.glance('--os-image-api-version 2 image-list'
' --visibility private')
image_list = self.parser.listing(out)
# NOTE(flwang): Because the member-list command of v2 is using
# image-id as required parameter, so we have to get a valid image id
# based on current environment. If there is no valid image id, we will
# pass in a fake one and expect a 404 error.
if len(image_list) > 0:
param_image_id = '--image-id %s' % image_list[0]['ID']
out = self.glance('--os-image-api-version 2 member-list',
params=param_image_id)
endpoints = self.parser.listing(out)
self.assertTableStruct(endpoints,
['Image ID', 'Member ID', 'Status'])
else:
param_image_id = '--image-id fake_image_id'
self.assertRaises(exceptions.CommandFailed,
self.glance,
'--os-image-api-version 2 member-list',
params=param_image_id)
def test_help(self):
help_text = self.glance('--os-image-api-version 2 help')
lines = help_text.split('\n')
self.assertFirstLineStartsWith(lines, 'usage: glance')
commands = []
cmds_start = lines.index('Positional arguments:')
cmds_end = lines.index('Optional arguments:')
command_pattern = re.compile('^ {4}([a-z0-9\-\_]+)')
for line in lines[cmds_start:cmds_end]:
match = command_pattern.match(line)
if match:
commands.append(match.group(1))
commands = set(commands)
wanted_commands = set(('image-create', 'image-delete', 'help',
'image-download', 'image-show', 'image-update',
'member-create', 'member-delete',
'member-list', 'image-list'))
self.assertFalse(wanted_commands - commands)
def test_version(self):
self.glance('', flags='--version')
def test_debug_list(self):
self.glance('--os-image-api-version 2 image-list', flags='--debug')
def test_no_ssl_compression(self):
# Test deprecating this hasn't broken anything
out = self.glance('--os-image-api-version 1 '
'--no-ssl-compression image-list')
endpoints = self.parser.listing(out)
self.assertTableStruct(endpoints, [
'ID', 'Name', 'Disk Format', 'Container Format',
'Size', 'Status'])
|
Python
| 0 |
@@ -4309,16 +4309,87 @@
ge-list'
+, 'image-deactivate',%0A 'image-reactivate'
))%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.