commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
1c7b9c1ed1f4d6a8ee201ba109db95449181fee1
|
Make operation tests timezone-aware
|
pylxd/tests/test_operation.py
|
pylxd/tests/test_operation.py
|
# Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from ddt import ddt
import mock
import unittest
from pylxd import api
from pylxd import connection
from pylxd.tests import annotated_data
from pylxd.tests import fake_api
@ddt
class LXDUnitTestOperation(unittest.TestCase):
def setUp(self):
super(LXDUnitTestOperation, self).setUp()
self.lxd = api.API()
def test_list_operations(self):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_operation_list())
self.assertEqual(
['1234'],
self.lxd.list_operations())
ms.assert_called_with('GET',
'/1.0/operations')
def test_operation_info(self):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_operation())
self.assertEqual({
'operation_create_time':
datetime.datetime.fromtimestamp(1433869644)
.strftime('%Y-%m-%d %H:%M:%S'),
'operation_update_time':
datetime.datetime.fromtimestamp(1433869643)
.strftime('%Y-%m-%d %H:%M:%S'),
'operation_status_code':
'Running'
}, self.lxd.operation_info('1234'))
ms.assert_called_with('GET',
'/1.0/operations/1234')
@annotated_data(
('create_time',
datetime.datetime.fromtimestamp(1433869644)
.strftime('%Y-%m-%d %H:%M:%S')),
('update_time',
datetime.datetime.fromtimestamp(1433869643)
.strftime('%Y-%m-%d %H:%M:%S')),
('status', 'Running'),
)
def test_operation_show(self, method, expected):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_operation())
self.assertEqual(
expected, getattr(self.lxd,
'operation_show_' + method)('1234'))
ms.assert_called_with('GET',
'/1.0/operations/1234')
@annotated_data(
('operation_delete', 'DELETE', ''),
('wait_container_operation', 'GET',
'/wait?status_code=200&timeout=30', ('200', '30')),
)
def test_operation_actions(self, method, http, path, args=()):
with mock.patch.object(connection.LXDConnection, 'get_status') as ms:
ms.return_value = True
self.assertTrue(
getattr(self.lxd, method)('1234', *args))
ms.assert_called_with(http,
'/1.0/operations/1234' + path)
|
Python
| 0.000106 |
@@ -1607,32 +1607,35 @@
tetime.datetime.
+utc
fromtimestamp(14
@@ -1629,35 +1629,35 @@
mtimestamp(14338
-696
+768
44)%0A
@@ -1767,32 +1767,35 @@
tetime.datetime.
+utc
fromtimestamp(14
@@ -1789,35 +1789,35 @@
mtimestamp(14338
-696
+768
43)%0A
@@ -2139,32 +2139,35 @@
tetime.datetime.
+utc
fromtimestamp(14
@@ -2169,19 +2169,19 @@
mp(14338
-696
+768
44)%0A
@@ -2269,16 +2269,19 @@
atetime.
+utc
fromtime
@@ -2295,11 +2295,11 @@
4338
-696
+768
43)%0A
|
36af45d88f01723204d9b65d4081e74a80f0776b
|
Add test for layers module.
|
test/layers_test.py
|
test/layers_test.py
|
Python
| 0 |
@@ -0,0 +1,1043 @@
+import theanets%0Aimport numpy as np%0A%0A%0Aclass TestLayer:%0A def test_build(self):%0A layer = theanets.layers.build('feedforward', nin=2, nout=4)%0A assert isinstance(layer, theanets.layers.Layer)%0A%0A%0Aclass TestFeedforward:%0A def test_create(self):%0A l = theanets.layers.Feedforward(nin=2, nout=4)%0A assert l.reset() == 12%0A%0Aclass TestTied:%0A def test_create(self):%0A l0 = theanets.layers.Feedforward(nin=2, nout=4)%0A l = theanets.layers.Tied(partner=l0)%0A assert l.reset() == 2%0A%0Aclass TestClassifier:%0A def test_create(self):%0A l = theanets.layers.Classifier(nin=2, nout=4)%0A assert l.reset() == 12%0A%0Aclass TestRecurrent:%0A def test_create(self):%0A l = theanets.layers.Recurrent(nin=2, nout=4)%0A assert l.reset() == 28%0A%0Aclass TestMRNN:%0A def test_create(self):%0A l = theanets.layers.MRNN(nin=2, nout=4, factors=3)%0A assert l.reset() == 42%0A%0Aclass TestLSTM:%0A def test_create(self):%0A l = theanets.layers.LSTM(nin=2, nout=4)%0A assert l.reset() == 124%0A
|
|
3dcf251276060b43ac888e0239f26a0cf2531832
|
Add tests for proxy drop executable
|
tests/test_proxy_drop_executable.py
|
tests/test_proxy_drop_executable.py
|
Python
| 0 |
@@ -0,0 +1,2859 @@
+# This Source Code Form is subject to the terms of the Mozilla Public%0A# License, v. 2.0. If a copy of the MPL was not distributed with this%0A# file, You can obtain one at http://mozilla.org/MPL/2.0/.%0A# Copyright (c) 2017 Mozilla Corporation%0Afrom positive_alert_test_case import PositiveAlertTestCase%0Afrom negative_alert_test_case import NegativeAlertTestCase%0Afrom alert_test_suite import AlertTestSuite%0A%0A%0Aclass TestAlertSSHPasswordAuthViolation(AlertTestSuite):%0A alert_filename = %22proxy_drop_non_standard_port%22%0A # This event is the default positive event that will cause the%0A # alert to trigger%0A default_event = %7B%0A %22_type%22: %22event%22,%0A %22_source%22: %7B%0A %22category%22: %22squid%22,%0A %22tags%22: %5B%22squid%22%5D,%0A %22details%22: %7B%0A %22details.sourceipaddress%22: %221.2.3.4%22,%0A %22details.destination%22: %22http://evil.com/evil.exe%22,%0A %22details.proxyaction%22: %22TCP_DENIED/-%22,%0A %7D%0A %7D%0A %7D%0A # This alert is the expected result from running this task%0A default_alert = %7B%0A %22category%22: %22squid%22,%0A %22tags%22: %5B'squid', 'proxy'%5D,%0A %22severity%22: %22WARNING%22,%0A %22summary%22: 'Multiple Proxy DROP events detected from 1.2.3.4 to the following executable file destinations: http://evil.com/evil.exe',%0A %7D%0A test_cases = %5B%5D%0A test_cases.append(%0A PositiveAlertTestCase(%0A description=%22Positive test with default events and default alert expected%22,%0A events=AlertTestSuite.create_events(default_event, 1),%0A expected_alert=default_alert%0A )%0A )%0A events = AlertTestSuite.create_events(default_event, 10)%0A for event in events:%0A event%5B'_source'%5D%5B'category'%5D = 'bad'%0A test_cases.append(%0A NegativeAlertTestCase(%0A description=%22Negative test case with events with incorrect category%22,%0A events=events,%0A )%0A )%0A events = AlertTestSuite.create_events(default_event, 10)%0A for event in events:%0A event%5B'_source'%5D%5B'tags'%5D = 'bad tag example'%0A test_cases.append(%0A NegativeAlertTestCase(%0A description=%22Negative test case with events with incorrect tags%22,%0A events=events,%0A )%0A )%0A events = AlertTestSuite.create_events(default_event, 10)%0A for event in events:%0A event%5B'_source'%5D%5B'utctimestamp'%5D = AlertTestSuite.subtract_from_timestamp_lambda(%7B%0A 'minutes': 241%7D)%0A event%5B'_source'%5D%5B'receivedtimestamp'%5D = AlertTestSuite.subtract_from_timestamp_lambda(%7B%0A 'minutes': 241%7D)%0A test_cases.append(%0A NegativeAlertTestCase(%0A description=%22Negative test case with old timestamp%22,%0A events=events,%0A )%0A )%0A
|
|
30d4301a04081f3d7a4fdba835a56aa0adac1375
|
fix latent slaves started serially with monkey patch instead
|
monkeypatch.py
|
monkeypatch.py
|
Python
| 0 |
@@ -0,0 +1,1884 @@
+from twisted.python import log%0Afrom twisted.internet import reactor%0A%0A%0Adef botmaster_maybeStartBuildsForSlave(self, slave_name):%0A %22%22%22%0A We delay this for 10 seconds, so that if multiple slaves start at the same%0A time, builds will be distributed between them.%0A %22%22%22%0A def do_start():%0A log.msg(format=%22Really starting builds on %25(slave_name)s%22,%0A slave_name=slave_name)%0A builders = self.getBuildersForSlave(slave_name)%0A self.brd.maybeStartBuildsOn(%5Bb.name for b in builders%5D)%0A log.msg(format=%22Waiting to start builds on %25(slave_name)s%22,%0A slave_name=slave_name)%0A reactor.callLater(10, do_start)%0A%0A%0Afrom buildbot.process.slavebuilder import AbstractSlaveBuilder%0A%0A%0Adef slavebuilder_buildStarted(self):%0A AbstractSlaveBuilder.buildStarted(self)%0A if self.slave and hasattr(self.slave, 'buildStarted'):%0A self.slave.buildStarted(self)%0A%0A%0Afrom buildbot.process.buildrequestdistributor import BasicBuildChooser%0A%0A%0Aclass NoFallBackBuildChooser(BasicBuildChooser):%0A %22%22%22%0A BuildChooser that doesn't fall back to rejected slaves.%0A In particular, builds with locks won't be assigned before a lock is ready.%0A %22%22%22%0A%0A def __init__(self, bldr, master):%0A BasicBuildChooser.__init__(self, bldr, master)%0A self.rejectedSlaves = None%0A%0A%0Adef apply_patches():%0A log.msg(%22Apply flocker_bb.monkeypatch.%22)%0A from buildbot.process.botmaster import BotMaster%0A BotMaster.maybeStartBuildsForSlave = botmaster_maybeStartBuildsForSlave%0A from buildbot.process.slavebuilder import SlaveBuilder%0A SlaveBuilder.buildStarted = slavebuilder_buildStarted%0A from buildbot.steps.master import MasterShellCommand%0A MasterShellCommand.renderables += %5B'path'%5D%0A from buildbot.process.buildrequestdistributor import (%0A BuildRequestDistributor)%0A BuildRequestDistributor.BuildChooser = NoFallBackBuildChooser%0A
|
|
379aef7e3aebc05352cacd274b43b156e32de18b
|
Add script to run tests
|
runtests.py
|
runtests.py
|
Python
| 0.000001 |
@@ -0,0 +1,588 @@
+#!/usr/bin/env python%0Aimport argparse%0Aimport sys%0A%0Aimport django%0Afrom django.conf import settings%0Afrom django.test.utils import get_runner%0A%0A%0Adef runtests(test_labels):%0A settings.configure(INSTALLED_APPS=%5B'tests'%5D)%0A django.setup()%0A TestRunner = get_runner(settings)%0A test_runner = TestRunner()%0A failures = test_runner.run_tests(test_labels)%0A sys.exit(failures)%0A%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser()%0A parser.add_argument('test_labels', nargs='*', default=%5B'tests'%5D)%0A args = parser.parse_args()%0A runtests(test_labels=args.test_labels)%0A
|
|
abf39931331f54aff5f10345939420041bd2039d
|
Add test for APS2 instruction merging.
|
tests/test_APS2Pattern.py
|
tests/test_APS2Pattern.py
|
Python
| 0 |
@@ -0,0 +1,1563 @@
+import h5py%0Aimport unittest%0Aimport numpy as np%0Afrom copy import copy%0A%0Afrom QGL import *%0Afrom instruments.drivers import APS2Pattern%0A%0Aclass APSPatternUtils(unittest.TestCase):%0A def setUp(self):%0A self.q1gate = Channels.LogicalMarkerChannel(label='q1-gate')%0A self.q1 = Qubit(label='q1', gateChan=self.q1gate)%0A self.q1 = Qubit(label='q1')%0A self.q1.pulseParams%5B'length'%5D = 30e-9%0A%0A Compiler.channelLib = %7B'q1': self.q1, 'q1-gate': self.q1gate%7D%0A%0A def test_synchronize_control_flow(self):%0A q1 = self.q1%0A%0A pulse = Compiler.Waveform()%0A pulse.length = 24%0A pulse.key = 12345%0A delay = Compiler.Waveform()%0A delay.length = 100%0A delay.isTimeAmp = True%0A blank = Compiler.Waveform( BLANK(q1, pulse.length) )%0A%0A seq_1 = %5Bqwait(), delay, copy(pulse), qwait(), copy(pulse)%5D%0A seq_2 = %5Bqwait(), copy(blank), qwait(), copy(blank)%5D%0A offsets = %7B APS2Pattern.wf_sig(pulse) : 0 %7D%0A %0A instructions = APS2Pattern.create_seq_instructions(%5Bseq_1, seq_2, %5B%5D, %5B%5D, %5B%5D%5D, offsets)%0A%0A instr_types = %5B%0A APS2Pattern.SYNC,%0A APS2Pattern.WAIT,%0A APS2Pattern.WFM,%0A APS2Pattern.MARKER,%0A APS2Pattern.WFM,%0A APS2Pattern.WAIT,%0A APS2Pattern.WFM,%0A APS2Pattern.MARKER%0A %5D%0A%0A for actual, expected in zip(instructions, instr_types):%0A instrOpCode = (actual.header %3E%3E 4) & 0xf%0A assert(instrOpCode == expected)%0A%0Aif __name__ == %22__main__%22: %0A unittest.main()%0A
|
|
df05088b5a6233cb262017b8489723c23000eb17
|
Add variable
|
src/robotide/ui/images.py
|
src/robotide/ui/images.py
|
# Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import wx
_SIZE = (16, 16)
_BASE = os.path.dirname(__file__)
class TreeImageList(wx.ImageList):
def __init__(self):
wx.ImageList.__init__(self, *_SIZE)
self._images = {
'TestDataDirectory': _TreeImage(self, wx.ART_FOLDER, wx.ART_FOLDER_OPEN),
'TestCaseFile': _TreeImage(self, wx.ART_NORMAL_FILE),
'TestCase': _TreeImage(self, 'robot.png'),
'UserKeyword': _TreeImage(self, 'process.png'),
'ResourceFile': _TreeImage(self, wx.ART_NORMAL_FILE)
}
def __getitem__(self, key):
return self._images[key]
class _TreeImage(object):
def __init__(self, image_list, normal, expanded=None):
self.normal = self._get_image(image_list, normal)
self.expanded = self._get_image(image_list, expanded) if expanded else self.normal
def _get_image(self, image_list, source):
if source.startswith('wx'):
img = wx.ArtProvider_GetBitmap(source, wx.ART_OTHER, _SIZE)
else:
path = os.path.join(_BASE, source)
img = wx.Image(path, wx.BITMAP_TYPE_PNG).ConvertToBitmap()
return image_list.Add(img)
|
Python
| 0.000005 |
@@ -619,16 +619,57 @@
port wx%0A
+from robot.parsing.model import Variable%0A
%0A%0A_SIZE
@@ -1175,16 +1175,73 @@
AL_FILE)
+,%0A 'Variable': _TreeImage(self, 'process.png')
%0A
|
aeaf2e1a1207f2094ea4298b1ecff015f5996b5a
|
Add test cases for gabor filter
|
skimage/filter/tests/test_gabor.py
|
skimage/filter/tests/test_gabor.py
|
Python
| 0 |
@@ -0,0 +1,1235 @@
+import numpy as np%0Afrom numpy.testing import assert_almost_equal, assert_array_almost_equal%0A%0Afrom skimage.filter import gabor_kernel, gabor_filter%0A%0A%0Adef test_gabor_kernel_sum():%0A for sigmax in range(1, 10, 2):%0A for sigmay in range(1, 10, 2):%0A for frequency in range(0, 10, 2):%0A kernel = gabor_kernel(sigmax, sigmay, frequency+0.1, 0)%0A # make sure gaussian distribution is covered nearly 100%25%0A assert_almost_equal(np.abs(kernel).sum(), 1, 2)%0A%0A%0Adef test_gabor_kernel_theta():%0A for sigmax in range(1, 10, 2):%0A for sigmay in range(1, 10, 2):%0A for frequency in range(0, 10, 2):%0A for theta in range(0, 10, 2):%0A kernel0 = gabor_kernel(sigmax, sigmay, frequency+0.1, theta)%0A kernel180 = gabor_kernel(sigmax, sigmay, frequency,%0A theta+np.pi)%0A%0A assert_array_almost_equal(np.abs(kernel0),%0A np.abs(kernel180))%0A%0A%0Adef test_gabor_filter():%0A real, imag = gabor_filter(np.random.random((100, 100)), 1, 1, 1, 1)%0A%0A%0Aif __name__ == %22__main__%22:%0A from numpy import testing%0A testing.run_module_suite()%0A
|
|
a70f46aac52be5b38b869cfbe18c0421a0032aee
|
Add script to count parameters of PyTorch model
|
count_params.py
|
count_params.py
|
Python
| 0 |
@@ -0,0 +1,208 @@
+import sys%0Aimport numpy as np%0Aimport torch%0A%0Amodel = torch.load(sys.argv%5B1%5D)%0Aparams = 0%0Afor key in model:%0A params += np.multiply.reduce(model%5Bkey%5D.shape)%0Aprint('Total number of parameters: ' + str(params))%0A
|
|
fd4398b1e811aaa2b876c120f99ca7fff08618ca
|
install on windows via gohlke wheels
|
scripts/install_on_windows.py
|
scripts/install_on_windows.py
|
Python
| 0 |
@@ -0,0 +1,975 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22Script for installing on Microsoft Windows%0A%0AWheels from %5BGOHLKE WINDOWS REPOSITORY%5D(https://www.lfd.uci.edu/~gohlke/pythonlibs/)%0A%0A%22%22%22%0A%0Atry:%0A from gohlkegrabber import GohlkeGrabber%0Aexcept ImportError:%0A print(%22gohlkegrabber not installed -%3E 'pip install gohlkegrabber%22)%0A pass%0A%0Aimport subprocess%0Aimport tempfile%0Aimport shutil%0A%0APACKAGES = ('numpy')%0A%0A%0Adef install_packages(packages, remove_tmpdir=True):%0A %22%22%22main script%22%22%22%0A%0A _py = '3.7'%0A _platform = 'win_amd64'%0A%0A _tmpdir = tempfile.mkdtemp(prefix='py37w')%0A print(f%22Temporary directory is: %7B_tmpdir%7D%22)%0A%0A gg = GohlkeGrabber()%0A%0A for pkg in packages:%0A print(f%22retreiving %7Bpkg%7D...%22)%0A pkwhl = gg.retrieve(_tmpdir, pkg, python=_py, platform=_platform)%0A subprocess.call(f%22pip install %7Bpkwhl%5B0%5D%7D%22)%0A%0A if remove_tmpdir:%0A shutil.rmtree(_tmpdir)%0A print(%22temporary directory removed%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A pass%0A%0A
|
|
495c937d39da1902948065a38502f9d582fa2b3b
|
Add darkobject tests.
|
tests/darkobject.py
|
tests/darkobject.py
|
Python
| 0 |
@@ -0,0 +1,325 @@
+%22%22%22%0A.. moduleauthor:: Adam Gagorik %[email protected]%3E%0A%22%22%22%0Aimport unittest%0Aimport pydarkstar.logutils%0Aimport pydarkstar.darkobject%0A%0Apydarkstar.logutils.setDebug()%0A%0Aclass TestDarkObject(unittest.TestCase):%0A def test_init(self):%0A pydarkstar.darkobject.DarkObject()%0A%0Aif __name__ == '__main__':%0A unittest.main()
|
|
6f8699288f79ff856ed58595169cb08956cd210d
|
Create toeplitz-matrix.py
|
Python/toeplitz-matrix.py
|
Python/toeplitz-matrix.py
|
Python
| 0.000017 |
@@ -0,0 +1,343 @@
+# Time: O(m * n)%0A# Space: O(1)%0A%0Aclass Solution(object):%0A def isToeplitzMatrix(self, matrix):%0A %22%22%22%0A :type matrix: List%5BList%5Bint%5D%5D%0A :rtype: bool%0A %22%22%22%0A return all(i == 0 or j == 0 or matrix%5Bi-1%5D%5Bj-1%5D == val%0A for i, row in enumerate(matrix)%0A for j, val in enumerate(row))%0A
|
|
76c040e9da5d94dfcb68d3e9a8003b894c1cf1dc
|
test file for vimba.py
|
tests/test_vimba.py
|
tests/test_vimba.py
|
Python
| 0 |
@@ -0,0 +1,1050 @@
+import pytest%0A%0Afrom pymba import Vimba, VimbaException%0A%0A%0Adef test_version():%0A version = Vimba().version.split('.')%0A assert int(version%5B0%5D) %3E= 1%0A assert int(version%5B1%5D) %3E= 7%0A assert int(version%5B2%5D) %3E= 0%0A%0A%0Adef test_startup_shutdown():%0A with pytest.raises(VimbaException) as e:%0A Vimba().system().feature_names()%0A assert e.value.error_code == VimbaException.ERR_STARTUP_NOT_CALLED%0A%0A # manual%0A Vimba().startup()%0A Vimba().system().feature_names()%0A Vimba().shutdown()%0A%0A # context manager%0A with Vimba() as vmb:%0A vmb.system().feature_names()%0A%0A%[email protected]%0Adef vmb() -%3E Vimba:%0A with Vimba() as v:%0A yield v%0A%0A%0A# works best with camera(s) attached%0Adef test_interface_camera_ids(vmb: Vimba):%0A # for ethernet camera discovery%0A if vmb.system().GeVTLIsPresent:%0A vmb.system().run_feature_command(%22GeVDiscoveryAllOnce%22)%0A%0A for func in (vmb.interface_ids, vmb.camera_ids):%0A ids = func()%0A assert isinstance(ids, list)%0A for x in ids:%0A assert isinstance(x, str)%0A
|
|
295b83d466b90ea812e8c0bda56b4d38a31c956a
|
Create reversedArrayNum.py
|
CodeWars/8kyu/reversedArrayNum.py
|
CodeWars/8kyu/reversedArrayNum.py
|
Python
| 0.000106 |
@@ -0,0 +1,59 @@
+def digitize(n):%0A return %5Bint(i) for i in str(n)%5D%5B::-1%5D%0A
|
|
7b279117da06af5cf21b61ad810a9c3177de8e3e
|
Update fabfile.py
|
fabfile.py
|
fabfile.py
|
from fabric.api import local,run
import os
from os import path
#Add settings module so fab file can see it
os.environ['DJANGO_SETTINGS_MODULE'] = "adl_lrs.settings"
from django.conf import settings
adldir = settings.MEDIA_ROOT
actor_profile = 'actor_profile'
activity_profile = 'activity_profile'
activity_state = 'activity_state'
INSTALL_STEPS = ['yes | sudo apt-get install python-setuptools libmysqlclient-dev python-dev python-mysqldb python-libxml2 python-libxslt1 libxml2-dev libxslt1-dev',
'sudo easy_install pip',
'pip install -r requirements.txt']
def deps_local():
for step in INSTALL_STEPS:
local(step)
def create_dirs():
#Create media directories and give them open permissions
if not os.path.exists(path.join(adldir,activity_profile)):
os.makedirs(path.join(adldir,activity_profile))
os.chmod(path.join(adldir,activity_profile), 0777)
if not os.path.exists(path.join(adldir,activity_state)):
os.makedirs(path.join(adldir,activity_state))
os.chmod(path.join(adldir,activity_state), 0777)
if not os.path.exists(path.join(adldir,actor_profile)):
os.makedirs(path.join(adldir,actor_profile))
os.chmod(path.join(adldir,actor_profile), 0777)
def deps_remote():
for step in INSTALL_STEPS:
run(step)
|
Python
| 0 |
@@ -553,16 +553,98 @@
'
+sudo pip install virtualenv',%0A 'virtualenv env;. env/bin/activate;
pip inst
@@ -666,16 +666,27 @@
ents.txt
+;deactivate
'%5D%0Adef d
@@ -751,27 +751,8 @@
p)%0A%0A
-def create_dirs():%0A
|
86418c4f3ea786c6eb1aad6579dadfb286dec0a3
|
Create InMoov2.minimal.py
|
toSort/InMoov2.minimal.py
|
toSort/InMoov2.minimal.py
|
Python
| 0.000001 |
@@ -0,0 +1,1940 @@
+# a very minimal script for InMoov%0A# although this script is very short you can still%0A# do voice control of a right hand or finger box%0A# for any command which you say - you will be required to say a confirmation%0A# e.g. you say -%3E open hand, InMoov will ask -%3E %22Did you say open hand?%22, you will need to%0A# respond with a confirmation (%22yes%22,%22correct%22,%22yeah%22,%22ya%22)%0A %0ArightPort = %22COM8%22%0A %0Ai01 = Runtime.createAndStart(%22i01%22, %22InMoov%22)%0A# starting parts%0Ai01.startEar()%0Ai01.startMouth()%0A#to tweak the default voice%0Ai01.mouth.setGoogleURI(%22http://thehackettfamily.org/Voice_api/api2.php?voice=Ryan&txt=%22)%0A##############%0Ai01.startRightHand(rightPort)%0A# tweaking defaults settings of right hand%0A#i01.rightHand.thumb.setMinMax(55,135)%0A#i01.rightHand.index.setMinMax(0,160)%0A#i01.rightHand.majeure.setMinMax(0,140)%0A#i01.rightHand.ringFinger.setMinMax(48,145)%0A#i01.rightHand.pinky.setMinMax(45,146)%0A#i01.rightHand.thumb.map(0,180,55,135)%0A#i01.rightHand.index.map(0,180,0,160)%0A#i01.rightHand.majeure.map(0,180,0,140)%0A#i01.rightHand.ringFinger.map(0,180,48,145)%0A#i01.rightHand.pinky.map(0,180,45,146)%0A#################%0A %0A# verbal commands%0Aear = i01.ear%0A %0Aear.addCommand(%22attach right hand%22, %22i01.rightHand%22, %22attach%22)%0Aear.addCommand(%22disconnect right hand%22, %22i01.rightHand%22, %22detach%22)%0Aear.addCommand(%22rest%22, i01.getName(), %22rest%22)%0Aear.addCommand(%22open hand%22, %22python%22, %22handopen%22)%0Aear.addCommand(%22close hand%22, %22python%22, %22handclose%22)%0Aear.addCommand(%22capture gesture%22, ear.getName(), %22captureGesture%22)%0Aear.addCommand(%22manual%22, ear.getName(), %22lockOutAllGrammarExcept%22, %22voice control%22)%0Aear.addCommand(%22voice control%22, ear.getName(), %22clearLock%22)%0A %0Aear.addComfirmations(%22yes%22,%22correct%22,%22yeah%22,%22ya%22)%0Aear.addNegations(%22no%22,%22wrong%22,%22nope%22,%22nah%22)%0A %0Aear.startListening()%0A %0Adef handopen():%0A i01.moveHand(%22left%22,0,0,0,0,0)%0A i01.moveHand(%22right%22,0,0,0,0,0)%0A %0Adef handclose():%0A i01.moveHand(%22left%22,180,180,180,180,180)%0A i01.moveHand(%22right%22,180,180,180,180,180)%0A
|
|
35e76ec99a3710a20b17a5afddaa14389af65098
|
Add some simple MediaWiki importer.
|
tools/import_mediawiki.py
|
tools/import_mediawiki.py
|
Python
| 0 |
@@ -0,0 +1,2141 @@
+import os%0Aimport os.path%0Aimport argparse%0Afrom sqlalchemy import create_engine%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument('url')%0A parser.add_argument('-o', '--out', default='wikked_import')%0A parser.add_argument('--prefix', default='wiki')%0A parser.add_argument('-v', '--verbose', action='store_true')%0A parser.add_argument('--ext', default='.md')%0A args = parser.parse_args()%0A%0A prefix = args.prefix%0A out_dir = args.out%0A ext = '.' + args.ext.lstrip('.')%0A%0A if not out_dir:%0A parser.print_help()%0A return 1%0A%0A if os.path.isdir(out_dir):%0A print(%22The output directory already exists!%22)%0A return 1%0A%0A engine = create_engine(args.url, echo=args.verbose)%0A conn = engine.connect()%0A%0A query = (%0A 'SELECT '%0A 'p.page_id,p.page_title,p.page_latest,'%0A 'r.rev_id,r.rev_text_id,t.old_id,t.old_text '%0A 'from %25(prefix)s_page p '%0A 'INNER JOIN %25(prefix)s_revision r ON p.page_latest = r.rev_id '%0A 'INNER JOIN %25(prefix)s_text t ON r.rev_text_id = t.old_id;' %25%0A %7B'prefix': prefix%7D)%0A q = conn.execute(query)%0A for p in q:%0A title = p%5B'page_title'%5D.decode('utf8')%0A text = p%5B'old_text'%5D.decode('utf8')%0A%0A path_noext = os.path.join(out_dir, title)%0A path = path_noext + ext%0A dirname = os.path.dirname(path)%0A if not os.path.isdir(dirname):%0A os.makedirs(dirname)%0A%0A if os.path.exists(path):%0A suffnum = 2%0A while True:%0A new_path = '%25s_%25d%25s' %25 (path_noext, suffnum, ext)%0A if not os.path.exists(new_path):%0A break%0A suffnum += 1%0A if suffnum %3E 100:%0A raise Exception(%22Can't find available path for: %22 %25%0A path)%0A%0A print(%22WARNING: %25s exists%22 %25 path)%0A print(%22WARNING: creating %25s instead%22 %25 new_path)%0A path = new_path%0A%0A print(p%5B'page_id'%5D, title)%0A with open(path, 'w', encoding='utf8') as fp:%0A fp.write(text)%0A%0A conn.close()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
237041aff9d99ac840572742467772edf1f4d5ef
|
Add image download example
|
examples/image/download.py
|
examples/image/download.py
|
Python
| 0.000003 |
@@ -0,0 +1,2262 @@
+# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0Aimport hashlib%0A%0A%22%22%22%0ADownload an image with the Image service.%0A%0AFor a full guide see%0Ahttp://developer.openstack.org/sdks/python/openstacksdk/users/guides/image.html%0A%22%22%22%0A%0A%0Adef download_image_stream(conn):%0A print(%22Download Image via streaming:%22)%0A%0A # Find the image you would like to download.%0A image = conn.image.find_image(%22myimage%22)%0A%0A # As the actual download now takes place outside of the library%0A # and in your own code, you are now responsible for checking%0A # the integrity of the data. Create an MD5 has to be computed%0A # after all of the data has been consumed.%0A md5 = hashlib.md5()%0A%0A with open(%22myimage.qcow2%22, %22wb%22) as local_image:%0A response = conn.image.download_image(image, stream=True)%0A%0A # Read only 1024 bytes of memory at a time until%0A # all of the image data has been consumed.%0A for chunk in response.iter_content(chunk_size=1024):%0A%0A # With each chunk, add it to the hash to be computed.%0A md5.update(chunk)%0A%0A local_image.write(chunk)%0A%0A # Now that you've consumed all of the data the response gave you,%0A # ensure that the checksums of what the server offered and%0A # what you downloaded are the same.%0A if response.headers%5B%22Content-MD5%22%5D != md5.hexdigest():%0A raise Exception(%22Checksum mismatch in downloaded content%22)%0A%0A%0Adef download_image(conn):%0A print(%22Download Image:%22)%0A%0A # Find the image you would like to download.%0A image = conn.image.find_image(%22myimage%22)%0A%0A with open(%22myimage.qcow2%22, %22w%22) as local_image:%0A response = conn.image.download_image(image)%0A%0A # Response will contain the entire contents of the Image.%0A local_image.write(response)%0A
|
|
ab60bd4f31a185884e0c05fa1a5f70c39a9d903a
|
add 52
|
python/p052.py
|
python/p052.py
|
Python
| 0.998596 |
@@ -0,0 +1,207 @@
+def same(a, b):%0A return sorted(str(a)) == sorted(str(b))%0A%0Afor i in xrange(1, 1000000):%0A if same(i, 2 * i) and same(3 * i, 4 * i) and same(5 * i, 6 * i) and same(i, 6 * i):%0A print i%0A break
|
|
40288df82cd8bf8f253284875c5da62a60975118
|
Fix path
|
services.py
|
services.py
|
"""
A toolkit for identifying and advertising service resources.
Uses a specific naming convention for the Task Definition of services. If you
name the Task Definition ending with "-service", no configuration is needed.
This also requires that you not use that naming convention for task definitions
that are not services.
For example:
A Task Definition with the family name of 'cache-service' will have its
hosting Container Instance's internal ip added to a Route53 private Zone as
cache.local and other machines on the same subnet can address it that way.
"""
import argparse
import os
import re
import boto
ecs = boto.connect_ec2containerservice()
ec2 = boto.connect_ec2()
route53 = boto.connect_route53()
if 'ECS_CLUSTER' in os.environ:
cluster = os.environ['ECS_CLUSTER']
elif os.path.exists('/etc/ecs/ecs.config'):
pat = re.compile(r'\bECS_CLUSTER\b\s*=\s*(\w*)')
cluster = pat.findall(open('/ecs/ecs.config').read())[-1]
else:
cluster = None
class MultipleTasksRunningForService(Exception):
"""It's assumed that there should be only 1 task running for a service.
Am I wrong? Tell me.
"""
pass
def get_task_definition_arns():
"""Request all API pages needed to get Task Definition ARNS."""
next_token = []
arns = []
while next_token is not None:
detail = ecs.list_task_definitions(next_token=next_token)
detail = detail['ListTaskDefinitionsResponse']
detail = detail['ListTaskDefinitionsResult']
arns.extend(detail['taskDefinitionArns'])
next_token = detail['nextToken']
return arns
def get_task_definition_families():
"""Ignore duplicate tasks in the same family."""
arns = get_task_definition_arns()
families = {}
for arn in arns:
match = pattern_arn.match(arn)
if match:
groupdict = match.groupdict()
families[groupdict['family']] = True
return families.keys()
def get_task_arn(family):
"""Get the ARN of running task, given the family name."""
response = ecs.list_tasks(cluster=cluster, family=family)
arns = response['ListTasksResponse']['ListTasksResult']['taskArns']
if len(arns) == 0:
return None
if len(arns) > 1:
raise MultipleTasksRunningForService
return arns[0]
def get_task_container_instance_arn(task_arn):
"""Get the ARN for the container instance a give task is running on."""
response = ecs.describe_tasks(task_arn, cluster=cluster)
response = response['DescribeTasksResponse']
return response['DescribeTasksResult']['tasks'][0]['containerInstanceArn']
def get_container_instance_ec2_instance_id(container_instance_arn):
"""Id the EC2 instance serving as the container instance."""
detail = ecs.describe_container_instances(
container_instances=container_instance_arn, cluster=cluster)
detail = detail['DescribeContainerInstancesResponse']
detail = detail['DescribeContainerInstancesResult']['containerInstances']
return detail[0]['ec2InstanceId']
def get_ec2_interface(ec2_instance_id):
"""Get the primary interface for the given EC2 instance."""
return ec2.get_all_instances(filters={
'instance-id': ec2_instance_id})[0].instances[0].interfaces[0]
def get_zone_for_vpc(vpc_id):
"""Identify the Hosted Zone for the given VPC.
Assumes a 1 to 1 relationship.
NOTE: There is an existing bug.
https://github.com/boto/boto/issues/3061
When that changes, I expect to have to search ['VPCs'] as a list of
dictionaries rather than a dictionary. This has the unfortunate side
effect of not working for Hosted Zones that are associated with more than
one VPC. (But, why would you expect internal DNS for 2 different private
networks to be the same anyway?)
"""
response = route53.get_all_hosted_zones()['ListHostedZonesResponse']
for zone in response['HostedZones']:
zone_id = zone['Id'].split('/')[-1]
detail = route53.get_hosted_zone(zone_id)['GetHostedZoneResponse']
if detail['VPCs']['VPC']['VPCId'] == vpc_id:
return {'zone_id': zone_id, 'zone_name': zone['Name']}
def get_info():
"""Get all needed info about running services."""
info = {'services': [], 'network': {'cluster': cluster}}
families = get_task_definition_families()
for family in families:
service_dict = {}
if family[-8:] != '-service':
continue
service_dict['service_arn'] = get_task_arn(family)
if not service_dict['service_arn']:
# task is not running, skip it
continue
service_dict['family'] = family
service_dict['name'] = family[:-8]
service_dict['container_instance_arn'] = (
get_task_container_instance_arn(service_dict['service_arn']))
service_dict['ec2_instance_id'] = (
get_container_instance_ec2_instance_id(
service_dict['container_instance_arn']))
ec2_interface = get_ec2_interface(service_dict['ec2_instance_id'])
service_dict['container_instance_internal_ip'] = (
ec2_interface.private_ip_address)
# No need to get common network info on each loop over tasks
if 'vpc_id' not in info['network']:
info['network']['vpc_id'] = ec2_interface.vpc_id
info['network'].update(get_zone_for_vpc(info['network']['vpc_id']))
info['services'].append(service_dict)
return info
def dns(zone_id, zone_name, service_name, service_ip, ttl=20):
"""Insert or update DNS record."""
rrs = boto.route53.record.ResourceRecordSets(route53, zone_id)
rrs.add_change('UPSERT', '{service_name}.{zone_name}'.format(**locals()),
'A', ttl).add_value(service_ip)
rrs.commit()
return rrs
def update_services(service_names=[], verbose=False):
"""Update DNS to allow discovery of properly named task definitions.
If service_names are provided only update those services.
Otherwise update all.
"""
info = get_info()
for service in info['services']:
if (service_names and
service['family'] not in service_names and
service['name'] not in service_names):
continue
if verbose:
print "registering {0}".format(service['name'])
dns(info['network']['zone_id'], info['network']['zone_name'],
service['name'], service['container_instance_internal_ip'])
def cli():
parser = argparse.ArgumentParser()
parser.add_argument('service_names', nargs='*',
help='list of services to start')
update_services(parser.parse_args().service_names, True)
pattern_arn = re.compile(
'arn:'
'(?P<partition>[^:]+):'
'(?P<service>[^:]+):'
'(?P<region>[^:]*):' # region is optional
'(?P<account>[^:]*):' # account is optional
'(?P<resourcetype>[^:/]+)([:/])'
'(?P<resource>('
'(?P<family>[^:]+):' # noqa
'(?P<version>[^:]+)|.*' # noqa
'))')
|
Python
| 0.000018 |
@@ -923,16 +923,20 @@
(open('/
+etc/
ecs/ecs.
|
26bc79d7ed478872f615e80fa177f0c4582c3631
|
reverse string ii
|
src/main/python/pyleetcode/reverse_string_ii.py
|
src/main/python/pyleetcode/reverse_string_ii.py
|
Python
| 0.999999 |
@@ -0,0 +1,966 @@
+%22%22%22%0AGiven a string and an integer k, you need to reverse the first k characters for every 2k characters counting from the%0Astart of the string. If there are less than k characters left, reverse all of them. If there are less than 2k but%0Agreater than or equal to k characters, then reverse the first k characters and left the other as original.%0A%0AExample:%0A Input: s = %22abcdefg%22, k = 2%0A%0A Output: %22bacdfeg%22%0ARestrictions:%0A * The string consists of lower English letters only.%0A * Length of the given string and k will in the range %5B1, 10000%5D%0A%22%22%22%0A%0A%0Adef reverse_str(s, k):%0A %22%22%22%0A :type s: str%0A :type k: int%0A :rtype: str%0A %22%22%22%0A if k == 0:%0A return s%0A%0A o = ''%0A for idx in range(0, len(s), k):%0A print idx, s%5Bidx:idx+k%5D%0A if idx/k %25 2 == 0:%0A o += s%5Bidx:idx+k%5D%5B::-1%5D%0A else:%0A o += s%5Bidx:idx+k%5D%0A idx += k%0A return o%0A%0A%0Adef test_reverse_str():%0A assert reverse_str('abcdefg', 2) == %22bacdfeg%22
|
|
fe6ece236e684d76441280ba700565f7fbce40cc
|
Create masked version based on pbcov cutogg
|
14B-088/HI/analysis/pbcov_masking.py
|
14B-088/HI/analysis/pbcov_masking.py
|
Python
| 0 |
@@ -0,0 +1,1314 @@
+%0A'''%0ACut out noisy regions by imposing a mask of the primary beam coverage.%0A'''%0A%0Afrom astropy.io import fits%0Afrom spectral_cube import SpectralCube%0Afrom spectral_cube.cube_utils import beams_to_bintable%0Afrom astropy.utils.console import ProgressBar%0Aimport os%0A%0Afrom analysis.paths import fourteenB_HI_data_path%0A%0A# execfile(os.path.expanduser(%22~/Dropbox/code_development/ewky_scripts/write_huge_fits.py%22))%0A%0Apbcov = fits.open(fourteenB_HI_data_path(%22M33_14B-088_pbcov.fits%22))%5B0%5D%0A%0Acube = SpectralCube.read(fourteenB_HI_data_path(%22M33_14B-088_HI.clean.image.fits%22))%0A%0A# Apply the mask, using a cut-off of 0.3. This retains all of the regions with%0A# emission.%0Apblim = 0.3%0Amasked_cube = cube.with_mask(pbcov.data %3E pblim)%0A%0Amasked_cube = masked_cube.minimal_subcube()%0A%0Anew_fitsname = fourteenB_HI_data_path(%22M33_14B-088_HI.clean.image.pbcov_gt_0.3_masked.fits%22,%0A no_check=True)%0A%0Amasked_cube.write(new_fitsname)%0A%0A# create_huge_fits(new_fitsname, cube.header)%0A%0A# save_hdu = fits.open(new_fitsname, mode='update')%0A%0A# Save per channel%0A# for chan in ProgressBar(cube.shape%5B0%5D):%0A# save_hdu%5B0%5D.data%5Bchan%5D = cube%5Bchan%5D.value%0A%0A# if chan %25 50 == 0:%0A# save_hdu.flush()%0A%0A# Save the beam table!%0A# save_hdu.append(beams_to_bintable(cube.beams))%0A%0A# save_hdu.flush()%0A# save_hdu.close()%0A
|
|
d8ddd6a843000c8b4125f166645a41443b6c06ba
|
Add kms_decrypt module
|
kms_decrypt.py
|
kms_decrypt.py
|
Python
| 0.000001 |
@@ -0,0 +1,2406 @@
+#!/usr/bin/python%0Aimport base64%0A%0ADOCUMENTATION = '''%0Ashort_description: Decrypt a secret that was generated by KMS%0Adescription:%0A - This module decrypts the given secret using AWS KMS, and returns it as the Plaintext property%0Aversion_added: null%0Aauthor: Ben Bridts%0Anotes:%0A - Make sure you read http://docs.aws.amazon.com/kms/latest/developerguide/control-access.html to learn how to restrict%0A access to your keys%0Arequirements:%0A - the boto3 python package%0Aoptions:%0A aws_secret_key:%0A description:%0A - AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.%0A required: false%0A default: null%0A aliases: %5B 'ec2_secret_key', 'secret_key' %5D%0A version_added: %221.5%22%0A aws_access_key:%0A description:%0A - AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.%0A required: false%0A default: null%0A aliases: %5B 'ec2_access_key', 'access_key' %5D%0A version_added: %221.5%22%0A region:%0A description:%0A - The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.%0A required: false%0A aliases: %5B'aws_region', 'ec2_region'%5D%0A version_added: %221.5%22%0A secret:%0A description:%0A - The encrypted string you want to decode%0A required: false%0A default: CAT%0A'''%0A%0AEXAMPLES = '''%0A- name: Decrypt secret%0A kms_decrypt:%0A secret: %22%7B%7B secret %7D%7D%22%0A register: result%0A delegate_to: 127.0.0.1%0A- name: Show plaintext%0A debug: var=result.plaintext%0A delegate_to: 127.0.0.1%0A'''%0A%0Aimport sys%0A%0Atry:%0A import boto3%0Aexcept ImportError:%0A print %22failed=True msg='boto3 required for this module'%22%0A sys.exit(1)%0A%0A%0Adef main():%0A argument_spec = ec2_argument_spec()%0A argument_spec.update(dict(%0A secret=dict(required=True),%0A ))%0A%0A module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)%0A secret = module.params.get('secret')%0A secret = base64.decodestring(secret)%0A%0A client = boto3.client('kms')%0A%0A response = client.decrypt(%0A CiphertextBlob=secret%0A )%0A%0A status_code = response%5B'ResponseMetadata'%5D%5B'HTTPStatusCode'%5D%0A if status_code != 200:%0A module.fail_json(msg='Failed with http status code %25s' %25 status_code)%0A%0A module.exit_json(changed=True, plaintext=response%5B'Plaintext'%5D, key_id=response%5B'KeyId'%5D)%0A%0A%0Afrom ansible.module_utils.basic import *%0Afrom ansible.module_utils.ec2 import *%0A%0Amain()%0A
|
|
4af087e4920124eddb0342d0f22978872f9ba5dc
|
add landuse_sql.py which convert the .csv files from ArcMap to a SQL database
|
landuse_sql.py
|
landuse_sql.py
|
Python
| 0.000006 |
@@ -0,0 +1,890 @@
+import sqlite3%0Aimport glob%0Aimport pandas%0A%0A#Name of SQL database%0Asql_schema = 'LandUse_Approx.db'%0A%0Afiles = %5Bf for f in glob.glob(%22*.csv%22) if %22LandUseApprox_%22 in f%5D%0A%0A#Create table names for the SQL database. %0A#Table names will have 'landuse_' as prefix and the year and length as the ending in the format 'YYYY_Length'%0A#Store table names in a dictonary (table_names) with the .csv file name as key and SQL table name as value%0Atable_names = %7B%7D%0Afor f in files:%0A%09table_names%5Bf%5D = 'landuse_' + f%5B-13:-4%5D%0A%0Aconn = sqlite3.connect(sql_schema)%0Ac = conn.cursor()%0A%0A#Convert each .csv file into a SQL database%0A#Iterate through all .csv file, convert each file into a Pandas DataFrame and then insert into SQL schema%0Afor f in %5Bfiles%5B0%5D%5D:%0A%09print f%0A%09raw_dataset = pandas.read_csv(f, index_col = 0)%0A%09print raw_dataset%0A%09raw_dataset.to_sql(table_names%5Bf%5D,conn)%0A%0A#TODO: Apply Primary Key constraint on OBJECTID
|
|
110179832ff8ccdda81599f7d6b0675ba8feac24
|
Fix document of gaussian
|
chainer/functions/gaussian.py
|
chainer/functions/gaussian.py
|
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class Gaussian(function.Function):
"""Gaussian sampling function.
In forward calculation, this funciton takes mean and logarithm of variance
as inputs, and draw a sample from a gaussian distribution.
"""
def __init__(self):
self.eps = None
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
m_type, v_type = in_types
type_check.expect(
m_type.dtype == numpy.float32,
v_type.dtype == numpy.float32,
m_type.shape == v_type.shape,
)
def check_type_backward(self, in_types, out_types):
type_check.expect(out_types.size() == 1)
m_type, v_type = in_types
g_type, = out_types
type_check.expect(
g_type.dtype == numpy.float32,
g_type.shape == m_type.shape,
)
def forward_cpu(self, inputs):
mean, ln_var = inputs
if self.eps is None:
self.eps = numpy.random.normal(0, 1, ln_var.shape) \
.astype(numpy.float32)
self.noise = numpy.exp(ln_var * 0.5) * self.eps
return mean + self.noise,
def forward_gpu(self, inputs):
mean, ln_var = inputs
if self.eps is None:
self.eps = cuda.empty(ln_var.shape, numpy.float32)
cuda.get_generator().fill_normal(self.eps)
noise = cuda.empty_like(ln_var)
cuda.elementwise(
'float* noise, const float* v, const float* e',
'noise[i] = __expf(v[i] * 0.5f) * e[i];',
'gaussian_forward'
)(noise, ln_var, self.eps)
self.noise = noise
return mean + self.noise,
def backward(self, inputs, grad_output):
g, = grad_output
return g, g * self.noise * 0.5,
def gaussian(mean, ln_var):
"""Gaussian sampling function.
It takes :math:`\\mu` and :math:`\\log(\\sigma)` as input and output sample
drawn from gaussian :math:`N(\\mu, \\sigma)`.
Args:
mean (~chainer.Variable): Input variable representing mean :math:`\\mu`.
ln_var (~chainer.Variable): Input variable representing logarithm of
variance :math:`\\sigma`.
Returns:
~chainer.Variable: Output variable.
"""
return Gaussian()(mean, ln_var)
|
Python
| 0.000005 |
@@ -1975,16 +1975,21 @@
t takes
+mean
:math:%60%5C
@@ -1997,16 +1997,42 @@
mu%60 and
+logarithm of variance%0A
:math:%60%5C
@@ -2043,16 +2043,18 @@
(%5C%5Csigma
+%5E2
)%60 as in
@@ -2072,18 +2072,16 @@
put
+a
sample
-%0A
dra
@@ -2096,16 +2096,20 @@
gaussian
+%0A
:math:%60
@@ -2200,24 +2200,36 @@
senting mean
+%0A
:math:%60%5C%5Cmu
@@ -2338,21 +2338,30 @@
math:%60%5C%5C
+log(%5C%5C
sigma
+%5E2)
%60.%0A%0A
|
e23ccb850a6aef017ae91e35f672e6c6b5184e23
|
Add image preprocessing functions
|
skan/pre.py
|
skan/pre.py
|
Python
| 0.000004 |
@@ -0,0 +1,2052 @@
+import numpy as np%0Afrom scipy import spatial, ndimage as ndi%0Afrom skimage import filters, img_as_ubyte%0A%0A%0Adef hyperball(ndim, radius):%0A %22%22%22Return a binary morphological filter containing pixels within %60radius%60.%0A%0A Parameters%0A ----------%0A ndim : int%0A The number of dimensions of the filter.%0A radius : int%0A The radius of the filter.%0A%0A Returns%0A -------%0A ball : array of bool, shape %5B2 * radius + 1,%5D * ndim%0A The required structural element%0A %22%22%22%0A size = 2 * radius + 1%0A center = %5B(radius,) * ndim%5D%0A%0A coords = np.mgrid%5B%5Bslice(None, size),%5D * ndim%5D.reshape(ndim, -1).T%0A distances = np.ravel(spatial.distance_matrix(coords, center))%0A selector = distances %3C= radius%0A%0A ball = np.zeros((size,) * ndim, dtype=bool)%0A ball.ravel()%5Bselector%5D = True%0A return ball%0A%0A%0A%0Adef threshold(image, *, sigma=0., radius=0, offset=0.):%0A %22%22%22Use scikit-image filters to %22intelligently%22 threshold an image.%0A%0A Parameters%0A ----------%0A image : array, shape (M, N, ...%5B, 3%5D)%0A Input image, conformant with scikit-image data type%0A specification %5B1%5D_.%0A sigma : float, optional%0A If positive, use Gaussian filtering to smooth the image before%0A thresholding.%0A radius : int, optional%0A If given, use local median thresholding instead of global.%0A offset : float, optional%0A If given, reduce the threshold by this amount. Higher values%0A result in more pixels above the threshold.%0A%0A Returns%0A -------%0A thersholded : image of bool, same shape as %60image%60%0A The thresholded image.%0A%0A References%0A ----------%0A .. %5B1%5D http://scikit-image.org/docs/dev/user_guide/data_types.html%0A %22%22%22%0A if sigma %3E 0:%0A image = filters.gaussian(image, sigma=sigma)%0A image = img_as_ubyte(image)%0A if radius %3E 0:%0A footprint = hyperball(image.ndim, radius=radius)%0A t = ndi.median_filter(image, footprint=footprint) - offset%0A else:%0A t = filters.threshold_otsu(image) - offset%0A thresholded = image %3E t%0A return thresholded%0A
|
|
f8823429d1bc548e4a91fe8ea64086d35dd66676
|
Add race migration.
|
tvdordrecht/race/migrations/0003_auto_20150730_2250.py
|
tvdordrecht/race/migrations/0003_auto_20150730_2250.py
|
Python
| 0 |
@@ -0,0 +1,1848 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Afrom django.conf import settings%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A migrations.swappable_dependency(settings.AUTH_USER_MODEL),%0A ('race', '0002_auto_20150729_1906'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='distance',%0A options=%7B'ordering': %5B'order'%5D, 'verbose_name': 'Afstand', 'verbose_name_plural': 'Afstanden'%7D,%0A ),%0A migrations.AlterModelOptions(%0A name='result',%0A options=%7B'ordering': %5B'date', 'event', 'distance', 'time'%5D, 'verbose_name': 'Wie wat waar / Uitslag', 'verbose_name_plural': 'Wie wat waars / Uitslagen'%7D,%0A ),%0A migrations.RemoveField(%0A model_name='distance',%0A name='default',%0A ),%0A migrations.AddField(%0A model_name='distance',%0A name='last_modified',%0A field=models.DateTimeField(auto_now=True, verbose_name=b'laatst bewerkt', null=True),%0A ),%0A migrations.AddField(%0A model_name='distance',%0A name='last_modified_by',%0A field=models.ForeignKey(related_name='distance_last_modified_by', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name=b'Laatst bewerkt door'),%0A ),%0A migrations.AddField(%0A model_name='distance',%0A name='owner',%0A field=models.ForeignKey(related_name='distance_owner', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name=b'Eigenaar'),%0A ),%0A migrations.AddField(%0A model_name='distance',%0A name='pub_date',%0A field=models.DateTimeField(null=True, verbose_name=b'publicatie datum', blank=True),%0A ),%0A %5D%0A
|
|
564851a1a7f1378c9ef0e936640b690300a112fb
|
Add synthtool scripts (#3765)
|
java-containeranalysis/google-cloud-containeranalysis/synth.py
|
java-containeranalysis/google-cloud-containeranalysis/synth.py
|
Python
| 0.000001 |
@@ -0,0 +1,1329 @@
+# Copyright 2018 Google LLC%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22This script is used to synthesize generated parts of this library.%22%22%22%0A%0Aimport synthtool as s%0Aimport synthtool.gcp as gcp%0A%0Agapic = gcp.GAPICGenerator()%0Acommon_templates = gcp.CommonTemplates()%0A%0Alibrary = gapic.java_library(%0A service='container',%0A version='v1beta1',%0A config_path='/google/devtools/containeranalysis/artman_containeranalysis_v1beta1.yaml',%0A artman_output_name='')%0A%0As.copy(library / 'gapic-google-cloud-containeranalysis-v1beta1/src', 'src')%0As.copy(library / 'grpc-google-cloud-containeranalysis-v1beta1/src', '../../google-api-grpc/grpc-google-cloud-containeranalysis-v1beta1/src')%0As.copy(library / 'proto-google-cloud-containeranalysis-v1beta1/src', '../../google-api-grpc/proto-google-cloud-containeranalysis-v1beta1/src')%0A
|
|
e2124aef9cb91dac3a597d353cd217ed328221e5
|
Add gyp file to build cpu_features static library.
|
ndk/sources/android/cpufeatures/cpu_features.gyp
|
ndk/sources/android/cpufeatures/cpu_features.gyp
|
Python
| 0 |
@@ -0,0 +1,425 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0A%7B%0A 'targets': %5B%0A %7B%0A 'target_name': 'cpu_features',%0A 'type': 'static_library',%0A 'direct_dependent_settings': %7B%0A 'include_dirs': %5B%0A '.',%0A %5D,%0A %7D,%0A 'sources': %5B%0A 'cpu-features.c',%0A %5D,%0A %7D,%0A %5D,%0A%7D%0A
|
|
f4aad329c445415f1306882d386abe43969ba6a9
|
Add test for API ticket basics.
|
Allura/allura/tests/functional/test_rest_api_tickets.py
|
Allura/allura/tests/functional/test_rest_api_tickets.py
|
Python
| 0 |
@@ -0,0 +1,2218 @@
+from pprint import pprint%0Afrom datetime import datetime, timedelta%0Aimport json%0A%0Afrom pylons import c%0Afrom ming.orm import session%0A%0Afrom allura import model as M%0Afrom allura.lib import helpers as h%0Afrom alluratest.controller import TestController, TestRestApiBase%0A%0A%0Aclass TestApiTicket(TestRestApiBase):%0A%0A def set_api_ticket(self, expire=None):%0A if not expire:%0A expire = timedelta(days=1)%0A api_ticket = M.ApiTicket(user_id=self.user._id, capabilities=%7B'import': 'test'%7D,%0A expires=datetime.utcnow() + expire)%0A session(api_ticket).flush()%0A self.set_api_token(api_ticket)%0A%0A def test_bad_signature(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/p/test/home/', api_signature='foo')%0A assert r.status_int == 403%0A%0A def test_bad_token(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/p/test/home/', api_key='foo')%0A assert r.status_int == 403%0A%0A def test_bad_timestamp(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/p/test/home/', api_timestamp=(datetime.utcnow() + timedelta(days=1)).isoformat())%0A assert r.status_int == 403%0A%0A def test_bad_path(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/1/test/home/')%0A assert r.status_int == 404%0A r = self.api_post('/rest/p/1223/home/')%0A assert r.status_int == 404%0A r = self.api_post('/rest/p/test/12home/')%0A assert r.status_int == 404%0A%0A def test_no_api(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/p/test/admin/')%0A assert r.status_int == 404%0A%0A def test_project_ping(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/p/test/home/')%0A assert r.status_int == 200%0A assert r.json%5B'shortname'%5D == 'test'%0A%0A def test_project_ping_expired_ticket(self):%0A self.set_api_ticket(timedelta(seconds=-1))%0A r = self.api_post('/rest/p/test/home/')%0A assert r.status_int == 403%0A%0A def test_subproject_ping(self):%0A self.set_api_ticket()%0A r = self.api_post('/rest/p/test/sub1/home/')%0A assert r.status_int == 200%0A assert r.json%5B'shortname'%5D == 'test/sub1'%0A
|
|
38a5a1d5bd5bcccf52a66a84377429bdecdfa4a2
|
Replace g.next() with next(g)
|
troveclient/tests/test_common.py
|
troveclient/tests/test_common.py
|
from testtools import TestCase
from mock import Mock
from troveclient import common
class CommonTest(TestCase):
def test_check_for_exceptions(self):
status = [400, 422, 500]
for s in status:
resp = Mock()
resp.status_code = s
self.assertRaises(Exception,
common.check_for_exceptions, resp, "body")
def test_limit_url(self):
url = "test-url"
limit = None
marker = None
self.assertEqual(url, common.limit_url(url))
limit = "test-limit"
marker = "test-marker"
expected = "test-url?marker=test-marker&limit=test-limit"
self.assertEqual(expected,
common.limit_url(url, limit=limit, marker=marker))
class PaginatedTest(TestCase):
def setUp(self):
super(PaginatedTest, self).setUp()
self.items_ = ["item1", "item2"]
self.next_marker_ = "next-marker"
self.links_ = ["link1", "link2"]
self.pgn = common.Paginated(self.items_, self.next_marker_,
self.links_)
def tearDown(self):
super(PaginatedTest, self).tearDown()
def test___init__(self):
self.assertEqual(self.items_, self.pgn.items)
self.assertEqual(self.next_marker_, self.pgn.next)
self.assertEqual(self.links_, self.pgn.links)
def test___len__(self):
self.assertEqual(len(self.items_), self.pgn.__len__())
def test___iter__(self):
itr_expected = self.items_.__iter__()
itr = self.pgn.__iter__()
self.assertEqual(itr_expected.next(), itr.next())
self.assertEqual(itr_expected.next(), itr.next())
self.assertRaises(StopIteration, itr_expected.next)
self.assertRaises(StopIteration, itr.next)
def test___getitem__(self):
self.assertEqual(self.items_[0], self.pgn.__getitem__(0))
def test___setitem__(self):
self.pgn.__setitem__(0, "new-item")
self.assertEqual("new-item", self.pgn.items[0])
def test___delitem(self):
del self.pgn[0]
self.assertEqual(1, self.pgn.__len__())
def test___reversed__(self):
itr = self.pgn.__reversed__()
expected = ["item2", "item1"]
self.assertEqual("item2", itr.next())
self.assertEqual("item1", itr.next())
self.assertRaises(StopIteration, itr.next)
def test___contains__(self):
self.assertTrue(self.pgn.__contains__("item1"))
self.assertTrue(self.pgn.__contains__("item2"))
self.assertFalse(self.pgn.__contains__("item3"))
|
Python
| 0.000939 |
@@ -1597,32 +1597,37 @@
elf.assertEqual(
+next(
itr_expected.nex
@@ -1622,34 +1622,27 @@
expected
-.next(), itr.
+),
next(
+itr
))%0A
@@ -1661,16 +1661,21 @@
rtEqual(
+next(
itr_expe
@@ -1682,26 +1682,19 @@
cted
-.next(), itr.
+),
next(
+itr
))%0A
@@ -1728,24 +1728,30 @@
opIteration,
+ next,
itr_expecte
@@ -1751,21 +1751,16 @@
expected
-.next
)%0A
@@ -1790,32 +1790,33 @@
pIteration,
-itr.
next
+, itr
)%0A%0A def t
@@ -2284,25 +2284,24 @@
item2%22,
-itr.
next(
+itr
))%0A
@@ -2333,17 +2333,16 @@
1%22,
-itr.
next(
+itr
))%0A
@@ -2385,16 +2385,17 @@
on,
-itr.
next
+, itr
)%0A%0A
|
3eb8e73faf56bf3e3e3eb7cc8209c780d0f71b62
|
create nanoparticle class
|
nanoparticle.py
|
nanoparticle.py
|
Python
| 0.000002 |
@@ -0,0 +1,2726 @@
+from scipy.constants import pi%0Aimport numpy as np%0Afrom math import cos, sin%0A%0Aclass NanoParticle(object):%0A def __init__(self, r, n_acceptors, tau_D, R_Forster):%0A %22%22%22%0A Create a nanoparticle object%0A %0A Parameters%0A ----------%0A R : float%0A Radio of nanoparticule%0A n_acceptors : float%0A Number of acceptors in the nanoparticle%0A tau_D : float%0A Lifetime of the donor%0A R_Forster : float%0A Radio de Forster%0A %22%22%22%0A self.R = r%0A self.n_acceptors = n_acceptors%0A self.acceptors_positions = np.zeros((n_acceptors,3))%0A self.tau_D = tau_D%0A self.R_Forster = R_Forster%0A %0A def deposit_superficial_acceptors(self):%0A %22%22%22%0A Generate random number of acceptors (n_acceptors) on the surface of the nanoparticle.%0A %22%22%22%0A for i in range(self.n_acceptors):%0A #Generate in spheric%0A theta = np.random.uniform(low=0, high=2*pi)%0A phi = np.random.uniform(low=0, high=pi)%0A #Transform to cartesian%0A self.acceptors_positions%5Bi%5D%5B0%5D = sin(phi)*cos(theta)*self.R%0A self.acceptors_positions%5Bi%5D%5B1%5D = sin(phi)*sin(theta)*self.R%0A self.acceptors_positions%5Bi%5D%5B2%5D = cos(phi)*self.R%0A %0A def deposit_volumetrically_acceptors(self):%0A %22%22%22%0A Generate random number of acceptors (n_acceptors) anywhere in the nanoparticle.%0A %0A Is not easy generate random point usin spherical coordinates.%0A For now, we generate random point in cartesian coordinates.%0A Reference link to implement in sphereic: http://mathworld.wolfram.com/SpherePointPicking.html%0A %22%22%22%0A for i in range(self.n_acceptors):%0A self.acceptors_positions%5Bi%5D%5B0%5D = np.random.uniform(low=-self.R, high=self.R)%0A self.acceptors_positions%5Bi%5D%5B1%5D = np.random.uniform(low=-self.R, high=self.R)%0A self.acceptors_positions%5Bi%5D%5B2%5D = np.random.uniform(low=-self.R, high=self.R)%0A %0A def photon(self):%0A %22%22%22%0A Generate random position of a photon in the nanoparticle.%0A %22%22%22%0A x = np.random.uniform(low=-self.R, high=self.R)%0A y = np.random.uniform(low=-self.R, high=self.R)%0A z = np.random.uniform(low=-self.R, high=self.R)%0A self.photon = np.array(%5Bx, y, z%5D)%0A %0A def walk(self):%0A pass%0A %0A def distance(self):%0A %22%22%22%0A Calculate, for all acceptor, 1/(r**6), where r are the distace bewteen the photon and acceptors%0A %22%22%22%0A self.dist = np.zeros(self.n_acceptors)%0A for i in range(self.n_acceptors):%0A self.dist%5Bi%5D = (sum((self.photon - self.acceptors_positions%5Bi%5D)**2))**3%0A
|
|
b3e6855489eba5d59507ef6fb4c92f8284526ec1
|
Check consecutive elements in an array
|
Arrays/check_consecutive_elements.py
|
Arrays/check_consecutive_elements.py
|
Python
| 0.000024 |
@@ -0,0 +1,1626 @@
+import unittest%0A%22%22%22%0AGiven an unsorted array of numbers, return true if the array only contains consecutive elements.%0AInput: 5 2 3 1 4%0AOuput: True (consecutive elements from 1 through 5)%0AInput: 83 78 80 81 79 82%0AOutput: True (consecutive elements from 78 through 83)%0AInput: 34 23 52 12 3%0AOutput: False%0A%22%22%22%0A%0A%22%22%22%0AApproach:%0A1. First check that there are (max - min + 1) elements in the array.%0A2. Second, check that all elements are unique.%0A3. If all elements are consecutive, we can use arr%5Bi%5D-min as an index into the array.%0A4. If element is positive, make it negative, else if its negative, there is repetition.%0ANOTE: This only works if all numbers are positive, otherwise use a hashmap to check for dupes.%0AO(n) time complexity and O(1) space complexity.%0A%22%22%22%0A%0A%0Adef check_consecutive_only(list_of_numbers):%0A min_val = min(list_of_numbers)%0A max_val = max(list_of_numbers)%0A%0A if len(list_of_numbers) != (max_val - min_val + 1):%0A return False%0A%0A for num in list_of_numbers:%0A index = abs(num) - min_val%0A if list_of_numbers%5Bindex%5D %3C 0:%0A return False%0A list_of_numbers%5Bindex%5D = -list_of_numbers%5Bindex%5D%0A%0A return True%0A%0A%0Aclass TestConsecutiveElements(unittest.TestCase):%0A%0A def test_consecutive_true(self):%0A list_of_numbers = %5B83, 78, 80, 81, 79, 82%5D%0A self.assertTrue(check_consecutive_only(list_of_numbers))%0A%0A def test_consecutive_false(self):%0A list_of_numbers = %5B7, 6, 5, 5, 3, 4%5D%0A self.assertFalse(check_consecutive_only(list_of_numbers))%0A list_of_numbers = %5B34, 23, 52, 12, 3%5D%0A self.assertFalse(check_consecutive_only(list_of_numbers))%0A
|
|
55b33bff9856cc91943f0a5ae492db1fdc7d8d5a
|
Add missing python 3 only file.
|
numba/tests/jitclass_usecases.py
|
numba/tests/jitclass_usecases.py
|
Python
| 0 |
@@ -0,0 +1,453 @@
+%22%22%22%0AUsecases with Python 3 syntax in the signatures. This is a separate module%0Ain order to avoid syntax errors with Python 2.%0A%22%22%22%0A%0A%0Aclass TestClass1(object):%0A def __init__(self, x, y, z=1, *, a=5):%0A self.x = x%0A self.y = y%0A self.z = z%0A self.a = a%0A%0A%0Aclass TestClass2(object):%0A def __init__(self, x, y, z=1, *args, a=5):%0A self.x = x%0A self.y = y%0A self.z = z%0A self.args = args%0A self.a = a%0A
|
|
e251aff9a232a66b2d24324f394da2ad9345ce79
|
Add migration script for changing users with None as email_verifications to {}
|
scripts/migration/migrate_none_as_email_verification.py
|
scripts/migration/migrate_none_as_email_verification.py
|
Python
| 0.000001 |
@@ -0,0 +1,1446 @@
+%22%22%22 Ensure that users with User.email_verifications == None now have %7B%7D instead%0A%22%22%22%0A%0Aimport logging%0Aimport sys%0Afrom tests.base import OsfTestCase%0Afrom tests.factories import UserFactory%0Afrom modularodm import Q%0Afrom nose.tools import *%0Afrom website import models%0Afrom website.app import init_app%0Afrom scripts import utils as scripts_utils%0Alogger = logging.getLogger(__name__)%0Alogger.setLevel(logging.INFO)%0A%0Adef main():%0A init_app(routes=False)%0A dry_run = 'dry' in sys.argv%0A count = 0%0A%0A if not dry_run:%0A scripts_utils.add_file_logger(logger, __file__)%0A logger.info(%22Iterating users with None as their email_verification%22)%0A for user in get_users_with_none_in_email_verifications():%0A user.email_verifications = %7B%7D%0A count += 1%0A logger.info(repr(user))%0A if not dry_run:%0A user.save()%0A%0A print('%7B%7D users migrated'.format(count))%0A%0Adef get_users_with_none_in_email_verifications():%0A return models.User.find( Q('email_verifications', 'eq', None))%0A%0Aclass TestMigrateDates(OsfTestCase):%0A def setUp(self):%0A super(TestMigrateDates, self).setUp()%0A self.user1 = UserFactory(email_verfications=None)%0A self.user2 = UserFactory(email_verfications=%7B%7D)%0A%0A def test_migrate_none_as_email(self):%0A main()%0A assert_equal(self.user1.email_verifications, %7B%7D)%0A assert_not_equal(self.user2.email_verifications, None)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
97bf6ba36b27822a9bd73cb9a27d9878e48945e2
|
add a decorator to ignore signals from fixture loading
|
project/apps/utils/signal_decorators.py
|
project/apps/utils/signal_decorators.py
|
Python
| 0 |
@@ -0,0 +1,381 @@
+%0A%0Afrom functools import wraps%0A%0Adef disable_for_loaddata(signal_handler):%0A %22%22%22%0A Decorator that turns off signal handlers when loading fixture data.%0A%0A based on http://stackoverflow.com/a/15625121%0A %22%22%22%0A%0A @wraps(signal_handler)%0A def wrapper(*args, **kwargs):%0A if kwargs.get('raw'):%0A return%0A signal_handler(*args, **kwargs)%0A return wrapper%0A
|
|
f8b5e413b46350f25bd7d231a8102c706fbf34f8
|
Add new package: py-devlib (#16982)
|
var/spack/repos/builtin/packages/py-devlib/package.py
|
var/spack/repos/builtin/packages/py-devlib/package.py
|
Python
| 0 |
@@ -0,0 +1,1903 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyDevlib(PythonPackage):%0A %22%22%22Library for interaction with and instrumentation of remote devices.%22%22%22%0A%0A homepage = %22https://github.com/ARM-software/devlib%22%0A url = %22https://github.com/ARM-software/devlib/archive/v1.2.tar.gz%22%0A%0A version('1.2', sha256='4cdb6767a9430b49eecffe34e2b9fcbcfc7e65328122d909aa71c3d11a86503d')%0A version('1.1.2', sha256='c900420cb97239b4642f5e333e43884fb09507b530edb55466e7b82103b4deaa')%0A version('1.1.1', sha256='eceb7a2721197a6023bbc2bbf346663fc117e4f54e1eb8334a3085dead9c8036')%0A version('1.1.0', sha256='317e9be2303ebb6aebac9a2ec398c622ea16d6e46079dc9e37253b37d739ca9d')%0A version('1.0.0', sha256='2f78278bdc9731a4fa13c41c74f08e0b8c5143de5fa1e1bdb2302673aec45862')%0A version('0.0.4', sha256='0f55e684d43fab759d0e74bd8f0d0260d9546a8b8d853d286acfe5e00c86da05')%0A version('0.0.3', sha256='29ec5f1de481783ab0b9efc111dfeb67c890187d56fca8592b25ee756ff32902')%0A version('0.0.2', sha256='972f33be16a06572a19b67d909ee0ed6cb6f21f9a9da3c43fd0ff5851421051d')%0A%0A depends_on('py-setuptools', type='build')%0A depends_on('py-python-dateutil', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('py-pyserial', type=('build', 'run'))%0A depends_on('py-wrapt', type=('build', 'run'))%0A depends_on('py-future', type=('build', 'run'))%0A depends_on('py-pandas', type=('build', 'run'))%0A depends_on('py-enum34', type=('build', 'run'), when='%5Epython@:3.3')%0A depends_on('py-contextlib2', type=('build', 'run'), when='%5Epython@:2.999')%0A depends_on('py-numpy@:1.16.4', type=('build', 'run'), when='%5Epython@:2.999')%0A depends_on('py-numpy', type=('build', 'run'), when='%5Epython@:3.0')%0A
|
|
de39aa257d845ecb6e1c2e7c4c4911497d00cdcf
|
add sample, non working, test_wsgi
|
os_loganalyze/tests/test_wsgi.py
|
os_loganalyze/tests/test_wsgi.py
|
Python
| 0 |
@@ -0,0 +1,1167 @@
+#!/usr/bin/python%0A#%0A# Copyright (c) 2013 IBM Corp.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0A%22%22%22%0ATest the ability to convert files into wsgi generators%0A%22%22%22%0A%0Afrom os_loganalyze.tests import base%0Aimport os_loganalyze.wsgi as log_wsgi%0A%0A%0Adef _start_response(*args):%0A return%0A%0A%0Aclass TestWsgi(base.TestCase):%0A%0A def test_nofile(self):%0A gen = log_wsgi.application(None, _start_response)%0A self.assertTrue(False)%0A self.assertEqual(gen, %5B'Invalid file url'%5D)%0A%0A environ = %7B%0A 'path': '/htmlify/foo.txt'%0A %7D%0A gen = log_wsgi.application(environ, _start_response)%0A self.assertEqual(gen, %5B'Invalid file url1'%5D)%0A
|
|
4c8ea40eeec6df07cf8721c256ad8cc3d35fb23e
|
Add intial unit test file
|
src/test_main.py
|
src/test_main.py
|
Python
| 0 |
@@ -0,0 +1,1699 @@
+import pytest%0Afrom main import *%0A%0Atest_files = %5B %22examples/C/filenames/script%22, %22examples/Clojure/index.cljs.hl%22, %0A %22examples/Chapel/lulesh.chpl%22, %22examples/Forth/core.fth%22, %0A %22examples/GAP/Magic.gd%22, %22examples/JavaScript/steelseries-min.js%22,%0A %22examples/Matlab/FTLE_reg.m%22, %22examples/Perl6/for.t%22,%0A %22examples/VimL/solarized.vim%22, %22examples/C/cpu.c%22,%0A %22examples/CSS/bootstrap.css%22, %22examples/D/mpq.d%22,%0A %22examples/Go/api.pb.go%22, %22examples/HTML+ERB/index.html.erb%22%5D%0A%0Anumber_of_comments = %5B%0A 423,# examples/C/filenames/script%0A 13, # examples/Clojure/index.cljs.hl%0A 609,# examples/Chapel/lulesh.chpl%0A 0, # examples/Forth/core.fth%0A 3, # examples/GAP/Magic.gd %0A 2, # examples/JavaScript/steelseries-min.js%0A 6, # examples/Matlab/FTLE_reg.m%0A 586,# examples/Perl6/for.t%0A 20, # examples/VimL/solarized.vim%0A 39, # examples/C/cpu.c%0A 680,# examples/CSS/bootstrap.css%0A 167,# examples/D/mpq.d %0A 0, # examples/Go/api.pb.go%0A 10 # examples/HTML+ERB/index.html.erb%0A%5D%0Adef test_get_comment_tokens():%0A from pygments.lexers.c_cpp import CLexer%0A%0A file_text_test = %22int main(int argc, char%5B%5D argv)%7B%5Cn//This is a comment%5Cn%7D%5Cn%22%0A c_lexer = CLexer()%0A%0A results = %5B%5D%0A for comment in get_comment_tokens(file_text_test, c_lexer):%0A results.append(comment)%0A%0A assert len(results) == 1%0A assert results%5B0%5D == %22//This is a comment%5Cn%22%0A%0Adef test_get_tokens_from_file():%0A for index,file in enumerate(test_files, 0):%0A result = get_tokens_from_file(%22../%22 + file)%0A #print(index)%0A print(file)%0A assert number_of_comments%5Bindex%5D == len(result.keys())
|
|
33a3e4a8adc6b3284de18fe02c67eafa3a391226
|
Create tinycrypt.py
|
tinycrypt.py
|
tinycrypt.py
|
Python
| 0.000091 |
@@ -0,0 +1,2 @@
+ %0A
|
|
4a179825234b711a729fce5bc9ffc8de029c0999
|
Test for invalid data when loading
|
utest/controller/test_loading.py
|
utest/controller/test_loading.py
|
import unittest
from robot.utils.asserts import assert_true, assert_raises
from robotide.application.chiefcontroller import ChiefController
from robotide.namespace import Namespace
from resources import MINIMAL_SUITE_PATH, RESOURCE_PATH
from robot.errors import DataError
class _FakeObserver(object):
def notify(self):
pass
def finished(self):
self.finished = True
class TestDataLoading(unittest.TestCase):
def setUp(self):
self.ctrl = ChiefController(Namespace())
self.load_observer = _FakeObserver()
def test_loading_suite(self):
self._load(MINIMAL_SUITE_PATH)
assert_true(self.ctrl._controller is not None)
def test_loading_resource(self):
self._load(RESOURCE_PATH)
assert_true(self.ctrl.resources != [])
def test_loading_invalid_data(self):
assert_raises(DataError, self._load, 'invalid')
def _load(self, path):
self.ctrl.load_data(self.load_observer, path)
assert_true(self.load_observer.finished)
if __name__ == "__main__":
unittest.main()
|
Python
| 0 |
@@ -71,41 +71,48 @@
ises
-%0A%0Afrom robotide.application.chief
+, assert_raises_with_msg%0A%0Afrom robotide.
cont
@@ -238,16 +238,34 @@
RCE_PATH
+, FakeLoadObserver
%0Afrom ro
@@ -298,129 +298,8 @@
r%0A%0A%0A
-class _FakeObserver(object):%0A%0A def notify(self):%0A pass%0A%0A def finished(self):%0A self.finished = True%0A%0A%0A
clas
@@ -440,13 +440,16 @@
r =
-_
Fake
+Load
Obse
@@ -933,16 +933,443 @@
ished)%0A%0A
+ def test_loading_invalid_datafile(self):%0A assert_raises_with_msg(DataError, 'Invalid data file: invalid.',%0A self.ctrl.load_datafile, FakeLoadObserver(),%0A 'invalid')%0A%0A def test_loading_invalid_resource(self):%0A assert_raises_with_msg(DataError, 'Invalid resource file: invalid.',%0A self.ctrl.load_resource, 'invalid')%0A%0A
%0Aif __na
|
f8d49af459fb3b751f44ecf625521c62fa68df0a
|
Check in script to delete existing autochecked tasks
|
bin/ext_service/historical/fix_autocheck_tasks.py
|
bin/ext_service/historical/fix_autocheck_tasks.py
|
Python
| 0 |
@@ -0,0 +1,2737 @@
+import logging%0Aimport argparse%0Aimport uuid%0Aimport emission.core.wrapper.user as ecwu%0Aimport emission.core.get_database as edb%0Aimport emission.net.ext_service.habitica.proxy as proxy%0A%0Adef fix_autocheck_for_user(uuid):%0A auto_tasks = find_existing_auto_tasks(uuid)%0A delete_tasks(uuid, auto_tasks)%0A create_new_tasks(uuid)%0A%0A# I wanted to reuse existing code, but it is unclear how to do so.%0A# in particular, I will have either the format of the old tests or of%0A# the new tests. Most PRs will not keep the old and the new around side%0A# to side. Since this is a historical, as opposed to ongoing script, I%0A# think this is fine.%0A%0Adef find_existing_auto_tasks(uuid):%0A method_uri = %22/api/v3/tasks/user%22%0A get_habits_uri = method_uri + %22?type=habits%22%0A #First, get all habits and check if the habit requested already exists%0A result = proxy.habiticaProxy(uuid, 'GET', get_habits_uri, None)%0A habits = result.json()%0A auto_tasks = %5B%5D%0A for habit in habits%5B'data'%5D:%0A print habit%5B'text'%5D, habit%5B%22notes%22%5D, habit%5B%22id%22%5D%0A if %22automatically%22 in habit%5B'notes'%5D:%0A logging.debug(%22Found auto task %25s, %25s, %25s%22 %25%0A (habit%5B'text'%5D, habit%5B'notes'%5D, habit%5B'id'%5D))%0A auto_tasks.append(habit)%0A else:%0A if len(habit%5B%22challenge%22%5D) %3E 0:%0A logging.info(%22Found challenge task %25s, %25s, %25s, unsure what to do%22 %25%0A (habit%5B'text'%5D, habit%5B'notes'%5D, habit%5B'id'%5D))%0A else:%0A logging.debug(%22Found manual task %25s, %25s, %25s%22 %25%0A (habit%5B'text'%5D, habit%5B'notes'%5D, habit%5B'id'%5D))%0A return auto_tasks%0A%0Adef delete_tasks(uuid, task_list):%0A method_uri = %22/api/v3/tasks/%22%0A%0A for task in task_list:%0A curr_task_del_uri = method_uri + str(task%5B%22id%22%5D)%0A result = proxy.habiticaProxy(uuid, 'DELETE', curr_task_del_uri, %7B%7D)%0A logging.debug(%22Result of deleting %25s = %25s%22 %25 (task%5B%22id%22%5D, result.json()))%0A%0Adef create_new_tasks(uuid):%0A pass%0A%0Aif __name__ == '__main__':%0A logging.basicConfig(level=logging.DEBUG)%0A%0A parser = argparse.ArgumentParser()%0A group = parser.add_mutually_exclusive_group(required=True)%0A group.add_argument(%22-e%22, %22--user_email%22)%0A group.add_argument(%22-u%22, %22--user_uuid%22)%0A group.add_argument(%22-a%22, %22--all%22, action=%22store_true%22)%0A%0A args = parser.parse_args()%0A%0A if args.all:%0A for uuid in edb.get_habitica_db().distinct(%22user_id%22):%0A logging.debug(%22About to check user %25s%22 %25 uuid)%0A fix_autocheck_for_user(uuid)%0A else:%0A if args.user_uuid:%0A del_uuid = uuid.UUID(args.user_uuid)%0A else:%0A del_uuid = ecwu.User.fromEmail(args.user_email).uuid%0A%0A fix_autocheck_for_user(del_uuid)%0A
|
|
89ef576ba4e707eef653c670b32fa40d862e79ec
|
Add package for the Python regex library (#4771)
|
var/spack/repos/builtin/packages/py-regex/package.py
|
var/spack/repos/builtin/packages/py-regex/package.py
|
Python
| 0 |
@@ -0,0 +1,1595 @@
+##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass PyRegex(PythonPackage):%0A %22%22%22Alternative regular expression module, to replace re.%22%22%22%0A%0A homepage = %22https://pypi.python.org/pypi/regex/%22%0A url = %22https://pypi.io/packages/source/r/regex/regex-2017.07.11.tar.gz%22%0A%0A version('2017.07.11', '95f81ebb5273c7ad9a0c4d1ac5a94eb4')%0A%0A depends_on('py-setuptools', type='build')%0A
|
|
0e6a7a805ff08f191c88bda67992cb874f538c2f
|
Add migration for unitconnection section types
|
services/migrations/0097_alter_unitconnection_section_type.py
|
services/migrations/0097_alter_unitconnection_section_type.py
|
Python
| 0 |
@@ -0,0 +1,932 @@
+# Generated by Django 4.0.5 on 2022-06-22 05:40%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22services%22, %220096_create_syllables_fi_columns%22),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name=%22unitconnection%22,%0A name=%22section_type%22,%0A field=models.PositiveSmallIntegerField(%0A choices=%5B%0A (1, %22PHONE_OR_EMAIL%22),%0A (2, %22LINK%22),%0A (3, %22TOPICAL%22),%0A (4, %22OTHER_INFO%22),%0A (5, %22OPENING_HOURS%22),%0A (6, %22SOCIAL_MEDIA_LINK%22),%0A (7, %22OTHER_ADDRESS%22),%0A (8, %22HIGHLIGHT%22),%0A (9, %22ESERVICE_LINK%22),%0A (10, %22PRICE%22),%0A (11, %22SUBGROUP%22),%0A %5D,%0A null=True,%0A ),%0A ),%0A %5D%0A
|
|
9009315381edd69adac3319b973b3bcdb16f23e4
|
Add missing module wirecloud.live.utils
|
src/wirecloud/live/utils.py
|
src/wirecloud/live/utils.py
|
Python
| 0.000024 |
@@ -0,0 +1,1012 @@
+# -*- coding: utf-8 -*-%0A%0A# Copyright (c) 2016 CoNWeT Lab., Universidad Polit%C3%A9cnica de Madrid%0A%0A# This file is part of Wirecloud.%0A%0A# Wirecloud is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A%0A# Wirecloud is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A%0A# You should have received a copy of the GNU Affero General Public License%0A# along with Wirecloud. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Afrom __future__ import unicode_literals%0A%0Afrom base64 import b64encode%0A%0A%0Adef build_group_name(name):%0A return b%22wc-%25s%22 %25 b64encode(name.encode('utf-8'), b'-_').replace(b'=', b'.')%0A%0A%0AWIRECLOUD_BROADCAST_GROUP = build_group_name('live-*')%0A
|
|
5e49eb4fb6bce9cdeae515590530b78e4dde89d9
|
Add alternate example for `match_template`.
|
doc/examples/plot_match_face_template.py
|
doc/examples/plot_match_face_template.py
|
Python
| 0 |
@@ -0,0 +1,1043 @@
+%22%22%22%0A=================%0ATemplate Matching%0A=================%0A%0AIn this example, we use template matching to identify the occurrence of an%0Aimage patch (in this case, a sub-image centered on the camera man's head).%0ASince there's only a single match, the maximum value in the %60match_template%60%0Aresult%60 corresponds to the head location. If you expect multiple matches, you%0Ashould use a proper peak-finding function.%0A%0A%22%22%22%0A%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0Afrom skimage import data%0Afrom skimage.feature import match_template%0A%0Aimage = data.camera()%0Ahead = image%5B70:170, 180:280%5D%0A%0Aresult = match_template(image, head)%0A%0Afig, (ax1, ax2) = plt.subplots(ncols=2, figsize=(8, 4))%0A%0Aax1.imshow(head)%0Aax1.set_axis_off()%0Aax1.set_title('template')%0A%0Aax2.imshow(image)%0Aax2.set_axis_off()%0Aax2.set_title('image')%0A%0A# highlight matched region%0Axy = np.unravel_index(np.argmax(result), image.shape)%5B::-1%5D # -1 flips ij to xy%0Awface, hface = head.shape%0Arect = plt.Rectangle(xy, wface, hface, edgecolor='r', facecolor='none')%0Aax2.add_patch(rect)%0A%0Aplt.show()%0A%0A
|
|
f284bb85a0b28142850f980a33f38a3cf25d9da8
|
Solve Knowit 2017/08
|
knowit2017/08.py
|
knowit2017/08.py
|
Python
| 0.000118 |
@@ -0,0 +1,840 @@
+memoized = %7B%7D%0A%0A%0Adef christmas_number(n):%0A in_sequence = %7B1: True%7D%0A%0A while True:%0A if n %3E 10000000:%0A for k in in_sequence:%0A memoized%5Bk%5D = False%0A%0A return False%0A%0A in_sequence%5Bn%5D = True%0A%0A if n in memoized:%0A return memoized%5Bn%5D%0A%0A n = sum(%5Bint(d)**2 for d in str(n)%5D)%0A%0A if n == 1:%0A for k in in_sequence:%0A memoized%5Bk%5D = True%0A%0A return True%0A%0A if n in in_sequence:%0A for k in in_sequence:%0A memoized%5Bk%5D = False%0A%0A return False%0A%0A%0Adef test_christmas_number():%0A assert christmas_number(13) is True%0A%0A%0Aif __name__ == %22__main__%22:%0A s = 0%0A%0A for n in range(1, 10000001):%0A if n %25 100000 == 0:%0A print(n)%0A%0A if christmas_number(n):%0A s += n%0A%0A print(s)
|
|
2dd0efce803c4dfcc4c5d61cf6fec1d5ee64e1b3
|
test for btcSpecialTx.py
|
test/test_btcSpecialTx.py
|
test/test_btcSpecialTx.py
|
Python
| 0.000001 |
@@ -0,0 +1,1701 @@
+from pyethereum import tester%0Afrom datetime import datetime, date%0Aimport math%0A%0Aimport pytest%0Aslow = pytest.mark.slow%0A%0Aclass TestBtcSpecialTx(object):%0A%0A CONTRACT = 'btcSpecialTx.py'%0A CONTRACT_GAS = 55000%0A%0A ETHER = 10 ** 18%0A%0A def setup_class(cls):%0A tester.gas_limit = 2 * 10**6%0A cls.s = tester.state()%0A cls.c = cls.s.abi_contract(cls.CONTRACT, endowment=2000*cls.ETHER)%0A cls.snapshot = cls.s.snapshot()%0A cls.seed = tester.seed%0A%0A def setup_method(self, method):%0A self.s.revert(self.snapshot)%0A tester.seed = self.seed%0A%0A%0A def test_testnetTx(self):%0A # testnet tx a51a71f8094f9b4e266fcccd55068e809277ec79bfa44b7bdb8f1355e9bb8460%0A # tx%5B9%5D of block 350559%0A txStr = '010000000158115acce0e68bc58ecb89e6452380bd68da56dc0a163d9806c04b24dfefe269000000008a47304402207a0bf036d5c78d6910d608c47c9e59cbf5708df51fd22362051b8f1ecd9691d1022055ee6ace9f12f02720ce91f62916570dbd93b2aa1e91be7da8e5230f62606db7014104858527cb6bf730cbd1bcf636bc7e77bbaf0784b9428ec5cca2d8378a0adc75f5ca893d14d9db2034cbb7e637aacf28088a68db311ff6f1ebe6d00a62fed9951effffffff0210980200000000001976a914a0dc485fc3ade71be5e1b68397abded386c0adb788ac10270000000000001976a914d3193ccb3564d5425e4875fe763e26e2fce1fd3b88ac00000000'%0A res = self.c.getFirst2Outputs(txStr)%0A assert res%5B0%5D == 170000%0A%0A out1stScriptIndex = res%5B1%5D%0A btcAddrIndex = out1stScriptIndex*2 + 6%0A assert txStr%5BbtcAddrIndex:btcAddrIndex+40%5D == 'a0dc485fc3ade71be5e1b68397abded386c0adb7'%0A%0A out2ndScriptIndex = res%5B2%5D%0A ethAddrIndex = out2ndScriptIndex*2 + 6%0A assert txStr%5BethAddrIndex:ethAddrIndex+40%5D == 'd3193ccb3564d5425e4875fe763e26e2fce1fd3b'%0A
|
|
edc5116472c49370e5bf3ff7f9f7872732b0285e
|
Add a solution to the phone number problem: can a phone number be represented as words in a dictionary?
|
phone_numbers.py
|
phone_numbers.py
|
Python
| 0.999746 |
@@ -0,0 +1,1829 @@
+#!/usr/bin/env python%0A%0Aimport unittest%0A%0Awords = set(%5B%22dog%22, %22clog%22, %22cat%22, %22mouse%22, %22rat%22, %22can%22,%0A %22fig%22, %22dig%22, %22mud%22, %22a%22, %22an%22, %22duh%22, %22sin%22,%0A %22get%22, %22shit%22, %22done%22, %22all%22, %22glory%22, %22comes%22,%0A %22from%22, %22daring%22, %22to%22, %22begin%22, %5D)%0A%0Adialmap = %7B%0A 'a':2, 'b':2, 'c':2,%0A 'd':3, 'e':3, 'f':3,%0A 'g':4, 'h':4, 'i':4,%0A 'j':5, 'k':5, 'l':5,%0A 'm':6, 'n':6, 'o':6,%0A 'p':7, 'q':7, 'r':7, 's':7,%0A 't':8, 'u':8, 'v':8,%0A 'w':9, 'x':9, 'y':9, 'z':9,%0A %7D%0A%0Adef tonumbers(word):%0A %22%22%22Convert the string 'word' into the equivalent string of phone-dailing numbers%22%22%22%0A numstr = ''%0A for c in word.lower():%0A numstr += str(dialmap%5Bc%5D)%0A return numstr%0A %0Awordsnum = set()%0Afor w in words:%0A wordsnum.add(tonumbers(w))%0A%0Adef isword(number):%0A %22%22%22Return True if the string of decimal digits 'number' can be represented%0A as the concatenation of words in the 'words' set, otherwise False.%22%22%22%0A if number in wordsnum:%0A return True%0A if number in isword.memoized:%0A return isword.memoized%5Bnumber%5D%0A for i in range(1, len(number)):%0A a = number%5Bi:%5D%0A b = number%5B:i%5D%0A #print locals()%0A if isword(a) and isword(b):%0A isword.memoized%5Bnumber%5D = True%0A return True%0A isword.memoized%5Bnumber%5D = False%0A return False%0Aisword.memoized = %7B%7D%0A%0A%0Aclass TestIsWord(unittest.TestCase):%0A def testGetShitDone(self):%0A self.assertTrue(isword(tonumbers('getshitdone')))%0A def testHas1(self):%0A self.assertFalse(isword('1092340345'))%0A def testDogDog(self):%0A self.assertTrue(isword(tonumbers('dogdog')))%0A def testMyNumber1(self):%0A self.assertFalse(isword('7342393309'))%0A def testMyNumber2(self):%0A self.assertFalse(isword('4082434090'))%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
904ac79bd278634c97f6f43f4d85bc0c2316117b
|
add configuration example
|
scripts/exchange-bots/config-example.py
|
scripts/exchange-bots/config-example.py
|
Python
| 0.000001 |
@@ -0,0 +1,2040 @@
+from bot.strategies.maker import MakerRamp, MakerSellBuyWalls%0A%0Awallet_host = %22localhost%22%0Awallet_port = 8092%0Awallet_user = %22%22%0Awallet_password = %22%22%0Awitness_url = %22ws://testnet.bitshares.eu/ws%22%0Awitness_user = %22%22%0Awitness_password = %22%22%0A%0Awatch_markets = %5B%22PEG.PARITY : TEST%22, %22PEG.RANDOM : TEST%22%5D%0Amarket_separator = %22 : %22%0A%0Abots = %7B%7D%0A%0A#############################%0A# Ramps%0A#############################%0Abots%5B%22MakerRexp%22%5D = %7B%22bot%22 : MakerRamp,%0A %22markets%22 : %5B%22PEG.PARITY : TEST%22%5D,%0A %22target_price%22 : %22feed%22,%0A %22spread_percentage%22 : 0.2,%0A %22volume_percentage%22 : 30,%0A %22ramp_price_percentage%22 : 2,%0A %22ramp_step_percentage%22 : 0.1,%0A %22ramp_mode%22 : %22linear%22%0A %7D%0Abots%5B%22MakerRamp%22%5D = %7B%22bot%22 : MakerRamp,%0A %22markets%22 : %5B%22PEG.PARITY : TEST%22%5D,%0A %22target_price%22 : %22feed%22,%0A %22spread_percentage%22 : 4,%0A %22volume_percentage%22 : 30,%0A %22ramp_price_percentage%22 : 4,%0A %22ramp_step_percentage%22 : 0.5,%0A %22ramp_mode%22 : %22exponential%22%0A %7D%0A#############################%0A# Walls%0A#############################%0Abots%5B%22MakerWall%22%5D = %7B%22bot%22 : MakerSellBuyWalls,%0A %22markets%22 : %5B%22PEG.PARITY : TEST%22%5D,%0A %22target_price%22 : %22feed%22,%0A %22spread_percentage%22 : 5,%0A %22volume_percentage%22 : 10,%0A %22symmetric_sides%22 : True,%0A %7D%0Abots%5B%22MakerBridge%22%5D = %7B%22bot%22 : MakerSellBuyWalls,%0A %22markets%22 : %5B%22PEG.PARITY : TEST%22%5D,%0A %22target_price%22 : 1.0,%0A %22spread_percentage%22 : 90,%0A %22volume_percentage%22 : 10,%0A %22symmetric_sides%22 : True,%0A %7D%0A%0A%0Aaccount = %22xeroc%22%0Asafe_mode = False%0A
|
|
d5cfa59c586053d911f8725dfd321d8ad0eecce6
|
Fix context comprobation. It must be exist at begining
|
account_voucher_payment_method/account_voucher.py
|
account_voucher_payment_method/account_voucher.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields, orm
import time
from openerp.tools.translate import _
from lxml import etree
class accountVoucherinherit(orm.Model):
_inherit = 'account.voucher'
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
res = super(accountVoucherinherit, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
doc = etree.XML(res['arch'])
#In this section is when some differences between supplier and customer are established
if context.get('type', 'sale') in ('purchase', 'payment'):
#Separate the journal types
nodes = doc.xpath("//field[@name='journal_id']")
for node in nodes:
#Add a domain when the view is from supplier.
node.set('domain', "[('payment_method_supplier','=', True)]")
#Remove selection widget (if the widget is added, values from
#customer are showed in supplier wizard. The wizard doesn't
#refresh values
node.set('widget', '')
res['arch'] = etree.tostring(doc)
return res
def _compute_exchange_rate(self, cr, uid, ids, field_names, args, context=None):
res_user_obj = self.pool.get('res.users')
currency_obj = self.pool.get('res.currency')
exchange_rate = 0.0
res = {}
#Company currency for logged user
res_user = res_user_obj.browse(cr, uid, uid, context=context)
company_currency = res_user.company_id.currency_id
#Today's date
now = time.strftime('%Y-%m-%d')
for voucher in self.browse(cr, uid, ids, context=context):
#Depends of sequence, set initial and final currency
if company_currency.sequence < voucher.currency_id.sequence:
initial_currency = company_currency
final_currency = voucher.currency_id
else:
initial_currency = voucher.currency_id
final_currency = company_currency
#Get exchange, depends of order sets before
exchange_rate = currency_obj.get_exchange_rate(cr, uid, initial_currency, final_currency, now, context=context)
res[voucher.id] = exchange_rate
return res
_columns = {
'voucher_payment_rate' : fields.function(_compute_exchange_rate, string='Exchange Rate Commercial', type='float',),
'voucher_payment_rate_currency_id' : fields.related('company_id', 'currency_id', string='Company Currency', type='many2one', relation='res.currency',),
}
|
Python
| 0.994592 |
@@ -1649,16 +1649,28 @@
if
+context and
context.
@@ -3878,28 +3878,29 @@
%0A %0A
+%0A
|
60efa5bbab4463714df8dd93c1c7c606bee4dbaf
|
add giphy plugin
|
plugins/giphy.py
|
plugins/giphy.py
|
Python
| 0 |
@@ -0,0 +1,378 @@
+from util import http, hook%0A%[email protected]_key('giphy')%[email protected]('giphy', autohelp=False)%[email protected]('gif', autohelp=False)%[email protected](autohelp=False)%0Adef giphy(inp, api_key=None):%0A %22.giphy %5Bterm%5D -- gets random gif for a term%22%0A%0A data = http.get_json(%22http://api.giphy.com/v1/gifs/random%22, %7B %22api_key%22: api_key, %22tag%22: inp %7D)%0A%0A return data%5B'data'%5D%5B'image_url'%5D%0A%0A
|
|
227a8e0f654c9797a7dedf863f7568d55a6c2f8e
|
add download sample from go-sciter port
|
examples/download.py
|
examples/download.py
|
Python
| 0 |
@@ -0,0 +1,1598 @@
+%22%22%22Go sciter example port.%22%22%22%0A%0Aimport sciter%0A%0Aclass MyEventHandler(sciter.EventHandler):%0A%0A def document_complete(self):%0A print(%22content loaded.%22)%0A %0A pass%0A%0A def on_data_arrived(self, nm):%0A print(%22data arrived, uri:%22, nm.uri, nm.dataSize)%0A pass%0A%0A%0Aclass Frame(sciter.Window):%0A def __init__(self):%0A super().__init__(ismain=True, uni_theme=False, debug=True)%0A pass%0A%0A def on_data_loaded(self, nm):%0A print(%22data loaded, uri:%22, nm.uri, nm.dataSize)%0A pass%0A%0A%0A def load(self, url):%0A self.set_title(%22Download Element Content%22)%0A self.load_html(b'''%3Chtml%3E%3Cbody%3E%3Cspan id='url'%3EUrl To Load%3C/span%3E%3Cframe id='content'%3E%3C/frame%3E%3C/body%3E%3C/html%3E''', %22/%22)%0A%0A # get root element%0A root = self.get_root()%0A%0A # get span#url and frame#content:%0A span = root.find_first('#url')%0A content = root.find_first('#content')%0A%0A # replace span text with url provided%0A text = span.get_text()%0A span.set_text(url)%0A print(%22span:%22, text)%0A%0A # install event handler to content frame to print data_arrived events%0A self.handler = MyEventHandler(element=content)%0A %0A print(%22load content%22)%0A content.request_html(url)%0A pass%0A pass%0A%0Aif __name__ == '__main__':%0A import sys%0A%0A print(%22Sciter version:%22, %22.%22.join(map(str, sciter.version())))%0A%0A if len(sys.argv) %3C 2:%0A sys.exit(%22at least one Sciter compatible page url is needed%22)%0A print(sys.argv%5B1%5D)%0A%0A frame = Frame()%0A frame.load(sys.argv%5B1%5D)%0A frame.expand()%0A frame.run_app(False)%0A
|
|
dff9d7a05e2a522b3dbbd7ea18866c5ba1fc0476
|
add a !stock plugin for stock images
|
plugins/stock.py
|
plugins/stock.py
|
Python
| 0 |
@@ -0,0 +1,698 @@
+%22%22%22!stock %3Csearch term%3E return a stock photo for %3Csearch term%3E%22%22%22%0A%0Afrom random import shuffle%0Aimport re%0A%0Aimport requests%0Afrom bs4 import BeautifulSoup%0A%0Adef stock(searchterm):%0A url = %22http://www.shutterstock.com/cat.mhtml?searchterm=%7B%7D&search_group=&lang=en&language=en&search_source=search_form&version=llv1%22.format(searchterm)%0A r = requests.get(url)%0A soup = BeautifulSoup(r.text)%0A images = %5Bx%5B%22src%22%5D for x in soup.select(%22.gc_clip img%22)%5D%0A shuffle(images)%0A%0A return images%5B0%5D if images else %22%22%0A%0Adef on_message(msg, server):%0A text = msg.get(%22text%22, %22%22)%0A match = re.findall(r%22!stock (.*)%22, text)%0A if not match: return%0A%0A searchterm = match%5B0%5D%0A return stock(searchterm)%0A
|
|
d26a78d3e0695e0bf492910c530beb54b30cdbbc
|
bump version number for development
|
stdeb/__init__.py
|
stdeb/__init__.py
|
# setuptools is required for distutils.commands plugin we use
import logging
import setuptools
__version__ = '0.4.2'
log = logging.getLogger('stdeb')
log.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
log.addHandler(handler)
|
Python
| 0 |
@@ -108,16 +108,20 @@
= '0.4.2
+.git
'%0A%0Alog =
|
1cab72ac3c5f3cea8326ebc97ccae1a8068eb839
|
Add http responses collection module.
|
superview/http.py
|
superview/http.py
|
Python
| 0 |
@@ -0,0 +1,1317 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0AThe various HTTP responses for use in returning proper HTTP codes.%0A%22%22%22%0A%0Afrom django.http import HttpResponse, StreamingHttpResponse%0A%0A%0Aclass HttpCreated(HttpResponse):%0A status_code = 201%0A%0A def __init__(self, *args, **kwargs):%0A location = kwargs.pop('location', '')%0A%0A super(HttpCreated, self).__init__(*args, **kwargs)%0A self%5B'Location'%5D = location%0A%0A%0Aclass HttpAccepted(HttpResponse):%0A status_code = 202%0A%0A%0Aclass HttpNoContent(HttpResponse):%0A status_code = 204%0A%0A%0Aclass HttpMultipleChoices(HttpResponse):%0A status_code = 300%0A%0A%0Aclass HttpSeeOther(HttpResponse):%0A status_code = 303%0A%0A%0Aclass HttpNotModified(HttpResponse):%0A status_code = 304%0A%0A%0Aclass HttpBadRequest(HttpResponse):%0A status_code = 400%0A%0A%0Aclass HttpUnauthorized(HttpResponse):%0A status_code = 401%0A%0A%0Aclass HttpForbidden(HttpResponse):%0A status_code = 403%0A%0A%0Aclass HttpNotFound(HttpResponse):%0A status_code = 404%0A%0A%0Aclass HttpMethodNotAllowed(HttpResponse):%0A status_code = 405%0A%0A%0Aclass HttpConflict(HttpResponse):%0A status_code = 409%0A%0A%0Aclass HttpGone(HttpResponse):%0A status_code = 410%0A%0A%0Aclass HttpTooManyRequests(HttpResponse):%0A status_code = 429%0A%0A%0Aclass HttpApplicationError(HttpResponse):%0A status_code = 500%0A%0A%0Aclass HttpNotImplemented(HttpResponse):%0A status_code = 501%0A%0A%0A%0A
|
|
b6c80ac9bc585602b943e8823f21465e663c9201
|
Modify ccnet_rpc
|
profile/views.py
|
profile/views.py
|
# encoding: utf-8
import simplejson as json
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import Context, RequestContext
from django.contrib.auth.decorators import login_required
from seaserv import ccnet_rpc, ccnet_threaded_rpc, get_binding_peerids
from pysearpc import SearpcError
from forms import ProfileForm
from models import Profile
from utils import go_error
from seahub.contacts.models import Contact
@login_required
def list_userids(request):
peer_list = []
try:
peers = ccnet_threaded_rpc.get_peers_by_email(request.user.username)
except:
peers = None
return render_to_response('profile/user_ids.html',
{'peers': peers},
context_instance=RequestContext(request))
def logout_relay(request):
peer_id = request.GET.get('peer_id', '')
try:
ccnet_threaded_rpc.remove_one_binding(request.user.username, peer_id)
except SearpcError, e:
return go_error(request, e.msg)
return HttpResponseRedirect(reverse('list_userids'))
@login_required
def edit_profile(request):
modified = False
if request.method == 'POST':
form = ProfileForm(request.POST)
if form.is_valid():
modified = True
nickname = form.cleaned_data['nickname']
intro = form.cleaned_data['intro']
try:
profile = Profile.objects.get(user=request.user.username)
except Profile.DoesNotExist:
profile = Profile()
profile.user = request.user.username
profile.nickname = nickname
profile.intro = intro
profile.save()
else:
try:
profile = Profile.objects.get(user=request.user.username)
form = ProfileForm({
'nickname': profile.nickname,
'intro': profile.intro,
})
except Profile.DoesNotExist:
form = ProfileForm()
return render_to_response('profile/set_profile.html', {
'form': form,
'modified': modified,
}, context_instance=RequestContext(request))
@login_required
def user_profile(request, user):
user_nickname = ''
user_intro = ''
err_msg = ''
try:
user_check = ccnet_threaded_rpc.get_emailuser(user)
except:
user_check = None
if user_check:
profile = Profile.objects.filter(user=user)
if profile:
profile = profile[0]
user_nickname = profile.nickname
user_intro = profile.intro
else:
err_msg = '该用户不存在'
if user == request.user.username or \
Contact.objects.filter(user_email=request.user.username,
contact_email=user).count() > 0:
new_user = False
else:
new_user = True
return render_to_response('profile/user_profile.html', {
'email': user,
'nickname': user_nickname,
'intro': user_intro,
'new_user': new_user,
'err_msg': err_msg,
},
context_instance=RequestContext(request))
def get_user_profile(request, user):
data = {
'email': user,
'user_nickname': '',
'user_intro': '',
'err_msg': '',
'new_user': ''
}
content_type = 'application/json; charset=utf-8'
try:
user_check = ccnet_rpc.get_emailuser(user)
except:
user_check = None
if user_check:
profile = Profile.objects.filter(user=user)
if profile:
profile = profile[0]
data['user_nickname'] = profile.nickname
data['user_intro'] = profile.intro
else:
data['err_msg'] = '该用户不存在'
if user == request.user.username or \
Contact.objects.filter(user_email=request.user.username,
contact_email=user).count() > 0:
data['new_user'] = False
else:
data['new_user'] = True
return HttpResponse(json.dumps(data), content_type=content_type)
|
Python
| 0.000002 |
@@ -3779,16 +3779,25 @@
= ccnet_
+threaded_
rpc.get_
|
86baa4f437cf3892c15a56e8331c19b6d2e63b1d
|
Add a script for generating unicode name table
|
lib/gen-names.py
|
lib/gen-names.py
|
Python
| 0 |
@@ -0,0 +1,1486 @@
+#!/usr/bin/python3%0A%0A# Input: https://www.unicode.org/Public/UNIDATA/UnicodeData.txt%0A%0Aimport io%0Aimport re%0A%0Aclass Builder(object):%0A def __init__(self):%0A pass%0A%0A def read(self, infile):%0A names = %5B%5D%0A for line in infile:%0A if line.startswith('#'):%0A continue%0A line = line.strip()%0A if len(line) == 0:%0A continue%0A (codepoint, name, _other) = line.split(';', 2)%0A%0A # Names starting with %3C are signifying controls and special blocks,%0A # they aren't useful for us%0A if name%5B0%5D == '%3C':%0A continue%0A%0A names.append((codepoint, name))%0A%0A return names%0A%0A def write(self, data):%0A print('''%5C%0Astruct CharacterName%0A%7B%0A gunichar uc;%0A const char *name;%0A%7D;''')%0A print('static const struct CharacterName character_names%5B%5D =%5Cn %7B')%0A s = ''%0A offset = 0%0A for codepoint, name in data:%0A print(' %7B%7B 0x%7B0%7D, %22%7B1%7D%22 %7D%7D,'.format(codepoint, name))%0A print(' %7D;')%0A%0Aif __name__ == '__main__':%0A import argparse%0A%0A parser = argparse.ArgumentParser(description='build')%0A parser.add_argument('infile', type=argparse.FileType('r'),%0A help='input file')%0A args = parser.parse_args()%0A%0A builder = Builder()%0A # FIXME: argparse.FileType(encoding=...) is available since Python 3.4%0A data = builder.read(io.open(args.infile.name, encoding='utf_8_sig'))%0A builder.write(data)%0A
|
|
a50190fe04e434ce70f6b02027e281a896dbb81b
|
Create Python password hasher
|
passwordhash.py
|
passwordhash.py
|
Python
| 0.000107 |
@@ -0,0 +1,851 @@
+#!/usr/bin/env python%0A# Password Hashing Module for Linux%0A# Author: Dave Russell Jr (drussell393)%0A%0Afrom getpass import getpass%0Aimport crypt%0A%0A# If you like Python 2, please to be importing.%0Aimport os%0Aimport binascii%0A%0Apassword = getpass('Enter your desired password, Harry: ')%0ApasswordConfirm = getpass('Confirm your password: ')%0A%0Aif (password == passwordConfirm):%0A # Python 2 alternative, os.urandom()%0A passwordHash = crypt.crypt(password, '$6$' + binascii.hexlify(os.urandom(4)))%0A%0A # Python 3 likes my crypt (mksalt doesn't work in Python 2)%0A #passwordHash = crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512))%0A%0A print('You%5C're a wizard, Harry: ' + passwordHash)%0Aelse:%0A print('Dobby has heard of your greatness, sir. But of your goodness, Dobby never knew.')%0A print('Your confirmation password didn%5C't match, Oh Great One.')%0A
|
|
961e9b031b94a0533c53c29787660ab954b6db37
|
Add patch-weight.py, patch weight into phrase segs.
|
patch-weight.py
|
patch-weight.py
|
Python
| 0.000001 |
@@ -0,0 +1,2422 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8%0A%0Afrom codecs import open%0Afrom argparse import ArgumentParser%0A%0ADEBUG_FLAG = False%0A%0Adef load_weight_dict(weight_file):%0A%0A weight_dict = %7B%7D%0A with open(weight_file, 'r') as fd:%0A for line in fd:%0A splited_line = line.strip().split()%0A if len(splited_line) != 2:%0A continue%0A word, weight = splited_line%0A if word not in weight_dict:%0A weight_dict%5Bword%5D = float(weight)%0A return weight_dict%0A%0Adef main():%0A%0A parser = ArgumentParser()%0A parser.add_argument(%22weight_file%22, help = %22word-weight in tsv format%22)%0A parser.add_argument(%22phrase_file%22, help = %22phrase segment file (original phrase and segmented phrase) one phrase per line in tsv format%22)%0A args = parser.parse_args()%0A%0A phrase_file = args.phrase_file%0A weight_file = args.weight_file%0A%0A weight_dict = load_weight_dict(weight_file)%0A word_set = set(weight_dict)%0A%0A with open(phrase_file, 'r') as fd:%0A for line in fd:%0A splited_line = line.strip().split(%22%5Ct%22)%0A if len(splited_line) != 2:%0A continue%0A phrase_str, phrase_seg = splited_line%0A phrase_seg_list = phrase_seg.split()%0A phrase_seg_set = set(phrase_seg_list)%0A outside_word_set = phrase_seg_set - word_set%0A if len(outside_word_set) %3E 0:%0A if DEBUG_FLAG:%0A print %22###outsidewords###%22, %22 %22.join(list(outside_word_set))%0A for word in outside_word_set:%0A weight_dict%5Bword%5D = 0.0%0A weight_sum = sum(%5Bweight_dict%5Bword%5D for word in phrase_seg_list%5D)%0A if DEBUG_FLAG:%0A if weight_sum == 0.0:%0A res_list = %5B%22%25s/%25s%22 %25 (word, weight_dict%5Bword%5D) for word in phrase_seg_list%5D%0A else:%0A res_list = %5B%22%25s/%25s%22 %25 (word, weight_dict%5Bword%5D / weight_sum) for word in phrase_seg_list%5D%0A print %22%25s%5Ct%25s%22 %25 (phrase_str, %22 %22.join(res_list))%0A else:%0A if weight_sum == 0.0:%0A res_list = %5B%22%25s%02%25s%22 %25 (word, weight_dict%5Bword%5D) for word in phrase_seg_list%5D%0A else:%0A res_list = %5B%22%25s%02%25s%22 %25 (word, weight_dict%5Bword%5D / weight_sum) for word in phrase_seg_list%5D%0A print %22%25s%5Ct%25s%22 %25 (phrase_str, %22%01%22.join(res_list))%0A %0A %0Aif __name__ == %22__main__%22:%0A%0A main()%0A%0A
|
|
9e51fc305f21a4031b6ec94ccfa39ef1e611da9e
|
add script to compare DFAs.
|
src/trusted/validator_ragel/unreviewed/compare_dfa.py
|
src/trusted/validator_ragel/unreviewed/compare_dfa.py
|
Python
| 0.000011 |
@@ -0,0 +1,1241 @@
+#!/usr/bin/python%0A# Copyright (c) 2013 The Native Client Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0Aimport sys%0Aimport os%0Asys.path.append(os.path.join(os.path.dirname(__file__), '..'))%0Aimport dfa_parser%0A%0A%0Avisited_pairs = set()%0A%0A%0Adef Traverse(state1, state2, path):%0A if (state1, state2) in visited_pairs:%0A return%0A%0A if state1.is_accepting != state2.is_accepting:%0A print map(hex, path)%0A print state1.is_accepting%0A print state2.is_accepting%0A sys.exit(1)%0A%0A visited_pairs.add((state1, state2))%0A%0A for byte in range(256):%0A new_path = path + %5Bbyte%5D%0A%0A t1 = state1.forward_transitions.get(byte)%0A t2 = state2.forward_transitions.get(byte)%0A if (t1 is None) != (t2 is None):%0A print map(hex, new_path)%0A print t1 is not None%0A print t2 is not None%0A sys.exit(1)%0A%0A if t1 is None:%0A continue%0A%0A Traverse(t1.to_state, t2.to_state, new_path)%0A%0A%0Adef main():%0A filename1, filename2 = sys.argv%5B1:%5D%0A%0A _, start_state1 = dfa_parser.ParseXml(filename1)%0A _, start_state2 = dfa_parser.ParseXml(filename2)%0A%0A Traverse(start_state1, start_state2, %5B%5D)%0A%0A print 'automata are equivalent'%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
3cdee1d40d3370686c9bff435a4575e985c121e9
|
Create __init__.py
|
pfc/__init__.py
|
pfc/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1,10 @@
+%22%22%22pfc%22%22%22%0A
|
|
438471a4a3b41637c5c1eb3c2e07d9d8ca81ee09
|
Add a stats ./manage.py command
|
www/management/commands/stats.py
|
www/management/commands/stats.py
|
Python
| 0.000048 |
@@ -0,0 +1,1737 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0A# Copyright 2014, Cercle Informatique ASBL. All rights reserved.%0A#%0A# This program is free software: you can redistribute it and/or modify it%0A# under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or (at%0A# your option) any later version.%0A#%0A# This software was made by hast, C4, ititou at UrLab, ULB's hackerspace%0A%0Afrom django.core.management.base import BaseCommand%0A%0Afrom users.models import User%0Afrom telepathy.models import Thread, Message%0Afrom documents.models import Document%0A%0A%0Aclass Command(BaseCommand):%0A%0A help = 'Numbers on b402'%0A%0A def handle(self, *args, **options):%0A Print = self.stdout.write%0A%0A Print(%22User summary :%5Cn%22)%0A Print(%22%7B%7D users%5Cn%22.format(User.objects.count()))%0A Print(%22%5Cn%22)%0A%0A Print(%22Document summary :%5Cn%22)%0A Print(%22%7B%7D documents%5Cn%22.format(Document.objects.count()))%0A Print(%22 - %7B%7D IN_QUEUE%5Cn%22.format(Document.objects.filter(state=%22IN_QUEUE%22).count()))%0A Print(%22 - %7B%7D PROCESSING%5Cn%22.format(Document.objects.filter(state=%22PROCESSING%22).count()))%0A Print(%22 - %7B%7D PREPARING%5Cn%22.format(Document.objects.filter(state=%22PREPARING%22).count()))%0A Print(%22 - %7B%7D READY_TO_QUEUE%5Cn%22.format(Document.objects.filter(state=%22READY_TO_QUEUE%22).count()))%0A Print(%22 - %7B%7D ERROR%5Cn%22.format(Document.objects.filter(state=%22ERROR%22).count()))%0A Print(%22 - %7B%7D DONE%5Cn%22.format(Document.objects.filter(state=%22DONE%22).count()))%0A Print(%22%5Cn%22)%0A%0A Print(%22Thread summary :%5Cn%22)%0A Print(%22%7B%7D threads%5Cn%22.format(Thread.objects.count()))%0A Print(%22%7B%7D messages%5Cn%22.format(Message.objects.count()))%0A Print(%22%5Cn%22)%0A
|
|
633e540a1718a5cc515725b13d3f1740bb950bb6
|
Use GitHub URL for ImageMagick
|
var/spack/repos/builtin/packages/ImageMagick/package.py
|
var/spack/repos/builtin/packages/ImageMagick/package.py
|
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Imagemagick(Package):
"""ImageMagick is a software suite to create, edit, compose,
or convert bitmap images."""
homepage = "http://www.imagemagick.org"
url = "http://www.imagemagick.org/download/ImageMagick-7.0.2-6.tar.gz"
version('7.0.2-6', 'c29c98d2496fbc66adb05a28d8fad21a')
depends_on('jpeg')
depends_on('libtool', type='build')
depends_on('libpng')
depends_on('freetype')
depends_on('fontconfig')
depends_on('libtiff')
depends_on('ghostscript')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('check')
make('install')
|
Python
| 0 |
@@ -1420,51 +1420,41 @@
url
-
= %22http
+s
://
-www.i
+github.com/I
mage
-m
+M
agick
-.org/download
/Ima
@@ -1465,16 +1465,24 @@
gick
--
+/archive/
7.0.2-
-6
+7
.tar
@@ -1510,45 +1510,104 @@
0.2-
-6
+7
', 'c
-29c98d2496fbc66adb05a28d8fad21a
+59cdc8df50e481b2bd1afe09ac24c08')%0A version('7.0.2-6', 'aa5689129c39a5146a3212bf5f26d478
')%0A%0A
@@ -1803,24 +1803,160 @@
stscript')%0A%0A
+ def url_for_version(self, version):%0A return %22https://github.com/ImageMagick/ImageMagick/archive/%7B0%7D.tar.gz%22.format(version)%0A%0A
def inst
|
3ba109622c24bd52f32e605c523249e1c26b0207
|
Add regression test with non ' ' space character as token
|
spacy/tests/regression/test_issue834.py
|
spacy/tests/regression/test_issue834.py
|
Python
| 0.99974 |
@@ -0,0 +1,327 @@
+# coding: utf-8%0A%0Afrom io import StringIO%0A%0Aword2vec_str = %22%22%22, -0.046107 -0.035951 -0.560418%0Ade -0.648927 -0.400976 -0.527124%0A. 0.113685 0.439990 -0.634510%0A%C2%A0 -1.499184 -0.184280 -0.598371%22%22%22%0A%0A%0Adef test_issue834(en_vocab):%0A f = StringIO(word2vec_str)%0A vector_length = en_vocab.load_vectors(f)%0A assert vector_length == 3%0A
|
|
d11707e651d4b44ef706f62677ba6a617102f239
|
Add test-code
|
test/post_test.py
|
test/post_test.py
|
Python
| 0.000083 |
@@ -0,0 +1,228 @@
+import json%0Aimport urllib2%0A%0Adata = %7B %0A %22cells%22:%5B%22ECT%22,%22VISC%22, %22AAA%22%5D%0A%7D%0A%0Areq = urllib2.Request('http://localhost:5000/api')%0Areq.add_header('Content-Type', 'application/json')%0A%0Aresponse = urllib2.urlopen(req, json.dumps(data))%0A
|
|
480852bb1dd6796b7fb12e40edc924b9a4dbee60
|
Add tests to cover no framework, no problem
|
test/test_misc.py
|
test/test_misc.py
|
Python
| 0 |
@@ -0,0 +1,385 @@
+import unittest%0A%0Afrom .helpers import run_module%0A%0A%0Aclass MiscTests(unittest.TestCase):%0A def setUp(self):%0A self.name = %22benchmarker%22%0A%0A def test_no_framework(self):%0A with self.assertRaises(Exception):%0A run_module(self.name)%0A%0A def test_no_problem(self):%0A with self.assertRaises(Exception):%0A run_module(self.name, %22--framework=pytorch%22)%0A
|
|
431760d7a840543901fc1ebc0069ecd384302101
|
Add tests/conftest.py for py.test
|
tests/conftest.py
|
tests/conftest.py
|
Python
| 0 |
@@ -0,0 +1,1759 @@
+import decimal%0Aimport os%0Atry:%0A # Python 2%0A from ConfigParser import ConfigParser%0Aexcept ImportError:%0A # Python 3%0A from configparser import ConfigParser%0A%0Aimport tests.helpers as th%0Afrom .helpers import cfgpath, clear_db, get_app_lock, release_app_lock%0A%0A_parser = ConfigParser(%7B%0A 'server': 'localhost',%0A 'username': 'sa',%0A 'password': '',%0A 'database': 'tempdb',%0A 'port': '1433',%0A 'ipaddress': '127.0.0.1',%0A 'instance': '',%0A%7D)%0A%0Adef pytest_addoption(parser):%0A parser.addoption(%0A %22--pymssql-section%22,%0A type=%22string%22,%0A default=os.environ.get('PYMSSQL_TEST_CONFIG', 'DEFAULT'),%0A help=%22The name of the section to use from tests.cfg%22%0A )%0A%0Adef pytest_configure(config):%0A _parser.read(cfgpath)%0A section = config.getoption('--pymssql-section')%0A%0A if not _parser.has_section(section) and section != 'DEFAULT':%0A raise ValueError('the tests.cfg file does not have section: %25s' %25 section)%0A%0A th.config.server = os.getenv('PYMSSQL_TEST_SERVER') or _parser.get(section, 'server')%0A th.config.user = os.getenv('PYMSSQL_TEST_USERNAME') or _parser.get(section, 'username')%0A th.config.password = os.getenv('PYMSSQL_TEST_PASSWORD') or _parser.get(section, 'password')%0A th.config.database = os.getenv('PYMSSQL_TEST_DATABASE') or _parser.get(section, 'database')%0A th.config.port = os.getenv('PYMSSQL_TEST_PORT') or _parser.get(section, 'port')%0A th.config.ipaddress = os.getenv('PYMSSQL_TEST_IPADDRESS') or _parser.get(section, 'ipaddress')%0A th.config.instance = os.getenv('PYMSSQL_TEST_INSTANCE') or _parser.get(section, 'instance')%0A th.config.orig_decimal_prec = decimal.getcontext().prec%0A%0A get_app_lock()%0A clear_db()%0A%0Adef pytest_unconfigure(config):%0A release_app_lock()%0A
|
|
bceee12d94924931ff73b45d2ed3de8b3d71522c
|
Add case fixture to top-level conftest.py in tests
|
tests/conftest.py
|
tests/conftest.py
|
Python
| 0.000001 |
@@ -0,0 +1,131 @@
+import pytest%0A%0Afrom gaphor.conftest import Case%0A%0A%[email protected]%0Adef case():%0A case = Case()%0A yield case%0A case.shutdown()%0A
|
|
0146058fe8a5c91ce33102bb55f5f087428a03a3
|
Add tests for get_keeper_token
|
tests/test_cli.py
|
tests/test_cli.py
|
Python
| 0.000001 |
@@ -0,0 +1,1228 @@
+%22%22%22Test the ltd-mason CLI features.%22%22%22%0A%0Afrom base64 import b64encode%0A%0Aimport responses%0Aimport pytest%0A%0Afrom ltdmason.cli import get_keeper_token%0A%0A%[email protected]%0Adef test_get_keeper_token():%0A %22%22%22Test getting a token from LTD Keeper.%22%22%22%0A expected_json = %7B'token': 'shake-it-off-shake-it-off'%7D%0A responses.add(%0A responses.GET,%0A 'http://localhost:5000/token',%0A json=expected_json,%0A status=200)%0A%0A _auth_header = 'Basic ' + b64encode(('user:pass')%0A .encode('utf-8')).decode('utf-8')%0A%0A token = get_keeper_token('http://localhost:5000', 'user', 'pass')%0A%0A assert responses.calls%5B0%5D.request.url == 'http://localhost:5000/token'%0A assert responses.calls%5B0%5D.request.headers%5B'Authorization'%5D %5C%0A == _auth_header%0A assert token == 'shake-it-off-shake-it-off'%0A%0A%[email protected]%0Adef test_get_keeper_token_error():%0A %22%22%22Test with server error.%22%22%22%0A expected_json = %7B'token': 'shake-it-off-shake-it-off'%7D%0A responses.add(%0A responses.GET,%0A 'http://localhost:5000/token',%0A json=expected_json,%0A status=401)%0A%0A with pytest.raises(RuntimeError):%0A get_keeper_token('http://localhost:5000', 'user', 'pass')%0A
|
|
e1e8bef8c2c916505e9bdc0ea37c81a7626db6af
|
Add int tests
|
tests/test_int.py
|
tests/test_int.py
|
Python
| 0.000061 |
@@ -0,0 +1,839 @@
+import pytest%0A%0Aimport parsenvy%0A%0A%0Adef test_int_positive(monkeypatch):%0A %22%22%22'13'%22%22%22%0A monkeypatch.setenv(%22foo%22, %2213%22)%0A%0A assert parsenvy.int(%22foo%22) == 13%0A%0A%0Adef test_int_negative(monkeypatch):%0A %22%22%22'-42'%22%22%22%0A monkeypatch.setenv(%22foo%22, %22-42%22)%0A%0A assert parsenvy.int(%22foo%22) == -42%0A%0A%0Adef test_int_zero(monkeypatch):%0A %22%22%22'0'%22%22%22%0A monkeypatch.setenv(%22foo%22, %220%22)%0A%0A assert parsenvy.int(%22foo%22) == 0%0A%0A%0Adef test_int_negative_zero(monkeypatch):%0A %22%22%22'-0'%22%22%22%0A monkeypatch.setenv(%22foo%22, %22-0%22)%0A%0A assert parsenvy.int(%22foo%22) == 0%0A%0A%0Adef test_int_invalid(monkeypatch):%0A %22%22%22'bar'%22%22%22%0A monkeypatch.setenv(%22foo%22, %22bar%22)%0A%0A with pytest.raises(TypeError):%0A parsenvy.int(%22foo%22)%0A%0A%0Adef test_int_empty(monkeypatch):%0A %22%22%22''%22%22%22%0A monkeypatch.setenv(%22foo%22, %22%22)%0A%0A with pytest.raises(TypeError):%0A parsenvy.int(%22foo%22)%0A
|
|
3b66fbc844b023003420db7a9986811110f55489
|
Add tests for the run() function
|
tests/test_run.py
|
tests/test_run.py
|
Python
| 0.000011 |
@@ -0,0 +1,1720 @@
+import sys%0Aimport tempfile%0Aimport unittest%0A%0Atry:%0A from StringIO import StringIO%0Aexcept ImportError:%0A from io import StringIO%0A%0Aimport icon_font_to_png%0A%0Aclass TestRun(unittest.TestCase):%0A def create_css_file(self, contents):%0A css_file = tempfile.NamedTemporaryFile()%0A css_file.write(contents.encode('utf-8'))%0A css_file.flush()%0A return css_file%0A%0A def test_usage(self):%0A orig_stderr = sys.stderr%0A sys.stderr = StringIO()%0A%0A self.assertRaises(SystemExit, icon_font_to_png.run,%0A %5B'icon_font_to_png.py'%5D)%0A %0A err = sys.stderr.getvalue().strip()%0A self.assertRegexpMatches(err, '%5Eusage: .*')%0A %0A sys.stderr = orig_stderr%0A%0A def test_list(self):%0A css_file = self.create_css_file(%0A %22.foo-xyzzy:before %7B content: '%5C%5Cf003' %7D%5Cn%22%0A %22.foo-baz:before %7B content: '%5C%5Cf002' %7D%5Cn%22%0A %22.foo-bar:before %7B content: '%5C%5Cf001' %7D%5Cn%22%0A )%0A%0A orig_stdout = sys.stdout%0A sys.stdout = StringIO()%0A%0A self.assertRaisesRegexp(SystemExit, '%5E0', %0A icon_font_to_png.run, %5B'foo.ttf', css_file.name, 'bar', '--list'%5D)%0A%0A out = sys.stdout.getvalue()%0A self.assertEqual(out,%0A %22bar%5Cn%22%0A %22baz%5Cn%22%0A %22xyzzy%5Cn%22%0A )%0A%0A sys.stdout = StringIO()%0A%0A self.assertRaisesRegexp(SystemExit, '%5E0',%0A icon_font_to_png.run, %5B'foo.ttf', css_file.name, 'bar', '--list',%0A '--keep-prefix'%5D)%0A%0A out = sys.stdout.getvalue()%0A self.assertEqual(out,%0A %22foo-bar%5Cn%22%0A %22foo-baz%5Cn%22%0A %22foo-xyzzy%5Cn%22%0A )%0A%0A sys.stdout = orig_stdout%0A%0Aif __name__ == '__main__':%0A unittest.main%0A
|
|
578de6c57f9698c7e273af06d1e815f71269bb18
|
Add a sample python file interesting to debug
|
tests/to_debug.py
|
tests/to_debug.py
|
Python
| 0 |
@@ -0,0 +1,2414 @@
+import sys%0Aimport os%0Aimport time%0Aimport threading%0Aimport ikpdb%0A%0ATEST_MULTI_THREADING = False%0ATEST_EXCEPTION_PROPAGATION = False%0ATEST_POSTMORTEM = True%0ATEST_SYS_EXIT = 0%0ATEST_STEPPING = False%0A%0A# Note that ikpdb.set_trace() will reset/mess breakpoints set using GUI%0ATEST_SET_TRACE = False %0A%0ATCB = TEST_CONDITIONAL_BREAKPOINT = True%0A%0Aclass Worker(object):%0A def __init__(self):%0A self._running = True%0A %0A def terminate(self):%0A self._running = False%0A %0A def run(self, n):%0A work_count = n%0A while self._running and n %3E 0:%0A print %22Worker: Doing iteration: %25s%22 %25 (work_count - n)%0A if n == 3:%0A pass # ikpdb.set_trace()%0A n -= 1%0A time.sleep(2)%0A%0Aga = 5%0Agb =%22coucou%22%0Ag_dict = %7B%22Genesis%22: 1, %22Don't Look Back%22: 2, 'array': %5B1,3,%7B'coucou': 3.14%7D%5D%7D%0Aa_tuple = (1,'e', 3.14, %5B'a', 'b'%5D)%0A%0Aclass BigBear:%0A color = %22white%22%0A def __init__(self, name='unknown'):%0A self._name = name%0A %0A def grumble(self):%0A print %22Roaaarrrrrrr%22%0A%0Adef sub_function():%0A return True%0A%0Adef the_function(p_nb_seconds):%0A a_var = 18.3%0A the_function_local_list = %5B1,2,3,'cyril'%5D%0A a_beast = BigBear()%0A print %22ga=%25s%22 %25 ga%0A %0A print %22Hello World%22%0A print %22Ceci est la ligne avec le point d'arret%22%0A for loop_idx in range(p_nb_seconds):%0A print %22hello @ %25s seconds%22 %25 loop_idx%0A time.sleep(1)%0A if loop_idx == 12:%0A if TEST_SET_TRACE:%0A ikpdb.set_trace() # will break on next line%0A pass # Need this for set_trace()%0A a_var = 98.3%0A sub_function() %0A%0A%0Adef sub_raiser():%0A raise Exception(%22Prends ca dans ta bouille%22)%0A%0A%0Adef raiser():%0A try:%0A sub_raiser()%0A except Exception as e:%0A raise e%0A%0A%0Aif __name__=='__main__':%0A b = 0%0A main_bear = BigBear(%22Cyril%22)%0A print %22Type of main_bear=%25s%22 %25 type(main_bear)%0A print %22sys.argv=%25s%22 %25 sys.argv%0A %0A if TEST_SYS_EXIT:%0A sys.exit(TEST_SYS_EXIT)%0A %0A if TEST_EXCEPTION_PROPAGATION:%0A raiser()%0A %0A if TEST_MULTI_THREADING:%0A w = Worker()%0A t = threading.Thread(target=w.run, args=(5,))%0A t.start()%0A%0A duration = 2 if TEST_STEPPING else 15%0A the_function(duration)%0A%0A if TEST_MULTI_THREADING:%0A w.terminate()%0A t.join()%0A %0A print %22finished%22%0A %0A if TEST_POSTMORTEM:%0A print 5 / b%0A %0A
|
|
329270ddef5f4da4528750ebc463ffc910325ec8
|
add migration
|
temba/channels/migrations/0066_auto_20170306_1713.py
|
temba/channels/migrations/0066_auto_20170306_1713.py
|
Python
| 0.000001 |
@@ -0,0 +1,749 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-03-06 17:13%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('channels', '0065_auto_20170228_0837'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='channelsession',%0A name='status',%0A field=models.CharField(choices=%5B('P', 'Pending'), ('Q', 'Queued'), ('R', 'Ringing'), ('I', 'In Progress'), ('D', 'Complete'), ('B', 'Busy'), ('F', 'Failed'), ('N', 'No Answer'), ('C', 'Canceled'), ('X', 'Interrupted'), ('T', 'Triggered'), ('A', 'Initiated')%5D, default='P', help_text='The status of this session', max_length=1),%0A ),%0A %5D%0A
|
|
a8caef202ba0fd6909359241ff385eca762aca1f
|
Add echo effect
|
quack/effects.py
|
quack/effects.py
|
Python
| 0.000002 |
@@ -0,0 +1,660 @@
+# Author: Martin McBride%0A# Created: 2018-09-25%0A# Copyright (C) 2018, Martin McBride%0A# License: MIT%0A%0Aimport math%0Aimport numpy as np%0Afrom quack.buffer import create_buffer%0A%0A%0Adef echo(params, source, delay, strength):%0A '''%0A Create an echo%0A :param params:%0A :param source:%0A :param delay:%0A :param strength:%0A :return:%0A '''%0A source = create_buffer(params, source)%0A delay = create_buffer(params, delay)%0A strength = create_buffer(params, strength)%0A output = source%5B:%5D%0A for i in range(params.length):%0A d = int(i - delay%5Bi%5D)%0A if 0 %3C= d %3C params.length:%0A output%5Bi%5D += source%5Bd%5D*strength%5Bi%5D%0A return output%0A
|
|
bdd7016fe8f41abdc8562d114efc41622916a675
|
Create startBackEnd.py
|
startBackEnd.py
|
startBackEnd.py
|
Python
| 0.000002 |
@@ -0,0 +1,261 @@
+#!/usr/bin/python%0Aimport boto.ec2%0Aconn = boto.ec2.connect_to_region(%22eu-central-1%22, aws_access_key_id='AKIAI111111111111111', aws_secret_access_key='keyyyyy')%0Ainstance = conn.get_all_instances(instance_ids=%5B'i-40eb8111'%5D)%0Aprint instance%5B0%5D.instances%5B0%5D.start()%0A
|
|
f6f2c6fc2a51bb3243d9b99ab1093809a2d1a5bb
|
Add script that tests AI players
|
test_players.py
|
test_players.py
|
Python
| 0.000001 |
@@ -0,0 +1,920 @@
+from AI import *%0Aimport random%0A%0Adef RandomPlayer(game):%0A return 0, random.choice(game.get_available_moves())%0A%0Adef ABPlayer(game):%0A return alpha_beta_search(game, 8, -np.inf, np.inf, True, evaluate_base)%0A%0Adef ABChainPlayer1(game):%0A return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_len)%0A%0Adef ABChainPlayer2(game):%0A return alpha_beta_search(game, 7, -np.inf, np.inf, True, evaluate_chain_count)%0A%0A%0Aplayers = %5BABChainPlayer2, ABPlayer%5D%0Aplayer_names = tuple((map(lambda x: x.__name__, players)))%0A%0Aprint %22%25s v. %25s%22 %25 player_names%0Agame = DotsAndBoxes(-1, 4, 4)%0A%0Awhile not game.is_over():%0A play_fn = players%5Bgame.turn == 1%5D%0A print %22%5CtTurn: %25s%22 %25 (player_names%5Bgame.turn == 1%5D)%0A score, move = play_fn(game)%0A game.play(move)%0A print %22%5CtPlayed: %25d %25d%22 %25 (move)%0A print %22%5CtEvaluated score: %25d%5Cn%22 %25 (score)%0A%0Aprint %22Winner: %25s%22 %25 (player_names%5Bnp.argmax(game.score)%5D)%0Aprint game.score
|
|
0cf85c1ab68ddc50787e6a09f3604320d18118b4
|
Add UniqueForFieldsMixin
|
django_more/mixins.py
|
django_more/mixins.py
|
Python
| 0 |
@@ -0,0 +1,1719 @@
+from django.db.models.options import normalize_together%0Afrom django.utils.functional import cached_property%0A%0A%0A# Used by OrderByField to allow for unique_together constraints to be field declared%0Aclass UniqueForFieldsMixin:%0A %22%22%22 Mixin first to a Field to add a unique_for_fields field option %22%22%22%0A unique_for_fields = None%0A%0A def __init__(self, unique_for_fields=None, *args, **kwargs):%0A if unique_for_fields:%0A self.unique_for_fields = tuple(unique_for_fields)%0A # If unique_for_fields then any unique option is irrelevant%0A kwargs.pop('unique', None)%0A super().__init__(*args, **kwargs)%0A%0A def deconstruct(self):%0A name, path, args, kwargs = super().deconstruct()%0A if self.unique_for_fields:%0A kwargs%5B'unique_for_fields'%5D = self.unique_for_fields%0A return name, path, args, kwargs%0A%0A def contribute_to_class(self, cls, *args, **kwargs):%0A super().contribute_to_class(cls, *args, **kwargs)%0A%0A # Add any necessary unique_together index to the model%0A if self.unique_for_fields:%0A # Alter only original_attr to fake being a declared unique_together%0A # Cannot modify cls._meta.unique_together as it breaks state consistency for migrations%0A ut = set((self.unique_together, )).union(normalize_together(cls._meta.original_attrs.get('unique_together')))%0A cls._meta.original_attrs%5B'unique_together'%5D = ut%0A%0A @cached_property%0A def unique_together(self):%0A return self.unique_for_fields + (self.attname, )%0A%0A @cached_property%0A def unique_for_attnames(self):%0A return %5Bself.model._meta.get_field(field_name).get_attname() for field_name in self.unique_for_fields%5D%0A
|
|
419f86f5c50f812f19dd731e9c33f66e57f51a48
|
Test matrix - work in progress
|
tests/matrix.py
|
tests/matrix.py
|
Python
| 0 |
@@ -0,0 +1,2499 @@
+import os.path, urllib, subprocess, shutil%0A%0Apython_versions = %5B'2.4.6', '2.5.6', '2.6.8', '2.7.5'%5D%0Alibcurl_versions = %5B'7.19.0', '7.32.0'%5D%0A%0Aclass in_dir:%0A def __init__(self, dir):%0A self.dir = dir%0A %0A def __enter__(self):%0A self.oldwd = os.getcwd()%0A os.chdir(self.dir)%0A %0A def __exit__(self, type, value, traceback):%0A os.chdir(self.oldwd)%0A%0Adef fetch(url, archive):%0A if not os.path.exists(archive):%0A print %22Fetching %25s%22 %25 url%0A io = urllib.urlopen(url)%0A with open('.tmp.%25s' %25 archive, 'w') as f:%0A while True:%0A chunk = io.read(65536)%0A if len(chunk) == 0:%0A break%0A f.write(chunk)%0A os.rename('.tmp.%25s' %25 archive, archive)%0A%0Adef build(archive, dir, prefix):%0A if not os.path.exists(dir):%0A print %22Building %25s%22 %25 archive%0A subprocess.check_call(%5B'tar', 'xf', archive%5D)%0A with in_dir(dir):%0A subprocess.check_call(%5B'./configure', '--prefix=%25s' %25 prefix%5D)%0A subprocess.check_call(%5B'make'%5D)%0A subprocess.check_call(%5B'make', 'install'%5D)%0A%0Afor python_version in python_versions:%0A url = 'http://www.python.org/ftp/python/%25s/Python-%25s.tgz' %25 (python_version, python_version)%0A archive = os.path.basename(url)%0A fetch(url, archive)%0A %0A dir = archive.replace('.tgz', '')%0A prefix = os.path.abspath('i/%25s' %25 dir)%0A build(archive, dir, prefix)%0A%0Afor libcurl_version in libcurl_versions:%0A url = 'http://curl.haxx.se/download/curl-%25s.tar.gz' %25 libcurl_version%0A archive = os.path.basename(url)%0A fetch(url, archive)%0A %0A dir = archive.replace('.tar.gz', '')%0A prefix = os.path.abspath('i/%25s' %25 dir)%0A build(archive, dir, prefix)%0A%0Afetch('https://raw.github.com/pypa/virtualenv/1.7/virtualenv.py', 'virtualenv-1.7.py')%0A%0Aif not os.path.exists('venv'):%0A os.mkdir('venv')%0A%0Afor python_version in python_versions:%0A for libcurl_version in libcurl_versions:%0A python_prefix = os.path.abspath('i/Python-%25s' %25 python_version)%0A libcurl_prefix = os.path.abspath('i/curl-%25s' %25 libcurl_version)%0A venv = os.path.abspath('venv/Python-%25s-curl-%25s' %25 (python_version, libcurl_version))%0A if os.path.exists(venv):%0A shutil.rmtree(venv)%0A subprocess.check_call(%5B'python', 'virtualenv-1.7.py', venv, '-p', '%25s/bin/python' %25 python_prefix%5D)%0A with in_dir('pycurl'):%0A subprocess.check_call('make clean && . %25s/bin/activate && make test' %25 venv, shell=True)%0A
|
|
c24647a921c64cfc8a1385f7e735622514e199c3
|
make it clear that we don't depend on gabble version for the test
|
tests/test-caps-update.py
|
tests/test-caps-update.py
|
"""
Test that CapabilitiesChanged signal is emitted only once after
all the caps in the presence have been analyzed.
"""
import dbus
from twisted.words.xish import domish
from servicetest import match, unwrap, lazy
from gabbletest import go, make_result_iq
def make_presence(from_jid, type, status):
presence = domish.Element((None, 'presence'))
if from_jid is not None:
presence['from'] = from_jid
if type is not None:
presence['type'] = type
if status is not None:
presence.addElement('status', content=status)
return presence
def caps_iface(proxy):
return dbus.Interface(proxy,
'org.freedesktop.Telepathy.Connection.Interface.Capabilities')
@match('dbus-signal', signal='StatusChanged', args=[0, 1])
def expect_connected(event, data):
presence = make_presence('[email protected]/Foo', None, 'hello')
presence.addElement('priority', None, '0')
c = presence.addElement(('http://jabber.org/protocol/caps', 'c'))
c['node'] = 'http://telepathy.freedesktop.org/caps'
c['ver'] = '0.5.14'
c['ext'] = 'voice-v1 jingle-audio jingle-video'
data['stream'].send(presence)
return True
@lazy
@match('dbus-signal', signal='CapabilitiesChanged',
args=[[(2, u'org.freedesktop.Telepathy.Channel.Type.StreamedMedia', 0,
3, 0, 3)]])
def expect_CapabilitiesChanged(event, data):
data['conn_iface'].Disconnect()
return True
@match('dbus-signal')
def expect_disconnected(event, data):
assert event.signal != 'CapabilitiesChanged'
if event.signal == 'StatusChanged' and event.args == [2, 1]:
return True
return False
if __name__ == '__main__':
go()
|
Python
| 0.000001 |
@@ -1,13 +1,12 @@
-%0A
%22%22%22%0ATest tha
@@ -1055,12 +1055,22 @@
'0.
-5.14
+4.test-version
'%0A
@@ -1087,17 +1087,8 @@
= '
-voice-v1
jing
|
7311f8f2a8a7ab285669dc02d26d7e2248583ff5
|
Add tests for 'rle_compress'
|
test_rle.py
|
test_rle.py
|
Python
| 0.000013 |
@@ -0,0 +1,358 @@
+import pypolycomp%0Aimport numpy as np%0A%0Adef test_compression():%0A for cur_type in (np.int8, np.int16, np.int32, np.int64,%0A np.uint8, np.uint16, np.uint32, np.uint64):%0A compressed = pypolycomp.rle_compress(np.array(%5B1, 1, 1, 2, 3%5D, dtype=cur_type))%0A assert np.all(compressed == np.array(%5B3, 1, 1, 2, 1, 3%5D, dtype=cur_type))%0A%0A
|
|
da2b773bf6e669b3ec50bbd6af73e1d80bb0b5a5
|
Add tsstats/event.py for easy event-initialization
|
tsstats/events.py
|
tsstats/events.py
|
Python
| 0 |
@@ -0,0 +1,845 @@
+from collections import namedtuple%0A%0AEvent = namedtuple(%0A 'Event', %5B'timestamp', 'identifier', 'action', 'arg', 'arg_is_client'%5D%0A)%0A%0A%0Adef nick(timestamp, identifier, nick):%0A return Event(timestamp, identifier, 'set_nick', nick, arg_is_client=False)%0A%0A%0Adef connect(timestamp, identifier):%0A return Event(%0A timestamp, identifier, 'connect', arg=timestamp, arg_is_client=False%0A )%0A%0A%0Adef disconnect(timestamp, identifier):%0A return Event(%0A timestamp, identifier, 'disconnect', arg=timestamp, arg_is_client=False%0A )%0A%0A%0Adef kick(timestamp, identifier, target_identifier):%0A return Event(%0A timestamp, identifier, 'kick', target_identifier, arg_is_client=True%0A )%0A%0A%0Adef ban(timestamp, identifier, target_identifier):%0A return Event(%0A timestamp, identifier, 'ban', target_identifier, arg_is_client=True%0A )%0A
|
|
99f5c2a9cd44ac8ed301a781460816e8f0dffdb8
|
add killall.py example script
|
examples/killall.py
|
examples/killall.py
|
Python
| 0.000001 |
@@ -0,0 +1,503 @@
+#!/usr/bin/env python%0A%0A%22%22%22%0AKill a process by name.%0A%22%22%22%0A%0Aimport os%0Aimport sys%0Aimport psutil%0A%0Adef main():%0A if len(sys.argv) != 2:%0A sys.exit('usage: %25s name' %25 __file__)%0A else:%0A NAME = sys.argv%5B1%5D%0A%0A killed = %5B%5D%0A for proc in psutil.process_iter():%0A if proc.name == NAME and proc.pid != os.getpid():%0A proc.kill()%0A killed.append(proc.pid)%0A if not killed:%0A sys.exit('%25s: no process found' %25 NAME)%0A else:%0A sys.exit(0)%0A%0Asys.exit(main())%0A
|
|
20c08b96ce7a5377576e45953266c51079b5bdeb
|
Create testfile.py
|
testfile.py
|
testfile.py
|
Python
| 0.000005 |
@@ -0,0 +1,22 @@
+print(%22Tess is cool%22)%0A
|
|
d75eebbcb6b1922d37a97550bc4cbead6e50cfdb
|
add localdb.py
|
united/localdb.py
|
united/localdb.py
|
Python
| 0.000001 |
@@ -0,0 +1,1359 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0ACopyright (C) 2015, MuChu Hsu%0AContributed by Muchu Hsu ([email protected])%0AThis file is part of BSD license%0A%3Chttps://opensource.org/licenses/BSD-3-Clause%3E%0A%22%22%22%0Aimport sqlite3%0Aimport os%0Aimport logging%0Afrom pkg_resources import resource_filename%0A%22%22%22%0A%E8%B3%87%E6%96%99%E5%BA%AB%E5%AD%98%E5%8F%96 %E9%A1%9E%E5%88%A5%0A%22%22%22%0Aclass SQLite3Db:%0A%0A #%E5%BB%BA%E6%A7%8B%E5%AD%90%0A def __init__(self):%0A logging.basicConfig(level=logging.INFO)%0A dbPath = resource_filename(%22cameo_res%22, %22local.db%22)%0A if os.path.exists(dbPath):#%E5%BB%BA%E7%AB%8B%E9%80%A3%E7%B7%9A%0A logging.info(%22connect to sqlite3 db.%22)%0A self.conn = sqlite3.connect(dbPath)%0A else: #%E5%88%9D%E5%A7%8B%E5%8C%96%E8%B3%87%E6%96%99%E5%BA%AB%E4%B8%A6%E5%BB%BA%E7%AB%8B%E9%80%A3%E7%B7%9A%0A logging.info(%22connect to sqlite3 db with initialization.%22)%0A self.conn = sqlite3.connect(dbPath)%0A c = self.conn.cursor()%0A c.execute(%22%22%22CREATE TABLE table%0A (id INTEGER PRIMARY KEY)%22%22%22)%0A self.conn.commit()%0A%0A #%E8%A7%A3%E6%A7%8B%E5%AD%90%0A def __del__(self):%0A logging.info(%22close sqlite3 db connection.%22)%0A self.conn.close() #%E9%97%9C%E9%96%89%E8%B3%87%E6%96%99%E5%BA%AB%E9%80%A3%E7%B7%9A%0A%0A # %E5%9F%B7%E8%A1%8C SQL %E4%B8%A6 commit (%E9%81%A9%E7%94%A8%E6%96%BC INSERT%E3%80%81UPDATE%E3%80%81DELETE)%0A def commitSQL(self, strSQL=None):%0A c = self.conn.cursor()%0A c.execute(strSQL)%0A self.conn.commit()%0A%0A # %E5%9F%B7%E8%A1%8C SQL %E4%B8%A6 fetchall %E8%B3%87%E6%96%99 (%E9%81%A9%E7%94%A8%E6%96%BC SELECT)%0A def fetchallSQL(self, strSQL=None):%0A c = self.conn.cursor()%0A c.execute(strSQL)%0A return c.fetchall()
|
|
b6d1b9365c356a14f0f9ef478247d498845a2b2c
|
add script to process normal vectors
|
coastline/data/vectors.py
|
coastline/data/vectors.py
|
Python
| 0 |
@@ -0,0 +1,1421 @@
+import matplotlib.pyplot as plt%0Aimport glob%0Aimport math%0A%0A%0Adef extract_data(file_name):%0A points = %5B%5D%0A with open(file_name, 'r') as f:%0A for i, line in enumerate(f):%0A if i %3E 2:%0A s = line.split()%0A point = (float(s%5B0%5D), float(s%5B1%5D))%0A points.append(point)%0A return points%0A%0A%0Adef normalize(vector, s):%0A norm = math.sqrt(vector%5B0%5D**2.0 + vector%5B1%5D**2.0)%0A return (s*vector%5B0%5D/norm, s*vector%5B1%5D/norm)%0A%0A%0Adef get_normal_vectors(points):%0A num_points = len(points)%0A vectors = %5B%5D%0A for i in range(num_points):%0A i_before = i - 1%0A i_after = (i + 1)%25num_points%0A vector = (points%5Bi_after%5D%5B1%5D - points%5Bi_before%5D%5B1%5D, -(points%5Bi_after%5D%5B0%5D - points%5Bi_before%5D%5B0%5D))%0A vector = normalize(vector, 5000.0)%0A vectors.append(vector)%0A return vectors%0A%0A%0Adef add_plot(file_name, style):%0A points = extract_data(file_name)%0A if len(points) %3E 3: # for the moment cannot handle linear islands%0A ax = plt.axes()%0A vectors = get_normal_vectors(points)%0A for i in range(len(points)):%0A ax.arrow(points%5Bi%5D%5B0%5D, points%5Bi%5D%5B1%5D, vectors%5Bi%5D%5B0%5D, vectors%5Bi%5D%5B1%5D, head_width=0.1, head_length=0.1, fc='k', ec='k')%0A (xs, ys) = zip(*points)%0A plt.plot(xs, ys, style)%0A%0A%0Afor f in glob.glob('*.txt'):%0A add_plot(f, 'r-')%0A%0A%0A#axes = plt.gca()%0A#axes.set_xlim(%5B-20.0, 0.0%5D)%0A#axes.set_ylim(%5B40.0, 60.0%5D)%0Aplt.show()%0A
|
|
cb9cbe28f0dfb8c98122ba52b7b050c5727ce1f2
|
Add debug logging for GetChildPids() on windows.
|
tools/telemetry/telemetry/core/chrome/win_platform_backend.py
|
tools/telemetry/telemetry/core/chrome/win_platform_backend.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import ctypes
import subprocess
try:
import pywintypes # pylint: disable=F0401
import win32api # pylint: disable=F0401
import win32con # pylint: disable=F0401
import win32process # pylint: disable=F0401
except ImportError:
pywintypes = None
win32api = None
win32con = None
win32process = None
from telemetry.core.chrome import platform_backend
class WinPlatformBackend(platform_backend.PlatformBackend):
def _GetProcessHandle(self, pid):
mask = (win32con.PROCESS_QUERY_INFORMATION |
win32con.PROCESS_VM_READ)
return win32api.OpenProcess(mask, False, pid)
# pylint: disable=W0613
def StartRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def StopRawDisplayFrameRateMeasurement(self):
raise NotImplementedError()
def GetRawDisplayFrameRateMeasurements(self):
raise NotImplementedError()
def IsThermallyThrottled(self):
raise NotImplementedError()
def HasBeenThermallyThrottled(self):
raise NotImplementedError()
def GetSystemCommitCharge(self):
class PerformanceInfo(ctypes.Structure):
"""Struct for GetPerformanceInfo() call
http://msdn.microsoft.com/en-us/library/ms683210
"""
_fields_ = [('size', ctypes.c_ulong),
('CommitTotal', ctypes.c_size_t),
('CommitLimit', ctypes.c_size_t),
('CommitPeak', ctypes.c_size_t),
('PhysicalTotal', ctypes.c_size_t),
('PhysicalAvailable', ctypes.c_size_t),
('SystemCache', ctypes.c_size_t),
('KernelTotal', ctypes.c_size_t),
('KernelPaged', ctypes.c_size_t),
('KernelNonpaged', ctypes.c_size_t),
('PageSize', ctypes.c_size_t),
('HandleCount', ctypes.c_ulong),
('ProcessCount', ctypes.c_ulong),
('ThreadCount', ctypes.c_ulong)]
def __init__(self):
self.size = ctypes.sizeof(self)
super(PerformanceInfo, self).__init__()
performance_info = PerformanceInfo()
ctypes.windll.psapi.GetPerformanceInfo(
ctypes.byref(performance_info), performance_info.size)
return performance_info.CommitTotal * performance_info.PageSize / 1024
def GetMemoryStats(self, pid):
try:
memory_info = win32process.GetProcessMemoryInfo(
self._GetProcessHandle(pid))
except pywintypes.error, e:
errcode = e[0]
if errcode == 87: # The process may have been closed.
return {}
raise
return {'VM': memory_info['PagefileUsage'],
'VMPeak': memory_info['PeakPagefileUsage'],
'WorkingSetSize': memory_info['WorkingSetSize'],
'WorkingSetSizePeak': memory_info['PeakWorkingSetSize']}
def GetIOStats(self, pid):
try:
io_stats = win32process.GetProcessIoCounters(
self._GetProcessHandle(pid))
except pywintypes.error, e:
errcode = e[0]
if errcode == 87: # The process may have been closed.
return {}
raise
return {'ReadOperationCount': io_stats['ReadOperationCount'],
'WriteOperationCount': io_stats['WriteOperationCount'],
'ReadTransferCount': io_stats['ReadTransferCount'],
'WriteTransferCount': io_stats['WriteTransferCount']}
def GetChildPids(self, pid):
"""Retunds a list of child pids of |pid|."""
pid_ppid_list = subprocess.Popen(['wmic', 'process', 'get',
'ParentProcessId,ProcessId'],
stdout=subprocess.PIPE).communicate()[0]
ppid_map = collections.defaultdict(list)
for pid_ppid in pid_ppid_list.splitlines()[1:]: #skip header
if not pid_ppid:
continue
curr_ppid, curr_pid = pid_ppid.split()
ppid_map[int(curr_ppid)].append(int(curr_pid))
def _GetChildrenPids(ppid_map, pid):
if not pid or pid not in ppid_map:
return []
ret = ppid_map[pid]
for child in ppid_map[pid]:
if child == pid:
continue
ret.extend(_GetChildrenPids(ppid_map, child))
return ret
return _GetChildrenPids(ppid_map, pid)
|
Python
| 0.00004 |
@@ -193,16 +193,31 @@
ctypes%0A
+import logging%0A
import s
@@ -3799,24 +3799,83 @@
nicate()%5B0%5D%0A
+ logging.info('wmic process output:%5Cn' + pid_ppid_list)%0A
ppid_map
@@ -4366,24 +4366,86 @@
ap, child))%0A
+ logging.info('Found child pids %25s for %25d' %25 (ret, pid))%0A
return
|
1ac09013e8cf89e83418de0be9d83b87a0a20634
|
Create mp3_exploit.py
|
mp3_exploit.py
|
mp3_exploit.py
|
Python
| 0 |
@@ -0,0 +1,711 @@
+#!/usr/bin/env python%0A'''%0AAuthor: Chris Duffy%0ADate: May 2015%0APurpose: To provide a means to demonstrate a simple file upload proof of concept related to%0A exploiting Free MP3 CD Ripper.%0A'''%0Aimport struct%0Afilename=%22exploit.wav%22%0Afill =%22A%22*4112%0A#eip = struct.pack('%3CI',0x42424242) # EIP overwrite verfication%0Aeip = struct.pack('%3CI',0x7C874413) # JMP ESP instruction from Kernel32.dll%0Anop = %22%5Cx90%22*16%0A# Place for calc.exe shellcode%0Acalc = ()%0A# Place for actual shellcode%0Ashell =()%0A#exploit = fill + eip + nop + calc #loader for simple proof of concept for shell cdoe%0Aexploit = fill + eip + nop + shell #loader for real shell access%0AwriteFile = open (filename, %22w%22)%0AwriteFile.write(exploit)%0AwriteFile.close()%0A
|
|
68ea60fd87e3e0240f82a42f0f6b4dcd65732f97
|
Add MQTT server example
|
mqtt-server.py
|
mqtt-server.py
|
Python
| 0.000001 |
@@ -0,0 +1,990 @@
+#!/usr/bin/python3%0A# %0A# Copyright (c) 2015-2016, Fabian Affolter %[email protected]%3E%0A# Released under the MIT license. See LICENSE file for details.%0A#%0A# Source: https://github.com/beerfactory/hbmqtt/blob/develop/samples/broker_start.py%0A#%0Aimport logging%0Aimport asyncio%0Aimport os%0Afrom hbmqtt.broker import Broker%0A%0Alogger = logging.getLogger(__name__)%0A%0Aconfig = %7B%0A 'listeners': %7B%0A 'default': %7B%0A 'type': 'tcp',%0A 'bind': '0.0.0.0:1883',%0A %7D,%0A 'ws-mqtt': %7B%0A 'bind': '127.0.0.1:3000',%0A 'type': 'ws',%0A 'max_connections': 10,%0A %7D,%0A %7D,%0A%7D%0A%0Abroker = Broker(config)%0A%[email protected]%0Adef test_coro():%0A yield from broker.start()%0A%0Aif __name__ == '__main__':%0A formatter = %22%5B%25(asctime)s%5D :: %25(levelname)s :: %25(name)s :: %25(message)s%22%0A logging.basicConfig(level=logging.INFO, format=formatter)%0A asyncio.get_event_loop().run_until_complete(test_coro())%0A asyncio.get_event_loop().run_forever()%0A
|
|
32a839a3b191582b7075229002026ef131416e17
|
Allow first data cell to be escaped when continuing lines with '...' (issue 420)
|
src/robot/parsing/rawdatatables.py
|
src/robot/parsing/rawdatatables.py
|
# Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from robot import utils
from robot.errors import DataError
from robot.output import LOGGER
_ERR = "Error in file '%s' in table '%s' in element on row %d: %s"
class _Table:
def __init__(self, name, source, data):
self._name = name
self._source = source
self._row = 0
self._data = data
def add_row(self, cells, repeat=1):
if not cells:
self._row += repeat
return
try:
for i in range(repeat):
self._row += 1
self._add_row(cells[0], cells[1:])
except:
self.report_invalid_syntax(self._row, utils.get_error_message())
def report_invalid_syntax(self, row, error, level='ERROR'):
msg = _ERR % (self._source, self._name, row, error)
LOGGER.write(msg, level)
class SimpleTable(_Table):
def _add_row(self, name, value):
if name == '...':
try:
self._data[-1].extend(value)
except IndexError:
raise DataError('Invalid multirow usage: No item started')
else:
item = SimpleItem(name, value, self._row, self)
self._data.append(item)
class ComplexTable(_Table):
def __init__(self, name, source, data):
_Table.__init__(self, name, source, data)
self._item = None
def _add_row(self, name, data):
if name != '':
self._item = ComplexItem(name, self._row, self)
self._data.append(self._item)
if self._item is None:
raise DataError('No name specified')
self._item.add_subitem(data)
class _Item:
def __init__(self, name, row, parent):
self.name = name
self._row = row
self._parent = parent
def report_invalid_syntax(self, error=None, level='ERROR'):
if error is None:
error = utils.get_error_message()
self._parent.report_invalid_syntax(self._row, error, level)
class SimpleItem(_Item):
def __init__(self, name, value, row, parent):
_Item.__init__(self, name, row, parent)
self.value = value
def extend(self, value):
self.value.extend(value)
def copy(self):
return SimpleItem(self.name, self.value[:], self._row, self._parent)
class ComplexItem(_Item):
"""Represents one item in Test Case or Keyword table"""
def __init__(self, name, row, parent):
_Item.__init__(self, name, row, parent)
self.metadata = []
self.keywords = []
self._previous = None
self._current_row = self._row - 1
def add_subitem(self, data):
self._current_row += 1
if not data:
return
name = data[0]
if name == '...':
self._add_to_previous(data[1:])
elif name == '' and len(data) > 1 and data[1] == '...':
self._add_to_previous(data[2:])
elif name.startswith('[') and name.endswith(']'):
name = name[1:-1].strip() # removes '[' and ']'
item = SimpleItem(name, data[1:], self._current_row, self._parent)
self.metadata.append(item)
self._previous = self.metadata
else:
self.keywords.append(data)
self._previous = self.keywords
def _add_to_previous(self, data):
if self._previous is None:
raise DataError('Invalid multirow usage: No item started')
self._previous[-1].extend(data)
|
Python
| 0 |
@@ -1080,17 +1080,17 @@
for
-i
+_
in rang
@@ -3465,13 +3465,21 @@
ame
-== ''
+in %5B'', '%5C%5C'%5D
and
|
c0b05a43e10693f8aab87a7f86726d512b7494fc
|
Add tenant exporter for accounting
|
bluebottle/clients/management/commands/export_tenants.py
|
bluebottle/clients/management/commands/export_tenants.py
|
Python
| 0 |
@@ -0,0 +1,1743 @@
+import json%0A%0Afrom rest_framework.authtoken.models import Token%0Afrom django.contrib.contenttypes.models import ContentType%0Afrom django.core.management.base import BaseCommand%0A%0Afrom bluebottle.clients import properties%0Afrom bluebottle.clients.models import Client%0Afrom bluebottle.clients.utils import LocalTenant%0A%0A%0Aclass Command(BaseCommand):%0A help = 'Export tenants, so that we can import them into the accounting app'%0A%0A def add_arguments(self, parser):%0A parser.add_argument('--file', type=str, default=None, action='store')%0A%0A def handle(self, *args, **options):%0A results = %5B%5D%0A for client in Client.objects.all():%0A properties.set_tenant(client)%0A with LocalTenant(client, clear_tenant=True):%0A ContentType.objects.clear_cache()%0A accounts = %5B%5D%0A for merchant in properties.MERCHANT_ACCOUNTS:%0A if merchant%5B'merchant'%5D == 'docdata':%0A accounts.append(%0A %7B%0A 'service_type': 'docdata',%0A 'username': merchant%5B'merchant_name'%5D%0A %7D%0A )%0A%0A api_key = Token.objects.get(user__username='accounting').key%0A results.append(%7B%0A %22name%22: client.schema_name,%0A %22domain%22: properties.TENANT_MAIL_PROPERTIES%5B'website'%5D,%0A %22api_key%22: api_key,%0A %22accounts%22: accounts%0A %7D)%0A if options%5B'file'%5D:%0A text_file = open(options%5B'file'%5D, %22w%22)%0A text_file.write(json.dumps(results))%0A text_file.close()%0A else:%0A print json.dumps(results)%0A
|
|
dfbf888ca0b56448a4f211900b16e3c85648b241
|
Add migration for changing docstring of Note.is_private to unicode
|
editorsnotes/main/migrations/0025_auto_20160628_0913.py
|
editorsnotes/main/migrations/0025_auto_20160628_0913.py
|
Python
| 0 |
@@ -0,0 +1,532 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.4 on 2016-06-28 09:13%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('main', '0024_topic_ld'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='note',%0A name='is_private',%0A field=models.BooleanField(default=False, help_text=%22If true, will only be be viewable to users who belong to the note's project.%22),%0A ),%0A %5D%0A
|
|
a45a0bb366ae28d38d543ce71f32f625e9b80042
|
add tools module
|
modules/tools.py
|
modules/tools.py
|
Python
| 0.000001 |
@@ -0,0 +1,1550 @@
+from pandas import DataFrame%0Afrom pandas.tseries.tools import to_datetime%0A%0A#%7CCreate time series from trade history DataFrame%0Adef time_series(df, period):%0A%09ts = DataFrame(columns=('timestamp', 'price', 'high',%0A%09%09%09%09'low', 'open', 'amount'))%09%0A%09tmin = int(df%5B'timestamp'%5D.min())%0A%09tmax = int(df%5B'timestamp'%5D.max())%0A%09for tsmp in range(tmin, tmax, period):%0A%09%09slic = time_slice(df, tsmp, period)%09%09%0A%09%09ts = ts.append(slic)%0A%09ts = date_index(ts)%0A%09return ts%0A%0Adef time_slice(df, tsmp, period):%0A%09lprice = df%5Bdf%5B'timestamp'%5D %3C tsmp%5D.tail(1)%5B'price'%5D%0A%09df = df%5Bdf%5B'timestamp'%5D %3E= tsmp%5D%0A%09df = df%5Bdf%5B'timestamp'%5D %3C (tsmp + period)%5D%09%0A%09if len(df.index) == 0:%0A%09%09slic = DataFrame(%7B'timestamp' : %5Btsmp%5D, 'price': lprice, %0A%09%09%09%09'high': lprice, 'low': lprice,%0A%09%09%09%09'open': lprice, 'amount': 0.0%7D)%09%09%0A%09else:%09%09%09%0A%09%09slic = DataFrame(%7B'timestamp' : %5Btsmp%5D, %0A%09%09%09%09'price': round(df%5B'price'%5D.iloc%5B-1%5D, 3),%0A%09%09%09%09'high': round(df%5B'price'%5D.max(), 3), %0A%09%09%09%09'low': round(df%5B'price'%5D.min(), 3),%0A%09%09%09%09'open': round(df%5B'price'%5D.iloc%5B0%5D, 3), %0A%09%09%09%09'amount': round(df%5B'amount'%5D.sum(), 4)%7D)%09%09%0A%09return slic%0A%0A#%7CCreate datetime index for DataFrame using %22timestamp%22 column%0Adef date_index(df):%0A%09date = df%5B'timestamp'%5D%0A%09date = to_datetime(date, unit='s')%0A%09df%5B'date'%5D = date%0A%09df = df.set_index('date')%0A%09return df%0A%0A#Outputs number of seconds in provided number of days/hours/minutes%0Adef seconds(days=0, hours=0, minutes=0, typ=''):%0A%09if typ == '':%09%0A%09%09total = 86400*days + 3600*hours + 60*minutes%0A%09elif typ == 'd':%0A%09%09total = 86400%0A%09elif typ == 'h':%0A%09%09total = 3600%0A%09elif typ == 'm':%0A%09%09total = 50%0A%09return total%0A%0A%0A
|
|
2f4d413e14011847138d6afd27a210fc58823c8a
|
add certificate and key migrations
|
rootfs/api/migrations/0004_auto_20160124_2134.py
|
rootfs/api/migrations/0004_auto_20160124_2134.py
|
Python
| 0.000001 |
@@ -0,0 +1,629 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.1 on 2016-01-24 21:34%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('api', '0003_auto_20160114_0310'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='certificate',%0A name='expires',%0A field=models.DateTimeField(editable=False),%0A ),%0A migrations.AlterField(%0A model_name='key',%0A name='fingerprint',%0A field=models.CharField(editable=False, max_length=128),%0A ),%0A %5D%0A
|
|
d1aa553f739e91cd470eea23042b6c8bcebe9b6f
|
add mocked integrationtest for the deprecationwarning of makeitem
|
testing/python/test_deprecations.py
|
testing/python/test_deprecations.py
|
Python
| 0 |
@@ -0,0 +1,461 @@
+import pytest%0A%0Afrom _pytest.python import PyCollector%0A%0A%0Aclass PyCollectorMock(PyCollector):%0A %22%22%22evil hack%22%22%22%0A%0A def __init__(self):%0A self.called = False%0A%0A def _makeitem(self, *k):%0A %22%22%22hack to disable the actual behaviour%22%22%22%0A self.called = True%0A%0A%0Adef test_pycollector_makeitem_is_deprecated():%0A%0A collector = PyCollectorMock()%0A with pytest.deprecated_call():%0A collector.makeitem('foo', 'bar')%0A assert collector.called%0A
|
|
e348ec573a4882258466cdc2ab73da8b4dbbe256
|
Create pillu.py
|
modules/pillu.py
|
modules/pillu.py
|
Python
| 0.000003 |
@@ -0,0 +1,6 @@
+#test%0A
|
|
1eb2e1390d41c65943e777a66918df87b4ee8799
|
Add constructive_hierarchy
|
constructive_hierarchy.py
|
constructive_hierarchy.py
|
Python
| 0.999379 |
@@ -0,0 +1,2223 @@
+'''Reason about a directed graph in which the (non-)existance of some edges%0Amust be inferred by the disconnectedness of certain vertices. Collect (truthy)%0Aevidence for boolean function return values.'''%0A%0Adef transitive_closure_set(vertices, edges):%0A '''Find the transitive closure of a set of vertices.'''%0A neighbours = %7Bb: (a, b) for a, b in edges if a in vertices%7D%0A if set(neighbours).issubset(vertices):%0A return vertices%0A return transitive_closure_set(vertices %7C neighbours, edges)%0A%0A#def downward_closure(vertex, edges):%0A# '''Find the downward closure of a vertex.'''%0A# return transitive_closure_set(%7Bvertex%7D, edges)%0A#%0A#def upward_closure(vertex, edges):%0A# '''Find the upward closure of a vertex.'''%0A# return transitive_closure_set(%7Bvertex%7D, %7B(b, a) for a, b in edges%7D)%0A#%0A#def is_connected(a, b, edges):%0A# '''Check if there is a path from a to b.'''%0A# return b in downward_closure(a, edges)%0A#%0A#def is_separated(a, b, edges, disconnections):%0A# '''Check that a and b will remain not connected even if edges are added to%0A# the graph, as long as the vertex pairs listed in disconnections remain%0A# disconected.'''%0A# return any((p, q) in disconnections%0A# for p in upward_closure(a, edges)%0A# for q in downward_closure(b, edges))%0A#%0A#def find_possible_connections(vertices, edges, disconnections):%0A# '''Find which edges can be added to create new connections, without%0A# connecting any pairs in disconnections.'''%0A# return %7B(a, b) for a in vertices for b in vertices if%0A# not is_connected(a, b, edges) and%0A# not is_separated(a, b, edges, disconnections)%7D%0A#%0A#def is_isthmus(edge, edges):%0A# return not is_connected(*edge, edges - %7Bedge%7D)%0A#%0A#def spanning_tree(edges):%0A# for edge in edges:%0A# if not is_isthmus(edge, edges):%0A# return spanning_tree(edges - %7Bedge%7D)%0A# return edges%0A#%0A#def rank_possible_edge(edge, vertices, edges, disconnections):%0A# evaluator = lambda x, y: len(find_possible_connections(vertices, x, y))%0A# exists_rank = evaluator(edges %7C %7Bedge%7D, disconnections)%0A# not_exists_rank = evaluator(edges, disconnections %7C %7Bedge%7D)%0A# return abs(exists_rank) + abs(not_exists_rank)%0A
|
|
25ff8c6f8bc9d70886d004f8b64f08facb8c12cf
|
Create Find the Celebrity sol for Leetcode
|
leetcode/277-Find-the-Celebrity/FindtheCelebrity_sol.py
|
leetcode/277-Find-the-Celebrity/FindtheCelebrity_sol.py
|
Python
| 0 |
@@ -0,0 +1,626 @@
+# The knows API is already defined for you.%0A# @param a, person a%0A# @param b, person b%0A# @return a boolean, whether a knows b%0A# def knows(a, b):%0A%0Aclass Solution(object):%0A def findCelebrity(self, n):%0A %22%22%22%0A :type n: int%0A :rtype: int%0A %22%22%22%0A if n %3C 2:%0A return -1%0A%0A candidate = 0%0A for i in range(1, n):%0A if not knows(i, candidate):%0A candidate = i%0A for i in range(n):%0A if i == candidate:%0A continue%0A if not knows(i, candidate) or knows(candidate, i):%0A return -1%0A return candidate%0A
|
|
3eeaa890f0a7afcf7a6f470055c5bc0fda20ae5c
|
create moistureCaptor.py
|
captors-enabled/moistureCaptor.py
|
captors-enabled/moistureCaptor.py
|
Python
| 0.000034 |
@@ -0,0 +1,214 @@
+%0A%0Aclass Captor():%0A id = 5%0A def Captor():%0A self.id = 5%0A %0A %0A def callback(self):%0A moisture = 0%0A #start communication with server%0A %0A return moisture%0A %0A def getiId(self):%0A return self.id%0A
|
|
25aa486fcba631a251db4f0366d4d4f713a86f37
|
Add missing migration file
|
SigmaPi/UserInfo/migrations/0003_auto_20170204_1342.py
|
SigmaPi/UserInfo/migrations/0003_auto_20170204_1342.py
|
Python
| 0.000002 |
@@ -0,0 +1,457 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('UserInfo', '0002_auto_20161208_1712'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='pledgeclass',%0A options=%7B'ordering': %5B'dateInitiated'%5D, 'verbose_name': 'Pledge Class', 'verbose_name_plural': 'Pledge Classes'%7D,%0A ),%0A %5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.