commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
319d74685a0bd44ca0c62bf41dae2f9515b5e327
|
Add tests for nilrt_ip._load_config
|
tests/pytests/unit/modules/test_nilrt_ip.py
|
tests/pytests/unit/modules/test_nilrt_ip.py
|
Python
| 0 |
@@ -0,0 +1,1861 @@
+import io%0A%0Aimport pytest%0Aimport salt.modules.nilrt_ip as nilrt_ip%0Afrom tests.support.mock import patch%0A%0A%[email protected](autouse=True)%0Adef setup_loader(request):%0A setup_loader_modules = %7Bnilrt_ip: %7B%7D%7D%0A with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock:%0A yield loader_mock%0A%0A%[email protected]%0Adef patched_config_file():%0A config_file = io.StringIO(%0A %22%22%22%0A %5Bsome_section%5D%0A name = thing%0A fnord = bar%0A icanhazquotes = %22this string is quoted%22%0A icannothazquotes = this string is unquoted%0A number_value = 42%0A %22%22%22%0A )%0A with patch(%22salt.utils.files.fopen%22, return_value=config_file):%0A yield%0A%0A%0Adef test_when_config_has_quotes_around_string_they_should_be_removed(%0A patched_config_file,%0A):%0A expected_value = %22this string is quoted%22%0A option = %22icanhazquotes%22%0A%0A actual_value = nilrt_ip._load_config(%22some_section%22, %5Boption%5D)%5Boption%5D%0A%0A assert actual_value == expected_value%0A%0A%0Adef test_when_config_has_no_quotes_around_string_it_should_be_returned_as_is(%0A patched_config_file,%0A):%0A expected_value = %22this string is unquoted%22%0A option = %22icannothazquotes%22%0A%0A actual_value = nilrt_ip._load_config(%22some_section%22, %5Boption%5D)%5Boption%5D%0A%0A assert actual_value == expected_value%0A%0A%[email protected](%0A %22default_value%22,%0A %5B%0A 42,%0A -99.9,%0A ('%22', %22some value%22, 42, '%22'),%0A %5B'%22', %22a weird list of values%22, '%22'%5D,%0A %7B%22this%22: %22dictionary%22, %22has%22: %22multiple values%22, 0: '%22', -1: '%22'%7D,%0A %5D,%0A)%0Adef test_when_default_value_is_not_a_string_and_option_is_missing_the_default_value_should_be_returned(%0A patched_config_file, default_value%0A):%0A option = %22non existent option%22%0A%0A actual_value = nilrt_ip._load_config(%0A %22some_section%22, options=%5Boption%5D, default_value=default_value%0A )%5Boption%5D%0A%0A assert actual_value == default_value%0A
|
|
e810ecb5362496f72485220ab4e9cecd5467b3a6
|
kill leftover webpagereplay servers.
|
build/android/pylib/utils/test_environment.py
|
build/android/pylib/utils/test_environment.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import psutil
from pylib import android_commands
def _KillWebServers():
for retry in xrange(5):
for server in ['lighttpd', 'web-page-replay']:
pids = [p.pid for p in psutil.process_iter() if server in p.name]
for pid in pids:
try:
logging.warning('Killing %s %s', server, pid)
os.kill(pid, signal.SIGQUIT)
except Exception as e:
logging.warning('Failed killing %s %s %s', server, pid, e)
def CleanupLeftoverProcesses():
"""Clean up the test environment, restarting fresh adb and HTTP daemons."""
_KillWebServers()
did_restart_host_adb = False
for device in android_commands.GetAttachedDevices():
adb = android_commands.AndroidCommands(device, api_strict_mode=True)
# Make sure we restart the host adb server only once.
if not did_restart_host_adb:
adb.RestartAdbServer()
did_restart_host_adb = True
adb.RestartAdbdOnDevice()
adb.EnableAdbRoot()
adb.WaitForDevicePm()
|
Python
| 0.000002 |
@@ -194,16 +194,30 @@
t psutil
+%0Aimport signal
%0A%0Afrom p
@@ -246,16 +246,17 @@
mmands%0A%0A
+%0A
def _Kil
@@ -280,27 +280,96 @@
for
-retry in xrange(5):
+s in %5Bsignal.SIGTERM, signal.SIGINT, signal.SIGQUIT, signal.SIGKILL%5D:%0A signalled = %5B%5D
%0A
@@ -404,14 +404,12 @@
'web
--
page
--
repl
@@ -424,22 +424,8 @@
-pids = %5Bp.pid
for
@@ -454,65 +454,91 @@
er()
- if server in p.name%5D%0A for pid in pids:%0A try:
+:%0A try:%0A if not server in ' '.join(p.cmdline):%0A continue
%0A
@@ -552,23 +552,20 @@
logging.
-warning
+info
('Killin
@@ -565,28 +565,34 @@
'Killing %25s
+%25s
%25s',
+ s,
server, pid
@@ -588,16 +588,18 @@
server,
+p.
pid)%0A
@@ -609,35 +609,53 @@
-os.kill(pid, signal.SIGQUIT
+p.send_signal(s)%0A signalled.append(p
)%0A
@@ -748,16 +748,18 @@
er,
+p.
pid, e)%0A
%0A%0Ade
@@ -754,16 +754,169 @@
pid, e)%0A
+ for p in signalled:%0A try:%0A p.wait(1)%0A except Exception as e:%0A logging.warning('Failed waiting for %25s to die. %25s', p.pid, e)%0A%0A
%0A%0Adef Cl
|
d77cb643c7762401209f1f9d9693ee352e6672cb
|
Create mqttEampleRemoteBrain.py
|
home/kyleclinton/mqttEampleRemoteBrain.py
|
home/kyleclinton/mqttEampleRemoteBrain.py
|
Python
| 0.000013 |
@@ -0,0 +1,942 @@
+from java.lang import String%0Afrom time import sleep%0Api = Runtime.createAndStart(%22pi%22,%22RasPi%22)%0A%0A#Load Pub/Sub Service (MQTT)%0Aexecfile(%22../py_scripts/mqttPubSubConfig.py%22)%0A%0A%0A# Add in controller for head, neck and antenna servos SHOULD be using i2c 16 servo controller%0A#Load Juniors mouth!%0Aexecfile(%22../py_scripts/juniors_voice.py%22)%0A%0A#Load Juniors Eyes!%0Aexecfile(%22../py_scripts/juniors_eyes_4.py%22)%0A%0A#####for testing%0Amouth.speakBlocking(%22Testing 1, 2, 3%22)%0A%0A%0AdrawEyes()%0Asleep(2)%0AdrawClosedEyes()%0Asleep(1)%0AdrawEyes()%0A%0Amqtt.subscribe(%22myrobotlab/speaking%22, 0)%0A#mqtt.publish(%22hello myrobotlab world%22)%0Apython.subscribe(%22mqtt%22, %22publishMqttMsgString%22)%0A# or mqtt.addListener(%22publishMqttMsgString%22, %22python%22)%0A %0A# MQTT call-back%0A# publishMqttMsgString --%3E onMqttMsgString(msg)%0Adef onMqttMsgString(msg):%0A # print %22message : %22, msg%0A mouth.speakBlocking(msg%5B0%5D)%0A print %22message : %22,msg%5B0%5D%0A print %22topic : %22,msg%5B1%5D%0A%0A%0A%0Amqtt.publish(%22What is your name?%22)%0A
|
|
3fb4d7b630fb7a4b34dcc4e1b72947e61f73a80f
|
Create script to dowload requisite test urls.
|
TestData/download_test_data.py
|
TestData/download_test_data.py
|
Python
| 0 |
@@ -0,0 +1,2125 @@
+def set_test_db():%0A from sys import path%0A path.insert(0, %22..%22)%0A from MyEdgarDb import get_list_sec_filings, get_cik_ticker_lookup_db, lookup_cik_ticker%0A get_list_sec_filings (7, 'test_idx.db')%0A get_cik_ticker_lookup_db ('test_idx.db')%0A%0Adef download_test_data():%0A import sqlite3%0A from datetime import datetime%0A import pandas as pd%0A testDir = %22..%5C%5CTestData%5C%5C%22%0A testTickers = %7B%0A %22AAPL%22: %5Bdatetime(2014, 8, 1), datetime(2018, 8, 1)%5D,%0A %22ACLS%22: %5Bdatetime(2014, 8, 31), datetime(2018, 8, 31)%5D,%0A %22ADSK%22: %5Bdatetime(2014, 4, 15), datetime(2018, 4, 15)%5D,%0A %22ALEX%22: %5Bdatetime(2015, 12, 31), datetime(2019, 12, 31)%5D,%0A %22MMM%22: %5Bdatetime(2015, 7, 1), datetime(2019, 7, 1)%5D,%0A %22NRP%22: %5Bdatetime(2015, 12, 31), datetime(2019, 12, 31)%5D,%0A %22NVDA%22: %5Bdatetime(2015, 12, 31), datetime(2019, 12, 31)%5D%0A %7D%0A conn3 = sqlite3.connect('test_idx.db')%0A cursor = conn3.cursor()%0A for ticker in testTickers:%0A #cursor.execute('''SELECT * FROM idx WHERE Symbol=?;''', (%22ABBV%22,))%0A cursor.execute('''SELECT * FROM cik_ticker_name WHERE ticker=?;''',(ticker,))%0A res = cursor.fetchall()%0A print(res)%0A cursor.execute('''SELECT * FROM idx WHERE cik=?;''', (res%5B0%5D%5B0%5D,))%0A recs = cursor.fetchall()%0A print(len(recs))%0A names = list(map(lambda x: x%5B0%5D, cursor.description))%0A #print(names)%0A df = pd.DataFrame(data=recs, columns=names)%0A df%5B'date'%5D = pd.to_datetime(df%5B'date'%5D)%0A beginDate = testTickers%5Bticker%5D%5B0%5D%0A endDate = testTickers%5Bticker%5D%5B1%5D%0A df1 = df%5B(df.date %3E= beginDate) & (df.date %3C= endDate)%5D%0A ## Sort by date in descending order (most recent is first)%0A df1.sort_values(by=%5B'date'%5D, inplace=True, ascending=False)%0A df1%5Bdf1.type == %2210-Q%22%5D.to_csv(testDir+ticker.lower()+%22_all_10qs.csv%22, index=None)%0A df1%5Bdf1.type == %2210-K%22%5D.to_csv(testDir+ticker.lower()+%22_all_10ks.csv%22, index=None)%0A %0A conn3.close()%0A %0A%0Aif __name__ == %22__main__%22:%0A #set_test_db()%0A download_test_data()
|
|
df8206b01eb2298651099c5e701d269a0e6cd8c6
|
add test case for tuple attribute error #35
|
test/test_flake8.py
|
test/test_flake8.py
|
Python
| 0 |
@@ -0,0 +1,233 @@
+import subprocess%0A%0A%0Adef test_call_flake8(tmpdir):%0A tmp = tmpdir.join('tmp.py')%0A tmp.write('')%0A output = subprocess.check_output(%0A %5B'flake8', str(tmp)%5D,%0A stderr=subprocess.STDOUT,%0A )%0A assert output == b''%0A
|
|
ecae1fa205c88d1d503663c5fbec80a1943146ad
|
add resources comparator
|
pynodegl-utils/pynodegl_utils/tests/cmp_resources.py
|
pynodegl-utils/pynodegl_utils/tests/cmp_resources.py
|
Python
| 0.000001 |
@@ -0,0 +1,2489 @@
+#!/usr/bin/env python%0A#%0A# Copyright 2020 GoPro Inc.%0A#%0A# Licensed to the Apache Software Foundation (ASF) under one%0A# or more contributor license agreements. See the NOTICE file%0A# distributed with this work for additional information%0A# regarding copyright ownership. The ASF licenses this file%0A# to you under the Apache License, Version 2.0 (the%0A# %22License%22); you may not use this file except in compliance%0A# with the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing,%0A# software distributed under the License is distributed on an%0A# %22AS IS%22 BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY%0A# KIND, either express or implied. See the License for the%0A# specific language governing permissions and limitations%0A# under the License.%0A#%0A%0Aimport os%0Aimport csv%0Aimport tempfile%0Aimport pynodegl as ngl%0A%0Afrom .cmp import CompareSceneBase, get_test_decorator%0A%0A%0A_COLS = (%0A 'Textures memory',%0A 'Buffers count',%0A 'Buffers total',%0A 'Blocks count',%0A 'Blocks total',%0A 'Medias count',%0A 'Medias total',%0A 'Textures count',%0A 'Textures total',%0A 'Computes',%0A 'GraphicCfgs',%0A 'Renders',%0A 'RTTs',%0A)%0A%0Aclass _CompareResources(CompareSceneBase):%0A%0A def __init__(self, scene_func, columns=_COLS, **kwargs):%0A super(_CompareResources, self).__init__(scene_func, width=320, height=240,%0A scene_wrap=self._scene_wrap,%0A **kwargs)%0A self._columns = columns%0A%0A def _scene_wrap(self, scene):%0A # We can't use NamedTemporaryFile because we may not be able to open it%0A # twice on some systems%0A fd, self._csvfile = tempfile.mkstemp(suffix='.csv', prefix='ngl-test-resources-')%0A os.close(fd)%0A return ngl.HUD(scene, export_filename=self._csvfile)%0A%0A def get_out_data(self):%0A for frame in self.render_frames():%0A pass%0A%0A # filter columns%0A with open(self._csvfile) as csvfile:%0A reader = csv.DictReader(csvfile)%0A data = %5Bself._columns%5D%0A for row in reader:%0A data.append(%5Bv for k, v in row.items() if k in self._columns%5D)%0A%0A # rely on base string diff%0A ret = ''%0A for row in data:%0A ret += ','.join(row) + '%5Cn'%0A%0A os.remove(self._csvfile)%0A%0A return ret%0A%0A%0Atest_resources = get_test_decorator(_CompareResources)%0A
|
|
885aac79c2e31fc74dc143fc2527e02c2c0a8941
|
add duckduckgo crawler
|
scholarly_citation_finder/api/crawler/Duckduckgo.py
|
scholarly_citation_finder/api/crawler/Duckduckgo.py
|
Python
| 0.999999 |
@@ -0,0 +1,1829 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0Aimport requests%0Afrom bs4 import BeautifulSoup%0A%0Aclass Duckduckgo:%0A %0A API_URL = 'https://duckduckgo.com/html/'%0A CSS_RESULT_ELEMENT = 'a'%0A CSS_RESULT_ELEMENT_CLASS = 'large'%0A CSS_RESULT_TYPE_ELEMENT = 'span'%0A CSS_RESULT_TYPE_ELEMENT_CLASS = 'result__type'%0A %0A def __init__(self):%0A pass%0A %0A %0A def query(self, keywords, filetype='pdf'):%0A '''%0A %0A %0A :see: https://duck.co/help/results/syntax%0A :see: https://duckduckgo.com/params%0A :param keywords:%0A :param filetype:%0A '''%0A %0A if filetype:%0A keywords = 'filetype:%7B%7D %7B%7D'.format(filetype, keywords)%0A %0A r = requests.get(self.API_URL, %7B'q': keywords%7D)%0A if r.status_code != 200:%0A raise Exception('Expected response code 200, but is %7B%7D'.format(r.status_code))%0A %0A self.__get_links(r.text)%0A %0A def __get_links(self, html):%0A soup = BeautifulSoup(html, 'lxml')%0A for link in soup.findAll(self.CSS_RESULT_ELEMENT, class_=self.CSS_RESULT_ELEMENT_CLASS):%0A url, title, type = self.__get_link_items(link)%0A print('%25s: %25s %22%25s%22' %25 (type, url, title))%0A %0A def __get_link_items(self, html_a_element):%0A url = html_a_element.get('href')%0A title = html_a_element.text%0A if url:%0A soup = BeautifulSoup(str(html_a_element), 'lxml')%0A type = soup.find(self.CSS_RESULT_TYPE_ELEMENT, class_=self.CSS_RESULT_TYPE_ELEMENT_CLASS)%0A if type:%0A type = type.text%0A return url, title, type%0A %0Aif __name__ == '__main__':%0A searchengine = Duckduckgo()%0A searchengine.query('kernel completion for learning consensus support vector machines in bandwidth limited sensor networks')%0A
|
|
60f01c055405fea9e3672821a1188774f7517707
|
add 140
|
vol3/140.py
|
vol3/140.py
|
Python
| 0.999989 |
@@ -0,0 +1,191 @@
+if __name__ == %22__main__%22:%0A L = 30%0A sqrt5 = 5 ** 0.5%0A f = %5B7, 14, 50, 97%5D%0A for i in range(L - 4):%0A f.append(7 * f%5B-2%5D - f%5B-4%5D)%0A print sum(int(x / sqrt5) - 1 for x in f)%0A
|
|
7d6b04bc60270d357fdf9401174ece249f9f3568
|
add 153
|
vol4/153.py
|
vol4/153.py
|
Python
| 0.999996 |
@@ -0,0 +1,540 @@
+import math%0Aimport fractions%0A%0Aif __name__ == %22__main__%22:%0A L = 10 ** 8%0A ans = 0%0A for i in xrange(1, L + 1):%0A ans += (L / i) * i%0A if i * i %3C L:%0A j = 1%0A while j %3C= i:%0A if fractions.gcd(i, j) == 1:%0A div = i * i + j * j%0A k = 1%0A v = 2 * i if i == j else 2 * (i + j)%0A while div * k %3C= L:%0A ans += (L / (div * k)) * k * v%0A k += 1%0A j += 1%0A print ans%0A
|
|
f6f12b1194fde3fc4dc355535ca88f472962d3a3
|
add camera color histogram
|
python/ocv4/camera_color_histogram.py
|
python/ocv4/camera_color_histogram.py
|
Python
| 0 |
@@ -0,0 +1,1631 @@
+#!/usr/bin/env python%0A%0A'''%0AVideo histogram sample to show live histogram of video%0A%0AKeys:%0A ESC - exit%0A%0A'''%0A%0A# Python 2/3 compatibility%0Afrom __future__ import print_function%0A%0Aimport numpy as np%0Aimport cv2 as cv%0A%0A# built-in modules%0Aimport sys%0A%0A# local modules%0Aimport video%0A%0Aclass App():%0A%0A def set_scale(self, val):%0A self.hist_scale = val%0A%0A def run(self):%0A hsv_map = np.zeros((180, 256, 3), np.uint8)%0A h, s = np.indices(hsv_map.shape%5B:2%5D)%0A hsv_map%5B:,:,0%5D = h%0A hsv_map%5B:,:,1%5D = s%0A hsv_map%5B:,:,2%5D = 255%0A hsv_map = cv.cvtColor(hsv_map, cv.COLOR_HSV2BGR)%0A cv.imshow('hsv_map', hsv_map)%0A%0A cv.namedWindow('hist', 0)%0A self.hist_scale = 10%0A%0A cv.createTrackbar('scale', 'hist', self.hist_scale, 32, self.set_scale)%0A%0A try:%0A fn = sys.argv%5B1%5D%0A except:%0A fn = 0%0A cam = video.create_capture(fn, fallback='synth:bg=baboon.jpg:class=chess:noise=0.05')%0A%0A while True:%0A flag, frame = cam.read()%0A cv.imshow('camera', frame)%0A%0A small = cv.pyrDown(frame)%0A%0A hsv = cv.cvtColor(small, cv.COLOR_BGR2HSV)%0A dark = hsv%5B...,2%5D %3C 32%0A hsv%5Bdark%5D = 0%0A h = cv.calcHist(%5Bhsv%5D, %5B0, 1%5D, None, %5B180, 256%5D, %5B0, 180, 0, 256%5D)%0A%0A h = np.clip(h*0.005*self.hist_scale, 0, 1)%0A vis = hsv_map*h%5B:,:,np.newaxis%5D / 255.0%0A cv.imshow('hist', vis)%0A%0A ch = cv.waitKey(1)%0A if ch == 27:%0A break%0A%0A print('Done')%0A%0A%0Aif __name__ == '__main__':%0A print(__doc__)%0A App().run()%0A cv.destroyAllWindows()%0A
|
|
3ebb2731d6389170e0bef0dab66dc7c4ab41152e
|
Add a unit-test for thread_pool.py.
|
thread_pool_test.py
|
thread_pool_test.py
|
Python
| 0 |
@@ -0,0 +1,706 @@
+import thread_pool%0Aimport unittest%0Afrom six.moves import queue%0A%0Aclass TestThreadPool(unittest.TestCase):%0A%0A def _producer_thread(self, results):%0A for i in range(10):%0A results.put(i)%0A%0A def _consumer_thread(self, results):%0A for i in range(10):%0A self.assertEqual(results.get(), i)%0A%0A def testContextManager(self):%0A results = queue.Queue(maxsize=1)%0A with thread_pool.ThreadPool(2) as pool:%0A pool.add(self._producer_thread, results)%0A pool.add(self._consumer_thread, results)%0A%0A def testJoin(self):%0A results = queue.Queue(maxsize=1)%0A pool = thread_pool.ThreadPool(2)%0A pool.add(self._producer_thread, results)%0A pool.add(self._consumer_thread, results)%0A pool.join()%0A
|
|
f6d4116ed5122868dbc10bf41dfc44053d0a0edf
|
write annotation parser for PASCAL VOC 2006
|
src/pascal_utils.py
|
src/pascal_utils.py
|
Python
| 0 |
@@ -0,0 +1,1806 @@
+import re%0A%0A%0Adef which_one(str, arr):%0A for a in arr:%0A if a in str:%0A return a%0A return ''%0A%0A%0Aclass VOC2006AnnotationParser(object):%0A SKIP_CHARACTER = '#'%0A OBJECT_SUMMARY = 'Objects with ground truth'%0A PREPEND = 'PAS'%0A TRUNC = 'Trunc'%0A DIFFICULT = 'Difficult'%0A CLASSES = %5B'bicycle', 'bus', 'car', 'motorbike', 'cat', 'cow', 'dog', 'horse', 'sheep', 'person'%5D%0A VIEWS = %5B'Frontal', 'Rear', 'Left', 'Right'%5D%0A RE_OBJECT_DEF = r%22Original label for object (%5Cd+) %5C%22(%5CS+)%5C%22 : %5C%22(%5CS+)%5C%22%22%0A RE_OBJECT_BB = r%22Bounding box for object %25d %5C%22%25s%5C%22 %5C(Xmin, Ymin%5C) - %5C(Xmax, Ymax%5C) : %5C((%5Cd+), (%5Cd+)%5C) - %5C((%5Cd+), (%5Cd+)%5C)%22%0A%0A def __init__(self, annotation_file_contet):%0A self.annotation_file_contet = annotation_file_contet%0A%0A def get_objects(self, trunc=True, difficult=False):%0A objects = %5B%5D%0A for match in re.finditer(self.RE_OBJECT_DEF, self.annotation_file_contet):%0A obj_index, obj_label, original_obj_label = match.groups()%0A obj_index = int(obj_index)%0A xmin, ymin, xmax, ymax = re.search(self.RE_OBJECT_BB %25 (obj_index, obj_label), self.annotation_file_contet).groups()%0A xmin, ymin, xmax, ymax = int(xmin), int(ymin), int(xmax), int(ymax)%0A%0A if not trunc and self.TRUNC in original_obj_label:%0A continue%0A if not difficult and self.DIFFICULT in original_obj_label:%0A continue%0A objects.append(%7B'ind': obj_index, 'label': obj_label, 'original_label': original_obj_label, 'xmin': xmin, 'ymin': ymin, 'xmax': xmax, 'ymax': ymax, 'trunc': self.TRUNC in original_obj_label, 'difficult': self.DIFFICULT in original_obj_label, 'class': which_one(original_obj_label, self.CLASSES), 'view': which_one(original_obj_label, self.VIEWS)%7D)%0A%0A return objects%0A
|
|
c6b1cbddec20fae0daeece0ea859a7227e16e3bf
|
Add primitive name space registry for event types
|
common/shregistry.py
|
common/shregistry.py
|
Python
| 0.000001 |
@@ -0,0 +1,2558 @@
+#!/usr/local/bin/python3 -u%0A__author__ = 'Oliver Ratzesberger %3Chttps://github.com/fxstein%3E'%0A__copyright__ = 'Copyright (C) 2015 Oliver Ratzesberger'%0A__license__ = 'Apache License, Version 2.0'%0A%0A# ALL event types need to be registered here%0A# ATTENTION: type must be unique to avoid event name space polution%0A%0A# Registry structure:%0A# key = unique identifier of event type%0A# name = unique namespace of event should be same as key in most cases%0A# class = class of events for grouping and statistics%0A# desc = description of event types%0A# tags = list of freeform tags%0A#%0A# Notes:%0A# keys and types should be kept short and shpuld be kept constant for the life%0A# of the project%0A%0AshRegistry = %7B%0A 'autelis' : %7B'name': 'autelis', 'class': 'Pool', 'desc': 'Autelis Pool Controller', 'tags': %5B'Pool', 'Autelis', 'Pentair'%5D%7D,%0A 'eagle' : %7B'name': 'eagle', 'class': 'Power', 'desc': 'Rainforest Eagle Gateway', 'tags':%5B'Rinaforest', 'Eagle', 'Power', 'Electricity'%5D%7D,%0A 'gfinance' : %7B'name': 'gfinance', 'class': 'Finance', 'desc': 'Google Finance', 'tags': %5B'Google', 'Finance', 'Stock', 'Currency', 'Index'%5D%7D,%0A 'isy' : %7B'name': 'isy', 'class': 'Automation', 'desc': 'ISY994 Home Automation Controller', 'tags':%5B'ISY', 'Insteon', 'X10'%5D%7D,%0A 'nesttherm': %7B'name': 'nesttherm','class': 'Climate', 'desc': 'Nest Thermostat', 'tags':%5B'Nest', 'Thermostat'%5D%7D,%0A 'nestfire' : %7B'name': 'nestfire', 'class': 'Protection', 'desc': 'Nest Fire & CO Alarm', 'tags':%5B'Nest', 'Protect', 'Fire Alarm', 'CO Alarm'%5D%7D,%0A 'netatmo' : %7B'name': 'netatmo', 'class': 'Climate', 'desc': 'Netatmo Climate Station', 'tags':%5B'Climate', 'Indoor', 'Outdoor'%5D%7D,%0A 'twitter' : %7B'name': 'twitter', 'class': 'Social', 'desc': 'Twitter Feed', 'tags':%5B'Twitter', 'Social', 'Tweet'%5D%7D,%0A 'usgsquake': %7B'name': 'usgsquake','class': 'Geological', 'desc': 'USGS Earthquakes', 'tags':%5B'USGS', 'Earthquake'%5D%7D,%0A 'zillow' : %7B'name': 'zillow', 'class': 'Finance', 'desc': 'Zillow Home Valuation', 'tags':%5B'Zillow', 'House', 'Home', 'Value', 'Fiance'%5D%7D,%0A%0A # Sentient Home Internal Event Types%0A 'tracer' : %7B'name': 'tracer', 'class': 'Internal', 'name': 'Sentient Home Periodic Tracer', 'tags': %5B'Sentient Home', 'Tracer'%5D%7D,%0A 'loadtest' : %7B'name': 'loadtest', 'class': 'Internal', 'name': 'Sentient Home Load Test Event Generator', 'tags': %5B'Sentient Home', 'Test'%5D%7D,%0A%7D%0A%0A#%0A# Do nothing%0A# (syntax check)%0A#%0Aif __name__ == %22__main__%22:%0A import __main__%0A print(__main__.__file__)%0A%0A print(%22syntax ok%22)%0A%0A exit(0)%0A
|
|
167872381e16090b1b47184a1a80bbe948d5fd91
|
Add test and test_suite function to svm module
|
scikits/learn/machine/svm/__init__.py
|
scikits/learn/machine/svm/__init__.py
|
"""
A Support Vector Machine, this module defines the following classes:
- `LibSvmCClassificationModel`, a model for C-SV classification
- `LibSvmNuClassificationModel`, a model for nu-SV classification
- `LibSvmEpsilonRegressionModel`, a model for epsilon-SV regression
- `LibSvmNuRegressionModel`, a model for nu-SV regression
- `LibSvmOneClassModel`, a model for distribution estimation
(one-class SVM)
Kernel classes:
- `LinearKernel`, a linear kernel
- `PolynomialKernel`, a polynomial kernel
- `RBFKernel`, a radial basis function kernel
- `SigmoidKernel`, a sigmoid kernel
- `CustomKernel`, a kernel that wraps any callable
Dataset classes:
- `LibSvmClassificationDataSet`, a dataset for training classification
models
- `LibSvmRegressionDataSet`, a dataset for training regression models
- `LibSvmOneClassDataSet`, a dataset for training distribution
estimation (one-class SVM) models
- `LibSvmTestDataSet`, a dataset for testing with any model
Data type classes:
- `svm_node_dtype`, the libsvm data type for its arrays
How To Use This Module
======================
(See the individual classes, methods, and attributes for details.)
1. Import it: ``import svm`` or ``from svm import ...``.
2. Create a training dataset for your problem::
traindata = LibSvmClassificationDataSet(labels, x)
traindata = LibSvmRegressionDataSet(y, x)
traindata = LibSvmOneClassDataSet(x)
where x is sequence of NumPy arrays containing scalars or
svm_node_dtype entries.
3. Create a test dataset::
testdata = LibSvmTestDataSet(u)
4. Create a model and fit it to the training data::
model = LibSvmCClassificationModel(kernel)
results = model.fit(traindata)
5. Use the results to make predictions with the test data::
p = results.predict(testdata)
v = results.predict_values(testdata)
"""
from classification import *
from regression import *
from oneclass import *
from dataset import *
from kernel import *
from predict import *
|
Python
| 0.000006 |
@@ -1988,16 +1988,367 @@
redict import *%0A
+%0Afrom numpy.testing import NumpyTest%0Atest = NumpyTest().test%0A%0Adef test_suite(*args):%0A # XXX: this is to avoid recursive call to itself. This is an horrible hack,%0A # I have no idea why infinite recursion happens otherwise.%0A if len(args) %3E 0:%0A import unittest%0A return unittest.TestSuite()%0A return NumpyTest().test(level = -10)%0A
|
5a27a8e0c7ae2e0cef787db107305251d096d81f
|
Add test runner.
|
lib/rapidsms/tests/runtests.py
|
lib/rapidsms/tests/runtests.py
|
Python
| 0 |
@@ -0,0 +1,97 @@
+#!/usr/bin/python %0A%0Afrom test_component import *%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
2f9152d5cc0ad4123522b054dd2b6458c602b1fd
|
add script for dataset
|
moses/scripts/download_dataset.py
|
moses/scripts/download_dataset.py
|
Python
| 0.000001 |
@@ -0,0 +1,2144 @@
+import argparse%0Aimport os%0Aimport pandas as pd%0Afrom urllib import request%0A%0A%0Adef get_parser():%0A parser = argparse.ArgumentParser()%0A%0A parser.add_argument('--output_dir', type=str, default='./data',%0A help='Directory for downloaded dataset')%0A parser.add_argument('--dataset_url', type=str, default='https://media.githubusercontent.com/media/neuromation/mnist4molecules/master-fixes/data/dataset.csv?token=AORf9_1XuBXJjCmYV--t6f1Ui5aVe-WEks5bvLClwA%253D%253D',%0A help='URL of dataset')%0A parser.add_argument('--no_subset', action='store_true',%0A help='Do not create subsets for training and testing')%0A parser.add_argument('--train_size', type=int, default=200000,%0A help='Size of training dataset')%0A parser.add_argument('--test_size', type=int, default=10000,%0A help='Size of testing dataset')%0A parser.add_argument('--seed', type=int, default=0,%0A help='Random state')%0A%0A return parser%0A%0A%0Adef main(config):%0A if not os.path.exists(config.output_dir):%0A os.mkdir(config.output_dir)%0A%0A dataset_path = os.path.join(config.output_dir, 'dataset.csv')%0A request.urlretrieve(config.dataset_url, dataset_path)%0A%0A if config.no_subset:%0A return%0A%0A data = pd.read_csv(dataset_path)%0A%0A train_data = data%5Bdata%5B'SPLIT'%5D == 'train'%5D%0A test_data = data%5Bdata%5B'SPLIT'%5D == 'test'%5D%0A test_scaffolds_data = data%5Bdata%5B'SPLIT'%5D == 'test_scaffolds'%5D%0A%0A train_data = train_data.sample(config.train_size, random_state=config.seed)%0A test_data = test_data.sample(config.test_size, random_state=config.seed)%0A test_scaffolds_data = test_scaffolds_data.sample(config.test_size, random_state=config.seed)%0A%0A train_data.to_csv(os.path.join(config.output_dir, 'train.csv'), index=False)%0A test_data.to_csv(os.path.join(config.output_dir, 'test.csv'), index=False)%0A test_scaffolds_data.to_csv(os.path.join(config.output_dir, 'test_scaffolds.csv'), index=False)%0A%0A%0Aif __name__ == '__main__':%0A parser = get_parser()%0A config = parser.parse_known_args()%5B0%5D%0A main(config)
|
|
19712e8e7b9423d4cb4bb22c37c7d8d2ea0559c5
|
Add example to show listing of USB devices
|
examples/list-usb.py
|
examples/list-usb.py
|
Python
| 0 |
@@ -0,0 +1,1316 @@
+#!/usr/bin/env python2%0A#%0A# This file is Public Domain and provided only for documentation purposes.%0A#%0A# Run : python2 ./list-usb.py%0A#%0A# Note: This will happily run with Python3 too, I just picked a common baseline%0A#%0A%0Aimport gi%0Agi.require_version('Ldm', '0.1')%0Afrom gi.repository import Ldm, GObject%0A%0Aclass PretendyPlugin(Ldm.Plugin):%0A%0A # Not really needed but good practice%0A __gtype_name__ = %22PretendyPlugin%22%0A%0A def __init__(self):%0A Ldm.Plugin.__init__(self)%0A%0A def do_get_provider(self, device):%0A %22%22%22 Demonstrate basic matching with custom plugins %22%22%22%0A if not device.has_type(Ldm.DeviceType.AUDIO):%0A return None%0A return Ldm.Provider.new(self, device, %22pretendy-package%22)%0A%0Adef main():%0A manager = Ldm.Manager()%0A manager.add_plugin(PretendyPlugin())%0A%0A for device in manager.get_devices(Ldm.DeviceType.USB):%0A # Use gobject properties or methods%0A print(%22USB Device: %7B%7D %7B%7D%22.format(%0A device.props.vendor,%0A device.get_name()))%0A%0A if device.has_type(Ldm.DeviceType.HID):%0A print(%22%5CtHID Device!%22)%0A%0A for provider in manager.get_providers(device):%0A plugin = provider.get_plugin()%0A print(%22%5CtSuggested package: %7B%7D%22.format(provider.get_package()))%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
b56690d046021e036b5b15c484d86c92f3519600
|
Add partial evaluation tool to replace functools module for python < 2.5
|
scikits/learn/common/myfunctools.py
|
scikits/learn/common/myfunctools.py
|
Python
| 0.000009 |
@@ -0,0 +1,548 @@
+# Last Change: Mon Aug 20 01:00 PM 2007 J%0A# Implement partial application (should only be used if functools is not%0A# available (eg python %3C 2.5)%0A%0Aclass partial:%0A def __init__(self, fun, *args, **kwargs):%0A self.fun = fun%0A self.pending = args%5B:%5D%0A self.kwargs = kwargs.copy()%0A%0A def __call__(self, *args, **kwargs):%0A if kwargs and self.kwargs:%0A kw = self.kwargs.copy()%0A kw.update(kwargs)%0A else:%0A kw = kwargs or self.kwargs%0A%0A return self.fun(*(self.pending + args), **kw)%0A
|
|
6219211d529d2dd58693ea93e6b799fd36259fee
|
Add tests
|
djangae/tests/test_async_multi_query.py
|
djangae/tests/test_async_multi_query.py
|
Python
| 0.000001 |
@@ -0,0 +1,1471 @@
+from django.test import override_settings%0Afrom django.db import NotSupportedError%0Afrom django.db import models%0Afrom djangae.test import TestCase%0A%0A%0Aclass MultiQueryModel(models.Model):%0A field1 = models.IntegerField()%0A%0A%0Aclass AsyncMultiQueryTest(TestCase):%0A %22%22%22%0A Specific tests for multiquery%0A %22%22%22%0A%0A def test_hundred_or(self):%0A for i in range(100):%0A MultiQueryModel.objects.create(field1=i)%0A%0A self.assertEqual(%0A len(MultiQueryModel.objects.filter(field1__in=list(range(100)))),%0A 100%0A )%0A%0A self.assertEqual(%0A MultiQueryModel.objects.filter(field1__in=list(range(100))).count(),%0A 100%0A )%0A%0A self.assertItemsEqual(%0A MultiQueryModel.objects.filter(%0A field1__in=list(range(100))%0A ).values_list(%22field1%22, flat=True),%0A list(range(100))%0A )%0A%0A self.assertItemsEqual(%0A MultiQueryModel.objects.filter(%0A field1__in=list(range(100))%0A ).order_by(%22-field1%22).values_list(%22field1%22, flat=True),%0A list(range(100))%5B::-1%5D%0A )%0A%0A @override_settings(DJANGAE_MAX_QUERY_BRANCHES=10)%0A def test_max_limit_enforced(self):%0A for i in range(11):%0A MultiQueryModel.objects.create(field1=i)%0A%0A self.assertRaises(NotSupportedError,%0A lambda: list(MultiQueryModel.objects.filter(%0A field1__in=range(11)%0A ))%0A )%0A
|
|
51e04ff17bccb4b71b8d5db4057a782fd2f8520c
|
Add script to synthesize all uploaded files. Patch by Dan Callahan.
|
tools/touch_all_files.py
|
tools/touch_all_files.py
|
Python
| 0 |
@@ -0,0 +1,1039 @@
+#!/usr/bin/python%0A%22%22%22%0AThis script touches all files known to the database, creating a skeletal%0Amirror for local development.%0A%22%22%22%0A%0Aimport sys, os%0Aimport store%0A%0Adef get_paths(cursor, prefix=None):%0A store.safe_execute(cursor, %22SELECT python_version, name, filename FROM release_files%22)%0A%0A for type, name, filename in cursor.fetchall():%0A yield os.path.join(prefix, type, name%5B0%5D, name, filename)%0A%0Aif __name__ == '__main__':%0A import config%0A try:%0A config = config.Config(sys.argv%5B1%5D)%0A except IndexError:%0A print %22Usage: touch_all_files.py config.ini%22%0A raise SystemExit%0A%0A datastore = store.Store(config)%0A datastore.open()%0A cursor = datastore.get_cursor()%0A prefix = config.database_files_dir%0A%0A for path in get_paths(cursor, prefix):%0A dir = os.path.dirname(path)%0A if not os.path.exists(dir):%0A print %22Creating directory %25s%22 %25 dir%0A os.makedirs(dir)%0A if not os.path.exists(path):%0A print %22Creating file %25s%22 %25 path%0A open(path, %22a%22)%0A
|
|
e8b6c596a7627d1c4f3f6915236317b0730210a2
|
Rename ds_tree_max_min_depth.py to ds_tree_balanced_bt.py
|
leetcode/ds_tree_balanced_bt.py
|
leetcode/ds_tree_balanced_bt.py
|
Python
| 0.998882 |
@@ -0,0 +1,1020 @@
+# @file Balanced Binary Tree%0A# @brief Given a binary tree, determine if it is height-balanced.%0A%0A# https://leetcode.com/problems/balanced-binary-tree/%0A%0A'''%0AGiven a binary tree, determine if it is height-balanced.%0A%0AFor this problem, a height-balanced binary tree is defined%0Aas a binary tree in which the depth of the two subtrees of every node never differ by more than 1.%0A'''%0A%0A#Given a BT node, find the depth (1-based depth)%0Adef maxDepth(root):%0A if (root == None): return 0%0A elif (root.left == None and root.right == None): return 1%0A else: return 1 + max(maxDepth(root.left), maxDepth(root.right))%0A%0Adef isBalanced(self, root):%0A if(root == None): return True%0A elif abs(maxDepth(root.left) - maxDepth(root.right)) %3E 1: return False%0A elif self.isBalanced(root.left) == False: return False%0A else: return self.isBalanced(root.right)%0A %0A
|
|
484e50b34c06785f1b1b48da5502f79ee5a2357b
|
add factories.py
|
tx_salaries/factories.py
|
tx_salaries/factories.py
|
Python
| 0.000001 |
@@ -0,0 +1,1310 @@
+import factory%0Afrom tx_people.models import Organization, Membership, Person, Post%0Afrom tx_salaries.models import Employee, EmployeeTitle, CompensationType, OrganizationStats%0A%0A%0A# tx_people factories%0Aclass OrganizationFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = Organization%0A%0A%0Aclass PersonFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = Person%0A%0A%0Aclass PostFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = Post%0A organization = factory.SubFactory(OrganizationFactory)%0A%0A%0Aclass MembershipFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = Membership%0A person = factory.SubFactory(PersonFactory)%0A organization = factory.SubFactory(OrganizationFactory)%0A post = factory.SubFactory(PostFactory)%0A%0A%0A# tx_salaries factories%0Aclass CompensationTypeFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = CompensationType%0A%0A%0Aclass EmployeeTitleFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = EmployeeTitle%0A%0A%0Aclass EmployeeFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = Employee%0A position = factory.SubFactory(MembershipFactory)%0A compensation_type = factory.SubFactory(CompensationTypeFactory)%0A title = factory.SubFactory(EmployeeTitleFactory)%0A compensation = 1337%0A%0A%0Aclass OrganizationStatsFactory(factory.DjangoModelFactory):%0A FACTORY_FOR = OrganizationStats
|
|
92af518e15af3d70da98302d94eefb9e25c0a771
|
Raise NotImplemented on Metric base class functions
|
metricsapp/models/base.py
|
metricsapp/models/base.py
|
import json
from django.db import models
from django.utils import timezone
from ..data import result_data
from jsonfield import JSONField
from model_utils.managers import InheritanceManager
import requests
from ..settings import conf
class Category(models.Model):
name = models.CharField(max_length=50)
def rate(self, sprint, team):
return Metric.rate(self.metric_set.filter(active=True).select_subclasses(), sprint, team)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
verbose_name_plural = "categories"
class Metric(models.Model):
@classmethod
def rate(cls, list_of_metrics, sprint, team):
rating = 0
max_rating = 0
for metric in list_of_metrics:
metric_results = metric.get_results(sprint, team)
rating += metric_results['score'] * metric.severity
max_rating += 100 * metric.severity
result = (rating/max_rating)*100
return round(result, 2)
#Enables returning subclasses via select_subclasses()
objects = InheritanceManager()
categories = models.ManyToManyField(Category)
name = models.CharField(max_length=50)
description = models.CharField(max_length=2000)
explanation = models.TextField(blank=True)
query = models.TextField()
endpoint = models.CharField(max_length=200)
results = JSONField(null=True)
last_query = models.DateTimeField(null=True, blank=True)
active = models.BooleanField(default=True)
HIGH = 1.5
NORMAL = 1.0
LOW = 0.5
SEVERITY_CHOICES = (
(HIGH, 'High (1.5x)'),
(NORMAL, 'Normal (1.0x)'),
(LOW, 'Low (0.5x)'),
)
severity = models.FloatField(choices=SEVERITY_CHOICES,
default=NORMAL)
def __str__(self):
return self.name
def _run_query(self):
return result_data[self.name]
def _calculate_score(self, *args, **kwargs):
return 50*self.severity
def run(self):
self.results = self._process(self._run_query())
self.last_query = timezone.now()
self.save()
def score_rating(self, score):
if score <= 25:
return 'bad'
if score >= 80:
return 'good'
if score <= 60:
return 'improvement'
return 'ok'
def get_results(self, *args, **kwargs):
score = self._calculate_score()
rating = self.score_rating(score)
return {'data':self.results, 'score':score, 'rating':rating}
class SprintMetric(Metric):
def _run_query(self, sprint, team):
url = 'http://192.168.30.196:7478/db/data/transaction/commit'
payload = {
"statements" : [ {
"statement" : self.query.format(
sprint=sprint,
team=team['team_name'],
label=team['label'],
sprint_list=', '.join(["'"+s+"'" for s in conf.sprints])
)
} ]
}
headers = {'Accept': 'application/json; charset=UTF-8', 'Content-Type': 'application/json'}
r = requests.post(url, data=json.dumps(payload), headers=headers)
print(sprint, team['name'], r.text[:100])
data = r.json()
errors = data.get('errors')
if errors:
print('ERROR:', errors)
return data
def _process(self, query_data):
data = query_data['results'][0]
result = {}
result['rows'] = []
for row in data['data']:
result['rows'].append(row['row'])
result['columns'] = data['columns']
return result
def run(self):
results = {}
for sprint in conf.sprints:
results[sprint] = {}
for team in conf.teams:
results[sprint][team['name']] = self._process(self._run_query(sprint, team))
self.results = results
self.last_query = timezone.now()
self.save()
def get_results(self, sprint, team, *args, **kwargs):
if isinstance(self.results, str):
results = json.loads(self.results)
print('GOT A STRING, WANTED A DICT')
else:
results = self.results
score = self._calculate_score(sprint, team)
rating = self.score_rating(score)
return {'data':results[sprint][team['name']], 'score':score, 'rating':rating}
def get_value(self, sprint, team, column):
if isinstance(self.results, str):
results = json.loads(self.results)
print('GOT A STRING, WANTED A DICT')
else:
results = self.results
results = results[sprint][team['name']]
try:
score_index = results['columns'].index(column)
except ValueError:
return None
rows = results['rows']
if rows:
value = rows[0][score_index]
else:
value = None
return value
|
Python
| 0 |
@@ -1666,36 +1666,27 @@
%0A%09%09r
-eturn result_data%5Bself.name%5D
+aise NotImplemented
%0A%0A%09d
@@ -1736,30 +1736,27 @@
%0A%09%09r
-eturn 50*self.severity
+aise NotImplemented
%0A%0A%09d
|
02d6dd700af4fad74592df5576c2ca5ea8bed5fe
|
Update hinton_diagram.py (#342)
|
scripts/hinton_diagram.py
|
scripts/hinton_diagram.py
|
#https://github.com/tonysyu/mpltools/blob/master/mpltools/special/hinton.py
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import collections
from matplotlib import transforms
from matplotlib import ticker
# TODO: Add yutils.mpl._coll to mpltools and use that for square collection.
class SquareCollection(collections.RegularPolyCollection):
"""Return a collection of squares."""
def __init__(self, **kwargs):
super(SquareCollection, self).__init__(4, rotation=np.pi/4., **kwargs)
def get_transform(self):
"""Return transform scaling circle areas to data space."""
ax = self.axes
pts2pixels = 72.0 / ax.figure.dpi
scale_x = pts2pixels * ax.bbox.width / ax.viewLim.width
scale_y = pts2pixels * ax.bbox.height / ax.viewLim.height
return transforms.Affine2D().scale(scale_x, scale_y)
def hinton(inarray, max_value=None, use_default_ticks=True):
"""Plot Hinton diagram for visualizing the values of a 2D array.
Plot representation of an array with positive and negative values
represented by white and black squares, respectively. The size of each
square represents the magnitude of each value.
Unlike the hinton demo in the matplotlib gallery [1]_, this implementation
uses a RegularPolyCollection to draw squares, which is much more efficient
than drawing individual Rectangles.
.. note::
This function inverts the y-axis to match the origin for arrays.
.. [1] http://matplotlib.sourceforge.net/examples/api/hinton_demo.html
Parameters
----------
inarray : array
Array to plot.
max_value : float
Any *absolute* value larger than `max_value` will be represented by a
unit square.
use_default_ticks: boolean
Disable tick-generation and generate them outside this function.
"""
ax = plt.gca()
ax.set_axis_bgcolor('gray')
# make sure we're working with a numpy array, not a numpy matrix
inarray = np.asarray(inarray)
height, width = inarray.shape
if max_value is None:
max_value = 2**np.ceil(np.log(np.max(np.abs(inarray)))/np.log(2))
values = np.clip(inarray/max_value, -1, 1)
rows, cols = np.mgrid[:height, :width]
pos = np.where(values > 0)
neg = np.where(values < 0)
for idx, color in zip([pos, neg], ['white', 'black']):
if len(idx[0]) > 0:
xy = list(zip(cols[idx], rows[idx]))
circle_areas = np.pi / 2 * np.abs(values[idx])
squares = SquareCollection(sizes=circle_areas,
offsets=xy, transOffset=ax.transData,
facecolor=color, edgecolor=color)
ax.add_collection(squares, autolim=True)
ax.axis('scaled')
# set data limits instead of using xlim, ylim.
ax.set_xlim(-0.5, width-0.5)
ax.set_ylim(height-0.5, -0.5)
if use_default_ticks:
ax.xaxis.set_major_locator(IndexLocator())
ax.yaxis.set_major_locator(IndexLocator())
class IndexLocator(ticker.Locator):
def __init__(self, max_ticks=10):
self.max_ticks = max_ticks
def __call__(self):
"""Return the locations of the ticks."""
dmin, dmax = self.axis.get_data_interval()
if dmax < self.max_ticks:
step = 1
else:
step = np.ceil(dmax / self.max_ticks)
return self.raise_if_exceeds(np.arange(0, dmax, step))
plt.figure()
A = np.random.uniform(-1, 1, size=(20, 20))
hinton(A)
#special.hinton(A)
plt.show()
|
Python
| 0 |
@@ -1895,15 +1895,12 @@
set_
-axis_bg
+face
colo
|
2633daf169d8fc3fbe3aa0d93b5126c1e835a2a0
|
Fix the form line restriction
|
accounting/apps/books/forms.py
|
accounting/apps/books/forms.py
|
from django.forms import ModelForm, BaseInlineFormSet
from django.forms.models import inlineformset_factory
from .models import (
Organization,
TaxRate,
TaxComponent,
Invoice,
InvoiceLine,
Bill,
BillLine,
Payment)
class RequiredFirstInlineFormSet(BaseInlineFormSet):
"""
Used to make empty formset forms required
See http://stackoverflow.com/questions/2406537/django-formsets-\
make-first-required/4951032#4951032
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if len(self.forms) > 0:
first_form = self.forms[0]
first_form.empty_permitted = False
class OrganizationForm(ModelForm):
class Meta:
model = Organization
fields = (
"display_name",
"legal_name",
)
class TaxRateForm(ModelForm):
class Meta:
model = TaxRate
fields = (
"name",
"organization",
)
class TaxComponentForm(ModelForm):
class Meta:
model = TaxComponent
fields = (
"name",
"percentage",
)
TaxComponentFormSet = inlineformset_factory(TaxRate,
TaxComponent,
form=TaxComponentForm,
formset=RequiredFirstInlineFormSet,
min_num=1,
extra=0)
class RestrictLineFormToOrganization(object):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
instance = kwargs.get('instance', None)
if instance:
self.fields['tax_rate'].queryset = (instance
.invoice
.organization
.tax_rates.all())
class InvoiceForm(ModelForm):
class Meta:
model = Invoice
fields = (
"number",
"organization",
"client",
"draft",
"sent",
"paid",
"date_issued",
"date_dued",
)
class InvoiceLineForm(RestrictLineFormToOrganization, ModelForm):
class Meta:
model = InvoiceLine
fields = (
"label",
"description",
"unit_price_excl_tax",
"quantity",
"tax_rate",
)
InvoiceLineFormSet = inlineformset_factory(Invoice,
InvoiceLine,
form=InvoiceLineForm,
formset=RequiredFirstInlineFormSet,
extra=1)
class BillForm(ModelForm):
class Meta:
model = Bill
fields = (
"number",
"client",
"organization",
"draft",
"sent",
"paid",
"date_issued",
"date_dued",
)
class BillLineForm(RestrictLineFormToOrganization, ModelForm):
class Meta:
model = BillLine
fields = (
"label",
"description",
"unit_price_excl_tax",
"quantity",
"tax_rate",
)
BillLineFormSet = inlineformset_factory(Bill,
BillLine,
form=BillLineForm,
formset=RequiredFirstInlineFormSet,
extra=1)
class PaymentForm(ModelForm):
class Meta:
model = Payment
fields = (
"amount",
"reference",
"detail",
"date_paid",
)
|
Python
| 0.998355 |
@@ -1548,16 +1548,21 @@
nization
+Mixin
(object)
@@ -1563,16 +1563,16 @@
bject):%0A
-
%0A def
@@ -1732,124 +1732,446 @@
-self.fields%5B'tax_rate'%5D.queryset = (instance%0A .invoice%0A .organization%0A
+if isinstance(instance, InvoiceLine):%0A organization = instance.invoice.organization%0A elif isinstance(instance, BillLine):%0A organization = instance.bill.organization%0A else:%0A raise NotImplementedError(%22The mixin has been applied to a %22%0A %22form model that is not supported%22)%0A self.fields%5B'tax_rate'%5D.queryset = organization
.tax
@@ -2182,17 +2182,16 @@
es.all()
-)
%0A%0A%0Aclass
@@ -2516,33 +2516,60 @@
rmToOrganization
-,
+Mixin,%0A
ModelForm):%0A
@@ -3015,32 +3015,86 @@
tInlineFormSet,%0A
+ min_num=1,%0A
@@ -3118,33 +3118,33 @@
extra=
-1
+0
)%0A%0A%0Aclass BillFo
@@ -3461,17 +3461,41 @@
nization
-,
+Mixin,%0A
ModelFo
@@ -3925,32 +3925,83 @@
tInlineFormSet,%0A
+ min_num=1,%0A
@@ -4030,17 +4030,17 @@
extra=
-1
+0
)%0A%0A%0Aclas
|
533559e20e377ce042591709e53d7dc7031d6205
|
Add test for timer automatically inserted due to directive
|
tests/test_directives.py
|
tests/test_directives.py
|
"""tests/test_directives.py.
Tests to ensure that directives interact in the etimerpected mannor
Copyright (C) 2015 Timothy Edmund Crosley
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import hug
def test_timer():
timer = hug.directives.timer()
assert isinstance(timer.taken(), float)
assert isinstance(timer.start, float)
timer = hug.directives.timer('Time: {0}')
assert isinstance(timer.taken(), str)
assert isinstance(timer.start, float)
|
Python
| 0 |
@@ -1170,19 +1170,59 @@
%0Aimport
-hug
+sys%0Aimport hug%0A%0Aapi = sys.modules%5B__name__%5D
%0A%0A%0Adef t
@@ -1466,28 +1466,172 @@
nstance(timer.start, float)%0A
+%0A @hug.get()%0A def timer_tester(timer):%0A return timer.taken()%0A%0A assert isinstance(hug.test.get(api, 'timer_tester').data, float)%0A
|
a3939b572c51b7a721b758cb5b93364e4b156c13
|
Add script that dumps the python path
|
dev_tools/syspath.py
|
dev_tools/syspath.py
|
Python
| 0.000002 |
@@ -0,0 +1,173 @@
+#!/usr/bin/env python%0A%0Aimport sys%0A%0A# path%5B0%5D, is the directory containing the script that was used to invoke the Python interpreter%0Afor s in sorted(sys.path%5B1:%5D):%0A print s%0A
|
|
f06a71a87daaaf0bc4b1f5701ce4c59805b70f6b
|
Format all local .json files for human readability
|
usr/bin/json_readable.py
|
usr/bin/json_readable.py
|
Python
| 0 |
@@ -0,0 +1,374 @@
+#!/usr/bin/env python%0A%0Aimport json, os%0A%0Afor filename in os.listdir('.'):%0A if os.path.isfile(filename) and os.path.splitext(filename)%5B1%5D.lower() == '.json':%0A with open(filename) as in_file:%0A data = json.load(in_file)%0A with open(filename, 'w') as out_file:%0A json.dump(data, out_file, indent=4) # indent=4 makes the files human readable%0A
|
|
986e830986b32eb879f05423279c96cbfe27eb14
|
Fix formatting of module import in subsample unit tests
|
skbio/stats/tests/test_subsample.py
|
skbio/stats/tests/test_subsample.py
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
try:
# future >= 0.12
from future.backports.test.support import import_fresh_module
except ImportError:
from future.standard_library.test.support import import_fresh_module
import unittest
import warnings
import numpy as np
cy_subsample = import_fresh_module('skbio.stats._subsample',
fresh=['skbio.stats.__subsample'])
py_subsample = import_fresh_module('skbio.stats._subsample',
blocked=[
'skbio.stats.__subsample'
])
def setup():
"""Ignore warnings during tests."""
warnings.simplefilter("ignore")
def teardown():
"""Clear the list of warning filters, so that no filters are active."""
warnings.resetwarnings()
class SubsampleTests(object):
def test_subsample_nonrandom(self):
"""Should function correctly for nonrandom cases."""
a = np.array([0, 5, 0])
# Subsample same number of items that are in input (without
# replacement).
np.testing.assert_equal(self.module.subsample(a, 5), a)
# Can only choose from one bin.
exp = np.array([0, 2, 0])
np.testing.assert_equal(self.module.subsample(a, 2), exp)
np.testing.assert_equal(self.module.subsample(a, 2, replace=True), exp)
# Subsample zero items.
a = [3, 0, 1]
exp = np.array([0, 0, 0])
np.testing.assert_equal(self.module.subsample(a, 0), exp)
np.testing.assert_equal(self.module.subsample(a, 0, replace=True), exp)
def test_subsample_without_replacement(self):
"""Should return a random subsample (without replacement)."""
# Selecting 2 counts from the vector 1000 times yields each of the two
# possible results at least once each.
a = np.array([2, 0, 1])
actual = set()
for i in range(1000):
obs = self.module.subsample(a, 2)
actual.add(tuple(obs))
self.assertEqual(actual, {(1, 0, 1), (2, 0, 0)})
obs = self.module.subsample(a, 2)
self.assertTrue(np.array_equal(obs, np.array([1, 0, 1])) or
np.array_equal(obs, np.array([2, 0, 0])))
def test_subsample_with_replacement(self):
"""Should return a random subsample (with replacement)."""
# Can choose from all in first bin, all in last bin (since we're
# sampling with replacement), or split across bins.
a = np.array([2, 0, 1])
actual = set()
for i in range(1000):
obs = self.module.subsample(a, 2, replace=True)
actual.add(tuple(obs))
self.assertEqual(actual, {(1, 0, 1), (2, 0, 0), (0, 0, 2)})
# Test that selecting 35 counts from a 36-count vector 1000 times
# yields more than 10 different subsamples. If we were subsampling
# *without* replacement, there would be only 10 possible subsamples
# because there are 10 nonzero bins in array a. However, there are more
# than 10 possibilities when sampling *with* replacement.
a = np.array([2, 0, 1, 2, 1, 8, 6, 0, 3, 3, 5, 0, 0, 0, 5])
actual = set()
for i in range(1000):
obs = self.module.subsample(a, 35, replace=True)
self.assertEqual(obs.sum(), 35)
actual.add(tuple(obs))
self.assertTrue(len(actual) > 10)
def test_subsample_with_replacement_equal_n(self):
"""Returns random subsample (w/ replacement) when n == counts.sum()."""
a = np.array([0, 0, 3, 4, 2, 1])
actual = set()
for i in range(1000):
obs = self.module.subsample(a, 10, replace=True)
self.assertEqual(obs.sum(), 10)
actual.add(tuple(obs))
self.assertTrue(len(actual) > 1)
def test_subsample_invalid_input(self):
"""Should raise an error on invalid input."""
# Negative n.
with self.assertRaises(ValueError):
self.module.subsample([1, 2, 3], -1)
# Floats.
with self.assertRaises(TypeError):
self.module.subsample([1, 2.3, 3], 2)
# Wrong number of dimensions.
with self.assertRaises(ValueError):
self.module.subsample([[1, 2, 3], [4, 5, 6]], 2)
# Input has too few counts.
with self.assertRaises(ValueError):
self.module.subsample([0, 5, 0], 6)
class PySubsampleTests(SubsampleTests, unittest.TestCase):
module = py_subsample
@unittest.skipIf(cy_subsample is None,
"Accelerated subsample module unavailable.")
class CySubsampleTests(SubsampleTests, unittest.TestCase):
module = cy_subsample
if __name__ == '__main__':
import nose
nose.runmodule()
|
Python
| 0.000001 |
@@ -892,109 +892,33 @@
ed=%5B
-%0A 'skbio.stats.__subsample'%0A
+'skbio.stats.__subsample'
%5D)%0A%0A
|
e736772d21aea0995a1948220e2dc2c6fa413fca
|
Add python zookeeper example
|
snippet/example/python/zookeeper.py
|
snippet/example/python/zookeeper.py
|
Python
| 0.000235 |
@@ -0,0 +1,2588 @@
+#!/usr/bin/env python%0D%0A# encoding: utf8%0D%0Afrom __future__ import absolute_import, print_function, unicode_literals, division%0D%0A%0D%0Aimport zookeeper%0D%0A%0D%0Adef refactor_path(f):%0D%0A def wrapper(*args, **kwargs):%0D%0A _refactor = kwargs.pop(%22refactor%22, True)%0D%0A if _refactor:%0D%0A path = kwargs.get(%22path%22, None)%0D%0A if path is not None:%0D%0A kwargs%5B%22path%22%5D = args%5B0%5D._path(path) # args%5B0%5D is an instance of ZooKeeper%0D%0A return f(*args, **kwargs)%0D%0A%0D%0A return wrapper%0D%0A%0D%0A%0D%0Aclass ZooKeeper(object):%0D%0A DEFAULT_ACL = %5B%7B%22perms%22: 0x1f, %22scheme%22: %22world%22, %22id%22: %22anyone%22%7D%5D%0D%0A%0D%0A def __init__(self, connector, root=%22/%22, acl=None, flags=0):%0D%0A self.root = root.rstrip(%22/%22)%0D%0A if not self.root:%0D%0A self.root = %22/%22%0D%0A self.zk = zookeeper.init(connector)%0D%0A self.acl = acl if acl else self.DEFAULT_ACL%0D%0A self.flags = flags%0D%0A%0D%0A def _path(self, path):%0D%0A path = path.strip(%22/%22)%0D%0A if path:%0D%0A path = %22/%22.join((self.root, path))%0D%0A else:%0D%0A path = self.root%0D%0A return path%0D%0A%0D%0A @refactor_path%0D%0A def create(self, path=%22%22, value=%22%22):%0D%0A try:%0D%0A zookeeper.create(self.zk, path, value, self.acl, self.flags)%0D%0A except zookeeper.NodeExistsException:%0D%0A pass%0D%0A except zookeeper.NoNodeException:%0D%0A self.create(path=path.rsplit(%22/%22, 1)%5B0%5D, refactor=False)%0D%0A self.create(path=path, value=value, refactor=False)%0D%0A%0D%0A @refactor_path%0D%0A def delete(self, path=%22%22, recursion=True):%0D%0A try:%0D%0A zookeeper.delete(self.zk, path)%0D%0A except zookeeper.NoNodeException:%0D%0A pass%0D%0A except zookeeper.NotEmptyException:%0D%0A if recursion:%0D%0A for subpath in self.ls(path=path, refactor=False):%0D%0A self.delete(path=%22/%22.join((path, subpath)), recursion=recursion, refactor=False)%0D%0A self.delete(path=path, recursion=recursion, refactor=False)%0D%0A else:%0D%0A raise%0D%0A%0D%0A @refactor_path%0D%0A def set(self, path=%22%22, value=%22%22):%0D%0A try:%0D%0A zookeeper.set(self.zk, path, value)%0D%0A except zookeeper.NoNodeException:%0D%0A self.create(path=path, value=value, refactor=False)%0D%0A self.set(path=path, value=value, refactor=False)%0D%0A%0D%0A @refactor_path%0D%0A def get(self, path=%22%22):%0D%0A return zookeeper.get(self.zk, path)%0D%0A%0D%0A @refactor_path%0D%0A def ls(self, path=%22%22):%0D%0A return zookeeper.get_children(self.zk, path)%0D%0A%0D%0A def close(self):%0D%0A zookeeper.close(self.zk)%0D%0A
|
|
798d9f39e9440f4f09cf83816c294f8ad9c06c4b
|
input api-method dummy
|
wa/api/input.py
|
wa/api/input.py
|
Python
| 0.999994 |
@@ -0,0 +1,228 @@
+import sys%0A%0Adef main(vars):%0A import os%0A import pickle%0A for var in vars:%0A variables = %7B%7D%0A var_value = input(var + %22: %22)%0A variables%5Bvar%5D = var_value%0A%0Aif __name__ == %22builtins%22:%0A main(sys.argv%5B1:%5D)%0A
|
|
101d334b80872c36adb5645ba0b3cda9b7c36a61
|
Add compliance with rule E261 to camo.py.
|
zerver/lib/camo.py
|
zerver/lib/camo.py
|
from django.conf import settings
import codecs
import hashlib
import hmac
from typing import Text
# Encodes the provided URL using the same algorithm used by the camo
# caching https image proxy
def get_camo_url(url):
# type: (Text) -> Text
# Only encode the url if Camo is enabled
if settings.CAMO_URI == '':
return url
encoded_url = url.encode("utf-8")
encoded_camo_key = settings.CAMO_KEY.encode("utf-8")
digest = hmac.new(encoded_camo_key, encoded_url, hashlib.sha1).hexdigest()
hex_encoded_url = codecs.encode(encoded_url, "hex") # type: ignore # https://github.com/python/typeshed/issues/300
return "%s%s/%s" % (settings.CAMO_URI, digest, hex_encoded_url.decode("utf-8"))
|
Python
| 0 |
@@ -565,16 +565,17 @@
, %22hex%22)
+
# type:
|
c78dffb9b23e38fc980c06ff519e750f5d1e3678
|
add day1_short_palindrome.py - might work :)
|
10-days-of-stats/day1_short_palindrome.py
|
10-days-of-stats/day1_short_palindrome.py
|
Python
| 0.000008 |
@@ -0,0 +1,517 @@
+#!/usr/bin/python3%0A%0A# let's try to not do string comparisons and maybe list indexing%0A# is faster than string indexing%0As = list(map(ord, list(input())))%0A%0Aslen = len(s)%0Afound = 0%0A%0A# baseline optimization only (don't know if there is more possible)%0Afor a in range(0, slen-3):%0A for d in range(a+3, slen):%0A if not s%5Bd%5D == s%5Ba%5D: %0A continue%0A for b in range(a+1, d-1):%0A for c in range(b+1, d):%0A if s%5Bb%5D == s%5Bc%5D:%0A found += 1%0A%0Aprint(found %25 (10**9 + 7))%0A
|
|
c0220578f4cd9b4c26879548751586615fe070e8
|
Add some freesurfer tools
|
cortex/freesurfer.py
|
cortex/freesurfer.py
|
Python
| 0 |
@@ -0,0 +1,1932 @@
+import os%0Aimport struct%0Aimport tempfile%0Aimport shlex%0Aimport subprocess as sp%0A%0Aimport numpy as np%0A%0Aimport vtkutils_new as vtk%0A%0Adef parse_curv(filename):%0A with open(filename) as fp:%0A fp.seek(15)%0A return np.fromstring(fp.read(), dtype='%3Ef4').byteswap()%0A%0Adef show_surf(subject, hemi, type):%0A from mayavi import mlab%0A from tvtk.api import tvtk%0A%0A tf = tempfile.NamedTemporaryFile(suffix='.vtk')%0A path = os.path.join(os.environ%5B'SUBJECTS_DIR'%5D, subject)%0A surf_file = os.path.join(path, %22surf%22, hemi+'.'+type)%0A curv_file = os.path.join(path, %22surf%22, hemi+'.curv')%0A proc = sp.call(shlex.split('mris_convert %7Bpath%7D %7Btf%7D'.format(path=surf_file, tf=tf.name)))%0A pts, polys, norms = vtk.read(tf.name)%0A curv = parse_curv(curv_file)%0A %0A fig = mlab.figure()%0A src = mlab.pipeline.triangular_mesh_source(pts%5B:,0%5D, pts%5B:,1%5D, pts%5B:,2%5D, polys, scalars=curv, figure=fig)%0A norms = mlab.pipeline.poly_data_normals(src, figure=fig)%0A norms.filter.splitting = False%0A surf = mlab.pipeline.surface(norms, figure=fig)%0A surf.parent.scalar_lut_manager.set(lut_mode='RdBu', data_range=%5B-1,1%5D, use_default_range=False)%0A%0A cursors = mlab.pipeline.scalar_scatter(%5B0%5D, %5B0%5D, %5B0%5D)%0A glyphs = mlab.pipeline.glyph(cursors, figure=fig)%0A glyphs.glyph.glyph_source.glyph_source = glyphs.glyph.glyph_source.glyph_dict%5B'axes'%5D%0A%0A fig.scene.background = (0,0,0)%0A fig.scene.interactor.interactor_style = tvtk.InteractorStyleTerrain()%0A%0A def picker_callback(picker):%0A if picker.actor in surf.actor.actors:%0A npts = np.append(cursors.data.points.to_array(), %5Bpts%5Bpicker.point_id%5D%5D, axis=0)%0A cursors.data.points = npts%0A%0A x, y, z = pts%5Bpicker.point_id%5D%0A with open(os.path.join(path, 'tmp', 'edit.dat'), 'w') as fp:%0A fp.write('%25f %25f %25f%5Cn'%25(x, y, z))%0A%0A picker = fig.on_mouse_pick(picker_callback)%0A picker.tolerance = 0.01%0A%0A return surf
|
|
81806c7c66f501075937832f546765fecbd312fc
|
Fix HOPE tests
|
custom/hope/tests.py
|
custom/hope/tests.py
|
# Built-in imports
from datetime import datetime
# Django imports
from django.test import TestCase
# External libraries
from casexml.apps.case.models import CommCareCase
# CommCare HQ imports
from corehq.apps.domain.models import Domain
from corehq.apps.users.models import WebUser
from custom.hope.models import HOPECase
class TestHOPECaseResource(TestCase):
"""
Smoke test for the HOPECase wrapper on CommCareCase to make sure that the
derived properties do not just immediately crash.
"""
def setUp(self):
self.domain = Domain.get_or_create_with_name('qwerty', is_active=True)
self.username = '[email protected]'
self.password = '***'
self.user = WebUser.create(self.domain.name, self.username, self.password)
self.user.set_role(self.domain.name, 'admin')
self.user.save()
def hit_every_HOPE_property(self, hope_case):
"""
Helper method that can be applied to a variety of HOPECase objects
to make sure none of the _HOPE properties crash
"""
hope_case._HOPE_admission_date
hope_case._HOPE_age_of_beneficiary
hope_case._HOPE_all_anc_doses_given
hope_case._HOPE_all_dpt1_opv1_hb1_doses_given
hope_case._HOPE_all_dpt2_opv2_hb2_doses_given
hope_case._HOPE_all_dpt3_opv3_hb3_doses_given
hope_case._HOPE_all_ifa_doses_given
hope_case._HOPE_all_tt_doses_given
hope_case._HOPE_bpl_indicator
hope_case._HOPE_child_age
hope_case._HOPE_delivery_type
hope_case._HOPE_discharge_date
hope_case._HOPE_education
hope_case._HOPE_existing_child_count
hope_case._HOPE_ifa1_date
hope_case._HOPE_ifa2_date
hope_case._HOPE_ifa3_date
hope_case._HOPE_measles_dose_given
hope_case._HOPE_num_visits
hope_case._HOPE_patient_reg_num
hope_case._HOPE_registration_date
hope_case._HOPE_time_of_birth
hope_case._HOPE_tubal_ligation
def test_derived_properties(self):
"""
Smoke test that the HOPE properties do not crash on a pretty empty CommCareCase
"""
modify_date = datetime.utcnow()
backend_case = CommCareCase(server_modified_on=modify_date, domain=self.domain.name)
backend_case.save()
# Rather than a re-fetch, this simulates the common case where it is pulled from ES
hope_case = HOPECase.wrap(backend_case.to_json())
self.hit_every_HOPE_property(hope_case)
backend_case.delete()
|
Python
| 0.000005 |
@@ -1066,90 +1066,8 @@
%22%22%0A%0A
- hope_case._HOPE_admission_date%0A hope_case._HOPE_age_of_beneficiary%0A
@@ -1098,32 +1098,32 @@
anc_doses_given%0A
+
hope_cas
@@ -1347,32 +1347,70 @@
_tt_doses_given%0A
+ hope_case._HOPE_bcg_indicator%0A
hope_cas
@@ -1435,42 +1435,8 @@
tor%0A
- hope_case._HOPE_child_age%0A
@@ -1473,81 +1473,8 @@
ype%0A
- hope_case._HOPE_discharge_date%0A hope_case._HOPE_education%0A
@@ -1655,24 +1655,24 @@
_dose_given%0A
-
hope
@@ -1694,16 +1694,56 @@
_visits%0A
+ hope_case._HOPE_opv_1_indicator%0A
|
ed4d07fb2a7fa8f1dd30a2b7982940a5fe78275b
|
Add the analysis driver for the run step of the study
|
dakota_run_driver.py
|
dakota_run_driver.py
|
Python
| 0.000003 |
@@ -0,0 +1,1446 @@
+#! /usr/bin/env python%0A# Brokers communication between Dakota and SWASH through files.%0A#%0A# Arguments:%0A# $1 is 'params.in' from Dakota%0A# $2 is 'results.out' returned to Dakota%0A%0Aimport sys%0Aimport os%0Aimport shutil%0Afrom subprocess import call%0A%0A%0Adef driver():%0A %22%22%22Broker communication between Dakota and SWASH through files.%22%22%22%0A%0A # Files and directories.%0A start_dir = os.path.dirname(os.path.realpath(__file__))%0A input_file = 'INPUT'%0A input_template = input_file + '.template' %0A output_file = 'bot07.mat'%0A output_file_var = 'Botlev'%0A data_file = 'sand.bot'%0A run_script = 'run_swash.sh'%0A%0A # Use %60dprepro%60 (from $DAKOTA_DIR/bin) to substitute parameter%0A # values from Dakota into the SWASH input template, creating a new%0A # SWASH input file.%0A shutil.copy(os.path.join(start_dir, input_template), os.curdir)%0A call(%5B'dprepro', sys.argv%5B1%5D, input_template, input_file%5D)%0A%0A # Copy the data file into the active directory.%0A shutil.copy(os.path.join(start_dir, data_file), os.curdir)%0A%0A # Call SWASH through a PBS submission script. Note that %60qsub%60%0A # returns immediately, so jobs do not block.%0A job_name = 'SWASH-Dakota' + os.path.splitext(os.getcwd())%5B-1%5D%0A call(%5B'qsub', '-N', job_name, os.path.join(start_dir, run_script)%5D)%0A%0A # Provide a dummy results file to advance Dakota.%0A with open(sys.argv%5B2%5D, 'w') as fp:%0A fp.write('0.0%5Cn1.0%5Cn')%0A%0Aif __name__ == '__main__':%0A driver()%0A
|
|
ac5f30f9d58a25476c935d5266e9948b03efebf8
|
Add simple fetch tests
|
observatory/dashboard/tests/test_fetch.py
|
observatory/dashboard/tests/test_fetch.py
|
Python
| 0 |
@@ -0,0 +1,1114 @@
+import pytest%0Afrom dashboard.models import Project, Blog, Repository%0Afrom emaillist.models import EmailAddress%0Afrom django.contrib.auth.models import User%0A%[email protected]_db%0Adef test_fetch_warning(client):%0A%0A user = User.objects.create_user('a', '[email protected]', 'bob')%0A user.first_name = %22testf%22%0A user.last_name = %22testf%22%0A user.save()%0A email = EmailAddress(address='[email protected]', user=user)%0A email.save()%0A%0A ease_blog = Blog(from_feed = False)%0A ease_blog.save()%0A ease_repo = Repository(web_url = %22http://git.gnome.org/browse/ease%22,%0A clone_url = %22git://git.gnome.org/ease%22,%0A from_feed = False)%0A ease_repo.save()%0A ease = Project(title = %22Ease%22,%0A description = %22A presentation application for the Gnome Desktop.%22,%0A website = %22http://www.ease-project.org%22,%0A wiki = %22http://live.gnome.org/Ease%22,%0A blog_id = ease_blog.id,%0A repository_id = ease_repo.id)%0A ease.save()%0A ease.authors.add(user)%0A ease.save()%0A%0A ease.do_warnings()%0A assert ease.blog_warn_level %3E 0%0A assert ease.repo_warn_level %3E 0%0A
|
|
7af99b98a9985aa1274c56ef8333b0c57a4679c9
|
add simple redis queue processor.
|
src/pyramid_weblayer/queue.py
|
src/pyramid_weblayer/queue.py
|
Python
| 0 |
@@ -0,0 +1,3209 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22Provides a %60%60QueueProcessor%60%60 utility that consumes and processes data%0A from one or more redis channels.%0A %0A %3E%3E%3E redis_client = '%3Credis.Redis%3E instance'%0A %3E%3E%3E input_channels = %5B'channel1', 'channeln'%5D%0A %3E%3E%3E handle_data = lambda data_str: print data_str%0A %3E%3E%3E processor = QueueProcessor(redis_client, input_channels, handle_data)%0A %0A Run in the main / current thread::%0A %0A %3E%3E%3E # processor.start()%0A %0A Run in a background thread::%0A %0A %3E%3E%3E # processor.start(async=True)%0A %0A If running in a background thread, call %60%60stop()%60%60 to exit::%0A %0A %3E%3E%3E # processor.stop()%0A %0A If you want jobs to be requeued (at the back of the queue)%0A This provides a very simple inter-process messaging and / or background%0A task processing mechanism. Queued messages / jobs are explictly passed%0A as string messages. %0A %0A Pro: you're always in control of your code execution environment.%0A Con: you have to deal with potentially tedious message parsing.%0A%22%22%22%0A%0Aimport logging%0Alogger = logging.getLogger(__name__)%0A%0Aimport json%0Aimport threading%0Aimport time%0A%0Aclass QueueProcessor(object):%0A %22%22%22Consume data from a redis queue. When it arrives, pass it to%0A %60%60self.handler_function%60%60.%0A %22%22%22%0A %0A running = False%0A %0A def stop(self):%0A %22%22%22Call %60%60stop()%60%60 to stop processing the queue the next time a job is%0A processed or the input queue timeout is reached.%0A %22%22%22%0A %0A logger.info('QueueProcessor.stop()')%0A %0A self.running = False%0A %0A def _start(self):%0A %22%22%22Actually start processing the input queue(s) ad-infinitum.%22%22%22%0A %0A logger.debug('QueueProcessor.start()')%0A logger.debug(self.channels)%0A %0A self.running = True%0A while self.running:%0A try:%0A return_value = self.redis.blpop(self.channels, timeout=self.timeout)%0A except Exception as err:%0A logger.warn(err, exc_info=True)%0A time.sleep(self.timeout)%0A else:%0A if return_value is not None:%0A channel, body = return_value%0A try:%0A self.handle_function(body)%0A except Exception as err:%0A logger.warn(err, exc_info=True)%0A logger.warn(return_value)%0A if self.should_requeue:%0A self.redis.rpush(channel, body)%0A %0A def start(self, async=False):%0A %22%22%22Either start running or start running in a thread.%22%22%22%0A %0A if self.running:%0A return%0A %0A if async:%0A threading.Thread(target=self._start).start()%0A else:%0A self._start()%0A %0A def __init__(self, redis_client, channels, handle_function, timeout=5,%0A should_requeue=False):%0A %22%22%22Instantiate a queue processor::%0A %0A %3E%3E%3E processor = QueueProcessor(None, None, None)%0A %0A %22%22%22%0A %0A self.redis = redis_client%0A self.channels = channels%0A self.handle_function = handle_function%0A self.timeout = timeout%0A self.should_requeue = should_requeue%0A %0A%0A
|
|
1975e6ebd57aac379ad19f5d4675f8f598c03c66
|
add utilNetworkIP
|
utilNetworkIP.py
|
utilNetworkIP.py
|
Python
| 0.000024 |
@@ -0,0 +1,432 @@
+import socket%0Aimport fcntl%0Aimport struct%0A%0A'''%0Av0.1 2015/11/28%0A%09- add NetworkIP_get_ipAddress_eth0()%0A%09- add get_ip_address()%0A'''%0A%0Adef get_ip_address(ifname):%0A s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)%0A return socket.inet_ntoa(fcntl.ioctl(%0A s.fileno(),%0A 0x8915, # SIOCGIFADDR%0A struct.pack('256s', ifname%5B:15%5D)%0A )%5B20:24%5D)%0A%0Adef NetworkIP_get_ipAddress_eth0():%0A%09return get_ip_address('eth0')%0A%0A%0A
|
|
d8f5b31fab57cc009e87a8d62c8d03075f66e9bd
|
Add solution for Project Euler problem 72 (#3122)
|
project_euler/problem_072/sol2.py
|
project_euler/problem_072/sol2.py
|
Python
| 0 |
@@ -0,0 +1,1187 @@
+%22%22%22%0AProject Euler Problem 72: https://projecteuler.net/problem=72%0A%0AConsider the fraction, n/d, where n and d are positive integers. If n%3Cd and HCF(n,d)=1,%0Ait is called a reduced proper fraction.%0A%0AIf we list the set of reduced proper fractions for d %E2%89%A4 8 in ascending order of size,%0Awe get:%0A%0A1/8, 1/7, 1/6, 1/5, 1/4, 2/7, 1/3, 3/8, 2/5, 3/7, 1/2,%0A4/7, 3/5, 5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8%0A%0AIt can be seen that there are 21 elements in this set.%0A%0AHow many elements would be contained in the set of reduced proper fractions%0Afor d %E2%89%A4 1,000,000?%0A%22%22%22%0A%0A%0Adef solution(limit: int = 1000000) -%3E int:%0A %22%22%22%0A Return the number of reduced proper fractions with denominator less than limit.%0A %3E%3E%3E solution(8)%0A 21%0A %3E%3E%3E solution(1000)%0A 304191%0A %22%22%22%0A primes = set(range(3, limit, 2))%0A primes.add(2)%0A for p in range(3, limit, 2):%0A if p not in primes:%0A continue%0A primes.difference_update(set(range(p * p, limit, p)))%0A%0A phi = %5Bfloat(n) for n in range(limit + 1)%5D%0A%0A for p in primes:%0A for n in range(p, limit + 1, p):%0A phi%5Bn%5D *= 1 - 1 / p%0A%0A return int(sum(phi%5B2:%5D))%0A%0A%0Aif __name__ == %22__main__%22:%0A print(f%22%7Bsolution() = %7D%22)%0A
|
|
2245bebaa87a346fe52f1cada48a817ba7af08a2
|
Update message.py
|
tendrl/commons/message.py
|
tendrl/commons/message.py
|
import datetime
from dateutil import parser
from inspect import getframeinfo
from inspect import stack
import json
import sys
is_collectd_imported = False
if '/usr/lib64/collectd' in sys.path:
is_collectd_imported = True
sys.path.remove('/usr/lib64/collectd')
import uuid
if is_collectd_imported:
sys.path.append('/usr/lib64/collectd')
# TODO(anmol, collectd) This is required due to
# https://github.com/collectd/collectd/issues/2179
# An appropriate solution needs to be carved out
from tendrl.commons.utils.time_utils import now # flake8:noqa
import traceback
from ruamel import yaml
class Message(object):
"""At the time of message object intialization
message_id, timestamp and caller are always None
because it assinged by Message class but job_id,
flwo_id, parent_id, cluster_id may come, when from_json
function call message old message_id, time_stamp and
caller is populated
"""
def __init__(self, priority, publisher, payload, job_id=None,
flow_id=None, parent_id=None, cluster_id=None,
message_id=None, timestamp=None, node_id=None,
caller=None, *args, **kwargs):
super(Message, self).__init__(*args, **kwargs)
if message_id is None:
self.message_id = str(uuid.uuid4())
self.timestamp = now()
else:
self.message_id = message_id
self.timestamp = timestamp
if caller is None:
# From which function, line and file error raised
caller = getframeinfo(stack()[1][0])
self.caller = {"filename": caller.filename,
"line_no": caller.lineno,
"function": caller.function}
else:
self.caller = caller
self.priority = priority
self.publisher = publisher
self.node_id = node_id
if self.node_id is None:
self.node_id = NS.node_context.node_id
self.job_id = job_id
self.flow_id = flow_id
self.parent_id = parent_id
self.cluster_id = cluster_id
self.payload = payload
@staticmethod
def from_json(json_str):
message_json = json.loads(json_str)
timestamp = parser.parse(message_json["timestamp"])
message_json["timestamp"] = timestamp
message = Message(**message_json)
if not message.validate():
# Invalid message logged as debug
message_new = Message("debug",
"node-agent",
{"message": message})
return message_new
else:
return message
@staticmethod
def to_json(message):
return json.dumps(message.__dict__, default=serialize_message)
def validate(self):
priorities = ["debug", "info", "notice",
"warning", "error", "critical"]
"""Validation for the object
check all the mandatory fields are present,
check payload is in dict format,
if payload contains job id then it is considered
as job_updates,
Check priorities and publishers are correct
"""
# Check payload type is dict
if type(self.payload) != dict:
return False
# Check mandatory fields
if (self.priority not in priorities or
self.node_id is None or
"message" not in self.payload):
return False
return True
class ExceptionMessage(Message):
# Normal trace will give function calls from try block
# but this function will identify traceback from start and
# traceback from try block, so it will give traceback like
# default python traceback
def __init__(self, priority, publisher, payload):
# skip last function call
# This will give traceback upto before try function call
formatted_stack = traceback.extract_stack()[:-2]
_, _ , exc_traceback = sys.exc_info()
# This will give traceback inside try block
recent_call = traceback.extract_tb(exc_traceback)
caller = getframeinfo(stack()[1][0])
caller = {"filename": caller.filename,
"line_no": caller.lineno,
"function": caller.function}
if "exception" in payload:
if isinstance(payload["exception"], Exception):
exception_traceback = self.format_exception(
formatted_stack)
exception_traceback.extend(self.format_exception(
recent_call))
payload["exception_traceback"] = exception_traceback
payload["exception_type"] = type(payload["exception"]).__name__
super(ExceptionMessage, self).__init__(
priority=priority, publisher=publisher, payload=payload,
caller=caller)
else:
err = "Exception field is not found in payload"
sys.stderr.write(err)
else:
err = "Given exception %s is not a subclass of " \
"Exception class \n" % (str(payload["exception"]))
sys.stderr.write(err)
def format_exception(self, formatted_stack):
tb = []
for item in formatted_stack:
file, line, function, statement = item
tb.append({"file" : file,
"line" : line,
"function" : function,
"statement" : statement
})
return tb
# To serialize when json contains old message object
def serialize_message(obj):
if isinstance(obj, Message):
serial = obj.__dict__
return serial
elif isinstance(obj, datetime.datetime):
serial = obj.isoformat()
return serial
elif isinstance(obj, Exception):
return yaml.dump(obj)
else:
raise TypeError(
"Message object is not serializable")
|
Python
| 0.000001 |
@@ -3123,17 +3123,50 @@
updates,
+ job updates should have flow id,
%0A
-
@@ -3315,32 +3315,41 @@
return False%0A
+ %0A
# Check
@@ -3513,32 +3513,32 @@
return False%0A
-
%0A
@@ -3522,32 +3522,135 @@
False%0A %0A
+ if self.job_id is not None:%0A if self.flow_id is None:%0A return False%0A%0A
return T
|
dab192db863fdd694bb0adbce10fa2dd6c05353b
|
Make the cli work again.
|
jrnl/__init__.py
|
jrnl/__init__.py
|
#!/usr/bin/env python
# encoding: utf-8
"""
jrnl is a simple journal application for your command line.
"""
__title__ = 'jrnl'
__version__ = '1.0.3'
__author__ = 'Manuel Ebert'
__license__ = 'MIT License'
__copyright__ = 'Copyright 2013 Manuel Ebert'
from . import Journal
from . import jrnl
|
Python
| 0 |
@@ -289,8 +289,30 @@
rt jrnl%0A
+from .jrnl import cli%0A
|
959c34eeaa2d16726e5c3b4c1b13a0fbad186395
|
Update errno.py (caused an error with eval())
|
src/Lib/errno.py
|
src/Lib/errno.py
|
"""
This module makes available standard errno system symbols.
The value of each symbol is the corresponding integer value,
e.g., on most systems, errno.ENOENT equals the integer 2.
The dictionary errno.errorcode maps numeric codes to symbol names,
e.g., errno.errorcode[2] could be the string 'ENOENT'.
Symbols that are not relevant to the underlying system are not defined.
To map error codes to error messages, use the function os.strerror(),
e.g. os.strerror(2) could return 'No such file or directory'.
"""
errorcode= {1: 'EPERM', 2: 'ENOENT', 3: 'ESRCH', 4: 'EINTR', 5: 'EIO',
6: 'ENXIO', 7: 'E2BIG', 8: 'ENOEXEC', 9: 'EBADF', 10: 'ECHILD', 11: 'EAGAIN',
12: 'ENOMEM', 13: 'EACCES', 14: 'EFAULT', 15: 'ENOTBLK', 16: 'EBUSY',
17: 'EEXIST', 18: 'EXDEV', 19: 'ENODEV', 20: 'ENOTDIR', 21: 'EISDIR',
22: 'EINVAL', 23: 'ENFILE', 24: 'EMFILE', 25: 'ENOTTY', 26: 'ETXTBSY',
27: 'EFBIG', 28: 'ENOSPC', 29: 'ESPIPE', 30: 'EROFS', 31: 'EMLINK',
32: 'EPIPE', 33: 'EDOM', 34: 'ERANGE', 35: 'EDEADLOCK', 36: 'ENAMETOOLONG',
37: 'ENOLCK', 38: 'ENOSYS', 39: 'ENOTEMPTY', 40: 'ELOOP', 42: 'ENOMSG',
43: 'EIDRM', 44: 'ECHRNG', 45: 'EL2NSYNC', 46: 'EL3HLT', 47: 'EL3RST',
48: 'ELNRNG', 49: 'EUNATCH', 50: 'ENOCSI', 51: 'EL2HLT', 52: 'EBADE',
53: 'EBADR', 54: 'EXFULL', 55: 'ENOANO', 56: 'EBADRQC', 57: 'EBADSLT',
59: 'EBFONT', 60: 'ENOSTR', 61: 'ENODATA', 62: 'ETIME', 63: 'ENOSR',
64: 'ENONET', 65: 'ENOPKG', 66: 'EREMOTE', 67: 'ENOLINK', 68: 'EADV',
69: 'ESRMNT', 70: 'ECOMM', 71: 'EPROTO', 72: 'EMULTIHOP', 73: 'EDOTDOT',
74: 'EBADMSG', 75: 'EOVERFLOW', 76: 'ENOTUNIQ', 77: 'EBADFD', 78: 'EREMCHG',
79: 'ELIBACC', 80: 'ELIBBAD', 81: 'ELIBSCN', 82: 'ELIBMAX', 83: 'ELIBEXEC',
84: 'EILSEQ', 85: 'ERESTART', 86: 'ESTRPIPE', 87: 'EUSERS', 88: 'ENOTSOCK',
89: 'EDESTADDRREQ', 90: 'EMSGSIZE', 91: 'EPROTOTYPE', 92: 'ENOPROTOOPT',
93: 'EPROTONOSUPPORT', 94: 'ESOCKTNOSUPPORT', 95: 'ENOTSUP',
96: 'EPFNOSUPPORT', 97: 'EAFNOSUPPORT', 98: 'EADDRINUSE',
99: 'EADDRNOTAVAIL', 100: 'ENETDOWN', 101: 'ENETUNREACH', 102: 'ENETRESET',
103: 'ECONNABORTED', 104: 'ECONNRESET', 105: 'ENOBUFS', 106: 'EISCONN',
107: 'ENOTCONN', 108: 'ESHUTDOWN', 109: 'ETOOMANYREFS', 110: 'ETIMEDOUT',
111: 'ECONNREFUSED', 112: 'EHOSTDOWN', 113: 'EHOSTUNREACH', 114: 'EALREADY',
115: 'EINPROGRESS', 116: 'ESTALE', 117: 'EUCLEAN', 118: 'ENOTNAM',
119: 'ENAVAIL', 120: 'EISNAM', 121: 'EREMOTEIO', 122: 'EDQUOT',
123: 'ENOMEDIUM', 124: 'EMEDIUMTYPE', 125: 'ECANCELED', 126: 'ENOKEY',
127: 'EKEYEXPIRED', 128: 'EKEYREVOKED', 129: 'EKEYREJECTED',
130: 'EOWNERDEAD', 131: 'ENOTRECOVERABLE', 132: 'ERFKILL'}
# now put the attributes of the errorcode dict into this modules namespace
_codes=[]
for _num, _code in errorcode.items():
_codes.append('%s=%s' % (_code, _num))
eval(';'.join(_codes))
|
Python
| 0.000001 |
@@ -2747,19 +2747,19 @@
num))%0A%0Ae
-val
+xec
(';'.joi
|
b7ea4cde920a69add4cbd4cfb76c651ec77910ce
|
Create __init__.py
|
bse/data/__init__.py
|
bse/data/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
ed36937ff6ccb2e676236b2bd128a2bb8fa9a760
|
add format_html util
|
dimagi/utils/html.py
|
dimagi/utils/html.py
|
Python
| 0.000002 |
@@ -0,0 +1,396 @@
+from __future__ import absolute_import%0Afrom django.utils.html import conditional_escape%0Afrom django.utils.safestring import mark_safe%0A%0Adef format_html(format_string, *args, **kwargs):%0A%0A escaped_args = map(conditional_escape, args)%0A escaped_kwargs = dict(%5B(key, conditional_escape(value)) for key, value in kwargs%5D)%0A return mark_safe(format_string.format(*escaped_args, **escaped_kwargs))
|
|
8a668efbc266802a4f4e23c936d3589b230d9528
|
Add blink example on two different boards
|
nanpy/examples/blink_2boards.py
|
nanpy/examples/blink_2boards.py
|
Python
| 0 |
@@ -0,0 +1,639 @@
+#!/usr/bin/env python%0A%0A# Author: Andrea Stagi %[email protected]%3E%0A# Description: keeps your led blinking on 2 boards%0A# Dependencies: None%0A%0Afrom nanpy import (ArduinoApi, SerialManager)%0Afrom time import sleep%0A%0A%0Adevice_1 = '/dev/tty.usbmodem1411'%0Adevice_2 = '/dev/tty.usbmodem1431'%0A%0Aconnection_1 = SerialManager(device=device_1)%0Aconnection_2 = SerialManager(device=device_2)%0A%0Aa1 = ArduinoApi(connection=connection_1)%0Aa1.pinMode(13, a1.OUTPUT)%0A%0Aa2 = ArduinoApi(connection=connection_2)%0Aa2.pinMode(13, a2.OUTPUT)%0A%0Afor i in range(10000):%0A a1.digitalWrite(13, (i + 1) %25 2)%0A sleep(1)%0A a2.digitalWrite(13, (i + 1) %25 2)%0A sleep(1)%0A%0A
|
|
61a005ffbc988b6a20441841112890bb397f8ca3
|
Create stub for 2016 NFLPool player picks
|
2016_player_picks.py
|
2016_player_picks.py
|
Python
| 0 |
@@ -0,0 +1,464 @@
+%0A%0Astone = %7B%22firstname%22: %22chris%22, %22lastname%22: %22stone%22, %22timestamp%22: %229/6/2016%22, %22email%22: %[email protected]%22,%0A %22afc_east_1%22: %22Patriots%22, %22afc_east_2%22: %22Jets%22, %22afc_east_last%22: %22Bills%22, %22afc_north_1%22: %22Steelers%22,%0A %22afc_north_2%22: %22Bengals%22, %22afc_north_last%22: %22Browns%22, %22afc_south_1%22: %22Colts%22, %22afc_south_2%22: %22Colts%22,%0A %22afc_south_last%22: %22Titans%22%7D%0A%0Athaden = %5B%5D%0A%0Agarber = %5B%5D%0A%0Afronczak = %5B%5D%0A%0Athomas = %5B%5D%0A%0Acutler = %5B%5D%0A%0Anorred = %5B%5D%0A%0Aoakland = %5B%5D%0A%0A%0A
|
|
893e4292f6b1799bf5f1888fcbad41ec8b5a5951
|
Use Q-learning to learn all state-action values via self-play
|
examples/tic_ql_tabular_selfplay_all.py
|
examples/tic_ql_tabular_selfplay_all.py
|
Python
| 0 |
@@ -0,0 +1,641 @@
+'''%0AIn this example we use Q-learning via self-play to learn%0Athe value function of all Tic-Tac-Toe positions.%0A'''%0Afrom capstone.environment import Environment%0Afrom capstone.game import TicTacToe%0Afrom capstone.mdp import GameMDP%0Afrom capstone.rl import QLearningSelfPlay%0Afrom capstone.rl.tabularf import TabularF%0Afrom capstone.util import tic2pdf%0A%0Agame = TicTacToe()%0Aenv = Environment(GameMDP(game))%0Aqlearning = QLearningSelfPlay(env, n_episodes=1000)%0Aqlearning.learn()%0A%0Afor move in game.legal_moves():%0A print('-' * 80)%0A value = qlearning.qf%5B(game, move)%5D%0A new_game = game.copy().make_move(move)%0A print(value)%0A print(new_game)%0A
|
|
af9b64bcf99d0e2c13b9b6b05a6b4029a0bb7d28
|
Add theimdbapi provider, it's faster than myapifilms.
|
providers/moviedata/theimdbapi.py
|
providers/moviedata/theimdbapi.py
|
Python
| 0 |
@@ -0,0 +1,1633 @@
+import re%0Afrom providers.moviedata.provider import MoviedataProvider%0Afrom application import ACCESS_KEYS, APPLICATION as APP%0Atry:%0A from urllib import urlencode # Python 2.X%0Aexcept ImportError:%0A from urllib.parse import urlencode # Python 3+%0A%0AIDENTIFIER = %22theimdbapi%22%0A%0Aclass Provider(MoviedataProvider):%0A def get_url(self, movie):%0A parameters = %7B%0A %22title%22: movie%5B%22name%22%5D.encode(%22utf-8%22),%0A %7D%0A if %22year%22 in movie and movie%5B%22year%22%5D:%0A parameters%5B%22year%22%5D = movie%5B%22year%22%5D%0A%0A return %22http://www.theimdbapi.org/api/find/movie?%22 + urlencode(parameters)%0A%0A def fetch_movie_data(self, movie):%0A url = self.get_url(movie)%0A APP.debug(%22Fetching url: %25s%22 %25 url)%0A data = self.parse_json(url)%0A if not data:%0A return %7B%7D%0A%0A for hit in data:%0A # Return the first hit with a release date%0A if hit and %22release_date%22 in hit and hit%5B%22release_date%22%5D:%0A return self.transform_data(hit)%0A%0A return %7B%7D%0A%0A def get_data_mapping(self):%0A return %7B%0A %22id%22: %22imdb_id%22,%0A %22title%22: %22title%22,%0A %22plot%22: %22storyline%22,%0A %22genre%22: %22genre%22,%0A %22director%22: %22director%22,%0A %22country%22: %22metadata.countries%22,%0A %22language%22: %22metadata.languages%22,%0A %22runtime%22: %22length%22,%0A %22released%22: %22release_date%22,%0A %22age_rating%22: %22content_rating%22,%0A %22year%22: %22year%22,%0A %22imdb_url%22: %22url%22,%0A %22imdb_poster%22: %22poster.large%22,%0A %22imdb_rating%22: %22rating%22,%0A %22imdb_rating_count%22: %22rating_count%22,%0A %7D%0A
|
|
a8419c46ceed655a276dad00a24e21f300fda543
|
Add py solution for 513. Find Bottom Left Tree Value
|
py/find-bottom-left-tree-value.py
|
py/find-bottom-left-tree-value.py
|
Python
| 0.000235 |
@@ -0,0 +1,483 @@
+# Definition for a binary tree node.%0A# class TreeNode(object):%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0Aclass Solution(object):%0A def findBottomLeftValue(self, root):%0A %22%22%22%0A :type root: TreeNode%0A :rtype: int%0A %22%22%22%0A q = %5Broot%5D%0A for v in q:%0A if v.right:%0A q.append(v.right)%0A if v.left:%0A q.append(v.left)%0A return v.val%0A
|
|
1e45505a94f23198d0ec464107c12d29d4d9aa16
|
Add tests for libdft
|
pyscf/lib/dft/test/test_libdft.py
|
pyscf/lib/dft/test/test_libdft.py
|
Python
| 0 |
@@ -0,0 +1,1578 @@
+#!/usr/bin/env python%0A%0Aimport unittest%0Aimport ctypes%0Aimport itertools%0Aimport numpy%0Afrom pyscf.dft.numint import libdft%0A%0Aclass KnownValues(unittest.TestCase):%0A def test_empty_blocks(self):%0A ao_loc = numpy.array(%5B0,51,60,100,112,165,172%5D, dtype=numpy.int32)%0A%0A def get_empty_mask(non0tab_mask):%0A non0tab_mask = numpy.asarray(non0tab_mask, dtype=numpy.uint8)%0A shls_slice = (0, non0tab_mask.size)%0A empty_mask = numpy.empty(4, dtype=numpy.int8)%0A empty_mask%5B:%5D = -9%0A libdft.VXCao_empty_blocks(%0A empty_mask.ctypes.data_as(ctypes.c_void_p),%0A non0tab_mask.ctypes.data_as(ctypes.c_void_p),%0A (ctypes.c_int*2)(*shls_slice),%0A ao_loc.ctypes.data_as(ctypes.c_void_p))%0A return empty_mask.tolist()%0A%0A def naive_emtpy_mask(non0tab_mask):%0A blksize = 56%0A ao_mask = numpy.zeros(ao_loc%5B-1%5D, dtype=bool)%0A for k, (i0, i1) in enumerate(zip(ao_loc%5B:-1%5D, ao_loc%5B1:%5D)):%0A ao_mask%5Bi0:i1%5D = non0tab_mask%5Bk%5D == 1%0A valued = %5Bm.any() for m in numpy.split(ao_mask, %5B56, 112, 168%5D)%5D%0A empty_mask = ~numpy.array(valued)%0A return empty_mask.astype(numpy.int).tolist()%0A%0A def check(non0tab_mask):%0A if get_empty_mask(non0tab_mask) != naive_emtpy_mask(non0tab_mask):%0A raise ValueError(non0tab_mask)%0A%0A for mask in list(itertools.product(%5B0, 1%5D, repeat=6)):%0A check(mask)%0A%0Aif __name__ == %22__main__%22:%0A print(%22Test libdft%22)%0A unittest.main()%0A
|
|
d2aca979f2c8a711bbc139675cf699b6ce5ce53d
|
Update keys-and-rooms.py
|
Python/keys-and-rooms.py
|
Python/keys-and-rooms.py
|
# There are N rooms and you start in room 0.
# Each room has a distinct number in 0, 1, 2, ..., N-1,
# and each room may have some keys to access the next room.
#
# Formally, each room i has a list of keys rooms[i],
# and each key rooms[i][j] is an integer in [0, 1, ..., N-1]
# where N = rooms.length.
# A key rooms[i][j] = v opens the room with number v.
#
# Initially, all the rooms start locked (except for room 0).
# You can walk back and forth between rooms freely.
# Return true if and only if you can enter every room.
#
# Example 1:
#
# Input: [[1],[2],[3],[]]
# Output: true
# Explanation:
# We start in room 0, and pick up key 1.
# We then go to room 1, and pick up key 2.
# We then go to room 2, and pick up key 3.
# We then go to room 3. Since we were able to go to every room,
# we return true.
# Example 2:
#
# Input: [[1,3],[3,0,1],[2],[0]]
# Output: false
# Explanation: We can't enter the room with number 2.
#
# Note:
# - 1 <= rooms.length <= 1000
# - 0 <= rooms[i].length <= 1000
# - The number of keys in all rooms combined is at most 3000.
class Solution(object):
def canVisitAllRooms(self, rooms):
"""
:type rooms: List[List[int]]
:rtype: bool
"""
lookup = set([0])
stack = [0]
while stack:
node = stack.pop()
for nei in rooms[node]:
if nei not in lookup:
lookup.add(nei)
if len(lookup) == len(rooms):
return True
stack.append(nei)
return len(lookup) == len(rooms)
|
Python
| 0.000001 |
@@ -1,8 +1,38 @@
+# Time: O(n!)%0A# Space: O(n)%0A%0A
# There
|
29a2dcf4ab6684187d95e0faab171b5e071e1eee
|
Create main.py
|
python/using_sqlalchemy01/main.py
|
python/using_sqlalchemy01/main.py
|
Python
| 0.000001 |
@@ -0,0 +1,368 @@
+from .models import User%0Afrom .database import session_scope%0A%0Aif __name__ == '__main__':%0A with session_scope() as session:%0A users = session.query( User ).order_by( User.id )%0A %0A # Remove all object instances from this Session to make them available to accessed by outside%0A users.expunge_all()%0A %0A for u in users:%0A print u%0A
|
|
95ff080685f01cbb368d9467f67076ce9f3eae08
|
add generic resource creator
|
stacker_blueprints/generic.py
|
stacker_blueprints/generic.py
|
Python
| 0 |
@@ -0,0 +1,2244 @@
+%22%22%22 Load dependencies %22%22%22%0Afrom troposphere import (%0A Ref, Output%0A)%0A%0Afrom stacker.blueprints.base import Blueprint%0Afrom stacker.blueprints.variables.types import (%0A CFNString,%0A CFNCommaDelimitedList,%0A)%0A%0Aclass generic_resource_creator(Blueprint):%0A %22%22%22 Generic Blueprint for creating a resource %22%22%22%0A def add_cfn_description(self):%0A %22%22%22 Boilerplate for CFN Template %22%22%22%0A template = self.template%0A template.add_version('2010-09-09')%0A template.add_description('Generic Resource Creator - 1.0.0')%0A%0A %22%22%22%0A%0A *** NOTE *** Template Version Reminder%0A%0A Make Sure you bump up the template version number above if submitting%0A updates to the repo. This is the only way we can tell which version of%0A a template is in place on a running resouce.%0A%0A %22%22%22%0A%0A VARIABLES = %7B%0A 'Class':%0A %7B'type': str,%0A 'description': 'The troposphere class to create'%7D,%0A 'Output':%0A %7B'type': str,%0A 'description': 'The output to create'%7D,%0A 'Properties':%0A %7B'type': dict,%0A 'description': 'The list of propertie to use for the troposphere class'%7D,%0A %7D%0A%0A def setup_resource(self):%0A template = self.template%0A variables = self.get_variables()%0A%0A tclass = variables%5B'Class'%5D%0A tprops = variables%5B'Properties'%5D%0A output = variables%5B'Output'%5D%0A%0A klass = self.get_class('troposphere.' + tclass)%0A%0A # we need to do the following because of type conversion issues%0A tprops_string = %7B%7D%0A for variable, value in tprops.items():%0A tprops_string%5Bvariable%5D = str(value)%0A%0A instance = klass.from_dict('ResourceRefName', tprops_string)%0A%0A template.add_resource(instance)%0A template.add_output(Output(%0A output,%0A Description=%22The output%22,%0A Value=Ref(instance)%0A ))%0A%0A def create_template(self):%0A %22%22%22 Create the CFN template %22%22%22%0A self.add_cfn_description()%0A self.setup_resource()%0A%0A def get_class(self, kls):%0A parts = kls.split('.')%0A module = %22.%22.join(parts%5B:-1%5D)%0A m = __import__( module )%0A for comp in parts%5B1:%5D:%0A m = getattr(m, comp) %0A return m%0A
|
|
3dcf737fa6a6467e1c96d31325e26ecf20c50320
|
Add test cases for the logger
|
test/test_logger.py
|
test/test_logger.py
|
Python
| 0.000002 |
@@ -0,0 +1,1039 @@
+# encoding: utf-8%0A%0A%22%22%22%0A.. codeauthor:: Tsuyoshi Hombashi %[email protected]%3E%0A%22%22%22%0A%0Afrom __future__ import print_function%0Afrom __future__ import unicode_literals%0A%0Aimport logbook%0Aimport pytest%0Afrom sqliteschema import (%0A set_logger,%0A set_log_level,%0A)%0A%0A%0Aclass Test_set_logger(object):%0A%0A @pytest.mark.parametrize(%5B%22value%22%5D, %5B%0A %5BTrue%5D,%0A %5BFalse%5D,%0A %5D)%0A def test_smoke(self, value):%0A set_logger(value)%0A%0A%0Aclass Test_set_log_level(object):%0A%0A @pytest.mark.parametrize(%5B%22value%22%5D, %5B%0A %5Blogbook.CRITICAL%5D,%0A %5Blogbook.ERROR%5D,%0A %5Blogbook.WARNING%5D,%0A %5Blogbook.NOTICE%5D,%0A %5Blogbook.INFO%5D,%0A %5Blogbook.DEBUG%5D,%0A %5Blogbook.TRACE%5D,%0A %5Blogbook.NOTSET%5D,%0A %5D)%0A def test_smoke(self, value):%0A set_log_level(value)%0A%0A @pytest.mark.parametrize(%5B%22value%22, %22expected%22%5D, %5B%0A %5BNone, LookupError%5D,%0A %5B%22unexpected%22, LookupError%5D,%0A %5D)%0A def test_exception(self, value, expected):%0A with pytest.raises(expected):%0A set_log_level(value)%0A
|
|
b410facc9e7882ecec1bc1029caa3f35a3a28d03
|
Test for bundle API
|
tests/bundle_api.py
|
tests/bundle_api.py
|
Python
| 0 |
@@ -0,0 +1,2667 @@
+#!/usr/bin/env python%0A%0A# pylint: disable-msg=C0103%0A%0A%22%22%22Implements an Execution Manager for the AIMES demo.%0A%22%22%22%0A%0A__author__ = %22Matteo Turilli, Andre Merzky%22%0A__copyright__ = %22Copyright 2014, RADICAL%22%0A__license__ = %22MIT%22%0A%0Aimport os%0A%0Aimport radical.utils as ru%0A%0Aimport aimes.bundle%0Aimport aimes.emanager.interface%0A%0A# Set environment directories to test the bundle API.%0ACONF = os.getenv(%22BUNDLE_CONF%22)%0AORIGIN = os.getenv(%22BUNDLE_ORIGIN%22)%0A%0A# Create a reporter for the demo. Takes care of colors and font attributes.%0Areport = ru.Reporter(title='Bundle API test')%0A%0Abundle = aimes.emanager.interface.Bundle(CONF, ORIGIN)%0A%0A# Collect information about the resources to plan the execution strategy.%0Abandwidth_in = dict()%0Abandwidth_out = dict()%0A%0A# Get network bandwidth for each resource.%0Afor resource_name in bundle.resources:%0A resource = bundle.resources%5Bresource_name%5D%0A bandwidth_in%5Bresource.name%5D = resource.get_bandwidth(ORIGIN, 'in')%0A bandwidth_out%5Bresource.name%5D = resource.get_bandwidth(ORIGIN, 'out')%0A%0A# Report back to the demo about the available resource bundle.%0Areport.info(%22Target Resources%22)%0Aprint %22IDs: %25s%22 %25 %5C%0A %5Bbundle.resources%5Bresource%5D.name for resource in bundle.resources%5D%0A%0A# Print all the information available via the bundle API.%0Afor resource_name in bundle.resources:%0A resource = bundle.resources%5Bresource_name%5D%0A%0A report.info(%22resource.name : %25s%22 %25 resource.name)%0A print %22resource.num_nodes: %25s%22 %25 resource.num_nodes%0A print %22resource.container: %25s%22 %25 resource.container%0A print %22resource.get_bandwidth(IP, 'in') : %25s%22 %25 %5C%0A resource.get_bandwidth(ORIGIN, 'in')%0A%0A print %22resource.get_bandwidth(IP, 'out'): %25s%22 %25 %5C%0A resource.get_bandwidth(ORIGIN, 'out')%0A%0A print %22resource.queues : %25s%22 %25 resource.queues.keys()%0A%0A for queue_name in resource.queues:%0A queue = resource.queues%5Bqueue_name%5D%0A print%0A print %22 queue.name : %25s%22 %25 queue.name%0A print %22 queue.resource_name : %25s%22 %25 queue.resource_name%0A print %22 queue.max_walltime : %25s%22 %25 queue.max_walltime%0A print %22 queue.num_procs_limit : %25s%22 %25 queue.num_procs_limit%0A print %22 queue.alive_nodes : %25s%22 %25 queue.alive_nodes%0A print %22 queue.alive_procs : %25s%22 %25 queue.alive_procs%0A print %22 queue.busy_nodes : %25s%22 %25 queue.busy_nodes%0A print %22 queue.busy_procs : %25s%22 %25 queue.busy_procs%0A print %22 queue.free_nodes : %25s%22 %25 queue.free_nodes%0A print %22 queue.free_procs : %25s%22 %25 queue.free_procs%0A print %22 queue.num_queueing_jobs: %25s%22 %25 queue.num_queueing_jobs%0A print %22 queue.num_running_jobs : %25s%22 %25 queue.num_running_jobs%0A
|
|
8b0b7c19d2e2c015fd8ba7d5408b23334ee8874f
|
Add test case for configure failure.
|
test/Configure/VariantDir2.py
|
test/Configure/VariantDir2.py
|
Python
| 0 |
@@ -0,0 +1,1596 @@
+#!/usr/bin/env python%0A#%0A# __COPYRIGHT__%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining%0A# a copy of this software and associated documentation files (the%0A# %22Software%22), to deal in the Software without restriction, including%0A# without limitation the rights to use, copy, modify, merge, publish,%0A# distribute, sublicense, and/or sell copies of the Software, and to%0A# permit persons to whom the Software is furnished to do so, subject to%0A# the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included%0A# in all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY%0A# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE%0A# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND%0A# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE%0A# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION%0A# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION%0A# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.%0A#%0A%0A__revision__ = %22__FILE__ __REVISION__ __DATE__ __DEVELOPER__%22%0A%0A%22%22%22%0AVerify that Configure contexts work with SConstruct/SConscript structure%0A%22%22%22%0A%0Aimport os%0A%0Aimport TestSCons%0A%0Atest = TestSCons.TestSCons()%0A%0Atest.write('SConstruct', %22%22%22%5C%0ASConscript('SConscript', build_dir='build', src='.')%0A%22%22%22)%0A%0Atest.write('SConscript', %22%22%22%5C%0Aenv = Environment()%0Aconfig = env.Configure(conf_dir='sconf', log_file='config.log')%0Aconfig.TryRun(%22int main() %7B%7D%22, %22.c%22)%0Aconfig.Finish()%0A%22%22%22)%0A%0Atest.run()%0Atest.pass_test()%0A
|
|
1fd09e7328b1ebf41bc0790f2a96c18207b10077
|
Add sine wave sweep test
|
tests/test-sweep.py
|
tests/test-sweep.py
|
Python
| 0 |
@@ -0,0 +1,1834 @@
+#!/usr/bin/env python%0Atry:%0A from cStringIO import StringIO%0Aexcept ImportError:%0A from io import StringIO%0A%0Aclass Makefile(object):%0A def __init__(self):%0A self._fp = StringIO()%0A self._all = set()%0A self._targets = set()%0A def add_default(self, x):%0A self._all.add(x)%0A def build(self, target, deps, *cmds):%0A if target in self._targets:%0A return%0A self._targets.add(target)%0A fp = self._fp%0A fp.write(target + ':')%0A for dep in deps:%0A fp.write(' ' + dep)%0A fp.write('%5Cn')%0A for cmd in cmds:%0A fp.write('%5Ct' + cmd + '%5Cn')%0A def write(self, *line):%0A for line in line:%0A self._fp.write(line + '%5Cn')%0A def save(self):%0A f = open('Makefile', 'w')%0A f.write('all:')%0A for t in sorted(self._all):%0A f.write(' ' + t)%0A f.write('%5Cn')%0A f.write(self._fp.getvalue())%0A%0Amake = Makefile()%0Amake.write(%0A 'FR := ../build/product/fresample',%0A 'SOX := sox')%0A%0Adef test_sweep(depth, rate1, rate2):%0A inpath = 'in_%25dk%25d.wav' %25 (rate1 // 1000, depth)%0A make.build(%0A inpath, %5B'Makefile'%5D,%0A '$(SOX) -b %25d -r %25d -n $@ synth 8 sine 0+%25d vol 0.999' %25%0A (depth, rate1, rate1//2))%0A for q in range(4):%0A outpath = 'out_%25dk%25d_%25dk%25dq' %25 %5C%0A (rate1 // 1000, depth, rate2/1000, q)%0A make.build(%0A outpath + '.wav', %5Binpath, '$(FR)', 'Makefile'%5D,%0A '$(FR) -q %25d -r %25d $%3C $@' %25 (q, rate2))%0A make.build(%0A outpath + '.png', %5Boutpath + '.wav', 'Makefile'%5D,%0A 'sox $%3C -n spectrogram -w kaiser -o $@')%0A make.add_default(outpath + '.png')%0A%0Atest_sweep(16, 96000, 44100)%0Atest_sweep(16, 96000, 48000)%0Atest_sweep(16, 48000, 44100)%0Amake.write(%0A 'clean:',%0A '%5Ctrm -f *.wav *.png')%0Amake.save()%0A
|
|
93b65dd6707093487dc702fd94cdb3c6017d873b
|
add unit tests for translate_ix_member_name in ixapi.py
|
tests/test_ixapi.py
|
tests/test_ixapi.py
|
Python
| 0 |
@@ -0,0 +1,1714 @@
+from pyixia.ixapi import translate_ix_member_name%0Afrom nose.tools import eq_%0A%0Adef test_translate_ix_member_name():%0A eq_(translate_ix_member_name('A'), 'a')%0A eq_(translate_ix_member_name('b'), 'b')%0A eq_(translate_ix_member_name('AA'), 'aa')%0A eq_(translate_ix_member_name('bb'), 'bb')%0A eq_(translate_ix_member_name('Ab'), 'ab')%0A eq_(translate_ix_member_name('bA'), 'b_a')%0A eq_(translate_ix_member_name('bbb'), 'bbb')%0A eq_(translate_ix_member_name('AAA'), 'aaa')%0A eq_(translate_ix_member_name('bAA'), 'b_aa')%0A eq_(translate_ix_member_name('Abb'), 'abb')%0A eq_(translate_ix_member_name('bbA'), 'bb_a')%0A eq_(translate_ix_member_name('AAb'), 'a_ab')%0A eq_(translate_ix_member_name('AbA'), 'ab_a')%0A eq_(translate_ix_member_name('bAb'), 'b_ab')%0A eq_(translate_ix_member_name('AAAA'), 'aaaa')%0A eq_(translate_ix_member_name('bbbb'), 'bbbb')%0A eq_(translate_ix_member_name('Abbb'), 'abbb')%0A eq_(translate_ix_member_name('bAAA'), 'b_aaa')%0A eq_(translate_ix_member_name('AAbb'), 'a_abb')%0A eq_(translate_ix_member_name('bbAA'), 'bb_aa')%0A eq_(translate_ix_member_name('AAAb'), 'aa_ab')%0A eq_(translate_ix_member_name('bbbA'), 'bbb_a')%0A eq_(translate_ix_member_name('AbAb'), 'ab_ab')%0A eq_(translate_ix_member_name('bAbA'), 'b_ab_a')%0A eq_(translate_ix_member_name('AbAA'), 'ab_aa')%0A eq_(translate_ix_member_name('AAbA'), 'a_ab_a')%0A eq_(translate_ix_member_name('bbAb'), 'bb_ab')%0A eq_(translate_ix_member_name('bAbb'), 'b_abb')%0A eq_(translate_ix_member_name('AbbA'), 'abb_a')%0A eq_(translate_ix_member_name('bAAb'), 'b_a_ab')%0A eq_(translate_ix_member_name('framerFCSErrors'), 'framer_fcs_errors')%0A eq_(translate_ix_member_name('ID'), 'id')%0A%0A
|
|
2e4111dda23e6d686c188cf832f7b6c7c19ea14b
|
Test ReadLengthStatistics
|
tests/test_stats.py
|
tests/test_stats.py
|
Python
| 0 |
@@ -0,0 +1,1517 @@
+from cutadapt.statistics import ReadLengthStatistics%0A%0A%0Aclass TestReadLengthStatistics:%0A def test_empty_on_init(self):%0A rls = ReadLengthStatistics()%0A assert rls.written_reads() == 0%0A assert rls.written_bp() == (0, 0)%0A lengths = rls.written_lengths()%0A assert not lengths%5B0%5D and not lengths%5B1%5D%0A%0A def test_some_reads(self):%0A rls = ReadLengthStatistics()%0A rls.update(%22THEREAD%22) # length: 7%0A rls.update(%22YETANOTHER%22) # length: 10%0A rls.update2(%22FIRST%22, %22SECOND%22) # lengths: 5, 6%0A rls.update(%2212345%22)%0A%0A assert rls.written_reads() == 4%0A assert rls.written_bp() == (7 + 10 + 5 + 5, 6)%0A lengths = rls.written_lengths()%0A assert sorted(lengths%5B0%5D.items()) == %5B(5, 2), (7, 1), (10, 1)%5D%0A assert sorted(lengths%5B1%5D.items()) == %5B(6, 1)%5D%0A%0A def test_iadd(self):%0A rls = ReadLengthStatistics()%0A rls.update(%22THEREAD%22) # length: 7%0A rls.update(%22YETANOTHER%22) # length: 10%0A rls.update2(%22FIRST%22, %22SECOND%22) # lengths: 5, 6%0A rls.update(%2212345%22)%0A%0A rls2 = ReadLengthStatistics()%0A rls2.update(%22TESTING%22) # length: 7%0A rls2.update2(%22LEFT%22, %22RIGHT%22) # lengths: 4, 5%0A rls += rls2%0A%0A assert rls.written_reads() == 6%0A assert rls.written_bp() == (7 + 10 + 5 + 5 + 7 + 4, 6 + 5)%0A lengths = rls.written_lengths()%0A assert sorted(lengths%5B0%5D.items()) == %5B(4, 1), (5, 2), (7, 2), (10, 1)%5D%0A assert sorted(lengths%5B1%5D.items()) == %5B(5, 1), (6, 1)%5D%0A
|
|
1165673d784eab36edcdc4ed4caf22dbd222874a
|
Add some preliminary code and function to enlarge image
|
whois-scraper.py
|
whois-scraper.py
|
Python
| 0 |
@@ -0,0 +1,515 @@
+from lxml import html%0Afrom PIL import Image%0Aimport requests%0A%0Adef enlarge_image(image_file):%0A%09image = Image.open(image_file)%0A%09enlarged_size = map(lambda x: x*2, image.size)%0A%09enlarged_image = image.resize(enlarged_size)%0A%0A%09return enlarged_image%0A%0Adef extract_text(image_file):%0A%09image = enlarge_image(image_file)%0A%0A%09# Use Tesseract to extract text from the enlarged image. Then Return it.%0A%0Adomain = 'speedtest.net'%0A%0Apage = requests.get('http://www.whois.com/whois/%7B%7D'.format(domain))%0Atree = html.fromstring(page.content)%0A
|
|
65b611d3cc67d0f12007ba0eb87e2b3d2a074ff3
|
Fix sanity. Adding future imports.
|
tensorflow/contrib/tpu/python/tpu/error_handling.py
|
tensorflow/contrib/tpu/python/tpu/error_handling.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================
"""ErrorRendezvous handler for collecting errors from multiple threads."""
import contextlib
import threading
import time
import traceback
from tensorflow.python.framework import errors
from tensorflow.python.platform import tf_logging as logging
_UNINTERESTING_ERRORS = (errors.CancelledError,)
class ErrorRendezvous(object):
"""Resolve errors from multiple threads during TPU execution.
TPU errors can occur on the infeed or outfeed threads as well as the main
training thread.
Depending on which thread "wins" and receives the session error first, we may
end up showing users a confusing and non-actionable error message (session
cancelled) instead of a root cause (e.g. a bad filename).
The rendezvous object provides a location to capture these errors until all
threads terminate. At that point we can choose the most informative error
to report.
"""
def __init__(self, num_sources):
# string -> (message, traceback)
self._errors = {}
self._num_sources = num_sources
self._session_cancel_timer = None
def record_error(self, source, exception, session=None):
"""Report an exception from the given source.
If a session is passed, a timer will be registered to close it after a few
seconds. This is necessary to ensure the main training loop does not hang
if an infeed/oufeed error occurs. We sleep a few seconds to allow a more
interesting error from another thread to propagate.
Args:
source: string, source of the error
exception: Exception being thrown
session: Session to close after delay.
"""
logging.info('Error recorded from %s: %s', source, exception)
stack_trace = traceback.format_exc()
self._errors[source] = (exception, stack_trace)
if session is not None and self._session_cancel_timer is None:
def _cancel_session():
time.sleep(5)
try:
session.close()
except: # pylint: disable=bare-except
pass
self._session_cancel_timer = threading.Thread(target=_cancel_session,)
self._session_cancel_timer.daemon = True
self._session_cancel_timer.start()
def record_done(self, source):
"""Mark execution source `source` as done.
If an error was originally reported from `source` it is left intact.
Args:
source: `str`, source being recorded
"""
logging.info('%s marked as finished', source)
if source not in self._errors:
self._errors[source] = None
@contextlib.contextmanager
def catch_errors(self, source, session=None):
"""Context manager to report any errors within a block."""
try:
yield
except Exception as e: # pylint: disable=broad-except
self.record_error(source, e, session)
def raise_errors(self, timeout_sec=5):
"""Wait for up to `timeout` seconds for all error sources to finish.
Preferentially raise "interesting" errors (errors not in the
_UNINTERESTING_ERRORS) set.
Args:
timeout_sec: Seconds to wait for other error sources.
"""
for _ in range(timeout_sec):
if len(self._errors) == self._num_sources:
break
time.sleep(1)
kept_errors = [(k, v) for (k, v) in self._errors.items() if v is not None]
if not kept_errors:
return
# First check for any interesting errors, then fall back on the session
# cancelled errors etc.
for k, (exc, _) in kept_errors:
if isinstance(exc, _UNINTERESTING_ERRORS):
continue
else:
raise exc
for k, (exc, _) in kept_errors:
raise exc
|
Python
| 0.000001 |
@@ -747,16 +747,126 @@
ds.%22%22%22%0A%0A
+from __future__ import absolute_import%0Afrom __future__ import division%0Afrom __future__ import print_function%0A%0A
import c
|
d0cb5c633b5447c22b1d27da93e82151620a30e4
|
Update DOI metadata for a node if it has a DOI
|
website/project/tasks.py
|
website/project/tasks.py
|
from django.apps import apps
import logging
import urlparse
import random
import requests
from framework.celery_tasks import app as celery_app
from website import settings, mails
from website.util.share import GraphNode, format_contributor
logger = logging.getLogger(__name__)
@celery_app.task(ignore_results=True)
def on_node_updated(node_id, user_id, first_save, saved_fields, request_headers=None):
# WARNING: Only perform Read-Only operations in an asynchronous task, until Repeatable Read/Serializable
# transactions are implemented in View and Task application layers.
AbstractNode = apps.get_model('osf.AbstractNode')
node = AbstractNode.load(node_id)
if node.is_collection or node.archiving or node.is_quickfiles:
return
need_update = bool(node.SEARCH_UPDATE_FIELDS.intersection(saved_fields))
# due to async nature of call this can issue a search update for a new record (acceptable trade-off)
if bool({'spam_status', 'is_deleted'}.intersection(saved_fields)):
need_update = True
elif not node.is_public and 'is_public' not in saved_fields:
need_update = False
if need_update:
node.update_search()
update_node_share(node)
def update_node_share(node):
# Wrapper that ensures share_url and token exist
if settings.SHARE_URL:
if not settings.SHARE_API_TOKEN:
return logger.warning('SHARE_API_TOKEN not set. Could not send "{}" to SHARE.'.format(node._id))
_update_node_share(node)
def _update_node_share(node):
# Any modifications to this function may need to change _async_update_node_share
data = serialize_share_node_data(node)
resp = send_share_node_data(data)
try:
resp.raise_for_status()
except Exception:
if resp.status_code >= 500:
_async_update_node_share.delay(node._id)
else:
send_desk_share_error(node, resp, 0)
@celery_app.task(bind=True, max_retries=4, acks_late=True)
def _async_update_node_share(self, node_id):
# Any modifications to this function may need to change _update_node_share
# Takes node_id to ensure async retries push fresh data
AbstractNode = apps.get_model('osf.AbstractNode')
node = AbstractNode.load(node_id)
data = serialize_share_node_data(node)
resp = send_share_node_data(data)
try:
resp.raise_for_status()
except Exception as e:
if resp.status_code >= 500:
if self.request.retries == self.max_retries:
send_desk_share_error(node, resp, self.request.retries)
raise self.retry(
exc=e,
countdown=(random.random() + 1) * min(60 + settings.CELERY_RETRY_BACKOFF_BASE ** self.request.retries, 60 * 10)
)
else:
send_desk_share_error(node, resp, self.request.retries)
def send_share_node_data(data):
resp = requests.post('{}api/normalizeddata/'.format(settings.SHARE_URL), json=data, headers={'Authorization': 'Bearer {}'.format(settings.SHARE_API_TOKEN), 'Content-Type': 'application/vnd.api+json'})
logger.debug(resp.content)
return resp
def serialize_share_node_data(node):
return {
'data': {
'type': 'NormalizedData',
'attributes': {
'tasks': [],
'raw': None,
'data': {'@graph': format_registration(node) if node.is_registration else format_node(node)}
}
}
}
def format_node(node):
is_qa_node = bool(set(settings.DO_NOT_INDEX_LIST['tags']).intersection(node.tags.all().values_list('name', flat=True))) \
or any(substring in node.title for substring in settings.DO_NOT_INDEX_LIST['titles'])
return [
{
'@id': '_:123',
'@type': 'workidentifier',
'creative_work': {'@id': '_:789', '@type': 'project'},
'uri': '{}{}/'.format(settings.DOMAIN, node._id),
}, {
'@id': '_:789',
'@type': 'project',
'is_deleted': not node.is_public or node.is_deleted or node.is_spammy or is_qa_node
}
]
def format_registration(node):
is_qa_node = bool(set(settings.DO_NOT_INDEX_LIST['tags']).intersection(node.tags.all().values_list('name', flat=True))) \
or any(substring in node.title for substring in settings.DO_NOT_INDEX_LIST['titles'])
registration_graph = GraphNode('registration', **{
'title': node.title,
'description': node.description or '',
'is_deleted': not node.is_public or node.is_deleted or is_qa_node,
'date_published': node.registered_date.isoformat() if node.registered_date else None,
'registration_type': node.registered_schema.first().name if node.registered_schema else None,
'withdrawn': node.is_retracted,
'justification': node.retraction.justification if node.retraction else None,
})
to_visit = [
registration_graph,
GraphNode('workidentifier', creative_work=registration_graph, uri=urlparse.urljoin(settings.DOMAIN, node.url))
]
registration_graph.attrs['tags'] = [
GraphNode('throughtags', creative_work=registration_graph, tag=GraphNode('tag', name=tag._id))
for tag in node.tags.all() or [] if tag._id
]
to_visit.extend(format_contributor(registration_graph, user, bool(user._id in node.visible_contributor_ids), i) for i, user in enumerate(node.contributors))
to_visit.extend(GraphNode('AgentWorkRelation', creative_work=registration_graph, agent=GraphNode('institution', name=institution.name)) for institution in node.affiliated_institutions.all())
visited = set()
to_visit.extend(registration_graph.get_related())
while True:
if not to_visit:
break
n = to_visit.pop(0)
if n in visited:
continue
visited.add(n)
to_visit.extend(list(n.get_related()))
return [node_.serialize() for node_ in visited]
def send_desk_share_error(node, resp, retries):
mails.send_mail(
to_addr=settings.OSF_SUPPORT_EMAIL,
mail=mails.SHARE_ERROR_DESK,
node=node,
resp=resp,
retries=retries,
can_change_preferences=False,
)
|
Python
| 0.000002 |
@@ -1210,24 +1210,121 @@
hare(node)%0A%0A
+ if node.get_identifier_value('doi'):%0A node.request_identifier_update(category='doi')%0A%0A
def update_n
|
8f4d557023a84f1b532fe0843615179ebf3194ec
|
add setup.py
|
DateObjects/setup.py
|
DateObjects/setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,945 @@
+# coding: utf-8%0Afrom setuptools import setup%0Aimport os%0A%0A%0AREADME = os.path.join(os.path.dirname(__file__), 'README.md')%0A%0Asetup(name='date-objects',%0A version='1.0',%0A description='helper for manipulating dates.',%0A long_description=open(README).read(),%0A author=%22Marcelo Fonseca Tambalo%22, author_email=%[email protected]%22,%0A py_modules=%5B'DateObjects'%5D,%0A zip_safe=False,%0A platforms='any',%0A include_package_data=True,%0A classifiers=%5B%0A 'Development Status :: 5 - Production/Stable',%0A 'Framework :: Django',%0A 'Framework :: Flask',%0A 'Intended Audience :: Developers',%0A 'License :: OSI Approved :: MIT License',%0A 'Natural Language :: English',%0A 'Operating System :: OS Independent',%0A 'Programming Language :: Python',%0A 'Topic :: Software Development :: Libraries',%0A %5D,%0A url='https://github.com/zokis/DateObjects/',)%0A
|
|
7d1f6125d1f56b871e8d6515e7dd1844e36968b1
|
Add exception support, most code transferred from driver's code
|
zvmsdk/exception.py
|
zvmsdk/exception.py
|
Python
| 0.000002 |
@@ -0,0 +1,2511 @@
+import config%0Aimport log%0Aimport six%0A%0ACONF = config.CONF%0ALOG = log.LOG%0A%0A%0Aclass BaseException(Exception):%0A %22%22%22%0A Inherit from this class and define a 'msg_fmt' property.%0A That msg_fmt will get printf'd with the keyword arguments%0A provided to the constructor.%0A %22%22%22%0A msg_fmt = %22An unknown exception occurred.%22%0A code = 500%0A headers = %7B%7D%0A safe = False%0A%0A def __init__(self, message=None, **kwargs):%0A self.kw = kwargs%0A if 'code' in self.kw:%0A try:%0A self.kw%5B'code'%5D = self.code%0A except AttributeError:%0A pass%0A%0A if not message:%0A try:%0A message = self.msg_fmt %25 kwargs%0A except Exception:%0A LOG.exception('Exception in string format operation')%0A for name, value in six.iteritems(kwargs):%0A LOG.error(%22%25s: %25s%22 %25 (name, value))%0A%0A message = self.msg_fmt%0A%0A self.message = message%0A super(BaseException, self).__init__(message)%0A%0A def format_message(self):%0A return self.args%5B0%5D%0A%0A%0Aclass ZVMDriverError(BaseException):%0A msg_fmt = 'z/VM driver error: %25(msg)s'%0A%0A%0Aclass ZVMXCATRequestFailed(BaseException):%0A msg_fmt = 'Request to xCAT server %25(xcatserver)s failed: %25(msg)s'%0A%0A%0Aclass ZVMInvalidXCATResponseDataError(BaseException):%0A msg_fmt = 'Invalid data returned from xCAT: %25(msg)s'%0A%0A%0Aclass ZVMXCATInternalError(BaseException):%0A msg_fmt = 'Error returned from xCAT: %25(msg)s'%0A%0A%0Aclass ZVMVolumeError(BaseException):%0A msg_fmt = 'Volume error: %25(msg)s'%0A%0A%0Aclass ZVMImageError(BaseException):%0A msg_fmt = %22Image error: %25(msg)s%22%0A%0A%0Aclass ZVMGetImageFromXCATFailed(BaseException):%0A msg_fmt = 'Get image from xCAT failed: %25(msg)s'%0A%0A%0Aclass ZVMNetworkError(BaseException):%0A msg_fmt = %22z/VM network error: %25(msg)s%22%0A%0A%0Aclass ZVMXCATXdshFailed(BaseException):%0A msg_fmt = 'Execute xCAT xdsh command failed: %25(msg)s'%0A%0A%0Aclass ZVMXCATCreateNodeFailed(BaseException):%0A msg_fmt = 'Create xCAT node %25(node)s failed: %25(msg)s'%0A%0A%0Aclass ZVMXCATCreateUserIdFailed(BaseException):%0A msg_fmt = 'Create xCAT user id %25(instance)s failed: %25(msg)s'%0A%0A%0Aclass ZVMXCATUpdateNodeFailed(BaseException):%0A msg_fmt = 'Update node %25(node)s info failed: %25(msg)s'%0A%0A%0Aclass ZVMXCATDeployNodeFailed(BaseException):%0A msg_fmt = 'Deploy image on node %25(node)s failed: %25(msg)s'%0A%0A%0Aclass ZVMConfigDriveError(BaseException):%0A msg_fmt = 'Create configure drive failed: %25(msg)s'%0A%0A%0Aclass ZVMRetryException(BaseException):%0A pass%0A
|
|
1d1f9d5d8f4873d6a23c430a5629eaeddfd50d2a
|
Add network set default route view
|
subiquity/ui/views/network_default_route.py
|
subiquity/ui/views/network_default_route.py
|
Python
| 0 |
@@ -0,0 +1,2562 @@
+# Copyright 2015 Canonical, Ltd.%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Afrom urwid import Text, Pile, ListBox%0Afrom subiquity.view import ViewPolicy%0Afrom subiquity.ui.buttons import cancel_btn, done_btn%0Afrom subiquity.ui.utils import Color, Padding%0Aimport logging%0A%0Alog = logging.getLogger('subiquity.network.set_default_route')%0A%0A%0Aclass NetworkSetDefaultRouteView(ViewPolicy):%0A def __init__(self, model, signal):%0A self.model = model%0A self.signal = signal%0A self.is_manual = False%0A body = %5B%0A Padding.center_50(self._build_disk_selection()),%0A Padding.line_break(%22%22),%0A Padding.center_50(self._build_raid_configuration()),%0A Padding.line_break(%22%22),%0A Padding.center_20(self._build_buttons())%0A %5D%0A super().__init__(ListBox(body))%0A%0A def _build_default_routes(self):%0A items = %5B%0A Text(%22Please set the default gateway:%22),%0A Color.menu_button(done_btn(label=%22192.168.9.1 (em1, em2)%22,%0A on_press=self.done),%0A focus_map=%22menu_button focus%22),%0A Color.menu_button(%0A done_btn(label=%22Specify the default route manually%22,%0A on_press=self.set_manually),%0A focus_map=%22menu_button focus%22)%0A %5D%0A return Pile(items)%0A%0A def _build_buttons(self):%0A cancel = cancel_btn(on_press=self.cancel)%0A done = done_btn(on_press=self.done)%0A%0A buttons = %5B%0A Color.button(done, focus_map='button focus'),%0A Color.button(cancel, focus_map='button focus')%0A %5D%0A return Pile(buttons)%0A%0A def set_manually(self, result):%0A self.is_manual = True%0A self.signal.emit_signal('refresh')%0A%0A def done(self, result):%0A self.signal.emit_signal('network:show')%0A%0A def cancel(self, button):%0A self.signal.emit_signal(self.model.get_previous_signal)%0A
|
|
8b8db4f78610b6c8b72270275a621d529091a74f
|
Set account_credit_control_dunning_fees to installable
|
account_credit_control_dunning_fees/__openerp__.py
|
account_credit_control_dunning_fees/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Credit control dunning fees',
'version': '0.1.0',
'author': 'Camptocamp',
'maintainer': 'Camptocamp',
'category': 'Accounting',
'complexity': 'normal',
'depends': ['account_credit_control'],
'description': """
Dunning Fees for Credit Control
===============================
This extention of credit control adds the notion of dunning fees
on credit control lines.
Configuration
-------------
For release 0.1 only fixed fees are supported.
You can specifiy a fixed fees amount, a product and a currency
on the credit control level form.
The amount will be used as fees values the currency will determine
the currency of the fee. If the credit control line has not the
same currency as the fees currency, fees will be converted to
the credit control line currency.
The product is used to compute taxes in reconciliation process.
Run
---
Fees are automatically computed on credit run and saved
on the generated credit lines.
Fees can be manually edited as long credit line is draft
Credit control Summary report includes a new fees column.
-------
Support of fees price list
""",
'website': 'http://www.camptocamp.com',
'data': ['view/policy_view.xml',
'view/line_view.xml',
'report/report.xml',
'security/ir.model.access.csv'],
'demo': [],
'test': [],
'installable': False,
'auto_install': False,
'license': 'AGPL-3',
'application': False}
|
Python
| 0.000001 |
@@ -2253,20 +2253,19 @@
lable':
-Fals
+Tru
e,%0A 'aut
|
d7c46e9bc205d8f5a3cf7e1871547eff8ae7164c
|
Implement performance testing script
|
tools/test-performance.py
|
tools/test-performance.py
|
Python
| 0.000002 |
@@ -0,0 +1,1090 @@
+#!/usr/bin/python3%0A%0Aimport asyncio%0Aimport random%0A%0Ahost = 'localhost'%0Aport = 5027%0A%0AmessageLogin = bytearray.fromhex('000F313233343536373839303132333435')%0AmessageLocation = bytearray.fromhex('000000000000002b080100000140d4e3ec6e000cc661d01674a5e0fffc00000900000004020100f0000242322318000000000100007a04')%0A%0Adevices = 100%0Aperiod = 1%0A%0A%0Aclass AsyncClient(asyncio.Protocol):%0A%0A def __init__(self, loop):%0A self.loop = loop%0A self.buffer = memoryview(messageLogin)%0A%0A def connection_made(self, transport):%0A self.send_message(transport)%0A%0A def send_message(self, transport):%0A transport.write(self.buffer)%0A self.buffer = memoryview(messageLocation)%0A delay = period * (0.9 + 0.2 * random.random())%0A self.loop.call_later(delay, self.send_message, transport)%0A%0A def data_received(self, data):%0A pass%0A%0A def connection_lost(self, exc):%0A self.loop.stop()%0A%0A%0Aloop = asyncio.get_event_loop()%0A%0Afor i in range(0, devices):%0A loop.create_task(loop.create_connection(lambda: AsyncClient(loop), host, port))%0A%0Aloop.run_forever()%0Aloop.close()%0A
|
|
2c57f2143e21fa3d006d4e4e2737429fb60b4797
|
Call pip install before running server.
|
tornado/setup_pg.py
|
tornado/setup_pg.py
|
from os.path import expanduser
from os import kill
import subprocess
import sys
import time
python = expanduser('~/FrameworkBenchmarks/installs/py2/bin/python')
cwd = expanduser('~/FrameworkBenchmarks/tornado')
def start(args, logfile, errfile):
subprocess.Popen(
python + " server.py --port=8080 --postgres=%s --logging=error" % (args.database_host,),
shell=True, cwd=cwd, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
for line in subprocess.check_output(["ps", "aux"]).splitlines():
if 'server.py --port=8080' in line:
pid = int(line.split(None,2)[1])
kill(pid, 9)
return 0
if __name__ == '__main__':
class DummyArg:
database_host = 'localhost'
start(DummyArg(), sys.stderr, sys.stderr)
time.sleep(1)
stop(sys.stderr, sys.stderr)
|
Python
| 0 |
@@ -1,54 +1,13 @@
-from os.path import expanduser%0Afrom os import kill
+import os
%0Aimp
@@ -50,17 +50,26 @@
e%0A%0A%0A
-python =
+bin_dir = os.path.
expa
@@ -118,104 +118,333 @@
/bin
-/
+')%0A
python
-')%0Acwd = expanduser('~/FrameworkBenchmarks/tornado')%0A%0A%0Adef start(args, logfile, errfile):
+ = os.path.expanduser(os.path.join(bin_dir, 'python'))%0Apip = os.path.expanduser(os.path.join(bin_dir, 'pip'))%0Acwd = os.path.expanduser('~/FrameworkBenchmarks/tornado')%0A%0A%0Adef start(args, logfile, errfile):%0A subprocess.call(pip + ' install -r requirements.txt', cwd=cwd, shell=True, stderr=errfile, stdout=logfile)%0A
%0A
@@ -479,17 +479,17 @@
ython +
-%22
+'
server.
@@ -532,17 +532,17 @@
ng=error
-%22
+'
%25 (args
@@ -634,16 +634,17 @@
turn 0%0A%0A
+%0A
def stop
@@ -708,19 +708,19 @@
ut(%5B
-%22ps%22, %22aux%22
+'ps', 'aux'
%5D).s
@@ -814,16 +814,17 @@
it(None,
+
2)%5B1%5D)%0A
@@ -834,16 +834,19 @@
+os.
kill(pid
|
046920217192081d628370898857581fb8d5ec28
|
Fix wrong teleport message (#3935)
|
pokemongo_bot/cell_workers/follow_path.py
|
pokemongo_bot/cell_workers/follow_path.py
|
# -*- coding: utf-8 -*-
import gpxpy
import gpxpy.gpx
import json
from pokemongo_bot.base_task import BaseTask
from pokemongo_bot.cell_workers.utils import distance, i2f, format_dist
from pokemongo_bot.human_behaviour import sleep
from pokemongo_bot.step_walker import StepWalker
from pgoapi.utilities import f2i
class FollowPath(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
def initialize(self):
self._process_config()
self.points = self.load_path()
if self.path_start_mode == 'closest':
self.ptr = self.find_closest_point_idx(self.points)
else:
self.ptr = 0
def _process_config(self):
self.path_file = self.config.get("path_file", None)
self.path_mode = self.config.get("path_mode", "linear")
self.path_start_mode = self.config.get("path_start_mode", "first")
def load_path(self):
if self.path_file is None:
raise RuntimeError('You need to specify a path file (json or gpx)')
if self.path_file.endswith('.json'):
return self.load_json()
elif self.path_file.endswith('.gpx'):
return self.load_gpx()
def load_json(self):
with open(self.path_file) as data_file:
points=json.load(data_file)
# Replace Verbal Location with lat&lng.
for index, point in enumerate(points):
point_tuple = self.bot.get_pos_by_name(point['location'])
self.emit_event(
'location_found',
level='debug',
formatted="Location found: {location} {position}",
data={
'location': point,
'position': point_tuple
}
)
points[index] = self.lat_lng_tuple_to_dict(point_tuple)
return points
def lat_lng_tuple_to_dict(self, tpl):
return {'lat': tpl[0], 'lng': tpl[1]}
def load_gpx(self):
gpx_file = open(self.path_file, 'r')
gpx = gpxpy.parse(gpx_file)
if len(gpx.tracks) == 0:
raise RuntimeError('GPX file does not cotain a track')
points = []
track = gpx.tracks[0]
for segment in track.segments:
for point in segment.points:
points.append({"lat": point.latitude, "lng": point.longitude})
return points
def find_closest_point_idx(self, points):
return_idx = 0
min_distance = float("inf");
for index in range(len(points)):
point = points[index]
botlat = self.bot.api._position_lat
botlng = self.bot.api._position_lng
lat = float(point['lat'])
lng = float(point['lng'])
dist = distance(
botlat,
botlng,
lat,
lng
)
if dist < min_distance:
min_distance = dist
return_idx = index
return return_idx
def work(self):
last_lat = self.bot.api._position_lat
last_lng = self.bot.api._position_lng
point = self.points[self.ptr]
lat = float(point['lat'])
lng = float(point['lng'])
if self.bot.config.walk > 0:
step_walker = StepWalker(
self.bot,
self.bot.config.walk,
lat,
lng
)
is_at_destination = False
if step_walker.step():
is_at_destination = True
else:
self.bot.api.set_position(lat, lng, 0)
dist = distance(
last_lat,
last_lng,
lat,
lng
)
if dist <= 1 or (self.bot.config.walk > 0 and is_at_destination):
if (self.ptr + 1) == len(self.points):
self.ptr = 0
if self.path_mode == 'linear':
self.points = list(reversed(self.points))
else:
self.ptr += 1
self.emit_event(
'position_update',
formatted="Teleported from {last_position} to {current_position} ({distance} {distance_unit})",
data={
'last_position': (last_lat, last_lng, 0),
'current_position': (lat, lng, 0),
'distance': dist,
'distance_unit': 'm'
}
)
return [lat, lng]
|
Python
| 0 |
@@ -580,24 +580,16 @@
points)%0A
-
%0A
@@ -2707,28 +2707,16 @@
'lng'%5D)%0A
-
%0A
@@ -4080,18 +4080,15 @@
ed=%22
-Teleported
+Walking
fro
|
640c86e17b10d8f892a4036ade4ce7b8dca30347
|
Implement dragon-blood-clan module.
|
gygax/modules/dbc.py
|
gygax/modules/dbc.py
|
Python
| 0 |
@@ -0,0 +1,1025 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22%0A:mod:%60gygax.modules.dbc%60 --- Module for playing Dragon-Blood-Clan.%0A==================================================================%0A%22%22%22%0A%0Aimport gygax.modules.roll as roll%0A%0Adef dbc(bot, sender, text):%0A need = 6 # The next die needed.%0A dice_count = 5 # The number of dice to roll.%0A rolls_left = 3 # The number of rolls left.%0A reply = %5B%5D # The reply to the sender.%0A%0A # Roll until we have 6, 5 and 4 or we run out of rolls.%0A while need %3E 3 and rolls_left %3E 0:%0A results = roll.roll_dice(dice_count, 6)%0A reply.append(%22 + %22.join(map(str, results)))%0A rolls_left -= 1%0A%0A # Check for needed dice%0A while need %3E 3 and need in results:%0A results.remove(need)%0A need -= 1%0A dice_count -= 1%0A%0A if need %3E 3:%0A reply.append(%22no luck%22)%0A else:%0A reply.append(%22score: %7B%7D%22.format(sum(results)))%0A reply.append(%22rolls left: %7B%7D%22.format(rolls_left))%0A bot.reply(%22, %22.join(reply))%0Adbc.command = %22.dbc%22%0A
|
|
2d81274953629e34cc4b0232782cb910d1d459c9
|
Add process_watcher mod (triggers Process.Creation event)
|
modder/mods/process_watcher.py
|
modder/mods/process_watcher.py
|
Python
| 0 |
@@ -0,0 +1,1332 @@
+# coding: utf-8%0Aimport atexit%0Aimport platform%0A%0Afrom modder import on, trigger%0A%0Aif platform.system() == 'Windows':%0A import pythoncom%0A import wmi%0A%0A @on('Modder.Started')%0A def watch_process_creation(event):%0A pythoncom.CoInitialize()%0A atexit.register(pythoncom.CoUninitialize)%0A%0A wmi_root = wmi.WMI()%0A process_watcher = wmi_root.Win32_Process.watch_for(%0A notification_type='Creation',%0A delay_secs=2%0A )%0A try:%0A while 1:%0A try:%0A new_process = process_watcher()%0A trigger(%0A 'Process.Created',%0A data=%7B%0A 'caption': new_process.wmi_property('Caption').value,%0A 'process_name': new_process.wmi_property('Name').value,%0A 'executable_path': new_process.wmi_property('ExecutablePath').value,%0A 'pid': new_process.wmi_property('ProcessId').value,%0A %7D%0A )%0A except Exception as e:%0A print 'innter error:', e%0A pass%0A except Exception as e:%0A print 'outter error:', e%0A pass%0A finally:%0A pythoncom.CoUninitialize()%0Aelse:%0A pass%0A
|
|
03618b710146cdfacb7a8913a65809227e71546c
|
add test
|
tests/transforms_tests/image_tests/test_ten_crop.py
|
tests/transforms_tests/image_tests/test_ten_crop.py
|
Python
| 0.000002 |
@@ -0,0 +1,616 @@
+import unittest%0A%0Aimport numpy as np%0A%0Afrom chainer import testing%0Afrom chainercv.transforms import ten_crop%0A%0A%0Aclass TestTenCrop(unittest.TestCase):%0A%0A def test_ten_crop(self):%0A img = np.random.uniform(size=(3, 48, 32))%0A%0A out = ten_crop(img, (48, 32))%0A self.assertEqual(out.shape, (10, 3, 48, 32))%0A for crop in out%5B:5%5D:%0A np.testing.assert_equal(crop, img)%0A for crop in out%5B5:%5D:%0A np.testing.assert_equal(crop%5B:, :, ::-1%5D, img)%0A%0A out = ten_crop(img, (24, 12))%0A self.assertEqual(out.shape, (10, 3, 24, 12))%0A%0A%0Atesting.run_module(__name__, __file__)%0A
|
|
4835cac3b5ea15671f3da25cbc6e6db4bad725c9
|
Create crawl-twse.py
|
crawl/crawl-twse.py
|
crawl/crawl-twse.py
|
Python
| 0.000001 |
@@ -0,0 +1,330 @@
+req=requests.get(%22http://www.twse.com.tw/ch/trading/fund/BFI82U/BFI82U.php?report1=day&input_date=105%252F05%252F31&mSubmit=%25ACd%25B8%25DF&yr=2016&w_date=20160530&m_date=20160501%22)%0Areq.encoding='utf-8'%0Ahtml=req.text.encode('utf-8')%0Asoup=BeautifulSoup(html,%22html.parser%22)%0Afor td in soup.findAll(%22td%22,%7B%22class%22:%22basic2%22%7D):%0A print td.text%0A
|
|
bd597a8f34d6f95bc445550bcc239ff67d0321f4
|
Add missing file.
|
tests/tests/utils.py
|
tests/tests/utils.py
|
Python
| 0.000001 |
@@ -0,0 +1,346 @@
+from django.db import connection%0Afrom django.utils import six%0A%0A%0Adef get_table_list():%0A with connection.cursor() as cursor:%0A table_list = connection.introspection.get_table_list(cursor)%0A if table_list and not isinstance(table_list%5B0%5D, six.string_types):%0A table_list = %5Btable.name for table in table_list%5D%0A return table_list%0A
|
|
4592684c869780d81cb3521ee57d4efab8138c74
|
Fix event
|
cupy/cuda/stream.py
|
cupy/cuda/stream.py
|
from cupy.cuda import runtime
class Event(object):
"""CUDA event, a synchronization point of CUDA streams.
This class handles the CUDA event handle in RAII way, i.e., when an Event
instance is destroyed by the GC, its handle is also destroyed.
Args:
block (bool): If True, the event blocks on the
:meth:`~cupy.cuda.Event.synchronize` method.
disable_timing (bool): If True, the event does not prepare the timing
data.
interprocess (bool): If True, the event can be passed to other
processes.
Attributes:
ptr (cupy.cuda.runtime.Stream): Raw stream handle. It can be passed to
the CUDA Runtime API via ctypes.
"""
def __init__(self, block=False, disable_timing=False, interprocess=False):
if interprocess and not disable_timing:
raise ValueError('Timing must be disabled for interprocess events')
flag = ((block or runtime.EVENT_BLOCKING_SYNC) |
(disable_timing or runtime.EVENT_DISABLE_TIMING) |
(interprocess or runtime.EVENT_INTERPROCESS))
self.ptr = runtime.eventCreate(flag)
def __del__(self):
runtime.eventDestroy(self.ptr)
@property
def done(self):
"""True if the event is done."""
return bool(runtime.eventQuery(self.ptr))
def record(self, stream):
"""Records the event to a stream.
Args:
stream (cupy.cuda.Stream): CUDA stream to record event.
.. seealso:: :meth:`cupy.cuda.Stream.record`
"""
runtime.eventRecord(self.ptr, stream.ptr)
def synchronize(self):
"""Synchronizes all device work to the event.
If the event is created as a blocking event, it also blocks the CPU
thread until the event is done.
"""
runtime.eventSynchronize(self.ptr)
def get_elapsed_time(start_event, end_event):
"""Gets the elapsed time between two events.
Args:
start_event (Event): Earlier event.
end_event (Event): Later event.
Returns:
float: Elapsed time in milliseconds.
"""
return runtime.eventElapsedTime(start_event.ptr, end_event.ptr)
class Stream(object):
"""CUDA stream.
This class handles the CUDA stream handle in RAII way, i.e., when an Stream
instance is destroyed by the GC, its handle is also destroyed.
Args:
null (bool): If True, the stream is a null stream (i.e. the default
stream that synchronizes with all streams). Otherwise, a plain new
stream is created.
non_blocking (bool): If True, the stream does not synchronize with the
NULL stream.
Attributes:
ptr (cupy.cuda.runtime.Stream): Raw stream handle. It can be passed to
the CUDA Runtime API via ctypes.
"""
def __init__(self, null=False, non_blocking=False):
if null:
self.ptr = runtime.Stream()
elif non_blocking:
self.ptr = runtime.streamCreateWithFlags(runtime.streamNonBlocking)
else:
self.ptr = runtime.streamCreate()
def __del__(self):
if self.ptr:
runtime.streamDestroy(self.ptr)
self.ptr = runtime.Stream()
@property
def done(self):
"""True if all work on this stream has been done."""
return bool(runtime.streamQuery(self.ptr))
def synchronize(self):
"""Waits for the stream completing all queued work."""
runtime.streamSynchronize(self.ptr)
def add_callback(self, callback, arg):
"""Adds a callback that is called when all queued work is done.
Args:
callback (function): Callback function. It must take three
arguments (Stream object, int error status, and user data of
type ctypes.c_void_p), and returns nothing.
arg (ctypes.c_void_p): Argument to the callback.
"""
runtime.streamAddCallback(self.ptr, callback, arg)
def record(self, event=None):
"""Records an event on the stream.
Args:
event (None or cupy.cuda.Event): CUDA event. If None, then a new
plain event is created and used.
Returns:
cupy.cuda.Event: The recorded event.
.. seealso:: :meth:`cupy.cuda.Event.record`
"""
if event is None:
event = Event()
runtime.eventRecord(event.ptr, self.ptr)
return event
def wait_event(self, event):
"""Makes the stream wait for an event.
The future work on this stream will be done after the event.
Args:
event (cupy.cuda.Event): CUDA event.
"""
runtime.streamWaitEvent(self.ptr, event)
|
Python
| 0.998788 |
@@ -1,8 +1,23 @@
+import ctypes%0A%0A
from cup
@@ -802,32 +802,68 @@
process=False):%0A
+ self.ptr = runtime.Event()%0A%0A
if inter
@@ -997,18 +997,19 @@
((block
-or
+and
runtime
@@ -1013,27 +1013,25 @@
ime.
-EVENT_BLOCKING_SYNC
+eventBlockingSync
) %7C%0A
@@ -1062,18 +1062,19 @@
_timing
-or
+and
runtime
@@ -1078,28 +1078,26 @@
ime.
-EVENT_DISABLE_TIMING
+eventDisableTiming
) %7C%0A
@@ -1126,18 +1126,19 @@
process
-or
+and
runtime
@@ -1142,26 +1142,25 @@
ime.
-EVENT_INTERPROCESS
+eventInterprocess
))%0A
@@ -1196,16 +1196,25 @@
ntCreate
+WithFlags
(flag)%0A%0A
@@ -1228,32 +1228,57 @@
__del__(self):%0A
+ if self.ptr:%0A
runtime.
@@ -1295,24 +1295,63 @@
oy(self.ptr)
+%0A self.ptr = runtime.Event()
%0A%0A @prope
@@ -1493,16 +1493,21 @@
, stream
+=None
):%0A
@@ -1617,32 +1617,84 @@
to record event.
+ The null%0A stream is used by default.
%0A%0A .. see
@@ -1737,32 +1737,98 @@
d%60%0A%0A %22%22%22%0A
+ if stream is None:%0A stream = Stream(null=True)%0A
runtime.
|
be904e21db2012ac8f72a141afd9b93da2bfb262
|
Create http responses
|
monarch/base/http/responses.py
|
monarch/base/http/responses.py
|
Python
| 0 |
@@ -0,0 +1,3201 @@
+# Copyright (C) 2015 David Barrag%C3%A1n %[email protected]%3E%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0A%0Afrom django.http.response import HttpResponseBase%0Afrom django.http import HttpResponse%0Afrom django.http import HttpResponseRedirect%0Afrom django.http import HttpResponsePermanentRedirect%0Afrom . import status%0A%0A%0Aclass HttpResponse(HttpResponse):%0A def __init__(self, content=%22%22, *args, **kwarg):%0A self.content_data = content%0A super(HttpResponse, self).__init__(content, *args, **kwarg)%0A%0A @property%0A def content_data(self):%0A return self.__content_data%0A%0A @content_data.setter%0A def content_data(self, value):%0A self.__content_data = value%0A%0A%0Aclass Ok(HttpResponse):%0A status_code = status.HTTP_200_OK%0A%0Aclass Created(HttpResponse):%0A status_code = status.HTTP_201_CREATED%0A%0Aclass Accepted(HttpResponse):%0A status_code = status.HTTP_202_ACCEPTED%0A%0Aclass NoContent(HttpResponse):%0A status_code = status.HTTP_204_NO_CONTENT%0A%0Aclass MultipleChoices(HttpResponse):%0A status_code = status.HTTP_300_MULTIPLE_CHOICES%0A%0Aclass MovedPermanently(HttpResponsePermanentRedirect):%0A status_code = status.HTTP_301_MOVED_PERMANENTLY%0A%0Aclass Redirect(HttpResponseRedirect):%0A status_code = status.HTTP_302_FOUND%0A%0Aclass SeeOther(HttpResponse):%0A status_code = status.HTTP_303_SEE_OTHER%0A%0Aclass NotModified(HttpResponse):%0A status_code = status.HTTP_304_NOT_MODIFIED%0A%0Aclass TemporaryRedirect(HttpResponse):%0A status_code = status.HTTP_307_TEMPORARY_REDIRECT%0A%0Aclass BadRequest(HttpResponse):%0A status_code = status.HTTP_400_BAD_REQUEST%0A%0Aclass Unauthorized(HttpResponse):%0A status_code = status.HTTP_401_UNAUTHORIZED%0A%0Aclass Forbidden(HttpResponse):%0A status_code = status.HTTP_403_FORBIDDEN%0A%0Aclass NotFound(HttpResponse):%0A status_code = status.HTTP_404_NOT_FOUND%0A%0Aclass MethodNotAllowed(HttpResponse):%0A status_code = status.HTTP_405_METHOD_NOT_ALLOWED%0A%0Aclass NotAcceptable(HttpResponse):%0A status_code = status.HTTP_406_NOT_ACCEPTABLE%0A%0Aclass Conflict(HttpResponse):%0A status_code = status.HTTP_409_CONFLICT%0A%0Aclass Gone(HttpResponse):%0A status_code = status.HTTP_410_GONE%0A%0Aclass PreconditionFailed(HttpResponse):%0A status_code = status.HTTP_412_PRECONDITION_FAILED%0A%0Aclass UnsupportedMediaType(HttpResponse):%0A status_code = status.HTTP_415_UNSUPPORTED_MEDIA_TYPE%0A%0Aclass TooManyRequests(HttpResponse):%0A status_code = status.HTTP_429_TOO_MANY_REQUESTS%0A%0Aclass InternalServerError(HttpResponse):%0A status_code = status.HTTP_500_INTERNAL_SERVER_ERROR%0A%0Aclass NotImplemented(HttpResponse):%0A status_code = status.HTTP_501_NOT_IMPLEMENTED%0A%0A
|
|
72fcb82d33c4a4317630b6f2c7985e69ff9d3ce3
|
add some simple tests for lru_cache
|
src/unittest/python/backport_tests.py
|
src/unittest/python/backport_tests.py
|
Python
| 0 |
@@ -0,0 +1,1158 @@
+#!/usr/bin/env python%0Afrom __future__ import print_function%0A%0Aimport sys%0Aimport unittest%0A%0Afrom backports import functools_lru_cache%0A%0Aclass Test(unittest.TestCase):%0A def test_with_bound_cache(self):%0A @functools_lru_cache.lru_cache()%0A def cachy(*args):%0A return True%0A self.assertTrue(cachy(%22foo%22))%0A self.assertTrue(cachy(%22bar%22))%0A self.assertTrue(cachy(%22foo%22))%0A print(cachy.cache_info())%0A cachy.cache_clear()%0A%0A def test_without_cache(self):%0A @functools_lru_cache.lru_cache(maxsize=None)%0A def cachy(*args):%0A return True%0A self.assertTrue(cachy(%22foo%22))%0A self.assertTrue(cachy(%22bar%22))%0A self.assertTrue(cachy(%22foo%22))%0A print(cachy.cache_info())%0A cachy.cache_clear()%0A%0A def test_with_boundless_cache(self):%0A @functools_lru_cache.lru_cache(maxsize=0)%0A def cachy(*args):%0A return True%0A self.assertTrue(cachy(%22foo%22))%0A self.assertTrue(cachy(%22bar%22))%0A self.assertTrue(cachy(%22foo%22))%0A print(cachy.cache_info())%0A cachy.cache_clear()%0A%0A%0Aif __name__ == %22__main__%22:%0A sys.exit(unittest.main())%0A
|
|
75a2c6fb7074e316908d12cfd6f1e03d9e0a1ba6
|
add new tool to generate new pipeline easily (outside of sequana repository)
|
sequana/scripts/start_pipeline.py
|
sequana/scripts/start_pipeline.py
|
Python
| 0 |
@@ -0,0 +1,1750 @@
+# -*- coding: utf-8 -*-%0A#%0A# This file is part of Sequana software%0A#%0A# Copyright (c) 2016 - Sequana Development Team%0A#%0A# File author(s):%0A# Thomas Cokelaer %[email protected]%3E%0A# Dimitri Desvillechabrol %[email protected]%3E, %0A# %[email protected]%3E%0A#%0A# Distributed under the terms of the 3-clause BSD license.%0A# The full license is in the LICENSE file, distributed with this software.%0A#%0A# website: https://github.com/sequana/sequana%0A# documentation: http://sequana.readthedocs.io%0A#%0A##############################################################################%0Afrom snakemake import shell as shellcmd%0Aimport shutil%0Aimport glob%0Aimport sys%0Afrom optparse import OptionParser%0Aimport argparse%0A%0A%0Aclass Options(argparse.ArgumentParser):%0A def __init__(self, prog=%22sequana_mapping%22):%0A usage = %22%22%22Welcome to SEQUANA - create a new pipeline from scratch%0A%0A sequana_start_pipeline %0A%0A %22%22%22%0A description = %22%22%22DESCRIPTION:%0A%0A%0A %22%22%22%0A%0A super(Options, self).__init__(usage=usage, prog=prog,%0A description=description)%0A%0A #self.add_argument(%22--use-sambamba%22, dest=%22sambamba%22, action=%22store_true%22,%0A # default=False,%0A # help=%22%22%22use sambamba instead of samtools for the sorting %22%22%22)%0A%0A%0Adef main(args=None):%0A%0A if args is None:%0A args = sys.argv%5B:%5D%0A%0A user_options = Options(prog=%22sequana%22)%0A%0A # If --help or no options provided, show the help%0A if %22--help%22 in args:%0A user_options.parse_args(%5B%22prog%22, %22--help%22%5D)%0A else:%0A options = user_options.parse_args(args%5B1:%5D)%0A%0A cmd = %22cookiecutter https://github.com/sequana/sequana_pipeline_template%22%0A import subprocess%0A subprocess.call(cmd.split())%0A%0A%0A
|
|
48c139172e2eab43919ac9589ee58e3ff2009887
|
Work in progress
|
lexicon/providers/azure.py
|
lexicon/providers/azure.py
|
Python
| 0.000003 |
@@ -0,0 +1,2176 @@
+import json%0Aimport requests%0A%0Afrom lexicon.providers.base import Provider as BaseProvider%0A%0AMANAGEMENT_URL = 'https://management.azure.com'%0AAPI_VERSION = '2018-03-01-preview'%0ANAMESERVER_DOMAINS = %5B'azure.com'%5D%0A%0A%0Adef provider_parser(subparser):%0A subparser.add_argument('--auth-credentials')%0A%0A%0Aclass Provider(BaseProvider):%0A def __init__(self, config):%0A super(Provider, self).__init__(config)%0A self.domain_id = None%0A self._access_token = None%0A self._subscription_id = None%0A%0A if self._get_provider_option('auth_credentials').startswith('file::'):%0A with open(self._get_provider_option('auth_credentials')%0A .replace('file::', '')) as file_h:%0A data = file_h.read()%0A%0A self._credentials = json.loads(data)%0A%0A def _authenticate(self):%0A ad_endpoint = self._credentials%5B'activeDirectoryEndpointUrl'%5D%0A tenant_id = self._credentials%5B'tenantId'%5D%0A client_id = self._credentials%5B'clientId'%5D%0A client_secret = self._credentials%5B'clientSecret'%5D%0A self._subscription_id = self._credentials%5B'subscriptionId'%5D%0A%0A assert ad_endpoint%0A assert tenant_id%0A assert client_id%0A assert client_secret%0A assert self._subscription_id%0A%0A url = '%7B0%7D/%7B1%7D/oauth2/token'.format(ad_endpoint, tenant_id)%0A data = %7B%0A 'grant_type': 'client_credentials',%0A 'client_id': client_id,%0A 'client_secret': client_secret,%0A 'resource': MANAGEMENT_URL%0A %7D%0A%0A result = requests.post(url, data=data)%0A result.raise_for_status()%0A%0A self._access_token = result.json()%5B'access_token'%5D%0A%0A url = ('%7B0%7D/subscriptions/%7B1%7D/providers/Microsoft.Network/dnszones'%0A .format(MANAGEMENT_URL, self._subscription_id))%0A headers = %7B'Authorization': 'Bearer %7B0%7D'.format(self._access_token)%7D%0A params = %7B'api-version': API_VERSION%7D%0A%0A result = requests.get(url, headers=headers, params=params)%0A result.raise_for_status()%0A%0A print(result.json())%0A%0A def _request(self, action='GET', url='/', data=None, query_params=None):%0A url = '%7B0%7D/subscriptions/%7B1%7D'
|
|
4485e7dd4b6d5a6199d99cdc9a852ff551fc384b
|
bump version number
|
client/version.py
|
client/version.py
|
ELECTRUM_VERSION = "0.37"
SEED_VERSION = 4 # bump this everytime the seed generation is modified
|
Python
| 0.000004 |
@@ -20,9 +20,9 @@
%220.3
-7
+8
%22%0ASE
|
e4979853bab394902394a35778d970ca2ccb20cb
|
Fix import of File class
|
tvrenamr/frontend.py
|
tvrenamr/frontend.py
|
#!/usr/bin/env python
import logging
import os
import sys
from .config import Config
from .episode import File
from .errors import *
from .logs import start_logging
from .main import TvRenamr
from .options import OptionParser
log = logging.getLogger('FrontEnd')
parser = OptionParser()
options, args = parser.parse_args()
class FrontEnd(object):
def __init__(self):
# start logging
if options.debug:
options.log_level = 10
start_logging(options.log_file, options.log_level, options.quiet)
def build_file_list(self, glob, recursive=False, ignore_filelist=None):
"""
Determines which files need to be processed for renaming.
:param glob: A list of file(s) or directory(s).
:param recursive: Do a recursive search for files if 'glob' is a
directory. Default is False.
:param ignore_filelist: Optional set of files to ignore from renaming.
Often used by filtering methods such as Deluge.
:returns: A list of files to be renamed.
:rtype: A list of tuples
"""
if len(glob) > 1:
# must have used wildcards
self.file_list = [os.path.split(fn) for fn in glob]
return
glob = glob[0] # only one item, add some convenience
if os.path.isdir(glob):
self.file_list = []
for root, dirs, files in os.walk(glob):
for fname in files:
# If we have a file we should be ignoring and skipping.
full_path = os.path.join(root, fname)
if ignore_filelist is not None and (full_path in ignore_filelist):
continue
self.file_list.append((root, fname))
# Don't want a recursive walk?
if not recursive:
break
elif os.path.isfile(glob):
self.file_list = [os.path.split(glob)]
else:
parser.error("'{0}' is not a file or directory. Ruh Roe!".format(args))
def get_config(self, path=None):
possible_config = (
options.config,
path,
os.path.expanduser('~/.tvrenamr/config.yml'),
os.path.join(sys.path[0], 'config.yml')
)
# get the first viable config from the list of possibles
_config = None
for config in possible_config:
if config is not None and os.path.exists(config):
_config = Config(config)
break
if _config is None:
raise ConfigNotFoundException
self.config = _config
def rename(self, working, filename):
try:
tv = TvRenamr(working, self.config, options.debug, options.dry)
_file = File(**tv.extract_details_from_file(filename, user_regex=options.regex))
# TODO: Warn setting season & episode will override *all* episodes
_file.user_overrides(options.show_name, options.season, options.episode)
for episode in _file.episodes:
episode.title = tv.retrieve_episode_title(episode, library=options.library,
canonical=options.canonical)
_file.show_name = tv.format_show_name(_file.show_name, the=options.the,
override=options.show_override)
_file.set_output_format(options.output_format, self.config)
path = tv.build_path(_file, rename_dir=options.rename_dir,
organise=options.organise)
tv.rename(filename, path)
except KeyboardInterrupt:
sys.exit()
except (ConfigNotFoundException,
NoMoreLibrariesException,
NoNetworkConnectionException):
if options.dry or options.debug:
self._stop_dry_run()
sys.exit(1)
except (AttributeError,
EmptyEpisodeTitleException,
EpisodeAlreadyExistsInDirectoryException,
EpisodeNotFoundException,
IncorrectCustomRegularExpressionSyntaxException,
InvalidXMLException,
OutputFormatMissingSyntaxException,
ShowNotFoundException,
UnexpectedFormatException) as e:
for msg in e.args:
log.critical(e)
pass
except Exception as e:
if options.debug:
# In debug mode, show the full traceback.
raise
for msg in e.args:
log.critical('Error: {0}'.format(msg))
sys.exit(1)
def run(self):
if options.dry or options.debug:
self._start_dry_run()
# kick off a rename for each file in the list
for details in self.file_list:
self.rename(*details)
# if we're not doing a dry run add a blank line for clarity
if not (options.debug and options.dry):
log.info('')
if options.dry or options.debug:
self._stop_dry_run()
def _start_dry_run(self):
log.log(26, 'Dry Run beginning.')
log.log(26, '-' * 70)
log.log(26, '')
def _stop_dry_run(self):
log.log(26, '')
log.log(26, '-' * 70)
log.log(26, 'Dry Run complete. No files were harmed in the process.')
log.log(26, '')
def run():
# use current directory if no args specified
files = args
if not args:
log.debug('No file or directory specified, using current directory')
files = [os.getcwd()]
frontend = FrontEnd()
frontend.get_config()
frontend.build_file_list(files, options.recursive, options.ignore_filelist)
frontend.run()
if __name__ == "__main__":
run()
|
Python
| 0.000002 |
@@ -84,34 +84,8 @@
fig%0A
-from .episode import File%0A
from
@@ -151,16 +151,22 @@
n import
+ File,
TvRenam
|
6486487dc1fc4972dcd18bc0e92bcae602f4d900
|
Create blacklist.py
|
cogs/blacklist.py
|
cogs/blacklist.py
|
Python
| 0.000006 |
@@ -0,0 +1 @@
+%0A
|
|
b02e308dfc2993123486a5660b6d14c98f19b389
|
Create Hamel_ZipCode_API.py
|
Hamel_ZipCode_API.py
|
Hamel_ZipCode_API.py
|
Python
| 0.000005 |
@@ -0,0 +1,3001 @@
+def back_out_unicode(stringval):%0A return str(stringval.encode('utf-8').decode('ascii', 'ignore'))%0A%0Adef zip_info(zipcode):%0A %22%22%22%0A Takes a zip code and goes to www.uszip.com/zip/*zipcode and%0A screen scrapes relevant information down. *zipcode is the 5-digit zipcode parameter%0A %0A input value zipcode must be a string value%0A returns a list of tuples, which are (key, value) pairs%0A %0A Written by Hamel Husain%0A [email protected]%0A %22%22%22%0A #Type Safety%0A%0A if type(zipcode) %3C%3E str or len(zipcode) %3E 5:%0A raise Exception('zipcode passed to this function must be a 5-digit string')%0A %0A from bs4 import BeautifulSoup%0A import urllib%0A%0A data = %5B('zipcode', str(zipcode))%5D #Initializes zipcode list%0A %0A %0A webaddress = 'http://www.uszip.com/zip/'+str(zipcode) #build web address%0A try:%0A html_collector = urllib.urlopen(webaddress).read() #read contents of HTML into variable%0A except:%0A print str(zipcode) #+ ' was an invalid zipcode, please try again - must be a 5 digit string value'%0A raise%0A %0A %0A soup = BeautifulSoup(html_collector) #make a Beautiful Soup object from HTML string so that we can parse%0A raw_html = soup.prettify() #this is so you can inspect html, will dump this into a file called sample_html.txt%0A %0A with open('sample_html.txt', 'w') as html: #so you can dump a copy of the HTML somewhere%0A html.write(back_out_unicode(raw_html))%0A %0A ##############%0A #Checks to see if zipcode returned by website is the one you input!##%0A #############%0A zipcode_returned = back_out_unicode(soup.find('strong').text.strip())%0A if zipcode %3C%3E zipcode_returned:%0A print '%25s was not found as a zipcode! Will Skip This' %25 (zipcode)%0A zip_valid = False%0A else:%0A zip_valid = True%0A city = back_out_unicode(soup.find('title').text.strip().replace(' zip code', ''))%0A %0A %0A ##Mark Zip Code as Retrieved Or Not##%0A data.append(('Zip Found', zip_valid)) %0A %0A if zip_valid:%0A data.append(('City', city))%0A %0A #return an iterable that has all of the results for 'dt', or the fieldnames%0A search_results_titles = soup.findAll('dt') #for this websites, titles are tagged 'dt', numbers are tagged 'dd'%0A%0A for label in search_results_titles:%0A current_name = label.name #tag name%0A current_string = back_out_unicode(label.text.strip()) #tag text%0A %0A next_name = label.find_next_sibling().name #next tag's name%0A next_string = back_out_unicode(label.find_next_sibling().text.strip()) #next tag's text%0A %0A #Want a 'dt' tag to be followed by a 'dd' tag, otherwise don't need it to be part of the result%0A if (current_name %3C%3E next_name) and current_name == 'dt' and next_name == 'dd' and zip_valid:%0A data.append((current_string, next_string))%0A %0A %0A return data%0A%0A%0Aif __name__ == '__main__':%0A print 'you have run the main file!'%0A hamel = zip_info('75019')%0A %0A
|
|
9ec957af0c3d57dff4c05c1b7ed3e66e1c033f6b
|
Add nagios check for idot snowplow ingest
|
nagios/check_idot_snowplows.py
|
nagios/check_idot_snowplows.py
|
Python
| 0 |
@@ -0,0 +1,656 @@
+%22%22%22%0A Nagios check to see how much snowplow data we are currently ingesting%0A%22%22%22%0Aimport sys%0Aimport os%0Aimport psycopg2%0A%0APOSTGIS = psycopg2.connect(database='postgis', host='iemdb', user='nobody')%0Apcursor = POSTGIS.cursor()%0A%0Apcursor.execute(%22%22%22%0A select count(*) from idot_snowplow_current WHERE %0A valid %3E now() - '30 minutes'::interval%0A%22%22%22)%0Arow = pcursor.fetchone()%0Acount = row%5B0%5D%0A%0Aif count %3E 2:%0A print 'OK - snowplows %25s %7Ccount=%25s;2;1;0' %25 (count, count)%0A sys.exit(0)%0Aelif count %3E 1:%0A print 'OK - snowplows %25s %7Ccount=%25s;2;1;0' %25 (count, count)%0A sys.exit(1)%0Aelse:%0A print 'CRITICAL - snowplows %25s %7Ccount=%25s;2;1;0' %25 (count, count)%0A sys.exit(2)
|
|
661e69ece73a609d230384874da9722de385d854
|
Change links to a dictionary, iterator instead of lambda
|
uoftscrapers/scrapers/libraries/__init__.py
|
uoftscrapers/scrapers/libraries/__init__.py
|
from ..utils import Scraper
from bs4 import BeautifulSoup, NavigableString
from datetime import datetime, date
from collections import OrderedDict
import urllib.parse as urlparse
from urllib.parse import urlencode
import re
class Libraries:
"""A scraper for the Libraries at the University of Toronto."""
host = 'https://onesearch.library.utoronto.ca/visit'
campuses_tags = {'St. George': 'UTSG', 'U of T Mississauga': 'UTM', 'U of T Scarborough': 'UTSC'}
@staticmethod
def scrape(location='.'):
Scraper.logger.info('Libraries initialized.')
Scraper.ensure_location(location)
# ['content'] -> 'Teaser text', ['data]
library_data_links = Libraries.get_library_link()
raise NotImplementedError('This scraper has not been implemented yet.')
Scraper.logger.info('Libraries completed.')
@staticmethod
def get_library_link():
html = Scraper.get(Libraries.host)
soup = BeautifulSoup(html, 'html.parser')
content_links = []
library_info_links = []
list_obj_arr = soup.select('.view-list-of-libraries')[1].select(
'.view-content')[0].select('.views-row')
content_links[:] = [l.select('a')[0]['href'] for l in list_obj_arr]
library_info_links = [l.select('a')[1]['href'] for l in list_obj_arr]
return {'content' : content_links , 'info': library_info_links}
|
Python
| 0 |
@@ -382,16 +382,22 @@
tags = %7B
+%0A %09
'St. Geo
@@ -410,16 +410,22 @@
'UTSG',
+%0A %09
'U of T
@@ -445,16 +445,22 @@
'UTM',
+%0A %09
'U of T
@@ -479,16 +479,22 @@
: 'UTSC'
+%0A %09
%7D%0A%0A @
@@ -626,16 +626,57 @@
cation)%0A
+ %09return Libraries.get_library_link()%0A
%09# %5B
@@ -683,16 +683,21 @@
'content
+_link
'%5D -%3E 'T
@@ -706,22 +706,55 @@
ser
-t
+T
ext'
-, %5B'data%5D
+%0A %09# %5B'info_link'%5D -%3E 'Everything Else'
%0A
@@ -1069,61 +1069,8 @@
r')%0A
- %09content_links = %5B%5D%0A %09library_info_links = %5B%5D%0A
@@ -1188,53 +1188,36 @@
%09
-content_links%5B:%5D = %5Bl.select('a')%5B0%5D%5B'href'%5D
+library_links = dict()%0A %09
for
@@ -1237,23 +1237,48 @@
_arr
-%5D
+:
%0A %09
+%09title = l.h2.text%0A %09%09
library_
info
@@ -1277,22 +1277,48 @@
ary_
+l
in
-fo_links = %5B
+ks%5Btitle%5D = %7B%0A %09%09%09'content_link':
l.se
@@ -1327,17 +1327,17 @@
ct('a')%5B
-1
+0
%5D%5B'href'
@@ -1341,80 +1341,74 @@
ef'%5D
- for l in list_obj_arr%5D%0A %09return %7B'content' : content_links , 'info':
+,%0A %09%09%09'info_link': l.select('a')%5B1%5D%5B'href'%5D%0A %09%09%7D%0A %09return
lib
@@ -1416,15 +1416,9 @@
ary_
-info_
links
-%7D
|
717b20e298547685ed0685bd09a4fac541034910
|
Add an example map flow
|
example/map_flows.py
|
example/map_flows.py
|
Python
| 0.000001 |
@@ -0,0 +1,435 @@
+from taskin import task%0A%0A%0Adef get_servers(data):%0A return %5B%0A 'foo.example.com',%0A 'bar.example.com',%0A %5D%0A%0A%0Adef create_something(data):%0A servers, name = data%0A for server in servers:%0A print('Creating: https://%25s/%25s' %25 (server, name))%0A%0A%0Adef main():%0A flow = %5B%0A get_servers,%0A task.MapTask(create_something, args=xrange(10))%0A %5D%0A task.do_flow(flow)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
cd2df0032a3978444d6bd15e3b49a20bef495b75
|
add blastp
|
Modules/f10_blast.py
|
Modules/f10_blast.py
|
Python
| 0.000433 |
@@ -0,0 +1,1759 @@
+import subprocess,os%0A%0Adef makeblastdb(fastaFile,datatype,outputname):%0A %22%22%22%0A this function build database given a fasta file%0A %0A * fastaFile: can be gzipped or not%0A %22%22%22%0A if fastaFile.endswith('.gz'):%0A cmd = ('gunzip -c %7Binput%7D %7C makeblastdb -in - -dbtype %7Btype%7D -title %7Btitle%7D '%0A '-out %7Boutputname%7D').format(input=fastaFile,%0A type=datatype,outputname=outputname,title=outputname)%0A else:%0A cmd = ('makeblastdb -in %7Binput%7D -dbtype %7Btype%7D -title %7Btitle%7D '%0A '-out %7Boutputname%7D').format(input=fastaFile,%0A type=datatype,outputname=outputname,title=outputname)%0A subprocess.call(cmd,shell=True)%0A %0A%0Adef blastp(query,database,outputFile,threads,evalue,fmt,mapnum):%0A %22%22%22%0A This function run blastp%0A %0A * query: fasta file which you want to map%0A %0A * database: database path/name%0A %0A * outputFile: tabular blast result%0A %22%22%22%0A if query.endswith('.gz'):%0A cmd = ('gunzip -c %7Binput%7D %7C blastp -query - -db %7Bdatabase%7D '%0A '-out %7BoutputFile%7D -evalue %7Bevalue%7D -outfmt %7Bformat%7D '%0A '-seg yes -num_threads %7Bthread%7D -num_alignments %7Bmapnum%7D').format(input=query,%0A database=database,outputFile=outputFile,evalue=evalue,%0A format=str(fmt),thread=str(threads),mapnum=mapnum)%0A else:%0A cmd = ('blastp -query %7Binput%7D -db %7Bdatabase%7D -out %7BoutputFile%7D '%0A '-evalue %7Bevalue%7D -outfmt %7Bformat%7D -seg yes '%0A '-num_threads %7Bthread%7D -num_alignments %7Bmapnum%7D').format(input=query,%0A database=database,outputFile=outputFile,evalue=evalue,%0A format=str(fmt),thread=str(threads),mapnum=mapnum)%0A subprocess.call(cmd,shell=True)%0A
|
|
7dac3075874a79d51d1b9d0c1551eec9a988f526
|
Create Roman_to_Integer.py
|
Array/Roman_to_Integer.py
|
Array/Roman_to_Integer.py
|
Python
| 0.000617 |
@@ -0,0 +1,671 @@
+Given a roman numeral, convert it to an integer.%0AInput is guaranteed to be within the range from 1 to 3999.%0A%0Aclass Solution:%0A # @return an integer%0A def romanToInt(self, s):%0A numerals = %7B %22M%22: 1000, %0A %22D%22: 500, %0A %22C%22: 100, %0A %22L%22: 50, %0A %22X%22: 10, %0A %22V%22: 5, %0A %22I%22: 1 %7D%0A result = 0%0A pre = s%5B0%5D%0A for char in s:%0A if numerals%5Bchar%5D %3C= numerals%5Bpre%5D:%0A result += numerals%5Bchar%5D%0A else:%0A result += numerals%5Bchar%5D - 2*numerals%5Bpre%5D%0A pre = char%0A return result %0A
|
|
c810882385e034ca0e888ce093b227198dbb5f76
|
Create GPIOTutorialtempLogger.py
|
GPIOTutorialtempLogger.py
|
GPIOTutorialtempLogger.py
|
Python
| 0.000004 |
@@ -0,0 +1,748 @@
+import RPi.GPIO as GPIO%0Aimport time as time%0AGPIO.setmode (GPIO.BCM)%0AGPIO.setup (22, GPIO.IN )%0AGPIO.setup (17,GPIO.OUT )%0A%0Awhile True:%0A if GPIO.input(22):%0A break%0A%0Aprint %22start%22%0A%0Adatafile = open (%22tempreading.log%22,%22w%22)%0A%0Awhile True:%0A GPIO.output (17, GPIO.HIGH)%0A tfile = open (%22/sys/bus/w1/devices/28-000005658920/w1_slave%22)%0A text = tfile.read()%0A tfile.close()%0A secondline = text.split (%22%5Cn%22)%5B1%5D%0A tempData = secondline.split(%22 %22)%5B9%5D%0A temprature = float (tempData%5B2:%5D)%0A temprature = temprature / 1000%0A print temprature%0A datafile.write(str(temprature)+ %22%5Cn%22)%0A time.sleep (1)%0A GPIO.output (17, GPIO.LOW)%0A time. sleep (1)%0A if GPIO.input (22)==1:%0A break%0Adatafile.close()%0AGPIO.output (17, GPIO.LOW)%0A
|
|
7b54ac1d1bf8cf6e9869e716940814d2d56cb1de
|
Create Watchers.py
|
examples/Watchers.py
|
examples/Watchers.py
|
Python
| 0.000001 |
@@ -0,0 +1,703 @@
+%0Alos = %5B%5D%0Aurl = 'https://stocktwits.com/symbol/'%0Aworkbook = openpyxl.load_workbook('Spreadsheet.xlsx')%0Aworksheet = workbook.get_sheet_by_name(name = 'Sheet1') %0Afor col in worksheet%5B'A'%5D:%0A los.append(col.value)%0Alos2 = %5B%5D%0Aprint(los)%0A%0A%0Afor i in los:%0A stocksite = url +i + '?q=' +i %0A print(stocksite)%0A with contextlib.closing(webdriver.PhantomJS(Phantom_Path)) as driver:%0A #with contextlib.closing(webdriver.Phantom_Path)) as driver: %0A driver.get(stocksite) %0A driver.find_element_by_id('sentiment-tab').click()%0A Bullish = driver.find_elements_by_css_selector('span.bullish:nth-child(1)')%0A Sentiment = %5Bx.text for x in Bullish%5D%0A los2.append(Sentiment%5B0%5D)%0A %0A
|
|
b44977653e57077118cb0eb0d549758f52beed35
|
Add basic example
|
examples/examples.py
|
examples/examples.py
|
Python
| 0.000159 |
@@ -0,0 +1,158 @@
+from pyrho import *%0A%0ARhO = models%5B'6'%5D()%0AProt = protocols%5B'step'%5D()%0AProt.phis = %5B1e16, 1e15, 1e14%5D%0ASim = simulators%5B'Python'%5D(Prot, RhO)%0ASim.run()%0ASim.plot()%0A
|
|
61139332ce1bcfd145f16b8f3c411e178db4054c
|
Add some unit tests for the hashing protocol of dtype (fail currently).
|
numpy/core/tests/test_dtype.py
|
numpy/core/tests/test_dtype.py
|
Python
| 0.000115 |
@@ -0,0 +1,2447 @@
+import numpy as np%0Afrom numpy.testing import *%0A%0Aclass TestBuiltin(TestCase):%0A def test_run(self):%0A %22%22%22Only test hash runs at all.%22%22%22%0A for t in %5Bnp.int, np.float, np.complex, np.int32, np.str, np.object,%0A np.unicode%5D:%0A dt = np.dtype(t)%0A hash(dt)%0A%0Aclass TestRecord(TestCase):%0A def test_equivalent_record(self):%0A %22%22%22Test whether equivalent record dtypes hash the same.%22%22%22%0A a = np.dtype(%5B('yo', np.int)%5D)%0A b = np.dtype(%5B('yo', np.int)%5D)%0A self.failUnless(hash(a) == hash(b), %0A %22two equivalent types do not hash to the same value !%22)%0A%0A def test_different_names(self):%0A # In theory, they may hash the same (collision) ?%0A a = np.dtype(%5B('yo', np.int)%5D)%0A b = np.dtype(%5B('ye', np.int)%5D)%0A self.failUnless(hash(a) != hash(b),%0A %22%25s and %25s hash the same !%22 %25 (a, b))%0A%0A def test_different_titles(self):%0A # In theory, they may hash the same (collision) ?%0A a = np.dtype(%7B'names': %5B'r','b'%5D, 'formats': %5B'u1', 'u1'%5D,%0A 'titles': %5B'Red pixel', 'Blue pixel'%5D%7D)%0A b = np.dtype(%7B'names': %5B'r','b'%5D, 'formats': %5B'u1', 'u1'%5D,%0A 'titles': %5B'RRed pixel', 'Blue pixel'%5D%7D)%0A self.failUnless(hash(a) != hash(b),%0A %22%25s and %25s hash the same !%22 %25 (a, b))%0A%0Aclass TestSubarray(TestCase):%0A def test_equivalent_record(self):%0A %22%22%22Test whether equivalent subarray dtypes hash the same.%22%22%22%0A a = np.dtype((np.int, (2, 3)))%0A b = np.dtype((np.int, (2, 3)))%0A self.failUnless(hash(a) == hash(b), %0A %22two equivalent types do not hash to the same value !%22)%0A%0A def test_nonequivalent_record(self):%0A %22%22%22Test whether different subarray dtypes hash differently.%22%22%22%0A a = np.dtype((np.int, (2, 3)))%0A b = np.dtype((np.int, (3, 2)))%0A self.failUnless(hash(a) != hash(b), %0A %22%25s and %25s hash the same !%22 %25 (a, b))%0A%0A a = np.dtype((np.int, (2, 3)))%0A b = np.dtype((np.int, (2, 2)))%0A self.failUnless(hash(a) != hash(b), %0A %22%25s and %25s hash the same !%22 %25 (a, b))%0A%0A a = np.dtype((np.int, (1, 2, 3)))%0A b = np.dtype((np.int, (1, 2)))%0A self.failUnless(hash(a) != hash(b), %0A %22%25s and %25s hash the same !%22 %25 (a, b))%0A%0Aclass TestMonsterType(TestCase):%0A %22%22%22Test deeply nested subtypes.%22%22%22%0A pass%0A%0Aif __name__ == %22__main__%22:%0A run_module_suite()%0A
|
|
3d11000488ca20e7e34a9f7030a16e69a6b4052f
|
add examples for trainig 3
|
3-python-intermediate/examples/list_comprehension.py
|
3-python-intermediate/examples/list_comprehension.py
|
Python
| 0 |
@@ -0,0 +1,386 @@
+%0Aodd = %5Bi for i in range(10) if i %25 2%5D%0Aprint(odd) # %5B1, 3, 5, 7, 9%5D%0A%0Aodd_squares = %5Bi ** 2 for i in odd%5D%0Aprint(odd_squares) # %5B1, 9, 25, 49, 81%5D%0A%0A%0Afirst_names = %5B'Bruce', 'James', 'Alfred'%5D%0Alast_names = %5B'Wayne', 'Gordon', 'Pennyworth'%5D%0A%0Aheroes = %5B'%7B%7D %7B%7D'.format(f, l) for f, l in zip(first_names, last_names)%5D%0Aprint(heroes) # %5B'Bruce Wayne', 'James Gordon', 'Alfred Pennyworth'%5D%0A%0A%0A%0A
|
|
0886a4efd7b7703d72be4319d7b0295d3bc64151
|
Create Tensor_Case.py
|
Tensor_Case.py
|
Tensor_Case.py
|
Python
| 0.000039 |
@@ -0,0 +1,553 @@
+import tensorflow as tf%0A%0Asess = tf.InteractiveSession()%0Ax = tf.random_uniform(%5B%5D)%0Ay = tf.random_uniform(%5B%5D)%0Aout1 = tf.cond(tf.greater(x,y), lambda:tf.add(x,y), lambda:(tf.subtract(x,y)))%0Aprint(x.eval(), y.eval(), out1.eval())%0A%0Ax = tf.random_uniform(%5B%5D,-1,1)%0Ay = tf.random_uniform(%5B%5D,-1,1)%0Adef f1(): return tf.cast(tf.add(x,y), tf.float32)%0Adef f2(): return tf.cast(tf.subtract(x,y), tf.float32)%0Adef f3(): return tf.cast(tf.constant(0), tf.float32)%0Aout2 = tf.case(%7Btf.less(x, y):f2, tf.greater(x,y):f1%7D, default=f3)%0Aprint(x.eval(), y.eval(), out2.eval())%0A
|
|
dfa492ffc2148d8ffa5c14145e0092be60ef44eb
|
add an example for pipeline
|
examples/pipeline.py
|
examples/pipeline.py
|
Python
| 0.000001 |
@@ -0,0 +1,879 @@
+import tornado%0Aimport tornadis%0A%0A%[email protected]%0Adef pipeline_coroutine():%0A # Let's get a connected client%0A client = tornadis.Client()%0A yield client.connect()%0A%0A # Let's make a pipeline object to stack commands inside%0A pipeline = tornadis.Pipeline()%0A pipeline.stack_call(%22SET%22, %22foo%22, %22bar%22)%0A pipeline.stack_call(%22GET%22, %22foo%22)%0A%0A # At this point, nothing is sent to redis%0A%0A # Let's submit the pipeline to redis and wait for replies%0A results = yield client.call(pipeline)%0A%0A # The two replies are in the results array%0A print results%0A # %3E%3E%3E %5B'OK', 'bar'%5D%0A%0A # Let's disconnect%0A client.disconnect()%0A%0A%0Adef stop_loop(future):%0A exception = future.exception()%0A if exception is not None:%0A raise(exception)%0A loop.stop()%0A%0A%0Aloop = tornado.ioloop.IOLoop.instance()%0Aloop.add_future(pipeline_coroutine(), stop_loop)%0Aloop.start()%0A
|
|
b75601e0c6bbb83dba4544f9d80b6f71c75fcdec
|
add missing ordered field to startup program interest
|
web/impact/impact/migrations/0003_add_ordered_field_to_startup_program_interest.py
|
web/impact/impact/migrations/0003_add_ordered_field_to_startup_program_interest.py
|
Python
| 0 |
@@ -0,0 +1,495 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.3 on 2018-01-30 10:37%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('impact', '0002_set_models_to_managed'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='startupprograminterest',%0A name='order',%0A field=models.PositiveIntegerField(db_index=True, editable=False),%0A ),%0A %5D%0A
|
|
105e5adbb7831442c9925aa812f711a9f020dfa4
|
Fix login tests for proper handling of LDAP exceptions.
|
okupy/tests/unit/test_login.py
|
okupy/tests/unit/test_login.py
|
# vim:fileencoding=utf8:et:ts=4:sts=4:sw=4:ft=python
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.core.urlresolvers import resolve
from django.template import RequestContext
from django.test.utils import override_settings
from base64 import b64encode
from Crypto import Random
from passlib.hash import ldap_md5_crypt
from mockldap import MockLdap
from okupy.accounts.views import login, logout
from okupy.accounts.forms import LoginForm
from okupy.common.test_helpers import (OkupyTestCase, set_request, no_database,
ldap_users)
from okupy.crypto.ciphers import cipher
from okupy.tests import vars
class LoginUnitTests(OkupyTestCase):
@classmethod
def setUpClass(cls):
cls.mockldap = MockLdap(vars.DIRECTORY)
@classmethod
def tearDownClass(cls):
del cls.mockldap
def setUp(self):
self.mockldap.start()
self.ldapobj = self.mockldap[settings.AUTH_LDAP_SERVER_URI]
def tearDown(self):
self.mockldap.stop()
del self.ldapobj
def test_incorrect_user_raises_login_failed(self):
request = set_request(uri='/login', post=vars.LOGIN_WRONG,
messages=True)
response = login(request)
response.context = RequestContext(request)
self.assertMessage(response, 'Login failed', 40)
def test_incorrect_user_does_not_get_transferred_in_db(self):
request = set_request(uri='/login', post=vars.LOGIN_WRONG,
messages=True)
login(request)
self.assertEqual(User.objects.count(), 0)
@no_database()
@override_settings(AUTHENTICATION_BACKENDS=(
'django_auth_ldap.backend.LDAPBackend',
'django.contrib.auth.backends.ModelBackend'))
def test_no_database_raises_critical(self):
request = set_request(uri='/login', post=vars.LOGIN_ALICE,
messages=True)
response = login(request)
response.context = RequestContext(request)
self.assertMessage(response,
"Can't contact the LDAP server or the database", 40)
@no_database()
@override_settings(AUTHENTICATION_BACKENDS=(
'django_auth_ldap.backend.LDAPBackend',
'django.contrib.auth.backends.ModelBackend'))
def test_no_database_sends_notification_mail(self):
request = set_request(uri='/login', post=vars.LOGIN_ALICE,
messages=True)
response = login(request)
response.context = RequestContext(request)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue(mail.outbox[0].subject.startswith('%sERROR:' %
settings.EMAIL_SUBJECT_PREFIX))
def test_correct_user_gets_transferred_in_db(self):
request = set_request(uri='/login', post=vars.LOGIN_ALICE)
login(request)
self.assertEqual(User.objects.count(), 1)
def test_authenticate_account_that_is_already_in_db(self):
vars.USER_ALICE.save()
request = set_request(uri='/login', post=vars.LOGIN_ALICE)
login(request)
self.assertEqual(User.objects.count(), 1)
def test_secondary_password_is_added_in_login(self):
request = set_request(uri='/login', post=vars.LOGIN_ALICE)
login(request)
self.assertEqual(len(ldap_users(
'alice',
directory=self.ldapobj.directory)[1]['userPassword']), 2)
self.assertEqual(len(request.session['secondary_password']), 48)
def test_secondary_password_is_removed_in_logout(self):
secondary_password = Random.get_random_bytes(48)
secondary_password_crypt = ldap_md5_crypt.encrypt(b64encode(
secondary_password))
self.ldapobj.directory[ldap_users('alice')[0]][
'userPassword'].append(secondary_password_crypt)
request = set_request(uri='/login', post=vars.LOGIN_ALICE,
user=vars.USER_ALICE)
request.session['secondary_password'] = cipher.encrypt(
secondary_password)
logout(request)
self.assertEqual(len(ldap_users(
'alice',
directory=self.ldapobj.directory)[1]['userPassword']), 1)
class LoginUnitTestsNoLDAP(OkupyTestCase):
def test_login_url_resolves_to_login_view(self):
found = resolve('/login/')
self.assertEqual(found.func, login)
def test_login_page_returns_200(self):
request = set_request(uri='/login')
response = login(request)
self.assertEqual(response.status_code, 200)
def test_rendered_login_form(self):
request = set_request(uri='/login')
response = login(request)
login_form_part = '<input id="id_username" maxlength="100"'
'name="username" type="text" />'
self.assertIn(login_form_part, response.content)
def test_empty_user_raises_form_error_messages(self):
request = set_request(uri='/login')
response = login(request)
response.context = RequestContext(request, {
'login_form': LoginForm(request.POST)})
self.assertFormError(response, 'login_form', 'username',
'This field is required.')
self.assertFormError(response, 'login_form', 'password',
'This field is required.')
def test_empty_user_raises_login_failed(self):
request = set_request(uri='/login', post=True, messages=True)
response = login(request)
response.context = RequestContext(request)
self.assertMessage(response, 'Login failed', 40)
def test_dont_authenticate_from_db_when_ldap_is_down(self):
request = set_request(uri='/login', post=vars.LOGIN_BOB, messages=True)
response = login(request)
response.context = RequestContext(request)
self.assertMessage(response, 'Login failed', 40)
def test_no_ldap_connection_raises_login_failed_in_login(self):
request = set_request(uri='/login', post=vars.LOGIN_WRONG,
messages=True)
response = login(request)
response.context = RequestContext(request)
self.assertMessage(response, 'Login failed', 40)
def test_no_ldap_connection_in_logout_sends_notification_mail(self):
request = set_request(uri='/login', post=vars.LOGIN_ALICE,
user=vars.USER_ALICE)
request.session['secondary_password'] = 'test'
logout(request)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue(mail.outbox[0].subject.startswith('%sERROR:' %
settings.EMAIL_SUBJECT_PREFIX))
|
Python
| 0 |
@@ -5803,16 +5803,46 @@
GIN_BOB,
+%0A
message
@@ -5966,39 +5966,84 @@
ge(response,
- 'Login failed'
+%0A %22Can't contact the LDAP server or the database%22
, 40)%0A%0A d
@@ -6077,27 +6077,24 @@
raises_l
-ogin_failed
+daperror
_in_logi
@@ -6331,39 +6331,84 @@
ge(response,
- 'Login failed'
+%0A %22Can't contact the LDAP server or the database%22
, 40)%0A%0A d
|
c3e7b563c3eeb24aa269f23672b8f469470908b7
|
Add an option to redirect user to a page if the key is already expired.
|
onetime/views.py
|
onetime/views.py
|
from datetime import datetime
from django.http import HttpResponseRedirect, HttpResponseGone
from django.shortcuts import get_object_or_404
from django.contrib.auth import login
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
def login(request, key):
data = get_object_or_404(Key, key=key)
if data.usage_left is not None and data.usage_left == 0:
return HttpResponseGone()
if data.expires is not None and data.expires < datetime.now():
return HttpResponseGone()
if data.usage_left is not None:
data.usage_left -= 1
data.save()
login(request, data.user)
next = request.GET.get('next', None)
if data.next is not None:
next = data.next
if next is None:
next = settings.LOGIN_REDIRECT_URL
return HttpResponseRedirect(next)
|
Python
| 0 |
@@ -329,19 +329,45 @@
est, key
+, redirect_expired_to=None
):%0A
-
data
@@ -402,16 +402,36 @@
y=key)%0A%0A
+ expired = False%0A
if d
@@ -495,33 +495,22 @@
-return HttpResponseGone()
+expired = True
%0A
@@ -573,16 +573,179 @@
.now():%0A
+ expired = True%0A%0A if expired:%0A if redirect_expired_to is not None:%0A return HttpResponseRedirect(redirect_expired_to)%0A else:%0A
|
92077ecd268a6ca04f2b413fd3535d4cc358c97b
|
Create serializers.py
|
app/grandchallenge/algorithms/serializers.py
|
app/grandchallenge/algorithms/serializers.py
|
Python
| 0.000002 |
@@ -0,0 +1,510 @@
+from rest_framework import serializers%0Afrom grandchallenge.algorithms.models import Algorithm, Job, Result%0A%0A%0Aclass AlgorithmSerializer(serializers.ModelSerializer):%0A class Meta:%0A model = Algorithm%0A fields = %5B'pk'%5D%0A%0A%0Aclass ResultSerializer(serializers.ModelSerializer):%0A class Meta:%0A model = Result%0A fields = %5B'pk', 'job', 'images', 'output'%5D%0A%0A%0Aclass JobSerializer(serializers.ModelSerializer):%0A class Meta:%0A model = Job%0A fields = %5B'pk', 'algorithm', 'image'%5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.