commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
3b8d2cc0279e4da1ab758251f00fd065d951df53
|
Add base for `help` command
|
foxybot/commands/help.py
|
foxybot/commands/help.py
|
Python
| 0.000004 |
@@ -0,0 +1,761 @@
+%22%22%22Command to retrieve help for other commands and topics%22%22%22%0A%0A%0Afrom command import AbstractCommand, bot_command%0Afrom bot_help import HelpManager%0A%0A@bot_command%0Aclass Help(AbstractCommand):%0A _aliases = ('help', 'h')%0A%0A async def execute(self, shards, client, msg):%0A%0A try:%0A args, extra = self._parser.parse_known_args(msg.content.split()%5B1:%5D)%0A except SystemExit as ex:%0A await client.send_message(msg.channel, 'Something very very bad happened')%0A return%0A %0A # await client.send_message(msg.channel, (args, extra))%0A await client.send_message(msg.channel, %22Hello, World!%22)%0A%0A%0A @property%0A def name(self):%0A return self._name%0A%0A @property%0A def aliases(self):%0A return self._aliases%0A
|
|
453df6abe7741fe0f24c03754b26c197fa282656
|
Create ValidateBST_002_iter.py
|
leetcode/098-Validate-Binary-Search-Tree/ValidateBST_002_iter.py
|
leetcode/098-Validate-Binary-Search-Tree/ValidateBST_002_iter.py
|
Python
| 0.000001 |
@@ -0,0 +1,685 @@
+# Definition for a binary tree node.%0A# class TreeNode(object):%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0Aclass Solution(object):%0A def isValidBST(self, root):%0A %22%22%22%0A :type root: TreeNode%0A :rtype: bool%0A %22%22%22%0A stack = %5Broot%5D%0A pre = None%0A %0A while stack != %5B%5D and stack%5B0%5D:%0A p = stack.pop()%0A while p:%0A stack.append(p)%0A p = p.left%0A p = stack.pop()%0A if pre and pre.val %3E= p.val:%0A return False%0A pre = p%0A stack.append(p.right)%0A %0A return True%0A
|
|
0e5bbc4df461c17ff7d1297ee4236afaa9e52a96
|
Create solution.py
|
leetcode/easy/remove_duplicates_from_sorted_array/py/solution.py
|
leetcode/easy/remove_duplicates_from_sorted_array/py/solution.py
|
Python
| 0.000018 |
@@ -0,0 +1,578 @@
+class Solution(object):%0A def removeDuplicates(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A %0A # Without this check, the function%0A # will return slow + 1 when called%0A # with an empty array. This would%0A # be an error. %0A if len(nums) == 0:%0A return 0%0A %0A slow = 0%0A %0A for fast in range(len(nums)):%0A if nums%5Bslow%5D != nums%5Bfast%5D:%0A slow += 1%0A nums%5Bslow%5D = nums%5Bfast%5D%0A %0A return slow + 1%0A
|
|
e8fa15603b275a690d96e37ab9dc560e68dedbb1
|
Add tests
|
test/test_02.py
|
test/test_02.py
|
Python
| 0.000001 |
@@ -0,0 +1,1635 @@
+import unittest%0A%0Aimport os%0Aimport sys%0Aimport lrmq%0Aimport timeout_decorator%0Aimport tempfile%0A%0Aimport pickle%0Aimport struct%0A%0Aimport asyncio%0A%0ATEST_TIMEOUT = 5 # it can fail in slow environment%0A%0Adef read_log(fn):%0A logs = %5B%5D%0A with open(fn, %22rb%22) as f:%0A while True:%0A slen = f.read(4)%0A if not slen:%0A break%0A slen = struct.unpack(%22%3EL%22, slen)%5B0%5D%0A data = pickle.loads(f.read(slen))%0A logs.append(data)%0A assert len(logs) %3E 0%0A return logs%0A %0Aclass TestRPC(unittest.TestCase):%0A%0A def setUp(self):%0A # reinitialize loop%0A loop = asyncio.new_event_loop()%0A asyncio.set_event_loop(loop)%0A # prepare test folder%0A self.logdir = tempfile.TemporaryDirectory()%0A%0A def tearDown(self):%0A self.logdir.cleanup()%0A%0A @timeout_decorator.timeout(TEST_TIMEOUT)%0A def test_single_master(self):%0A logname = os.path.join(self.logdir.name, %22single_master%22)%0A code = lrmq.main(%7B%0A %22debuglogger%22: logname + %22.pkl%22,%0A %22loglevel%22: %22DEBUG%22,%0A %22log%22: logname + %22_hub.log%22,%0A %22agents%22: %5B%7B%0A %22type%22: %22stdio%22,%0A %22cmd%22: %22test/msc1.py%22,%0A %22id%22: %22test02_master%22,%0A %22name%22: %22test02_master%22,%0A %22log%22: logname + %22_master.log%22,%0A %22loglevel%22: %22DEBUG%22,%0A %22args%22: %5B%22master%22%5D%7D%0A %5D%0A %7D)%0A assert code == 0%0A for log in read_log(logname + %22.pkl%22):%0A log_id = None%0A if %22log_id%22 in log:%0A print(log)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
74550ef0c76a941c473c8d024ccc0a0403631c49
|
Add basic structure for "/glossary" routes test
|
wqflask/tests/integration/test_markdown_routes.py
|
wqflask/tests/integration/test_markdown_routes.py
|
Python
| 0.000021 |
@@ -0,0 +1,457 @@
+%22Integration tests for markdown routes%22%0Aimport unittest%0A%0Afrom bs4 import BeautifulSoup%0A%0Afrom wqflask import app%0A%0A%0Aclass TestGenMenu(unittest.TestCase):%0A %22%22%22Tests for glossary%22%22%22%0A%0A def setUp(self):%0A self.app = app.test_client()%0A%0A def tearDown(self):%0A pass%0A%0A def test_glossary_page(self):%0A %22%22%22Test that the glossary page is rendered properly%22%22%22%0A response = self.app.get('/glossary', follow_redirects=True)%0A pass%0A
|
|
5e1c48f9d00266290a8739f88085f050b1baa805
|
Add test_backend.py in preparation for migrating backend to rigor's database layer
|
test_backend.py
|
test_backend.py
|
Python
| 0 |
@@ -0,0 +1,1830 @@
+#!/usr/bin/env python%0A%0Aimport types%0Aimport pprint%0A%0Aimport backend%0Aimport config%0Afrom utils import *%0A%0ADBNAME = config.CROWD_DB%0A%0A%0AdebugMain('dbQueryDict')%0Asql = 'SELECT COUNT(*) FROM image;'%0Aconn = backend.getDbConnection(DBNAME)%0Agen = backend.dbQueryDict(conn, sql)%0Aassert isinstance(gen, types.GeneratorType)%0Arows = list(gen)%0Aassert len(rows) == 1%0Aassert isinstance(rows%5B0%5D, dict)%0Aassert 'count' in rows%5B0%5D%0A%0A%0AdebugMain('getDatabaseNames')%0Anames = backend.getDatabaseNames()%0Aassert DBNAME in names%0AdebugDetail(names)%0A%0A%0AdebugMain('getTags')%0Atags = backend.getTags(DBNAME)%0Aassert len(tags) %3E 0%0Aassert isinstance(tags%5B0%5D, basestring)%0Aassert sorted(tags)%5B0%5D == 'align=center'%0A%0A%0AdebugMain('getImage by id')%0AID = 1%0AimgDict = backend.getImage(DBNAME, id=ID)%0Aassert isinstance(imgDict, dict)%0Aassert 'id' in imgDict%0Aassert imgDict%5B'id'%5D == ID%0Aassert 'tags' in imgDict%0Aassert len(imgDict%5B'tags'%5D) %3E 0%0Aassert isinstance(imgDict%5B'tags'%5D%5B0%5D, basestring)%0A%0A%0AdebugMain('searchImages')%0AqueryDict = dict(%0A database_name = DBNAME,%0A has_tags = %5B'align=left'%5D,%0A page = 1,%0A max_count = 4,%0A)%0Acount, results = backend.searchImages(queryDict)%0Aassert count %3E 1%0Aassert isinstance(results, list)%0Aassert isinstance(results%5B0%5D, dict)%0Aassert 'tags' in results%5B0%5D%0A%0A%0AdebugMain('getImage by locator')%0ALOCATOR = '4075c8de-fb2e-41e8-831b-ea4bdcb5a6a3'%0AimgDict = backend.getImage(DBNAME, locator=LOCATOR)%0Aassert isinstance(imgDict, dict)%0Aassert 'locator' in imgDict%0Aassert imgDict%5B'locator'%5D == LOCATOR%0Aassert 'tags' in imgDict%0Aassert len(imgDict%5B'tags'%5D) %3E 0%0Aassert isinstance(imgDict%5B'tags'%5D%5B0%5D, basestring)%0A%0A%0AdebugMain('getImageAnnotations')%0AID = 1%0Aannotations = backend.getImageAnnotations(DBNAME, ID)%0Aassert isinstance(annotations, list)%0Aassert isinstance(annotations%5B0%5D, dict)%0Aassert 'domain' in annotations%5B0%5D%0A%0A%0Aprint green('===== success =====')%0A%0A%0A
|
|
a5d93698ad5ae1e6488b536abb8501cd6ec70551
|
Add memory usage report to cost analyzer tool; run all default optimizations.
|
tensorflow/python/grappler/cost_analyzer_tool.py
|
tensorflow/python/grappler/cost_analyzer_tool.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""A tool for cost analysis."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from google.protobuf import text_format
from tensorflow.contrib.fused_conv.ops import gen_fused_conv2d_bias_activation_op # pylint: disable=unused-import
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.grappler import cost_analyzer
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.platform import app
from tensorflow.python.platform import gfile
from tensorflow.python.training import saver
def main(_):
if FLAGS.metagraphdef:
with gfile.GFile(FLAGS.metagraphdef) as meta_file:
metagraph = meta_graph_pb2.MetaGraphDef()
metagraph.ParseFromString(meta_file.read())
else:
with gfile.GFile(FLAGS.graphdef) as graph_file:
graph_def = graph_pb2.GraphDef()
if FLAGS.graphdef.endswith(".pbtxt"):
text_format.Merge(graph_file.read(), graph_def)
else:
graph_def.ParseFromString(graph_file.read())
importer.import_graph_def(graph_def, name="")
graph = ops.get_default_graph()
fetch = graph.get_operation_by_name(FLAGS.fetch)
graph.add_to_collection("train_op", fetch)
metagraph = saver.export_meta_graph(
graph_def=graph.as_graph_def(), graph=graph)
if FLAGS.rewriter_config is not None:
rewriter_config = rewriter_config_pb2.RewriterConfig()
text_format.Merge(FLAGS.rewriter_config, rewriter_config)
optimized_graph = tf_optimizer.OptimizeGraph(rewriter_config, metagraph)
metagraph.graph_def.CopyFrom(optimized_graph)
report = cost_analyzer.GenerateCostReport(metagraph, FLAGS.per_node_report)
print(report)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--metagraphdef",
type=str,
default=None,
help="Input .meta MetaGraphDef file path.")
parser.add_argument(
"--graphdef",
type=str,
default=None,
help="Input .pb GraphDef file path.")
# Consider making flag fetch work together with flag metagraphdef. As some
# MetaGraphDef files don't have collection train_op.
parser.add_argument(
"--fetch",
type=str,
default=None,
help=
"The name of the fetch node. This flag is ignored if flag "
"metagraphdef is used."
)
parser.add_argument(
"--rewriter_config",
type=str,
default=None,
help="Configuration for the grappler optimizers, described as a "
"RewriterConfig protocol buffer. Usage example 1: "
"--rewriter_config='optimize_tensor_layout: true "
"disable_model_pruning: true'. Usage example 2: "
"--rewriter_config='optimizers: \"constfold\" optimizers: \"layout\"'")
parser.add_argument(
"--per_node_report",
action="store_true",
help="Generate per-node report. By default the report contains stats "
"aggregated on a per op type basis, per_node_report adds results "
"for each individual node to the report.")
FLAGS, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
|
Python
| 0 |
@@ -2247,25 +2247,16 @@
aph)%0A%0A
-if FLAGS.
rewriter
@@ -2267,24 +2267,9 @@
fig
-is not None:%0A
+=
rew
@@ -2284,47 +2284,69 @@
nfig
- = r
+_pb2.R
ewriter
-_c
+C
onfig
-_pb2.R
+()%0A if FLAGS.r
ewriter
-C
+_c
onfig
-()
+ is not None:
%0A
@@ -2406,18 +2406,16 @@
nfig)%0A
-
-
optimize
@@ -2479,18 +2479,16 @@
agraph)%0A
-
metagr
@@ -2622,16 +2622,89 @@
report)%0A
+ report = cost_analyzer.GenerateMemoryReport(metagraph)%0A print(report)%0A
%0A%0Aif __n
|
961040f13f1d2b2d8aea019a6649f29f858d2a09
|
Correct cleanup code for shutting down firefox in python bindings
|
py/selenium/webdriver/firefox/webdriver.py
|
py/selenium/webdriver/firefox/webdriver.py
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
try:
import http.client as http_client
except ImportError:
import httplib as http_client
import shutil
import socket
import sys
from .firefox_binary import FirefoxBinary
from .service import Service
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.extension_connection import ExtensionConnection
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
class WebDriver(RemoteWebDriver):
# There is no native event support on Mac
NATIVE_EVENTS_ALLOWED = sys.platform != "darwin"
def __init__(self, firefox_profile=None, firefox_binary=None, timeout=30,
capabilities=None, proxy=None, executable_path='wires'):
self.binary = firefox_binary
self.profile = firefox_profile
if self.profile is None:
self.profile = FirefoxProfile()
self.profile.native_events_enabled = (
self.NATIVE_EVENTS_ALLOWED and self.profile.native_events_enabled)
if capabilities is None:
capabilities = DesiredCapabilities.FIREFOX
if "marionette" in capabilities and capabilities['marionette'] is True:
# Let's use Marionette! WOOOOHOOOOO!
if "binary" in capabilities:
self.binary = capabilities["binary"]
self.service = Service(executable_path, firefox_binary=self.binary)
self.service.start()
RemoteWebDriver.__init__(self,
command_executor=self.service.service_url,
desired_capabilities=capabilities,
keep_alive=True)
else:
# Oh well... sometimes the old way is the best way.
if self.binary is None:
self.binary = FirefoxBinary()
if proxy is not None:
proxy.add_to_capabilities(capabilities)
RemoteWebDriver.__init__(self,
command_executor=ExtensionConnection("127.0.0.1", self.profile,
self.binary, timeout),
desired_capabilities=capabilities,
keep_alive=True)
self._is_remote = False
def quit(self):
"""Quits the driver and close every associated window."""
try:
RemoteWebDriver.quit(self)
except (http_client.BadStatusLine, socket.error):
# Happens if Firefox shutsdown before we've read the response from
# the socket.
pass
self.service.stop()
'''try:
shutil.rmtree(self.profile.path)
if self.profile.tempfolder is not None:
shutil.rmtree(self.profile.tempfolder)
except Exception as e:
print(str(e))'''
@property
def firefox_profile(self):
return self.profile
|
Python
| 0.000014 |
@@ -3316,16 +3316,110 @@
pass%0A
+ if %22marionette%22 in self.capabilities and self.capabilities%5B'marionette'%5D is True:%0A
@@ -3450,16 +3450,35 @@
-'''
+else:%0A
try:%0A
+
@@ -3522,32 +3522,36 @@
th)%0A
+
+
if self.profile.
@@ -3582,32 +3582,36 @@
+
shutil.rmtree(se
@@ -3637,24 +3637,28 @@
er)%0A
+
+
except Excep
@@ -3680,16 +3680,20 @@
+
print(st
@@ -3701,11 +3701,8 @@
(e))
-'''
%0A%0A
|
986ff101ce224494a5cdb047a1aefd99c8a6d840
|
Add an aioredis example
|
examples/sanic_aioredis_example.py
|
examples/sanic_aioredis_example.py
|
Python
| 0.000038 |
@@ -0,0 +1,809 @@
+%22%22%22 To run this example you need additional aioredis package%0A%22%22%22%0Afrom sanic import Sanic, response%0Aimport aioredis%0A%0Aapp = Sanic(__name__)%0A%0A%[email protected](%22/%22)%0Aasync def handle(request):%0A async with request.app.redis_pool.get() as redis:%0A await redis.set('test-my-key', 'value')%0A val = await redis.get('test-my-key')%0A return response.text(val.decode('utf-8'))%0A%0A%[email protected]('before_server_start')%0Aasync def before_server_start(app, loop):%0A app.redis_pool = await aioredis.create_pool(%0A ('localhost', 6379),%0A minsize=5,%0A maxsize=10,%0A loop=loop%0A )%0A%0A%[email protected]('after_server_stop')%0Aasync def after_server_stop(app, loop):%0A app.redis_pool.close()%0A await app.redis_pool.wait_closed()%0A%0A%0Aif __name__ == '__main__':%0A app.run(host=%220.0.0.0%22, port=8000)%0A
|
|
e355a926155355ccc5d8b545534f331bdb683f02
|
Add management
|
podcastsync.py
|
podcastsync.py
|
Python
| 0.000001 |
@@ -0,0 +1,564 @@
+import click%0Afrom getpass import getpass%0Afrom gposerver import create_app, db, User, Device, EpisodeAction%0A%0Aapp = create_app()%0A%[email protected]_context_processor%0Adef make_shell_context():%0A return dict(app=app, db=db, User=User, Device=Device, EpisodeAction=EpisodeAction)%0A%[email protected]()%0Adef adduser():%0A %22%22%22Add new user.%22%22%22%0A username = input(%22Username: %22)%0A password = getpass(%22Password: %22)%0A u = User(username, password)%0A db.session.add(u)%0A db.session.commit() %0A%[email protected]()%0Adef init():%0A %22%22%22Initialise database.%22%22%22%0A db.create_all()%0A
|
|
1786ebacb85b2ddce816fb21b80285d991761695
|
Implement classes to be used by the deserializer
|
poyo/_nodes.py
|
poyo/_nodes.py
|
Python
| 0 |
@@ -0,0 +1,2242 @@
+# -*- coding: utf-8 -*-%0A%0A%0Aclass TreeElement(object):%0A %22%22%22Helper class to identify internal classes.%22%22%22%0A def __init__(self, **kwargs):%0A pass%0A%0A%0Aclass ContainerMixin(object):%0A %22%22%22Mixin that can hold TreeElement instances.%0A%0A Containers can be called to return a dict representation.%0A %22%22%22%0A def __init__(self, **kwargs):%0A self._children = %5B%5D%0A super(ContainerMixin, self).__init__(**kwargs)%0A%0A def __iter__(self):%0A for c in self._children:%0A yield c%0A%0A def __call__(self):%0A return %7Bc.name: c() for c in self%7D%0A%0A def add_child(self, child):%0A %22%22%22If the given object is an instance of Child add it to self and%0A register self as a parent.%0A %22%22%22%0A if not isinstance(child, ChildMixin):%0A raise TypeError(%0A 'Requires instance of TreeElement. '%0A 'Got %7B%7D'.format(type(child))%0A )%0A child.parent = self%0A self._children.append(child)%0A%0A%0Aclass ChildMixin(object):%0A %22%22%22Mixin that can be attached to Container object.%22%22%22%0A def __init__(self, **kwargs):%0A parent = kwargs%5B'parent'%5D%0A%0A if not isinstance(parent, ContainerMixin):%0A raise ValueError(%0A 'Parent of ChildMixin instance needs to be a Container.'%0A )%0A parent.add_child(self)%0A super(ChildMixin, self).__init__(**kwargs)%0A%0A%0Aclass Root(ContainerMixin, TreeElement):%0A %22%22%22Pure Container class to represent the root of a YAML config.%22%22%22%0A def __init__(self, **kwargs):%0A super(Root, self).__init__(**kwargs)%0A self.level = -1%0A%0A%0Aclass Section(ContainerMixin, ChildMixin, TreeElement):%0A %22%22%22Class that can act as a Child, but also as a Container.%22%22%22%0A def __init__(self, name, level, **kwargs):%0A super(Section, self).__init__(**kwargs)%0A self.name = name%0A self.level = level%0A%0A%0Aclass Simple(ChildMixin, TreeElement):%0A %22%22%22Class that can solely be used as a Child, f.i. simple key value pairs%0A in a config.%0A %22%22%22%0A def __init__(self, name, level, value, **kwargs):%0A super(Simple, self).__init__(**kwargs)%0A self.name = name%0A self.level = level%0A self.value = value%0A%0A def __call__(self):%0A return self.value%0A
|
|
9f276fba97318431d85c08fc0718b30bf39ed1bf
|
Create add-one-row-to-tree.py
|
Python/add-one-row-to-tree.py
|
Python/add-one-row-to-tree.py
|
Python
| 0.000017 |
@@ -0,0 +1,2104 @@
+# Time: O(n)%0A# Space: O(h)%0A%0A# Given the root of a binary tree, then value v and depth d,%0A# you need to add a row of nodes with value v at the given depth d. The root node is at depth 1.%0A#%0A# The adding rule is: given a positive integer depth d,%0A# for each NOT null tree nodes N in depth d-1, create two tree nodes%0A# with value v as N's left subtree root and right subtree root.%0A# And N's original left subtree should be the left subtree of the new left subtree root,%0A# its original right subtree should be the right subtree of the new right subtree root.%0A# If depth d is 1 that means there is no depth d-1 at all,%0A# then create a tree node with value v as the new root of the whole original tree,%0A# and the original tree is the new root's left subtree.%0A#%0A# Example 1:%0A# Input: %0A# A binary tree as following:%0A# 4%0A# / %5C%0A# 2 6%0A# / %5C / %0A# 3 1 5 %0A#%0A# v = 1%0A#%0A# d = 2%0A#%0A# Output: %0A# 4%0A# / %5C%0A# 1 1%0A# / %5C%0A# 2 6%0A# / %5C / %0A# 3 1 5 %0A#%0A# Example 2:%0A# Input: %0A# A binary tree as following:%0A# 4%0A# / %0A# 2 %0A# / %5C %0A# 3 1 %0A#%0A# v = 1%0A#%0A# d = 3%0A#%0A# Output: %0A# 4%0A# / %0A# 2%0A# / %5C %0A# 1 1%0A# / %5C %0A# 3 1%0A# Note:%0A# 1. The given d is in range %5B1, maximum depth of the given tree + 1%5D.%0A# 2. The given binary tree has at least one tree node.%0A%0A# Definition for a binary tree node.%0A# class TreeNode(object):%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0Aclass Solution(object):%0A def addOneRow(self, root, v, d):%0A %22%22%22%0A :type root: TreeNode%0A :type v: int%0A :type d: int%0A :rtype: TreeNode%0A %22%22%22%0A if d in (0, 1):%0A node = TreeNode(v)%0A if d == 1:%0A node.left = root%0A else:%0A node.right = root%0A return node%0A if root and d %3E= 2:%0A root.left = self.addOneRow(root.left, v, d-1 if d %3E 2 else 1)%0A root.right = self.addOneRow(root.right, v, d-1 if d %3E 2 else 0)%0A return root%0A
|
|
2e7e83a0c3b789a0d0ba89134b64a0f6b723c3af
|
add forgotten path-building test
|
bids/layout/tests/test_path_building.py
|
bids/layout/tests/test_path_building.py
|
Python
| 0.000002 |
@@ -0,0 +1,583 @@
+import pytest%0Afrom bids.layout import BIDSLayout%0Afrom os.path import join, abspath, sep%0Afrom bids.tests import get_test_data_path%0A%0A%[email protected](scope='module')%0Adef layout():%0A data_dir = join(get_test_data_path(), '7t_trt')%0A return BIDSLayout(data_dir)%0A%0Adef test_bold_construction(layout):%0A ents = dict(subject='01', run=1, task='rest', suffix='bold')%0A assert layout.build_path(ents) == %22sub-01/func/sub-01_task-rest_run-1_bold.nii.gz%22%0A ents%5B'acquisition'%5D = 'random'%0A assert layout.build_path(ents) == %22sub-01/func/sub-01_task-rest_acq-random_run-1_bold.nii.gz%22
|
|
4582edfe2b138fd63645caddde198dc7fee9bd0a
|
Fix adb_install_apk, broken by 9b3e716.
|
build/android/adb_install_apk.py
|
build/android/adb_install_apk.py
|
#!/usr/bin/env python
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility script to install APKs from the command line quickly."""
import argparse
import logging
import os
import sys
from pylib import constants
from pylib.device import device_blacklist
from pylib.device import device_errors
from pylib.device import device_utils
from pylib.utils import run_tests_helper
def main():
parser = argparse.ArgumentParser()
apk_group = parser.add_mutually_exclusive_group(required=True)
apk_group.add_argument('--apk', dest='apk_name',
help='DEPRECATED The name of the apk containing the'
' application (with the .apk extension).')
apk_group.add_argument('apk_path', nargs='?',
help='The path to the APK to install.')
# TODO(jbudorick): Remove once no clients pass --apk_package
parser.add_argument('--apk_package', help='DEPRECATED unused')
parser.add_argument('--keep_data',
action='store_true',
default=False,
help='Keep the package data when installing '
'the application.')
parser.add_argument('--debug', action='store_const', const='Debug',
dest='build_type',
default=os.environ.get('BUILDTYPE', 'Debug'),
help='If set, run test suites under out/Debug. '
'Default is env var BUILDTYPE or Debug')
parser.add_argument('--release', action='store_const', const='Release',
dest='build_type',
help='If set, run test suites under out/Release. '
'Default is env var BUILDTYPE or Debug.')
parser.add_argument('-d', '--device', dest='device',
help='Target device for apk to install on.')
parser.add_argument('-v', '--verbose', action='count',
help='Enable verbose logging.')
args = parser.parse_args()
run_tests_helper.SetLogLevel(args.verbose)
constants.SetBuildType(args.build_type)
apk = args.apk_path or args.apk_name
if not apk.endswith('.apk'):
apk += '.apk'
if not os.path.exists(apk):
apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
if not os.path.exists(apk):
parser.error('%s not found.' % apk)
devices = device_utils.DeviceUtils.HealthyDevices()
if args.device:
devices = [d for d in devices if d == args.device]
if not devices:
raise device_errors.DeviceUnreachableError(args.device)
elif not devices:
raise device_errors.NoDevicesError()
def blacklisting_install(device):
try:
device.Install(apk, reinstall=args.keep_data)
except device_errors.CommandFailedError:
logging.exception('Failed to install %s', args.apk)
device_blacklist.ExtendBlacklist([str(device)])
logging.warning('Blacklisting %s', str(device))
except device_errors.CommandTimeoutError:
logging.exception('Timed out while installing %s', args.apk)
device_blacklist.ExtendBlacklist([str(device)])
logging.warning('Blacklisting %s', str(device))
device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install)
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0.000003 |
@@ -2934,32 +2934,37 @@
ll %25s', args.apk
+_name
)%0A device_b
@@ -3168,16 +3168,21 @@
args.apk
+_name
)%0A
|
b447711c4396c36bc845184961d28660735c6f3d
|
Create window.py
|
src/new/window.py
|
src/new/window.py
|
Python
| 0.000001 |
@@ -0,0 +1,380 @@
+# window draws%0A%0A# editor window%0Aclass EditorWindow(Fl_Double_Window) :%0A%09search = %22%22%0A%09def __init__(self, w, h, label) :%0A%09%09Fl_Double_Window.__init__(self, w, h, label)%0A%0A# set/update title%0Adef set_title(win):%0A%09global filename, title%0A%09if len(filename) == 0:%0A%09%09title = %22Untitled%22%0A%09else:%0A%09%09title = os.path.basename(filename)%0A%09if changed:%0A%09%09title = title+%22 (modified)%22%0A%09win.label(title)%0A
|
|
7ef6c8c3ea0e2481a424bcca91496ce14c0aec4a
|
add basic file verifier, just checks dimensions, need to add header and vlr checks.
|
misc/file_verify.py
|
misc/file_verify.py
|
Python
| 0 |
@@ -0,0 +1,651 @@
+#!/usr/bin/env python%0Aimport sys%0Asys.path.append(%22../%22)%0A%0Afrom laspy import file as File%0AinFile1 = File.File(sys.argv%5B1%5D,mode= %22r%22)%0AinFile2 = File.File(sys.argv%5B2%5D,mode= %22r%22)%0A%0Aspec = inFile1.reader.point_format.lookup.keys()%0A%0Adef f(x):%0A return(list(inFile1.reader.get_dimension(x)) == list(inFile2.reader.get_dimension(x)))%0A%0Apassed = 0%0Afailed = 0%0Afor dim in spec:%0A if f(dim):%0A passed += 1%0A print(%22Dimension: %22 + dim + %22 is identical.%22)%0A else:%0A failed += 1%0A print(%22Dimension: %22 + dim + %22 is not identical%22)%0A%0Aprint(str(passed) + %22 identical dimensions out of %22 + str(passed + failed))%0AinFile1.close()%0AinFile2.close()%0A
|
|
895571ec359e7571f8581f3635ae1c452ed911a5
|
add a nova command
|
cloudmesh_cmd3/plugins/cm_shell_nova.py
|
cloudmesh_cmd3/plugins/cm_shell_nova.py
|
Python
| 0.000013 |
@@ -0,0 +1,2287 @@
+from cmd3.shell import command%0Afrom cloudmesh_common.logger import LOGGER%0Aimport os%0Afrom cloudmesh_common.tables import row_table%0A%0Alog = LOGGER(__file__)%0A%0A%0Aclass cm_shell_nova:%0A%0A %22%22%22opt_example class%22%22%22%0A%0A def activate_cm_shell_nova(self):%0A self.register_command_topic('cloud','nova')%0A pass%0A%0A @command%0A def do_nova(self, args, arguments):%0A %22%22%22%0A Usage:%0A nova login%0A nova info %0A nova help%0A nova ARGUMENTS %0A%0A A simple wrapper for the openstack nova command%0A%0A Arguments:%0A%0A ARGUMENTS The arguments passed to nova%0A help Prints the nova manual%0A login reads the information from the current cloud%0A and updates the environment variables if%0A the cloud is an openstack cloud%0A info the environment values for OS%0A %0A Options:%0A%0A -v verbose mode%0A%0A %22%22%22%0A # log.info(arguments)%0A%0A if arguments%5B%22help%22%5D:%0A os.system(%22nova help%22)%0A return%0A elif arguments%5B%22info%22%5D:%0A #%0A # prints the current os env variables for nova%0A #%0A d = %7B%7D%0A%0A for attribute in %5B'OS_USER_ID',%0A 'OS_USERNAME',%0A 'OS_TENANT_NAME',%0A 'OS_AUTH_URL',%0A 'OS_CACERT',%0A 'OS_PASSWORD',%0A 'OS_REGION'%5D:%0A try:%0A d%5Battribute%5D = os.environ%5Battribute%5D%0A except:%0A d%5Battribute%5D = None%0A print row_table(d, order=None, labels=%5B%22Variable%22, %22Value%22%5D)%0A return%0A elif arguments%5B%22login%22%5D:%0A print %22Not yet implemented%22%0A #%0A # TODO: implemet%0A #%0A # cloud = get current default%0A # if cloud type is openstack:%0A # credentials = get credentials %0A # set the credentials in the current os system env variables%0A # %0A else:%0A os.system(%22nova %7B0%7D%22.format(arguments%5B%22ARGUMENTS%22%5D))%0A return%0A%0A
|
|
c6a3db1c3fcd99e9dcbf4d6052a8882363c4bc21
|
minor change. rename pyunit test name
|
h2o-py/tests/testdir_misc/pyunit_as_date.py
|
h2o-py/tests/testdir_misc/pyunit_as_date.py
|
import sys
sys.path.insert(1, "../../")
import h2o, tests
def download_pojo():
hdf = h2o.import_file(path=h2o.locate("smalldata/jira/v-11.csv"))
print hdf.head()
# NB: columns 1,5 are currently unsupported as date types
# that is, h2o cannot understand:
# 1 integer days since epoch (or since any other date);
# 2 dates formatted as %d/%m/%y (in strptime format strings)
print hdf.summary()
print 'adding date columns'
# NB: h2o automagically recognizes and if it doesn't recognize, you're out of luck
hdf["ds5"] = hdf["ds5"].as_date("%d/%m/%y %H:%M")
hdf["ds6"] = hdf["ds6"].as_date("%d/%m/%Y %H:%M:%S")
hdf["ds7"] = hdf["ds7"].as_date("%m/%d/%y")
hdf["ds8"] = hdf["ds8"].as_date("%m/%d/%Y")
hdf["ds9"] = hdf["ds9"].asfactor().as_date("%Y%m%d")
hdf["ds10"] = hdf["ds10"].as_date("%Y_%m_%d")
print 'extracting year and month from posix date objects'
hdf["year2"] = hdf["ds2"].year()
hdf["year3"] = hdf["ds3"].year()
hdf["year4"] = hdf["ds4"].year()
hdf["year5"] = hdf["ds5"].year()
hdf["year6"] = hdf["ds6"].year()
hdf["year7"] = hdf["ds7"].year()
hdf["year8"] = hdf["ds8"].year()
hdf["year9"] = hdf["ds9"].year()
hdf["year10"] = hdf["ds10"].year()
hdf["mon2"] = hdf["ds2"].month()
hdf["mon3"] = hdf["ds3"].month()
hdf["mon4"] = hdf["ds4"].month()
hdf["mon5"] = hdf["ds5"].month()
hdf["mon6"] = hdf["ds6"].month()
hdf["mon7"] = hdf["ds7"].month()
hdf["mon8"] = hdf["ds8"].month()
hdf["mon9"] = hdf["ds9"].month()
hdf["mon10"] = hdf["ds10"].month()
hdf["idx2"] = hdf["ds2"].year() * 12 + hdf["ds2"].month()
hdf["idx3"] = hdf["ds3"].year() * 12 + hdf["ds3"].month()
hdf["idx4"] = hdf["ds4"].year() * 12 + hdf["ds4"].month()
hdf["idx5"] = hdf["ds5"].year() * 12 + hdf["ds5"].month()
hdf["idx6"] = hdf["ds6"].year() * 12 + hdf["ds6"].month()
hdf["idx7"] = hdf["ds7"].year() * 12 + hdf["ds7"].month()
hdf["idx8"] = hdf["ds8"].year() * 12 + hdf["ds8"].month()
hdf["idx9"] = hdf["ds9"].year() * 12 + hdf["ds9"].month()
hdf["idx10"] = hdf["ds10"].year() * 12 + hdf["ds10"].month()
# frames
hdf = h2o.import_file(path=h2o.locate("smalldata/jira/v-11.csv"))
hdf["ds9"] = hdf["ds9"].asfactor()
hdf5 = hdf["ds5"]
hdf6 = hdf["ds6"]
hdf7 = hdf["ds7"]
hdf8 = hdf["ds8"]
hdf9 = hdf["ds9"]
hdf10 = hdf["ds10"]
hdf5 = hdf5.as_date("%d/%m/%y %H:%M")
hdf6 = hdf6.as_date("%d/%m/%Y %H:%M:%S")
hdf7 = hdf7.as_date("%m/%d/%y")
hdf8 = hdf8.as_date("%m/%d/%Y")
hdf9 = hdf9.as_date("%Y%m%d")
hdf10 = hdf10.as_date("%Y_%m_%d")
if __name__ == "__main__":
tests.run_test(sys.argv, download_pojo)
|
Python
| 0.999963 |
@@ -56,29 +56,28 @@
ts%0A%0Adef
-download_pojo
+test_as_data
():%0A hd
@@ -2574,19 +2574,18 @@
gv,
-download_pojo
+test_as_data
)%0A
|
2bf2a0849c1524f3ac56533d9f36eb907213f819
|
Add WebAPI plugin
|
proxy/plugins/WebAPI.py
|
proxy/plugins/WebAPI.py
|
Python
| 0 |
@@ -0,0 +1,456 @@
+from ..data import clients, blocks, players%0Afrom twisted.web.server import Site%0Afrom twisted.web.resource import Resource%0Aimport json, time%0A%0AupStart = time.strftime(%22%25a, %25d %25b %25Y %25H:%25M:%25S +0000%22, time.gmtime())%0A%0Aclass WebAPI(Resource):%0A%09def render_GET(self, request):%0A%09%09currData = %7B'count' : len(clients.connectedClients), 'blocksCached' : len(blocks.blockList), 'playersCached' : len(players.playerList), 'upSince' : upStart%7D%0A%09%09return json.dumps(currData)
|
|
19348f5d8e2832fbf378578d38516df66dc849b6
|
Implement IRCv3.1 StartTLS
|
heufybot/modules/ircv3/starttls.py
|
heufybot/modules/ircv3/starttls.py
|
Python
| 0.999316 |
@@ -0,0 +1,1823 @@
+from twisted.internet.interfaces import ISSLTransport%0Afrom twisted.plugin import IPlugin%0Afrom heufybot.moduleinterface import BotModule, IBotModule%0Afrom zope.interface import implements%0A%0Atry:%0A from twisted.internet import ssl%0Aexcept ImportError:%0A ssl = None%0A%0A%0Aclass IRCv3StartTLS(BotModule):%0A implements(IPlugin, IBotModule)%0A%0A name = %22StartTLS%22%0A%0A def actions(self):%0A return %5B (%22listcaps%22, 1, self.addToCapList),%0A (%22caps-acknowledged%22, 1, self.requestNegotiation),%0A (%22pre-handlenumeric-670%22, 1, self.startNegotiation),%0A (%22pre-handlenumeric-691%22, 1, self.negotiationFailed) %5D%0A%0A def addToCapList(self, server, caps):%0A if not self.bot.servers%5Bserver%5D.secureConnection and ssl is not None:%0A caps.append(%22tls%22)%0A%0A def requestNegotiation(self, server, caps):%0A if %22tls%22 in caps:%0A self.bot.log.info(%22%5B%7Bserver%7D%5D Trying to initiate StartTLS...%22, server=server)%0A self.bot.servers%5Bserver%5D.sendMessage(%22STARTTLS%22)%0A%0A def startNegotiation(self, server, prefix, params):%0A self.bot.log.info(%22%5B%7Bserver%7D%5D Server replied: %5C%22%7Breply%7D%5C%22%22, server=server, reply=params%5B1%5D)%0A self.bot.log.info(%22%5B%7Bserver%7D%5D Proceeding with TLS handshake...%22, server=server)%0A self.bot.servers%5Bserver%5D.transport.startTLS(ssl.CertificateOptions())%0A if ISSLTransport.providedBy(self.bot.servers%5Bserver%5D.transport):%0A self.bot.servers%5Bserver%5D.secureConnection = True%0A self.bot.log.info(%22%5B%7Bserver%7D%5D TLS handshake successful. Connection is now secure.%22, server=server)%0A return True%0A%0A def negotiationFailed(self, server, prefix, params):%0A self.bot.log.warn(%22%5B%7Bserver%7D%5D StartTLS failed, reason: %5C%22%7Breply%7D%5C%22.%22, server=server, reply=params%5B1%5D)%0A return True%0A%0A%0AstartTLS = IRCv3StartTLS()%0A
|
|
63e14ae4485bcca682b952e5ab7f125f58c3d960
|
Add pwnypack ipython extension.
|
pwnypack/ipython_ext.py
|
pwnypack/ipython_ext.py
|
Python
| 0 |
@@ -0,0 +1,454 @@
+import functools%0Aimport shlex%0Aimport pwny%0Aimport pwnypack.main%0A%0A%0A__all__ = %5B%5D%0A%0A%0Adef call_main_func(func_name, ipython, line):%0A pwnypack.main.main(%5Bfunc_name%5D + shlex.split(line))%0A%0A%0Adef load_ipython_extension(ipython):%0A ipython.push(vars(pwny))%0A for f_name in pwnypack.main.MAIN_FUNCTIONS:%0A ipython.define_magic(f_name, functools.partial(call_main_func, f_name))%0A%0A%0Adef unload_ipython_extension(ipython):%0A ipython.drop_by_id(vars(pwny))%0A
|
|
7fbfca47b2b435a0aa4df8d39699831f752f351d
|
Add initial code for scraping standings data
|
pybaseball/standings.py
|
pybaseball/standings.py
|
Python
| 0 |
@@ -0,0 +1,1248 @@
+from bs4 import BeautifulSoup%0Aimport requests%0Aimport datetime%0A%0Adef get_soup(date):%0A%09#year, month, day = %5Btoday.strftime(%22%25Y%22), today.strftime(%22%25m%22), today.strftime(%22%25d%22)%5D%0A%09#url = %22http://www.baseball-reference.com/boxes?year=%7B%7D&month=%7B%7D&day=%7B%7D%22.format(year, month, day)%0A%09year = date.strftime(%22%25Y%22)%0A%09url = 'http://www.baseball-reference.com/leagues/MLB/%7B%7D-standings.shtml'.format(year)%0A%09s=requests.get(url).content%0A%09return BeautifulSoup(s)%0A%0Adef get_tables(soup):%0A%09tables = soup.find_all('table')%0A%09datasets = %5B%5D%0A%09for table in tables:%0A%09%09data = %5B%5D%0A%09%09headings = %5Bth.get_text() for th in table.find(%22tr%22).find_all(%22th%22)%5D%0A%09%09data.append(headings)%0A%09%09table_body = table.find('tbody')%0A%09%09rows = table_body.find_all('tr')%0A%09%09for row in rows:%0A%09%09%09#data.append(row.find_all('a')%5B0%5D%5B'title'%5D) # team name%0A%09%09 cols = row.find_all('td')%0A%09%09 cols = %5Bele.text.strip() for ele in cols%5D%0A%09%09 cols.insert(0,row.find_all('a')%5B0%5D%5B'title'%5D)%0A%09%09 data.append(%5Bele for ele in cols if ele%5D)%0A%09%09datasets.append(data)%0A%09return datasets%0A%0A%0Adef standings(date=None):%0A%09# get most recent standings if date not specified%0A%09if(date is None):%0A%09%09date = datetime.datetime.today()%0A%09# retrieve html from baseball reference%0A%09soup = get_soup(date)%0A%09tables = get_tables(soup)%0A%09return tables%0A%0A
|
|
d187c51ccd9dc1676b6f16eddecee6dce752d668
|
Make class test-class name more specific
|
distarray/tests/test_client.py
|
distarray/tests/test_client.py
|
import unittest
from IPython.parallel import Client
from distarray.client import DistArrayContext
class TestClient(unittest.TestCase):
def setUp(self):
self.client = Client()
self.dv = self.client[:]
def testCreateDAC(self):
'''Can we create a plain vanilla context?'''
dac = DistArrayContext(self.dv)
self.assertIs(dac.view, self.dv)
def testCreateDACwithTargets(self):
'''Can we create a context with a subset of engines?'''
dac = DistArrayContext(self.dv, targets=[0, 1])
self.assertIs(dac.view, self.dv)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Python
| 0 |
@@ -103,21 +103,31 @@
ass Test
-Clien
+DistArrayContex
t(unitte
@@ -239,22 +239,24 @@
def test
-C
+_c
reate
+_
DAC(self
@@ -409,22 +409,26 @@
test
-C
+_c
reate
+_
DAC
+_
with
-T
+_t
arge
|
b5d8b29a34a4675ad5de33511bfca486f648a134
|
Create _source.py
|
static/_source.py
|
static/_source.py
|
Python
| 0.000002 |
@@ -0,0 +1,1264 @@
+# coding: utf-8%0A%0A# BlackSmith general configuration file%0A%0A# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-%0A%0A# Jabber server to connect%0ASERVER = 'example.com'%0A%0A# Connecting Port%0APORT = 5222%0A%0A# Jabber server%60s connecting Host%0AHOST = 'example.com'%0A%0A# Using TLS (True - to enable, False - to disable)%0ASECURE = True%0A%0A# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-%0A%0A# User%60s account%0AUSERNAME = 'username'%0A%0A# Jabber ID%60s Password%0APASSWORD = 'password'%0A%0A# Resourse (please don%60t touch it)%0ARESOURCE = u'simpleApps' # You can write unicode symbols here%0A%0A# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-%0A%0A# Default chatroom nick%0ADEFAULT_NICK = u'BlackSmith-m.1' # You can write unicode symbols here%0A%0A# Groupchat message size limit%0ACHAT_MSG_LIMIT = 1024%0A%0A# Private/Roster message size limit%0APRIV_MSG_LIMIT = 2024%0A%0A# Incoming message size limit%0AINC_MSG_LIMIT = 8960%0A%0A# Working without rights of moder (True - to enable, False - to disable)%0AMSERVE = False%0A%0A# Jabber account of bot%60s owner%0ABOSS = '[email protected]'%0A%0A# Memory usage limit (size in kilobytes, 0 - not limited)%0AMEMORY_LIMIT = 49152%0A%0A# Admin password, used as a key to command %22login%22%0ABOSS_PASS = ''%0A%0A# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-%0A
|
|
647f0c1409dcd22d69a79d21571d2c03f794a2a8
|
Test iter and yield
|
99_misc/iterator.py
|
99_misc/iterator.py
|
Python
| 0 |
@@ -0,0 +1,302 @@
+#/usr/bin/env python%0A# Test yield generator%0Adef my_double(arr):%0A for i in arr:%0A yield i * 2%0A%0Afor i in my_double(range(1, 10)):%0A print(%22%7B0%7D %22.format(i)),%0A%0Aprint(%22%5Cn%22),%0A%0A# Text iteration%0Ai = iter(my_double(range(10, 21)))%0Aprint i%0Afor j in range (1, 10):%0A print(%22%7B0%7D %22.format(i.next())),%0A%0A
|
|
28944376472130d53a05f7473e7213c917207cd4
|
Add model representing a listing
|
apartments/models.py
|
apartments/models.py
|
Python
| 0 |
@@ -0,0 +1,811 @@
+from sqlalchemy import create_engine, Column, DateTime, Float, Integer, String%0Afrom sqlalchemy.ext.declarative import declarative_base%0A%0ABase = declarative_base()%0A%0A%0Aclass Listing(Base):%0A __tablename__ = 'listings'%0A%0A id = Column(Integer, primary_key=True)%0A craigslist_id = Column(Integer, unique=True)%0A%0A name = Column(String)%0A price = Column(Float)%0A link = Column(String, unique=True)%0A created = Column(DateTime)%0A%0A area = Column(String)%0A geotag = Column(String)%0A lat = Column(Float)%0A lon = Column(Float)%0A location = Column(String)%0A nearest_stop = Column(String)%0A%0A def __repr__(self):%0A return f'%3CListing(name=%7Bself.name%7D, price=%7Bself.price%7D, craigslist_id=%7Bself.craigslist_id%7D)%3E'%0A%0A%0Aengine = create_engine('sqlite:///apartments.db')%0ABase.metadata.create_all(engine)%0A
|
|
38cbc73f70a9ca896a29d7fa2e000388bbf40d88
|
Add script to generate data from an experiment
|
DilipadTopicModelling/experiment_get_results.py
|
DilipadTopicModelling/experiment_get_results.py
|
Python
| 0 |
@@ -0,0 +1,1516 @@
+import logging%0Aimport os%0Aimport pandas as pd%0A%0Afrom CPTCorpus import CPTCorpus%0Afrom CPT_Gibbs import GibbsSampler%0A%0Alogger = logging.getLogger(__name__)%0Alogging.basicConfig(format='%25(levelname)s : %25(message)s', level=logging.INFO)%0A%0A# select experiment to get parameters from%0AnTopics = 100%0Astart = 80%0Aend = 199%0A%0Aalpha = 50.0/nTopics%0Abeta = 0.02%0AnIter = 200%0A%0A# load corpus%0Adata_dir = '/home/jvdzwaan/data/tmp/generated/test_exp/'%0Acorpus = CPTCorpus.load('%7B%7Dcorpus.json'.format(data_dir),%0A topicDict='%7B%7D/topicDict.dict'.format(data_dir),%0A opinionDict='%7B%7D/opinionDict.dict'.format(data_dir))%0A%0Aout_dir = '/home/jvdzwaan/data/tmp/generated/test_exp/%7B%7D'.format(nTopics)%0A%0Asampler = GibbsSampler(corpus, nTopics=nTopics, nIter=nIter, alpha=alpha,%0A beta=beta, beta_o=beta, out_dir=out_dir)%0Asampler._initialize()%0Asampler.estimate_parameters(start=start, end=end)%0A%0Apd.DataFrame(sampler.theta).to_csv(os.path.join(out_dir, 'theta_%7B%7D.csv'.%0A format(nTopics)))%0Atopics = sampler.topics_to_df(phi=sampler.topics, words=corpus.topic_words())%0Atopics.to_csv(os.path.join(out_dir, 'topics_%7B%7D.csv'.format(nTopics)))%0Afor i, p in enumerate(sampler.corpus.perspectives):%0A opinions = sampler.topics_to_df(phi=sampler.opinions%5Bi%5D,%0A words=corpus.opinion_words())%0A opinions.to_csv(os.path.join(out_dir,%0A 'opinions_%7B%7D_%7B%7D.csv'.format(p.name, nTopics)))%0A
|
|
656d94c0375f6a96cc3a9d4b3227d8f19afe3dea
|
Add lemon drop elevator model
|
control/systems/main.py
|
control/systems/main.py
|
Python
| 0 |
@@ -0,0 +1,245 @@
+import numpy as np%0A%0AKt = 1.41/89.0%0AKv = 5840.0/3.0%0AG = 10.0%0AJ = 4.0*(2.54**2.0)/2.0 # 4 kg on a 1 inch pully%0AR = 12.0/89.0%0A%0AA = np.asarray(%5B%5B0, 1%5D,%0A %5B0, -(Kt*Kv)/((G**2)*J*R)%5D%5D)%0AB = np.asarray(%5B%5B0%5D,%0A %5BKt/(G*J*R)%5D%5D)%0A
|
|
2ca0d97649529dfc66486dc1d3e7fa1e37d8ee91
|
add integration test for api analytics
|
integrations/test_api_analytics.py
|
integrations/test_api_analytics.py
|
Python
| 0 |
@@ -0,0 +1,1632 @@
+%22%22%22Integration tests for internal analytics.%22%22%22%0A%0A# standard library%0Aimport unittest%0A%0A# third party%0Aimport mysql.connector%0Aimport requests%0A%0A%0A# use the local instance of the Epidata API%0ABASE_URL = 'http://delphi_web_epidata/epidata/api.php'%0A%0A%0Aclass ApiAnalyticsTests(unittest.TestCase):%0A %22%22%22Tests internal analytics not specific to any particular endpoint.%22%22%22%0A%0A def setUp(self):%0A %22%22%22Perform per-test setup.%22%22%22%0A%0A # connect to the %60epidata%60 database and clear the %60api_analytics%60 table%0A cnx = mysql.connector.connect(%0A user='user',%0A password='pass',%0A host='delphi_database_epidata',%0A database='epidata')%0A cur = cnx.cursor()%0A cur.execute('truncate table api_analytics')%0A cnx.commit()%0A cur.close()%0A%0A # make connection and cursor available to test cases%0A self.cnx = cnx%0A self.cur = cnx.cursor()%0A%0A def tearDown(self):%0A %22%22%22Perform per-test teardown.%22%22%22%0A self.cur.close()%0A self.cnx.close()%0A%0A def test_analytics_update(self):%0A %22%22%22Update internal analytics for requests to the API.%22%22%22%0A%0A make_request = lambda src: requests.get(BASE_URL, params=%7B'source': src%7D)%0A%0A # make some requests%0A for _ in range(1):%0A make_request('source1')%0A for _ in range(5):%0A make_request('source2')%0A for _ in range(19):%0A make_request('source3')%0A%0A # verify that analytics are available%0A self.cur.execute('''%0A select source, count(1)%0A from api_analytics%0A group by source%0A order by source%0A ''')%0A values = %5Brow for row in self.cur%5D%0A self.assertEqual(values, %5B%0A ('source1', 1),%0A ('source2', 5),%0A ('source3', 19),%0A %5D)%0A
|
|
8cc622db293816fc96bb7df0139b57a2b5a2eaef
|
add scanning of live IP addresses with ping sweep, multi threading
|
Scan_IpAdds_ping.py
|
Scan_IpAdds_ping.py
|
Python
| 0 |
@@ -0,0 +1,2547 @@
+import os, platform, collections%0Aimport socket, subprocess,sys%0Aimport threading%0Afrom datetime import datetime%0A%0Aclass myThread (threading.Thread):%0A def __init__(self,startLastOctet,endLastOctet):%0A threading.Thread.__init__(self)%0A self.startLastOctet = startLastOctet%0A self.endLastOctet = endLastOctet%0A def run(self):%0A runThread(self.startLastOctet,self.endLastOctet)%0A%0Adef getNetwork():%0A net = raw_input(%22Enter the Network Address:%5Ct%5Ct %22)%0A netSplit= net.split('.')%0A a = '.'%0A firstThreeOctet = netSplit%5B0%5D+a+netSplit%5B1%5D+a+netSplit%5B2%5D+a%0A startLastOctet = int(raw_input(%22Enter the beginning of last Octet:%5Ct %22))%0A endLastOctet = int(raw_input(%22Enter the end od last Octet:%5Ct%5Ct %22))%0A endLastOctet =endLastOctet+1%0A dic = collections.OrderedDict()%0A oper = platform.system()%0A if (oper==%22Windows%22):%0A pingCmd = %22ping -n 1 %22%0A elif (oper== %22Linux%22):%0A pingCmd = %22ping -c 1 %22%0A else :%0A pingCmd = %22ping -c 1 %22%0A return firstThreeOctet, startLastOctet, endLastOctet, dic, pingCmd%0A%0Adef runThread(startLastOctet,endLastOctet):%0A #print %22Scanning in Progess%22%0A for ip in xrange(startLastOctet,endLastOctet):%0A addr = firstThreeOctet+str(ip)%0A pingAddress = pingCmd+addr%0A response = os.popen(pingAddress)%0A for line in response.readlines():%0A #if(line.count(%22TTL%22)):%0A # break%0A if (line.count(%22ttl%22)):%0A #print addr, %22--%3E Live%22%0A dic%5Bip%5D= addr%0A break%0A%0Aif __name__ == '__main__':%0A subprocess.call('clear',shell=True)%0A print %22-%22 * 75%0A print %22This program search for life IPs in last octet, with multiple threads %22%0A print %22%5CtFor example: 192.168.11.xxx - 192.168.11.yyy%22%0A print %22-%22 * 75%0A firstThreeOctet, startLastOctet, endLastOctet, dic, pingCmd = getNetwork()%0A t1= datetime.now()%0A%0A total_ip =endLastOctet-startLastOctet%0A tn =3 # number of ip handled by one thread%0A total_thread = total_ip/tn%0A total_thread=total_thread+1%0A threads= %5B%5D%0A try:%0A for i in xrange(total_thread):%0A en = startLastOctet+tn%0A if(en %3EendLastOctet):%0A en =endLastOctet%0A thread = myThread(startLastOctet,en)%0A thread.start()%0A threads.append(thread)%0A startLastOctet =en%0A except:%0A print %22Error: unable to start thread%22%0A%0A print %22%5Ct Number of Threads active:%22, threading.activeCount()%0A for t in threads:%0A t.join()%0A print %22%5CtExiting Main Thread%22%0A%0A sortedIPs = collections.OrderedDict(sorted(dic.items()))%0A for key in sortedIPs:%0A print %22IP address: %7B%7D %5Ct --%3E Live%22.format(sortedIPs%5Bkey%5D)%0A t2= datetime.now()%0A total =t2-t1%0A print %22Scanning complete in %22 , total%0A%0A%0A
|
|
e670de6ecb7be3da56acf2976148574165cb69aa
|
Add missing test module
|
h5py/tests/test_utils.py
|
h5py/tests/test_utils.py
|
Python
| 0.000002 |
@@ -0,0 +1,2340 @@
+#+%0A# %0A# This file is part of h5py, a low-level Python interface to the HDF5 library.%0A# %0A# Copyright (C) 2008 Andrew Collette%0A# http://h5py.alfven.org%0A# License: BSD (See LICENSE.txt for full license)%0A# %0A# $Date$%0A# %0A#-%0Aimport sys%0Aimport numpy%0A%0Afrom common import HDF5TestCase, api_18%0A%0Afrom h5py import *%0Afrom h5py import utils%0Afrom h5py.h5 import H5Error%0A%0Aclass TestUtils(HDF5TestCase):%0A%0A def test_check_read(self):%0A %22%22%22 Check if it's possible to read from the NumPy array %22%22%22%0A%0A carr = numpy.ones((10,10), order='C')%0A farr = numpy.ones((10,10), order='F')%0A oarr = numpy.ones((10,10), order='C')%0A oarr.strides = (0,1)%0A%0A utils.check_numpy_read(carr)%0A self.assertRaises(TypeError, utils.check_numpy_read, farr)%0A self.assertRaises(TypeError, utils.check_numpy_read, oarr)%0A%0A s_space = h5s.create_simple((5,5))%0A m_space = h5s.create_simple((10,10))%0A l_space = h5s.create_simple((12,12))%0A%0A utils.check_numpy_read(carr, m_space.id)%0A utils.check_numpy_read(carr, l_space.id)%0A self.assertRaises(TypeError, utils.check_numpy_read, carr, s_space.id)%0A%0A # This should not matter for read%0A carr.flags%5B'WRITEABLE'%5D = False%0A utils.check_numpy_read(carr)%0A%0A def test_check_write(self):%0A %22%22%22 Check if it's possible to write to the NumPy array %22%22%22%0A%0A carr = numpy.ones((10,10), order='C')%0A farr = numpy.ones((10,10), order='F')%0A oarr = numpy.ones((10,10), order='C')%0A oarr.strides = (0,1)%0A%0A utils.check_numpy_write(carr)%0A self.assertRaises(TypeError, utils.check_numpy_write, farr)%0A self.assertRaises(TypeError, utils.check_numpy_write, oarr)%0A%0A s_space = h5s.create_simple((5,5))%0A m_space = h5s.create_simple((10,10))%0A l_space = h5s.create_simple((12,12))%0A%0A utils.check_numpy_write(carr, s_space.id)%0A utils.check_numpy_write(carr, m_space.id)%0A self.assertRaises(TypeError, utils.check_numpy_write, carr, l_space.id)%0A%0A # This should matter now%0A carr.flags%5B'WRITEABLE'%5D = False%0A self.assertRaises(TypeError, utils.check_numpy_write, carr)%0A%0A def test_emalloc(self):%0A %0A utils._test_emalloc(1024)%0A utils._test_emalloc(0)%0A self.assertRaises(MemoryError, utils._test_emalloc, sys.maxint)%0A%0A%0A%0A%0A%0A%0A
|
|
3fd4244dbfd33bbf2fa369d81756e82b1cf1c467
|
Clear out unaligned NLCD19 GWLF-E results
|
src/mmw/apps/modeling/migrations/0041_clear_nlcd2019_gwlfe_results.py
|
src/mmw/apps/modeling/migrations/0041_clear_nlcd2019_gwlfe_results.py
|
Python
| 0 |
@@ -0,0 +1,1245 @@
+# Generated by Django 3.2.13 on 2022-10-17 13:47%0A%0Afrom django.db import migrations%0A%0A%0Adef clear_nlcd2019_gwlfe_results(apps, schema_editor):%0A %22%22%22%0A Clear the results for all scenarios belonging to GWLF-E projects made after%0A the release of 1.33.0, which had incorrectly aligned NLCD19 2019 on%0A 2022-01-17:%0A https://github.com/WikiWatershed/model-my-watershed/releases/tag/1.33.0%0A%0A These results will be recalculated with the correclty aligned NLCD19 values%0A when these projects are accessed again.%0A %22%22%22%0A Project = apps.get_model('modeling', 'Project')%0A Scenario = apps.get_model('modeling', 'Scenario')%0A%0A Project.objects.filter(%0A model_package='gwlfe',%0A created_at__gte='2022-01-17',%0A ).update(%0A gis_data=None,%0A mapshed_job_uuid=None,%0A subbasin_mapshed_job_uuid=None,%0A )%0A%0A Scenario.objects.filter(%0A project__model_package='gwlfe',%0A project__created_at__gte='2022-01-17',%0A ).update(%0A results='%5B%5D',%0A modification_hash='',%0A )%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('modeling', '0040_clear_nlcd2019_tr55_results'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(clear_nlcd2019_gwlfe_results),%0A %5D%0A
|
|
34046e290842108212d71f6cf2445d7015bf2423
|
Create text.py
|
dasem/text.py
|
dasem/text.py
|
Python
| 0.000103 |
@@ -0,0 +1,718 @@
+%22%22%22text.%22%22%22%0A%0A%0Afrom nltk import sent_tokenize, word_tokenize%0A%0A%0Adef sentence_tokenize(text):%0A %22%22%22Tokenize a Danish text into sentence.%0A%0A The model from NTLK trained on Danish is used.%0A%0A Parameters%0A ----------%0A text : str%0A The text to be tokenized.%0A%0A Returns%0A -------%0A sentences : list of str%0A Sentences as list of strings.%0A%0A Examples%0A --------%0A %3E%3E%3E text = 'Hvad!? Hvor har du f.eks. siddet?'%0A %3E%3E%3E sentences = sentence_tokenize(text)%0A %3E%3E%3E sentences%0A %5B'Hvad!?', 'Hvor har du f.eks. siddet?'%5D%0A%0A %22%22%22%0A return sent_tokenize(text, language='danish')%0A%0A%0Adef word_tokenize(sentence):%0A %22%22%22Tokenize a Danish sentence into words.%22%22%22%0A return word_tokenize(sentence)%0A
|
|
477de06a99fc4998ec15442e5fae9b919be53392
|
Initialize P2_scheduledComicDownloader
|
books/AutomateTheBoringStuffWithPython/Chapter15/PracticeProjects/P2_scheduledComicDownloader.py
|
books/AutomateTheBoringStuffWithPython/Chapter15/PracticeProjects/P2_scheduledComicDownloader.py
|
Python
| 0 |
@@ -0,0 +1,518 @@
+# Write a program that checks the websites of several web comics and automatically%0A# downloads the images if the comic was updated since the program%E2%80%99s last visit.%0A#%0A# Your operating system%E2%80%99s scheduler (Scheduled Tasks on Windows, launchd on OS X,%0A# and cron on Linux) can run your Python program once a day.%0A#%0A# The Python program itself can download the comic and then copy it to your desktop%0A# so that it is easy to find. This will free you from having to check the website%0A# yourself to see whether it has updated.%0A
|
|
5dad4f0e2d9732d7ff4a0feebff332f005cabf01
|
Remove foreign keys from deprecated `progress-edx-platform-extensions` (#1874)
|
common/djangoapps/database_fixups/migrations/0002_remove_foreign_keys_from_progress_extensions.py
|
common/djangoapps/database_fixups/migrations/0002_remove_foreign_keys_from_progress_extensions.py
|
Python
| 0 |
@@ -0,0 +1,3219 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%22%22%22%0AThe %60progress-edx-platform-extensions%60 has been deprecated in favor of %60edx-completion%60.%0AThe requirement was removed in the commit linked as (1) below. However its migration (2) had not been reverted.%0AThat migration used %60auth_user.id%60 as the foreign key in its models (3), but Django does not resolve this constraint%0Abetween existing tables anymore, because the model has been removed.%0ATherefore we need to drop the tables related to deprecated application in order to be able to remove users properly.%0A%0ABecause of some performance concerns, deletion is implemented in (4).%0AThis migration drops only foreign keys from deprecated tables.%0AIf ran twice (for any reason), it will raise a custom error for better visibility that these keys do not exist.%0A%0A(1) https://github.com/edx-solutions/edx-platform/commit/59bf3efe71533de53b60bd979517e889d18a96bb%0A(2) https://github.com/edx-solutions/progress-edx-platform-extensions/blob/master/progress/migrations/0001_initial.py%0A(3) https://github.com/edx-solutions/progress-edx-platform-extensions/blob/master/progress/models.py%0A(4) https://github.com/edx-solutions/edx-platform/pull/1862%0A%22%22%22%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('database_fixups', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.RunSQL(%22%22%22%0A -- Drop a procedure if it already exists - safety check.%0A DROP PROCEDURE IF EXISTS drop_foreign_key_from_table;%0A%0A -- We are dropping constraints from 3 tables, so we create a temporary procedure to avoid code repetition.%0A CREATE PROCEDURE drop_foreign_key_from_table(given_table VARCHAR(64))%0A BEGIN%0A -- Find the ID of the foreign key (there is only one per table, otherwise it would fail).%0A SET @foreign_key = (%0A SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS%0A WHERE TABLE_NAME = given_table AND CONSTRAINT_TYPE = 'FOREIGN KEY'%0A );%0A IF @foreign_key IS NOT NULL THEN%0A -- Prepare query (MySQL does not allow embedding queries in a standard way here).%0A SET @statement = CONCAT('ALTER TABLE ', given_table, ' DROP FOREIGN KEY ', @foreign_key);%0A PREPARE stmt FROM @statement;%0A EXECUTE stmt;%0A DEALLOCATE PREPARE stmt;%0A ELSE%0A -- Raise custom error for having clearer logs in case of a failure.%0A SET @error_message = CONCAT('Cannot find foreign key in ', given_table, ' table.');%0A SIGNAL SQLSTATE '45000' SET MESSAGE_TEXT = @error_message;%0A END IF;%0A END;%0A%0A -- Call temporary procedure on relevant tables.%0A CALL drop_foreign_key_from_table('progress_coursemodulecompletion');%0A CALL drop_foreign_key_from_table('progress_studentprogress');%0A CALL drop_foreign_key_from_table('progress_studentprogresshistory');%0A%0A -- Clean up.%0A DROP PROCEDURE IF EXISTS drop_foreign_key_from_table;%0A %22%22%22)%0A %5D%0A
|
|
9eb35140a1790625c32773af6b8a2d76699e86c6
|
Move MapEntityForm to mapentity (ref #129)
|
mapentity/forms.py
|
mapentity/forms.py
|
Python
| 0 |
@@ -0,0 +1,1744 @@
+from django.utils.translation import ugettext_lazy as _%0A%0Aimport floppyforms as forms%0Afrom crispy_forms.helper import FormHelper%0Afrom crispy_forms.layout import Layout, Submit, Div, Button%0Afrom crispy_forms.bootstrap import FormActions%0A%0A%0Aclass MapEntityForm(forms.ModelForm):%0A pk = forms.Field(required=False, widget=forms.Field.hidden_widget)%0A model = forms.Field(required=False, widget=forms.Field.hidden_widget)%0A%0A helper = FormHelper()%0A helper.form_class = 'form-horizontal'%0A %0A modelfields = tuple()%0A geomfields = tuple()%0A actions = FormActions(%0A Button('cancel', _('Cancel'), ),%0A Submit('save_changes', _('Save changes'), css_class=%22btn-primary offset1%22),%0A css_class=%22form-actions span11%22,%0A )%0A%0A def __init__(self, *args, **kwargs):%0A super(MapEntityForm, self).__init__(*args, **kwargs)%0A # Generic behaviour%0A if self.instance.pk:%0A self.helper.form_action = self.instance.get_update_url()%0A else:%0A self.helper.form_action = self.instance.get_add_url()%0A self.fields%5B'pk'%5D.initial = self.instance.pk%0A self.fields%5B'model'%5D.initial = self.instance._meta.module_name%0A %0A # Hide label for geom :%0A for geomfield in self.geomfields:%0A self.fields%5Bgeomfield%5D.label = False%0A %0A # Get fields from subclasses%0A fields = ('pk','model') + self.modelfields%0A leftpanel = Div(%0A *fields,%0A css_class=%22span3%22%0A )%0A%0A rightpanel = Div(%0A *self.geomfields,%0A css_class=%22span8%22%0A )%0A %0A # Main form layout%0A self.helper.layout = Layout(%0A leftpanel,%0A rightpanel,%0A self.actions%0A )%0A
|
|
2a5012f0b74fa025bbc909fd8bfb10aec272d148
|
Create pawn-brotherhood.py
|
home/pawn-brotherhood.py
|
home/pawn-brotherhood.py
|
Python
| 0.99898 |
@@ -0,0 +1,799 @@
+def safe_pawns ( pawns ) :%0A %0A n = 0%0A %0A for file , rank in pawns :%0A %0A if rank %3C %222%22 : continue%0A %0A if file %3E %22a%22 :%0A %0A first = chr( ord(file) - 1) + str( int(rank) - 1 )%0A %0A if first in pawns :%0A n += 1%0A continue%0A %0A if file %3C %22h%22 :%0A %0A second = chr( ord(file) + 1) + str( int(rank) - 1 )%0A %0A if second in pawns :%0A n += 1%0A continue%0A %0A return n%0A%0Aif __name__ == '__main__':%0A #These %22asserts%22 using only for self-checking and not necessary for auto-testing%0A assert safe_pawns(%7B%22b4%22, %22d4%22, %22f4%22, %22c3%22, %22e3%22, %22g5%22, %22d2%22%7D) == 6%0A assert safe_pawns(%7B%22b4%22, %22c4%22, %22d4%22, %22e4%22, %22f4%22, %22g4%22, %22e5%22%7D) == 1%0A
|
|
632be2720287425387454fbe2bd9ce1c7eb2cdfb
|
basic tests
|
Orange/widgets/data/tests/test_owconcatenate.py
|
Orange/widgets/data/tests/test_owconcatenate.py
|
Python
| 0.999568 |
@@ -0,0 +1,2447 @@
+# Test methods with long descriptive names can omit docstrings%0A# pylint: disable=missing-docstring%0A%0Aimport numpy as np%0A%0Afrom Orange.data import Table%0Afrom Orange.widgets.data.owconcatenate import OWConcatenate%0Afrom Orange.widgets.tests.base import WidgetTest%0A%0A%0Aclass TestOWConcatenate(WidgetTest):%0A%0A def setUp(self):%0A self.widget = self.create_widget(OWConcatenate)%0A self.iris = Table(%22iris%22)%0A self.titanic = Table(%22titanic%22)%0A%0A def test_single_input(self):%0A self.assertIsNone(self.get_output(%22Data%22))%0A self.send_signal(%22Primary Data%22, self.iris)%0A output = self.get_output(%22Data%22)%0A self.assertEqual(list(output), list(self.iris))%0A self.send_signal(%22Primary Data%22, None)%0A self.assertIsNone(self.get_output(%22Data%22))%0A self.send_signal(%22Additional Data%22, self.iris)%0A output = self.get_output(%22Data%22)%0A self.assertEqual(list(output), list(self.iris))%0A self.send_signal(%22Additional Data%22, None)%0A self.assertIsNone(self.get_output(%22Data%22))%0A%0A def test_two_inputs_union(self):%0A self.send_signal(%22Additional Data%22, self.iris, 0)%0A self.send_signal(%22Additional Data%22, self.titanic, 1)%0A output = self.get_output(%22Data%22)%0A # needs to contain all instances%0A self.assertEqual(len(output), len(self.iris) + len(self.titanic))%0A # needs to contain all variables%0A outvars = output.domain.variables%0A self.assertLess(set(self.iris.domain.variables), set(outvars))%0A self.assertLess(set(self.titanic.domain.variables), set(outvars))%0A # the first part of the data set is iris, the second part is titanic%0A np.testing.assert_equal(self.iris.X, output.X%5B:len(self.iris), :-3%5D)%0A self.assertTrue(np.isnan(output.X%5B:len(self.iris), -3:%5D).all())%0A np.testing.assert_equal(self.titanic.X, output.X%5Blen(self.iris):, -3:%5D)%0A self.assertTrue(np.isnan(output.X%5Blen(self.iris):, :-3%5D).all())%0A%0A def test_two_inputs_intersection(self):%0A self.send_signal(%22Additional Data%22, self.iris, 0)%0A self.send_signal(%22Additional Data%22, self.titanic, 1)%0A self.widget.controls.merge_type.buttons%5B1%5D.click()%0A output = self.get_output(%22Data%22)%0A # needs to contain all instances%0A self.assertEqual(len(output), len(self.iris) + len(self.titanic))%0A # no common variables%0A outvars = output.domain.variables%0A self.assertEqual(0, len(outvars))%0A
|
|
eda9d7d6f80ddc1733c395e8f78dab4103f8aaad
|
version bump for 0.26.3.
|
oneflow/__init__.py
|
oneflow/__init__.py
|
VERSION = '0.26.2.4'
|
Python
| 0 |
@@ -14,10 +14,8 @@
.26.
-2.4
+3
'%0A%0A
|
7267966b9718b5a57c397447aad075cacb212f39
|
version bump for 0.26.1.12.
|
oneflow/__init__.py
|
oneflow/__init__.py
|
VERSION = '0.26.1.11'
|
Python
| 0 |
@@ -13,12 +13,12 @@
0.26.1.1
-1
+2
'%0A%0A
|
ef026ce3b4bf7fc50499ce5ecb688c02bbc77544
|
Add outline for orbital maneuver class
|
orbital/maneuver.py
|
orbital/maneuver.py
|
Python
| 0 |
@@ -0,0 +1,728 @@
+class Maneuver:%0A def __init__(self):%0A pass%0A%0A @classmethod%0A def raise_apocenter_by(cls, delta, orbit):%0A pass%0A%0A @classmethod%0A def change_apocenter_to(cls, apocenter, orbit):%0A pass%0A%0A @classmethod%0A def lower_apocenter_by(cls, delta, orbit):%0A pass%0A%0A @classmethod%0A def raise_pericenter_by(cls, delta, orbit):%0A pass%0A%0A @classmethod%0A def change_pericenter_to(cls, pericenter, orbit):%0A pass%0A%0A @classmethod%0A def lower_pericenter_by(cls, delta, orbit):%0A pass%0A%0A @classmethod%0A def hohmann_transfer(cls):%0A # how to specify new orbit?%0A # - new semimajor axix/radius/altitude%0A pass%0A%0A def bielliptic_transfer(cls):%0A pass%0A
|
|
c191959db6b1a14d527ec41f910682fd017421ee
|
fix for handling spaces in sys.executable and in sut_path (issue 166)
|
doc/quickstart/testlibs/LoginLibrary.py
|
doc/quickstart/testlibs/LoginLibrary.py
|
import os
import sys
class LoginLibrary:
def __init__(self):
sut_path = os.path.join(os.path.dirname(__file__),
'..', 'sut', 'login.py')
self._command_prefix = '%s %s ' % (sys.executable, sut_path)
self._status = ''
def create_user(self, username, password):
self._run_command('create', username, password)
def change_password(self, username, old_pwd, new_pwd):
self._run_command('change-password', username, old_pwd, new_pwd)
def attempt_to_login_with_credentials(self, username, password):
self._run_command('login', username, password)
def status_should_be(self, expected_status):
if expected_status != self._status:
raise AssertionError("Expected status to be '%s' but was '%s'"
% (expected_status, self._status))
def _run_command(self, command, *args):
command = '%s %s %s' % (self._command_prefix, command, ' '.join(args))
process = os.popen(command)
self._status = process.read().strip()
process.close()
|
Python
| 0 |
@@ -109,16 +109,32 @@
dirname(
+os.path.abspath(
__file__
@@ -134,16 +134,17 @@
_file__)
+)
,%0A
@@ -226,21 +226,25 @@
efix = '
+%22
%25s
-
+%22 %22
%25s
+%22
' %25 (sy
|
ede05f2196dc7e96df01176f20b39772ac26e1ae
|
add python/logviewer.py
|
python/logviewer.py
|
python/logviewer.py
|
Python
| 0.000003 |
@@ -0,0 +1,2017 @@
+#!/usr/bin/python3%0A%0Aimport io, os, re, sys%0Afrom http import HTTPStatus, server%0A%0AFILE = None%0A%0AINDEX = %22%22%22%3C!DOCTYPE html%3E%0A%3Cmeta charset=%22utf-8%22%3E%0A%3Ctitle%3ELog Viewer%3C/title%3E%0A%3Cscript%3E%0Avar logBox = null;%0Avar lastOffset = 0;%0Afunction initialize() %7B%0A logBox = document.getElementById('log');%0A lastOffset = 0;%0A update();%0A%7D%0A%0Afunction update() %7B%0A fetch('/get?offset=' + lastOffset).then(function(response) %7B%0A if (response.ok) %7B%0A return response.text();%0A %7D%0A %7D).then(function(text) %7B%0A lastOffset += text.length;%0A logBox.value += text; // FIXME: escape%0A logBox.scrollTop = logBox.scrollHeight; // Scroll to bottom%0A setTimeout(update, 3000);%0A %7D);%0A%7D%0A%3C/script%3E%0A%3Cbody onLoad=%22initialize();%22%3E%0A%3Ctextarea id=%22log%22 wrap=%22off%22 cols=%22120%22 rows=%2250%22 readonly=%22readonly%22%3E%0A%3C/textarea%3E%0A%22%22%22%0A%0A# INDEX = None # Dev mode%0A%0Aclass HTTPRequestHandler(server.BaseHTTPRequestHandler):%0A%0A def do_GET(self):%0A if self.path == '/':%0A self.send_OK(%22text/html%22, INDEX.encode())%0A elif self.path.startswith('/get?'):%0A # TODO: convert query string to a dict%0A m = re.search('offset=(%5C%5Cd+)', self.path)%0A offset = int(m.group(1)) if m else 0%0A m = re.search('length=(%5C%5Cd+)', self.path)%0A length = int(m.group(1)) if m else -1%0A FILE.seek(offset)%0A body = FILE.read(length)%0A self.send_OK(%22text/plain%22, body)%0A else:%0A self.send_error(HTTPStatus.NOT_FOUND, %22File not found%22)%0A%0A def send_OK(self, content_type, body):%0A self.send_response(HTTPStatus.OK)%0A self.send_header(%22Content-Type%22, content_type)%0A self.send_header('Content-Length', int(len(body)))%0A self.end_headers()%0A self.wfile.write(body)%0A%0A%0Adef main(argv):%0A global FILE, INDEX%0A FILE = open(argv%5B1%5D, 'rb')%0A if not INDEX:%0A INDEX = open(os.path.splitext(argv%5B0%5D)%5B0%5D + '.html').read()%0A server.test(HandlerClass=HTTPRequestHandler)%0A %0A%0Aif __name__ == '__main__':%0A main(sys.argv)%0A
|
|
3cbb02ebb1ba195f373c4b9238a49c30039f821e
|
revert changes
|
python/lottosend.py
|
python/lottosend.py
|
Python
| 0.000001 |
@@ -0,0 +1,1988 @@
+import json%0Aimport urllib2%0Aclass LottosendSDK:%0A%09#= Imports%09%0A%09%0A%09#= Contrusctor%0A%09def __init__(self):%09%09%09%09%0A%09%09self.token = ''%0A%09%09self.lottosend_api = ''%0A%09%09self.results_api = ''%0A%09%09self.auto_login_url = ''%09%09%0A%0A%09# signup user in lottosend system%0A%09def signupViaApi(self,first_name, last_name, prefix, phone, email, address, country, passwd, a_aid):%09%09%0A%09%09params = dict()%0A%09%09params = %7B%0A%09%09%09'web_user': %7B%0A%09%09%09%09'email': email,%0A%09%09%09%09'first_name': first_name,%0A%09%09%09%09'last_name': last_name,%0A%09%09%09%09'phone': phone,%0A%09%09%09%09'password': passwd,%0A%09%09%09%09'country': country,%0A%09%09%09%09'address': address,%0A%09%09%09%09'aid': a_aid%0A%09%09%09%7D%0A%09%09%7D%09%09%0A%09%09req = urllib2.Request(self.lottosend_api,%0A headers = %7B%0A %22Authorization%22: 'Token token=%25s' %25 self.token,%0A %22Content-Type%22: %22application/json%22,%0A %22Accept%22: %22*/*%22%09 %0A %09%7D, data = json.dumps(params))%09%09%0A%09%09return urllib2.urlopen(req).read()%0A%0A%09# obtain user token to resign-in%0A%09def obtainToken(self,id):%0A%09%09req = urllib2.Request('%25s/%25s/token'%25(self.lottosend_api,id),%0A headers = %7B%0A %22Authorization%22: 'Token token=%25s' %25 self.token,%0A %22Content-Type%22: %22application/json%22,%0A %22Accept%22: %22*/*%22%09 %0A %09%7D)%0A%0A%09%09return urllib2.urlopen(req).read()%0A%0A%09# get all user info%0A%09def getUsersInfo(self):%0A%09%09req = urllib2.Request('%25s/?last_synced_timestamp=1'%25self.lottosend_api,%0A headers = %7B%0A %22Authorization%22: 'Token token=%25s' %25 self.token,%0A %22Content-Type%22: %22application/json%22,%0A %22Accept%22: %22*/*%22%09 %0A %09%7D)%0A%09%09return urllib2.urlopen(req).read()%0A%0A%09# get user transactions%0A%09def getUsersTransactions(self):%0A%09%09req = urllib2.Request('%25s/transactions/?last_synced_timestamp=1'%25self.lottosend_api,%0A headers = %7B%0A %22Authorization%22: 'Token token=%25s' %25 self.token,%0A %22Content-Type%22: %22application/json%22,%0A %22Accept%22: %22*/*%22%09 %0A %09%7D)%0A%09%09return urllib2.urlopen(req).read()%0A
|
|
490af74a5b52d8014a8c3e13cfa6f015a4927cf4
|
add a merge migration to ensure the two lead nodes don't cause a crash during a deploy
|
accelerator/migrations/0021_merge_20181011_1153.py
|
accelerator/migrations/0021_merge_20181011_1153.py
|
Python
| 0 |
@@ -0,0 +1,367 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.8 on 2018-10-11 15:53%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('accelerator', '0020_criterion_verbose_names'),%0A ('accelerator', '0020_remove_is_open_from_program_family'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
|
|
46cef615f9d10279ea4907a542a87e4af22b37cd
|
Add A* pathfinding algorithm to utilities.
|
enactiveagents/utilities/pathfinding.py
|
enactiveagents/utilities/pathfinding.py
|
Python
| 0 |
@@ -0,0 +1,2978 @@
+%22%22%22%0AModule containing pathfinding utilities.%0A%22%22%22%0A%0Aimport model%0Aimport Queue%0A%0Aclass Pathfinding(object):%0A%0A @staticmethod%0A def get_neighbours(world, position):%0A %22%22%22%0A Get all neighbours of a given position (cell).%0A%0A :param world: The world%0A :param position: The given position (cell)%0A %22%22%22 %0A neighbours = %5B%5D%0A for dx in %5B-1, 0, 1%5D:%0A for dy in %5B-1, 0, 1%5D:%0A if dx == 0 and dy == 0:%0A continue%0A%0A if (position.get_x() + dx %3C 0 %0A or position.get_y() + dy %3C 0 %0A or position.get_x() + dx %3E= world.get_width() %0A or position.get_y() + dy %3E= world.get_height()):%0A continue%0A%0A new_position = model.world.Position(position)%0A new_position.add((dx, dy))%0A%0A add = True%0A entities = world.get_entities_at(new_position)%0A for entity in entities:%0A if entity.collidable():%0A add = False%0A break%0A%0A if add:%0A neighbours.append(new_position)%0A%0A return neighbours%0A%0A @staticmethod%0A def heuristic(start, goal):%0A %22%22%22 %0A Calculate the heuristic cost to get from start to the goal.%0A%0A :param start: The starting position%0A :param goal: The goal position%0A %22%22%22%0A return abs(start.get_x() - goal.get_x()) + abs(start.get_y() - goal.get_y())%0A%0A @staticmethod%0A def reconstruct_path(backtrack, goal):%0A path = %5B%5D%0A%0A current = goal%0A while backtrack%5Bcurrent%5D != None:%0A path.append(current)%0A current = backtrack%5Bcurrent%5D%0A%0A return path%0A%0A @staticmethod%0A def find_path(world, start, goal):%0A %22%22%22%0A Implements the A* algorithm to find a path from the start to the goal.%0A%0A :param world: The world%0A :param start: The starting position%0A :param goal: The goal position%0A %22%22%22%0A priority_queue = Queue.PriorityQueue()%0A priority_queue.put(start, 0)%0A%0A backtrack = %7B%7D%0A cost_to = %7B%7D%0A%0A backtrack%5Bstart%5D = None%0A cost_to%5Bstart%5D = 0%0A%0A while not priority_queue.empty():%0A current = priority_queue.get()%0A%0A if current == goal:%0A # The goal has been found, so stop searching%0A break%0A%0A for neighbour in Pathfinding.get_neighbours(world, current):%0A cost_to_neighbour = cost_to%5Bcurrent%5D + 1%0A%0A if neighbour not in cost_to or cost_to_neighbour %3C cost_to%5Bneighbour%5D:%0A cost_to%5Bneighbour%5D = cost_to_neighbour%0A backtrack%5Bneighbour%5D = current%0A priority = cost_to_neighbour + Pathfinding.heuristic(neighbour, goal)%0A priority_queue.put(neighbour, priority)%0A%0A return (Pathfinding.reconstruct_path(backtrack, goal), cost_to%5Bgoal%5D)
|
|
0ba701bd4459273df726e33709ae0e441bd4a767
|
migrate username field to 150 chars
|
email_auth/migrations/0003_django110.py
|
email_auth/migrations/0003_django110.py
|
Python
| 0.000002 |
@@ -0,0 +1,833 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.4 on 2016-12-27 09:08%0Afrom __future__ import unicode_literals%0A%0Afrom django import VERSION%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('email_auth', '0002_auto_20160327_1119'),%0A %5D%0A%0A operations = %5B%5D%0A if VERSION %3E= (1, 10):%0A import django.contrib.auth.validators%0A%0A operations.append(migrations.AlterField(%0A model_name='user',%0A name='username',%0A field=models.CharField(error_messages=%7B'unique': 'A user with that username already exists.'%7D, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=%5Bdjango.contrib.auth.validators.ASCIIUsernameValidator()%5D, verbose_name='username'),%0A ))%0A
|
|
650dab63187c6355ce735687d8c9bb06909602bd
|
remove [contribute] prefix on emails (bug 731369)
|
apps/mozorg/views.py
|
apps/mozorg/views.py
|
from django.core.mail import EmailMessage
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
import jingo
from product_details import product_details
import basket
import l10n_utils
from forms import ContributeForm, NewsletterCountryForm
@csrf_exempt
def contribute(request):
def has_contribute_form():
return (request.method == 'POST' and
'contribute-form' in request.POST)
def has_newsletter_form():
return (request.method == 'POST' and
'newsletter-form' in request.POST)
locale = getattr(request, 'locale', 'en-US')
success = False
newsletter_success = False
print request.POST
# This is ugly, but we need to handle two forms. I would love if
# these forms could post to separate pages and get redirected
# back, but we're forced to keep the error/success workflow on the
# same page. Please change this.
if has_contribute_form():
form = ContributeForm(request.POST)
if form.is_valid():
data = form.cleaned_data
contribute_send(data)
contribute_autorespond(request, data)
if data['newsletter']:
try:
basket.subscribe(data['email'], 'about-mozilla')
except basket.BasketException, e: pass
success = True
else:
form = ContributeForm()
if has_newsletter_form():
newsletter_form = NewsletterCountryForm(locale,
request.POST,
prefix='newsletter')
if newsletter_form.is_valid():
data = newsletter_form.cleaned_data
try:
basket.subscribe(data['email'],
'about-mozilla',
format=data['fmt'],
country=data['country'])
newsletter_success = True
except basket.BasketException, e:
msg = newsletter_form.error_class(
['We apologize, but an error occurred in our system.'
'Please try again later.']
)
newsletter_form.errors['__all__'] = msg
else:
newsletter_form = NewsletterCountryForm(locale, prefix='newsletter')
return l10n_utils.render(request,
'mozorg/contribute.html',
{'form': form,
'success': success,
'newsletter_form': newsletter_form,
'newsletter_success': newsletter_success})
def contribute_send(data):
ccs = {
'QA': '[email protected]',
'Thunderbird': '[email protected]',
'Students': '[email protected]',
'Research': '[email protected]',
'Design': '[email protected]',
'Security': '[email protected]',
'Docs': '[email protected]',
'Drumbeat': '[email protected]',
'Browser Choice': '[email protected]',
'IT': '[email protected]',
'Marketing': '[email protected]',
'Add-ons': '[email protected]'
}
from_ = '[email protected]'
subject = '[Contribute] Inquiry about Mozilla %s' % data['interest']
msg = ("Email: %s\r\nArea of Interest: %s\r\nComment: %s\r\n"
% (data['email'], data['interest'], data['comments']))
headers = {'Reply-To': data['email']}
to = ['[email protected]']
if settings.DEV:
to = [data['email']]
cc = None
if data['interest'] in ccs:
cc = [data['interest']]
if settings.DEV:
cc = [data['email']]
email = EmailMessage(subject, msg, from_, to, cc=cc, headers=headers)
email.send()
def contribute_autorespond(request, data):
replies = {
'Support': '[email protected]',
'Localization': '[email protected]',
'QA': '[email protected]',
'Add-ons': '[email protected]',
'Marketing': '[email protected]',
'Students': '[email protected]',
'Documentation': '[email protected]',
'Research': '[email protected]',
'Thunderbird': '[email protected]',
'Accessibility': '[email protected]',
'Firefox Suggestions': '[email protected]',
'Webdev': '[email protected]',
' ': '[email protected]'
}
msgs = {
'Support': 'emails/support.txt',
'QA': 'emails/qa.txt',
'Add-ons': 'emails/addons.txt',
'Marketing': 'emails/marketing.txt',
'Students': 'emails/students.txt',
'Documentation': 'emails/documentation.txt',
'Firefox Suggestions': 'emails/suggestions.txt',
'Webdev': 'emails/webdev.txt',
' ': 'emails/other.txt'
}
subject = 'Inquiry about Mozilla %s' % data['interest']
to = [data['email']]
from_ = '[email protected]'
headers = {}
msg = ''
if data['interest'] in msgs:
msg = jingo.render_to_string(request, msgs[data['interest']], data)
else:
return False
msg = msg.replace('\n', '\r\n')
if data['interest'] in replies:
headers = {'Reply-To': replies[data['interest']]}
email = EmailMessage(subject, msg, from_, to, headers=headers)
email.send()
|
Python
| 0 |
@@ -3334,21 +3334,8 @@
= '
-%5BContribute%5D
Inqu
|
659e9ef52aa7fe47bfab7c731c8e028d1f564904
|
Allow another process to process submission info if the assigned one dies. #183
|
judge/rabbitmq/daemon.py
|
judge/rabbitmq/daemon.py
|
import json
import logging
from os import getpid
from . import connection
logger = logging.getLogger('judge.handler')
class AMQPResponseDaemon(object):
def __init__(self):
self.chan = connection.connect().channel()
self._judge_response_handlers = {
'acknowledged': self.on_acknowledged,
'grading-begin': self.on_grading_begin,
'grading-end': self.on_grading_end,
'compile-error': self.on_compile_error,
'compile-message': self.on_compile_message,
'internal-error': self.on_internal_error,
'aborted': self.on_aborted,
'test-case': self.on_test_case,
}
self._ping_handlers = {
'ping': self.on_ping,
'problem-update': self.on_problem_update,
'executor-update': self.on_executor_update,
}
self._submission_tags = {}
def run(self):
self.chan.basic_consume(self._take_new_submission, queue='submission-id')
self.chan.basic_consume(self._handle_ping, queue='judge-ping')
self.chan.start_consuming()
def stop(self):
self.chan.stop_consuming()
def _take_new_submission(self, chan, method, properties, body):
chan.basic_ack(delivery_tag=method.delivery_tag)
if not body.isdigit():
return
tag = self.chan.basic_consume(self._handle_judge_response, queue='sub-' + body, no_ack=True)
self._submission_tags[int(body)] = tag
logger.info('Declare responsibility for: %d: pid %d', int(body), getpid())
def _finish_submission(self, id):
self.chan.basic_cancel(self._submission_tags[id])
self.chan.queue_delete('sub-%d' % id)
del self._submission_tags[id]
logger.info('Finished responsibility for: %d: pid %d', id, getpid())
def _handle_judge_response(self, chan, method, properties, body):
try:
packet = json.loads(body.decode('zlib'))
self._judge_response_handlers.get(packet['name'], self.on_malformed)(packet)
except Exception:
logger.exception('Error in AMQP judge response handling')
def _handle_ping(self, chan, method, properties, body):
try:
packet = json.loads(body.decode('zlib'))
self._ping_handlers.get(packet['name'], self.on_malformed)(packet)
chan.basic_ack(delivery_tag=method.delivery_tag)
except Exception:
logger.exception('Error in AMQP judge ping handling')
chan.basic_nack(delivery_tag=method.delivery_tag)
def on_acknowledged(self, packet):
logger.info('Submission acknowledged: %d', packet['id'])
def on_grading_begin(self, packet):
logger.info('Grading has begun on: %s', packet['id'])
def on_grading_end(self, packet):
logger.info('Grading has ended on: %s', packet['id'])
self._finish_submission(packet['id'])
def on_compile_error(self, packet):
logger.info('Submission failed to compile: %s', packet['id'])
self._finish_submission(packet['id'])
def on_compile_message(self, packet):
logger.info('Submission generated compiler messages: %s', packet['id'])
def on_internal_error(self, packet):
try:
raise ValueError('\n\n' + packet['message'])
except ValueError:
logger.exception('Judge %s failed while handling submission %s', packet['judge'], packet['id'])
def on_aborted(self, packet):
logger.info('Submission aborted: %s', packet['id'])
self._finish_submission(packet['id'])
def on_test_case(self, packet):
if packet['batch']:
logger.info('Test case completed on: %s, batch #%d, case #%d', packet['id'], packet['batch'], packet['position'])
else:
logger.info('Test case completed on: %s, case #%d', packet['id'], packet['position'])
def on_malformed(self, packet):
logger.error('Malformed packet: %s', packet)
def on_ping(self, packet):
pass
def on_problem_update(self, packet):
logger.info('Judge %s updated problem list', packet['judge'])
def on_executor_update(self, packet):
logger.info('Judge %s updated executor list', packet['judge'])
|
Python
| 0 |
@@ -893,16 +893,50 @@
ags = %7B%7D
+%0A self._submission_ack = %7B%7D
%0A%0A de
@@ -1254,32 +1254,103 @@
perties, body):%0A
+ try:%0A id = int(body)%0A except ValueError:%0A
chan.bas
@@ -1394,39 +1394,8 @@
ag)%0A
- if not body.isdigit():%0A
@@ -1491,30 +1491,72 @@
sub-
-' + body, no_ack=True)
+%25d' %25 id)%0A self._submission_ack%5Bid%5D = method.delivery_tag
%0A
@@ -1583,24 +1583,17 @@
n_tags%5Bi
-nt(body)
+d
%5D = tag%0A
@@ -1655,24 +1655,17 @@
d %25d', i
-nt(body)
+d
, getpid
@@ -1707,16 +1707,83 @@
f, id):%0A
+ self.chan.basic_ack(delivery_tag=self._submission_ack%5Bid%5D)%0A
@@ -1916,16 +1916,53 @@
ags%5Bid%5D%0A
+ del self._submission_ack%5Bid%5D%0A
@@ -2248,32 +2248,93 @@
formed)(packet)%0A
+ chan.basic_ack(delivery_tag=method.delivery_tag)%0A
except E
|
ed20a93e917cfdddc5cd49cc6446b6e80fb4573d
|
Migrate symbtr uuid field to django type
|
makam/migrations/0007_auto_20150812_1615.py
|
makam/migrations/0007_auto_20150812_1615.py
|
Python
| 0.000001 |
@@ -0,0 +1,741 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Aimport django_extensions.db.fields%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('makam', '0006_auto_20150727_1631'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='symbtr',%0A name='uuid',%0A field=django_extensions.db.fields.UUIDField(max_length=36, editable=False, blank=True),%0A ),%0A migrations.RunSQL('alter table makam_symbtr alter COLUMN uuid type uuid USING (%22uuid%22::uuid)'),%0A migrations.AlterField(%0A model_name='symbtr',%0A name='uuid',%0A field=models.UUIDField(db_index=True),%0A ),%0A %5D%0A
|
|
e142530eef5754d4314d97f0d9e144f348d3909a
|
add docs_create_missing_stubs
|
maintenance/docs_create_missing_stubs.py
|
maintenance/docs_create_missing_stubs.py
|
Python
| 0 |
@@ -0,0 +1,1620 @@
+import os%0Aimport subprocess%0A%0A# hardcoded paths%0AHUNTER_DIR='..'%0APACKAGES_DIR=os.path.join(HUNTER_DIR, 'cmake/projects')%0ADOCS_PKG_DIR=os.path.join(HUNTER_DIR, 'docs', 'packages', 'pkg')%0A%0A# get all wiki entries%0Adocs_filenames = %5Bx for x in os.listdir(DOCS_PKG_DIR) if x.endswith('.rst')%5D%0Adocs_entries = %5Bx%5B:-4%5D for x in docs_filenames%5D%0A%0A# get all hunter package entries %0Apkg_entries = %5Bx for x in os.listdir(PACKAGES_DIR) if os.path.isdir(os.path.join(PACKAGES_DIR, x))%5D%0Apkg_entries_lower = %5Bx.lower() for x in pkg_entries%5D%0A%0A# packages both in hunter and wiki%0Apkg_match = %5Bx for x in pkg_entries if x in docs_entries%5D%0A# packages only in hunter%0Apkg_only_hunter = %5Bx for x in pkg_entries if x not in pkg_match%5D%0A%0A# output directories%0Apackages_dir = 'packages'%0Atmp_dir = 'packages/tmp'%0Aonly_hunter_dir = 'packages/only_hunter'%0A%0A# create if not exist%0Afor d in %5Bpackages_dir, tmp_dir, only_hunter_dir%5D:%0A if not os.path.exists(d):%0A os.mkdir(d)%0A%0A# header for rst files%0Aheader_format_string = %22%22%22.. spelling::%0A%0A %7B%7D%0A%0A.. _pkg.%7B%7D:%0A%0A%7B%7D%0A%7B%7D%0A%0A%22%22%22%0A%0A# create dummy entries for packages only in hunter%0Afor entry in pkg_only_hunter:%0A source_md = os.path.join(WIKI_DIR, 'pkg.' + entry.lower() + '.md')%0A tmp_rst = os.path.join(tmp_dir, entry + '.rst')%0A target_rst = os.path.join(only_hunter_dir, entry + '.rst')%0A %0A underscores = %22=%22 * len(entry)%0A header = header_format_string.format(entry, entry, entry, underscores)%0A #print(header)%0A %0A with open(target_rst, 'w') as f:%0A f.write(header)%0A%0Aprint(%22pkg_match entries: %22, len(pkg_match))%0Aprint(%22pkg_only_hunter entries: %22, len(pkg_only_hunter))
|
|
52c50ca6e4c5d2ee75300617c5da118fb1136e76
|
Add custom plot style contour_image.
|
mplstyles/plots.py
|
mplstyles/plots.py
|
Python
| 0 |
@@ -0,0 +1,1200 @@
+from matplotlib import cm%0Aimport matplotlib.pyplot as plt%0Afrom mplstyles import cmap as colormap%0Aimport numpy as np%0A%0Adef contour_image(x,y,Z,cmap=None,vmax=None,vmin=None,interpolation='nearest',contour_labelsize=9,contour_opts=%7B%7D,imshow_opts=%7B%7D,clegendlabels=%5B%5D,label=False):%0A%09ax = plt.gca()%0A%09%0A%09x_delta = float((x%5B-1%5D-x%5B0%5D))/(len(x)-1)/2.%0A%09y_delta = float((y%5B-1%5D-y%5B0%5D))/(len(y)-1)/2.%0A%09%0A%09extent=(x%5B0%5D,x%5B-1%5D,y%5B0%5D,y%5B-1%5D)%0A%09%0A%09extent_delta = (x%5B0%5D-x_delta,x%5B-1%5D+x_delta,y%5B0%5D-y_delta,y%5B-1%5D+y_delta)%0A%09%0A%09ax.set_xlim(x%5B0%5D,x%5B-1%5D)%0A%09ax.set_ylim(y%5B0%5D,y%5B-1%5D)%0A%09%0A%09if cmap is None:%0A%09%09cmap = colormap.reverse(cm.Blues)%0A%09%0A%09Z = Z.transpose()%0A%0A%09#plt.contourf(X,Y,self.pdata,interpolation=interpolation)%0A%09cs = ax.imshow(Z,interpolation=interpolation,origin='lower',aspect='auto',extent=extent_delta,cmap=cmap,vmax=vmax,vmin=vmin, **imshow_opts)%0A%0A%09# Draw contours%0A%09X, Y = np.meshgrid(x, y)%0A%09CS = ax.contour(X, Y, Z, extent=extent, origin='lower', **contour_opts )%0A%0A%09# Label contours%0A%09if label:%0A%09%09ax.clabel(CS, fontsize=contour_labelsize)%0A%0A%09# Show contours in legend if desired%0A%09if len(clegendlabels) %3E 0:%0A%09%09for i in range(len(clegendlabels)):%0A%09%09%09CS.collections%5Bi%5D.set_label(clegendlabels%5Bi%5D)%0A%09%09#ax.legend()%0A%09%0A%09return cs, CS
|
|
e2a0a27e853e1e8c8913e9851d2a7aa0fb18b3ee
|
add exception test
|
tests/exceptions_test.py
|
tests/exceptions_test.py
|
Python
| 0.000001 |
@@ -0,0 +1,1466 @@
+# -*- coding: utf-8 -%0A#%0A# Copyright (c) 2008 (c) Benoit Chesneau %[email protected]%3E %0A#%0A# Permission to use, copy, modify, and distribute this software for any%0A# purpose with or without fee is hereby granted, provided that the above%0A# copyright notice and this permission notice appear in all copies.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22 AND THE AUTHOR DISCLAIMS ALL WARRANTIES%0A# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF%0A# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR%0A# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES%0A# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN%0A# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF%0A# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.%0A#%0A%0Aimport unittest%0A%0Aclass ResourceTestCase(unittest.TestCase):%0A%0A def testForceException(self):%0A %0A import webob.exc%0A from restkit.errors import use_simple_exception, use_wsgi_exception%0A use_simple_exception()%0A from restkit.errors import ResourceError%0A self.assert_(issubclass(ResourceError, webob.exc.WSGIHTTPException) == False)%0A use_wsgi_exception()%0A %0A def testWebobException(self):%0A import webob.exc%0A from restkit.errors import ResourceError%0A self.assert_(issubclass(ResourceError, webob.exc.WSGIHTTPException) == True)%0A %0Aif __name__ == '__main__':%0A unittest.main()
|
|
9231511307631ad92b896941607c4e5f3c7704ce
|
Create new script for attaching and releasing the gripper's compressor glove.
|
cs473_baxter/scripts/glove.py
|
cs473_baxter/scripts/glove.py
|
Python
| 0 |
@@ -0,0 +1,1281 @@
+#!/usr/bin/env python%0A%0Aimport argparse%0A%0Aimport rospy%0A%0Aimport baxter_interface%0A%0Aclass Glove():%0A%09def __init__(self, gripper):%0A%09%09self.gripper = Gripper(gripper)%0A%0A%09%09# Verify robot is enabled%0A%09%09print %22Getting robot state...%22%0A%09%09self._rs = baxter_interface.RobotEnable()%0A%09%09self._init_state = self._rs.state().enabled%0A%09%09print %22Enabling robot...%22%0A%09%09self._rs.enable()%0A%09%09print %22Running. Ctrl-c to quit%22%0A%0A%09def grip_glove(self):%0A%09%09self.gripper.calibrate()%0A%09%09self.gripper.open()%0A%09%09# set moving force%0A%09%09# set holding force%0A%09%09# prompt for glove%0A%09%09# grip glove%0A%0A%09def release_glove(self):%0A%09%09self.gripper.open()%0A%0A%09def clean_shutdown(self):%0A%09%09print %22%5CnExiting glove routine...%22%0A%09%09if not self._init_state and self._rs.state().enabled:%0A%09%09%09print %22Disabling robot...%22%0A%09%09%09self._rs.disable()%0A%0A%0Adef main():%0A%09arg_fmt = argparse.RawDescriptionHelpFormatter%0A%09parser = argparse.ArgumentParser(formatter_class=arg_fmt,%0A%09%09%09%09%09%09%09%09%09description=main.__doc__)%0A%09parser.add_argument(%0A%09%09'-g', '--grip', choices=%5B'grip', 'release'%5D, required=True,%0A%09%09help=%22grip or release glove%22%0A%09)%0A%09args = parser.parse_args(rospy.myargv()%5B1:%5D)%0A%0A%09g = Glove('right')%0A%0A%09# register shutdown callback%0A%09rospy.on_shutdown(g.clean_shutdown)%0A%0A%09if args.grip is 'grip':%0A%09%09g.grip_glove()%0A%09else:%0A%09%09g.release_glove()%0A%0Aif __name__ == '__main__':%0A%09main()%0A
|
|
bc2a707ea12716612422959b107b72c84d9dc946
|
add test for dump_table_to_json()
|
tests/test_dump_table.py
|
tests/test_dump_table.py
|
Python
| 0.000001 |
@@ -0,0 +1,1416 @@
+import scraperwiki%0Aimport unittest%0Afrom dc_base_scrapers.common import dump_table_to_json%0A%0A%0Aclass DumpTableTests(unittest.TestCase):%0A%0A def test_dump_table(self):%0A # create tables with same columns in different order%0A scraperwiki.sqlite.execute(%22%22%22CREATE TABLE foo (%0A b TEXT,%0A a INT,%0A c TEXT%0A );%22%22%22)%0A scraperwiki.sqlite.execute(%22%22%22CREATE TABLE bar (%0A c TEXT,%0A b TEXT,%0A a INT%0A );%22%22%22)%0A%0A # insert same content differently ordered%0A foo_records = %5B%0A %7B'a': 2, 'b': 'foo', 'c': 'foo'%7D,%0A %7B'a': 1, 'b': 'foo', 'c': 'foo'%7D,%0A %7B'a': 3, 'b': 'foo', 'c': 'foo'%7D,%0A %5D%0A for rec in foo_records:%0A scraperwiki.sqlite.save(unique_keys='a', table_name='foo', data=rec)%0A scraperwiki.sqlite.commit_transactions()%0A%0A bar_records = %5B%0A %7B'a': 2, 'b': 'foo', 'c': 'foo'%7D,%0A %7B'a': 3, 'b': 'foo', 'c': 'foo'%7D,%0A %7B'a': 1, 'b': 'foo', 'c': 'foo'%7D,%0A %5D%0A for rec in bar_records:%0A scraperwiki.sqlite.save(unique_keys='a', table_name='bar', data=rec)%0A scraperwiki.sqlite.commit_transactions()%0A%0A # check that json representation is consistent%0A foo_json = dump_table_to_json('foo', 'a')%0A bar_json = dump_table_to_json('bar', 'a')%0A%0A self.assertEqual(foo_json, bar_json)%0A
|
|
5207d3c91d64170d783388a064334e495b3b562c
|
Add a new test for the latest RegexLexer change, multiple new states including '#pop'.
|
tests/test_regexlexer.py
|
tests/test_regexlexer.py
|
Python
| 0.000007 |
@@ -0,0 +1,965 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A Pygments regex lexer tests%0A ~~~~~~~~~~~~~~~~~~~~~~~~~~%0A%0A :copyright: 2007 by Georg Brandl.%0A :license: BSD, see LICENSE for more details.%0A%22%22%22%0A%0Aimport unittest%0A%0Afrom pygments.token import Text%0Afrom pygments.lexer import RegexLexer%0A%0Aclass TestLexer(RegexLexer):%0A %22%22%22Test tuple state transitions including #pop.%22%22%22%0A tokens = %7B%0A 'root': %5B%0A ('a', Text.Root, 'rag'),%0A ('e', Text.Root),%0A %5D,%0A 'beer': %5B%0A ('d', Text.Beer, ('#pop', '#pop')),%0A %5D,%0A 'rag': %5B%0A ('b', Text.Rag, '#push'),%0A ('c', Text.Rag, ('#pop', 'beer')),%0A %5D,%0A %7D%0A%0Aclass TupleTransTest(unittest.TestCase):%0A def test(self):%0A lx = TestLexer()%0A toks = list(lx.get_tokens_unprocessed('abcde'))%0A self.assertEquals(toks,%0A %5B(0, Text.Root, 'a'), (1, Text.Rag, 'b'), (2, Text.Rag, 'c'),%0A (3, Text.Beer, 'd'), (4, Text.Root, 'e')%5D)%0A
|
|
9445433b54fcbd7f56617fff853b761107bc94cc
|
Test add
|
a.py
|
a.py
|
Python
| 0 |
@@ -0,0 +1,29 @@
+%22%22%22%0AComment%0A%22%22%22%0Aprint %22apa%22%0A%0A
|
|
c1b34a71306af1c38f305981dc1d50135b2887d8
|
add the missing new executor.py file
|
asyncio/executor.py
|
asyncio/executor.py
|
Python
| 0.000004 |
@@ -0,0 +1,2378 @@
+from .log import logger%0A%0A__all__ = (%0A 'CancelledError', 'TimeoutError',%0A 'FIRST_COMPLETED', 'FIRST_EXCEPTION', 'ALL_COMPLETED',%0A )%0A%0A# Argument for default thread pool executor creation.%0A_MAX_WORKERS = 5%0A%0Atry:%0A import concurrent.futures%0A import concurrent.futures._base%0Aexcept ImportError:%0A FIRST_COMPLETED = 'FIRST_COMPLETED'%0A FIRST_EXCEPTION = 'FIRST_EXCEPTION'%0A ALL_COMPLETED = 'ALL_COMPLETED'%0A%0A class Future(object):%0A def __init__(self, callback, args):%0A try:%0A self._result = callback(*args)%0A self._exception = None%0A except Exception as err:%0A raise%0A self._result = None%0A self._exception = err%0A self.callbacks = %5B%5D%0A%0A def cancelled(self):%0A return False%0A%0A def done(self):%0A return True%0A%0A def exception(self):%0A return self._exception%0A%0A def result(self):%0A if self._exception is not None:%0A raise self._exception%0A else:%0A return self._result%0A%0A def add_done_callback(self, callback):%0A callback(self)%0A%0A class Error(Exception):%0A %22%22%22Base class for all future-related exceptions.%22%22%22%0A pass%0A%0A class CancelledError(Error):%0A %22%22%22The Future was cancelled.%22%22%22%0A pass%0A%0A class TimeoutError(Error):%0A %22%22%22The operation exceeded the given deadline.%22%22%22%0A pass%0A%0A class SynchronousExecutor:%0A %22%22%22%0A Synchronous executor: submit() blocks until it gets the result.%0A %22%22%22%0A def submit(self, callback, *args):%0A return Future(callback, args)%0A%0A def shutdown(self, wait):%0A pass%0A%0A def get_default_executor():%0A logger.error(%22concurrent.futures module is missing: %22%0A %22use a synchrounous executor as fallback!%22)%0A return SynchronousExecutor()%0Aelse:%0A FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED%0A FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION%0A ALL_COMPLETED = concurrent.futures.ALL_COMPLETED%0A%0A Future = concurrent.futures.Future%0A Error = concurrent.futures._base.Error%0A CancelledError = concurrent.futures.CancelledError%0A TimeoutError = concurrent.futures.TimeoutError%0A%0A def get_default_executor():%0A return concurrent.futures.ThreadPoolExecutor(_MAX_WORKERS)%0A
|
|
fad65e68b3fcfa736ba5d6e62fbe0588100dc153
|
Create gdax-myTrades-pagination.py
|
examples/py/gdax-myTrades-pagination.py
|
examples/py/gdax-myTrades-pagination.py
|
Python
| 0 |
@@ -0,0 +1,1045 @@
+# -*- coding: utf-8 -*-%0A%0Aimport os%0Aimport sys%0A%0Aroot = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))%0Asys.path.append(root + '/python')%0A'''%0AExample snippet to traverse GDAX / CoinBase Pro pagination.%0AUseful for reaching back more than 100 myTrades, the same works%0Afor fetchClosedOrders%0A%0A'''%0A%0Aimport ccxt%0A%0Aexchange = ccxt.gdax(%7B%0A %22apiKey%22: %22123456%22,%0A %22secret%22: %22/abcdefghijklmnop/w==%22,%0A %22password%22: %22987654321%22,%0A %22enableRateLimit%22: True%0A%7D)%0A%0A#use sandbox url%0Aexchange.urls%5B'api'%5D = exchange.urls%5B'test'%5D%0A%0Aparam_key=''%0Aparam_value=''%0AallMyTrades: list = %5B%5D%0A%0Awhile True:%0A myTrades = exchange.fetchMyTrades(symbol='BTC/USD', params=%7Bparam_key: param_value%7D)%0A%0A # Handle gdax with pagination ...%0A if exchange.last_response_headers._store.get('cb-after'):%0A param_key = 'after'%0A param_value = exchange.last_response_headers._store%5B'cb-after'%5D%5B1%5D%0A%0A allMyTrades.extend(myTrades)%0A%0A else:%0A allMyTrades.extend(myTrades)%0A break%0A%0Afor trade in allMyTrades:%0A print(trade)%0A
|
|
6cc59b5ad1b70e0b303680d9e58c8d8158bec1e6
|
Create solution.py
|
hackerrank/algorithms/implementation/easy/sock_merchant/py/solution.py
|
hackerrank/algorithms/implementation/easy/sock_merchant/py/solution.py
|
Python
| 0.000018 |
@@ -0,0 +1,205 @@
+#!/bin/python3%0A%0Aimport sys%0Aimport collections%0A%0An = int(input().strip())%0Ac = map(int, input().strip().split(' '))%0A%0ApairCount = sum(count // 2 for count in collections.Counter(c).values())%0A%0Aprint(pairCount)%0A
|
|
bf01ea13c046d711939c1bb0aaedf9fbbc7c638d
|
Add initial systemd module
|
salt/modules/systemd.py
|
salt/modules/systemd.py
|
Python
| 0.000001 |
@@ -0,0 +1,1246 @@
+'''%0AProvide the service module for systemd%0A'''%0A%0Adef __virtual__():%0A '''%0A Only work on systems which default to systemd%0A '''%0A if __grains__%5B'os'%5D == 'Fedora' and __grains__%5B'osrelease'%5D %3E 15:%0A%09 return 'service'%0A return False%0A%0Adef start(name):%0A '''%0A Start the specified service with systemd%0A%0A CLI Example::%0A%0A%09 salt '*' service.start %3Cservice name%3E%0A '''%0A cmd = 'systemctl start %7B0%7D.service'.format(name)%0A return not __salt__%5B'cmd.retcode'%5D(cmd)%0A%0Adef stop(name):%0A '''%0A Stop the specifed service with systemd%0A%0A CLI Example::%0A%0A%09 salt '*' service.stop %3Cservice name%3E%0A '''%0A cmd = 'systemctl stop %7B0%7D.service'.format(name)%0A return not __salt__%5B'cmd.retcode'%5D(cmd)%0A%0Adef restart(name):%0A '''%0A Start the specified service with systemd%0A%0A CLI Example::%0A%0A%09 salt '*' service.start %3Cservice name%3E%0A '''%0A cmd = 'systemctl restart %7B0%7D.service'.format(name)%0A return not __salt__%5B'cmd.retcode'%5D(cmd)%0A%0Adef status(name):%0A '''%0A Return the status for a service via systemd, returns the PID if the service%0A is running or an empty string if the service is not running%0A '''%0A cmd = (%22systemctl restart %7B0%7D.service %7C grep 'Main PID'%22%0A%09%09 %22 %7C awk '%7Bprint $3%7D'%22).format(name)%0A return __salt__%5B'cmd.run'%5D(cmd).strip()%0A%0A
|
|
b1738d70e3a90e7bf27c9eeccb25b09403b74f1a
|
Add transport factory
|
devicehive/transport.py
|
devicehive/transport.py
|
Python
| 0.000004 |
@@ -0,0 +1,574 @@
+def init(name, data_format, data_format_options, handler, handler_options):%0A transport_class_name = '%25sTransport' %25 name.title()%0A transport_module = __import__('devicehive.transports.%25s_transport' %25 name,%0A fromlist=%5Btransport_class_name%5D)%0A return getattr(transport_module, transport_class_name)(data_format,%0A data_format_options,%0A handler,%0A handler_options)%0A
|
|
9ba1dd92919fb37862e6e94bf55cc25e7be3b009
|
add co.py
|
co.py
|
co.py
|
Python
| 0.000044 |
@@ -0,0 +1,1021 @@
+#!/bin/env python3%0Aimport functools%0Adef coroutine(f):%0A @functools.wraps(f)%0A def _coroutine(*args, **kwargs):%0A active_coroutine = f(*args, **kwargs)%0A next(active_coroutine)%0A return active_coroutine%0A return _coroutine%0A@coroutine%0Adef simple_coroutine():%0A print('Setting up the coroutine')%0A try:%0A while True:%0A item = yield%0A print('Got item: %25r' %25 item)%0A except GeneratorExit:%0A print('Normal exit')%0A except Exception as e:%0A print('Exception exit: %25r' %25 e)%0A raise%0A finally:%0A print('Any exit')%0Aprint('Creating simple coroutine')%0Aactive_coroutine = simple_coroutine()%0Aprint()%0Aprint('Sending spam')%0Aactive_coroutine.send('spam')%0Aprint()%0Aprint('Close the coroutine')%0Aactive_coroutine.close()%0Aprint()%0Aprint('Creating simple coroutine')%0Aactive_coroutine = simple_coroutine()%0Aprint()%0Aprint('Sending eggs')%0Aactive_coroutine.send('eggs')%0Aprint()%0Aprint('Throwing runtime error')%0Aactive_coroutine.throw(RuntimeError, 'Oops...')%0Aprint()%0A
|
|
2158408476733010b7416d474226bb3522619aeb
|
Add level to Group serializer (refs GH-1475)
|
src/sentry/api/serializers/models/group.py
|
src/sentry/api/serializers/models/group.py
|
from __future__ import absolute_import
from collections import defaultdict
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.utils import timezone
from sentry.api.serializers import Serializer, register
from sentry.app import tsdb
from sentry.constants import TAG_LABELS
from sentry.models import (
Group, GroupBookmark, GroupTagKey, GroupSeen, GroupStatus
)
from sentry.utils.db import attach_foreignkey
from sentry.utils.http import absolute_uri
@register(Group)
class GroupSerializer(Serializer):
def get_attrs(self, item_list, user):
attach_foreignkey(item_list, Group.project, ['team'])
if user.is_authenticated() and item_list:
bookmarks = set(GroupBookmark.objects.filter(
user=user,
group__in=item_list,
).values_list('group_id', flat=True))
seen_groups = dict(GroupSeen.objects.filter(
user=user,
group__in=item_list,
).values_list('group_id', 'last_seen'))
else:
bookmarks = set()
seen_groups = {}
tag_counts = defaultdict(dict)
tag_results = GroupTagKey.objects.filter(
group__in=item_list,
).values_list('key', 'group', 'values_seen')
for key, group_id, values_seen in tag_results:
tag_counts[key][group_id] = values_seen
# we need to compute stats at 1d (1h resolution), and 14d/30d (1 day res)
group_ids = [g.id for g in item_list]
now = timezone.now()
hourly_stats = tsdb.get_range(
model=tsdb.models.group,
keys=group_ids,
end=now,
start=now - timedelta(days=1),
rollup=3600,
)
daily_stats = tsdb.get_range(
model=tsdb.models.group,
keys=group_ids,
end=now,
start=now - timedelta(days=30),
rollup=3600 * 24,
)
result = {}
for item in item_list:
active_date = item.active_at or item.last_seen
tags = {}
for key in tag_counts.iterkeys():
label = TAG_LABELS.get(key, key.replace('_', ' ')).lower()
try:
value = tag_counts[key].get(item.id, 0)
except KeyError:
value = 0
tags[key] = {
'label': label,
'count': value,
}
result[item] = {
'is_bookmarked': item.id in bookmarks,
'has_seen': seen_groups.get(item.id, active_date) > active_date,
'tags': tags,
'hourly_stats': hourly_stats[item.id],
'daily_stats': daily_stats[item.id],
}
return result
def serialize(self, obj, attrs, user):
status = obj.get_status()
if status == GroupStatus.RESOLVED:
status_label = 'resolved'
elif status == GroupStatus.MUTED:
status_label = 'muted'
else:
status_label = 'unresolved'
if obj.team:
permalink = absolute_uri(reverse('sentry-group', args=[
obj.organization.slug, obj.project.slug, obj.id]))
else:
permalink = None
d = {
'id': str(obj.id),
'count': str(obj.times_seen),
'title': obj.message_short,
'culprit': obj.culprit,
'permalink': permalink,
'firstSeen': obj.first_seen,
'lastSeen': obj.last_seen,
'timeSpent': obj.avg_time_spent,
'isResolved': obj.get_status() == GroupStatus.RESOLVED,
'status': status_label,
'isPublic': obj.is_public,
# 'score': getattr(obj, 'sort_value', 0),
'project': {
'name': obj.project.name,
'slug': obj.project.slug,
},
'stats': {
'24h': attrs['hourly_stats'],
'30d': attrs['daily_stats'],
},
'isBookmarked': attrs['is_bookmarked'],
'hasSeen': attrs['has_seen'],
'tags': attrs['tags'],
}
return d
|
Python
| 0 |
@@ -3706,16 +3706,62 @@
SOLVED,%0A
+ 'level': obj.get_level_display(),%0A
|
02844d3a2ed329a02afaaf8dc1ad07407768a68b
|
Create app.py
|
app.py
|
app.py
|
Python
| 0.000003 |
@@ -0,0 +1,843 @@
+# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0Afrom flask import Flask%0Afrom flask import request%0Aimport requests%0A%0Aapp = Flask(__name__)%0A%0A%0Adef get_allergies():%0A URL = 'http://saallergy.info/today'%0A HEADERS = %7B'accept': 'application/json'%7D%0A r = requests.get(URL, headers=HEADERS)%0A data = r.json()%0A date = data%5B'results'%5D%5B0%5D%5B'date'%5D%0A text = 'Allergies for %25s: ' %25 date%0A for a in data%5B'results'%5D:%0A text = text + '%25s - %25s (%25s) %7C ' %25 (a%5B'allergen'%5D, a%5B'level'%5D,%0A a%5B'count'%5D)%0A text = text.rstrip(' ')%0A text = text.rstrip('%7C')%0A return text%0A %0A %[email protected](%22/allergies%22)%0Adef allergies():%0A allergies_str = get_allergies()%0A return allergies_str%0A%0Aif __name__ == %22__main__%22:%0A app.run(host='0.0.0.0')%0A
|
|
43d73b7bdc8b38b3e2e583a0321936ab80c0f4e0
|
Add bot.py
|
bot.py
|
bot.py
|
Python
| 0 |
@@ -0,0 +1,135 @@
+import praw%0A%0Ar = praw.Reddit('/u/powderblock Glasses Bot')%0A%0Afor post in r.get_subreddit('all').get_new(limit=5):%0A%09print(str(post.url))%0A
|
|
42389c93d11de00b50b08fcd1eca74fbe3941365
|
Create banner-generator.py
|
banner-generator.py
|
banner-generator.py
|
Python
| 0.000001 |
@@ -0,0 +1,525 @@
+#!/usr/bin/python%0A%0A#####################################################%0A# grabs a banner image from flaming text%0A# and saves it to the project directory as banner.png%0A#####################################################%0A%0Aimport urllib%0Aimport random%0A%0Aword_file = %22words.txt%22%0AWORDS = open(word_file).read().splitlines()%0Aword1 = random.choice(WORDS) + '+' + random.choice(WORDS)%0Amyurl = %22http://www.flamingtext.com/net-fu/proxy_form.cgi?imageoutput=true&script=dance-logo&text=%22+mytext%0Aurllib.urlretrieve(myurl, %22banner.png%22)%0A
|
|
45b789010409e4e2e2afc88cb776c8b70e7768ec
|
Add unit test for DakotaBase
|
dakota/tests/test_dakota_base.py
|
dakota/tests/test_dakota_base.py
|
Python
| 0 |
@@ -0,0 +1,690 @@
+#!/usr/bin/env python%0A#%0A# Tests for dakota.dakota_base module.%0A#%0A# Call with:%0A# $ nosetests -sv%0A#%0A# Mark Piper ([email protected])%0A%0Aimport os%0Aimport filecmp%0Afrom nose.tools import *%0Afrom dakota.dakota_base import DakotaBase%0A%0A%0A# Fixtures -------------------------------------------------------------%0A%0Adef setup_module():%0A %22%22%22Called before any tests are performed.%22%22%22%0A print('%5Cn*** DakotaBase tests')%0A%0Adef teardown_module():%0A %22%22%22Called after all tests have completed.%22%22%22%0A pass%0A%0A# Tests ----------------------------------------------------------------%0A%0A@raises(TypeError)%0Adef test_instantiate():%0A %22%22%22Test whether DakotaBase fails to instantiate.%22%22%22%0A d = DakotaBase()%0A
|
|
b0ec56421c65c744c62c98850b69719fa6c351a1
|
Update base.py
|
docs/examples/pyramid/base.py
|
docs/examples/pyramid/base.py
|
__author__ = 'Bhoomit'
import json
from pyramid.response import Response as PyramidResponse
from oauth2.web import Response
from oauth2.error import OAuthInvalidError, \
ClientNotFoundError, OAuthInvalidNoRedirectError, UnsupportedGrantError, ParameterMissingError
from oauth2.client_authenticator import ClientAuthenticator, request_body
from oauth2.tokengenerator import Uuid4
class Request():
"""
Contains data of the current HTTP request.
"""
def __init__(self, env):
self.method = env.method
self.params = env.json_body
self.registry = env.registry
self.headers = env.registry
def post_param(self, name):
return self.params.get(name)
class BaseAuthController(object):
def __init__(self, request, site_adapter):
self.request = Request(request)
self.site_adapter = site_adapter
self.token_generator=Uuid4()
self.client_store = self._get_client_store()
self.access_token_store = self._get_token_store()
self.client_authenticator = ClientAuthenticator(
client_store=self.client_store,
source=request_body
)
self.grant_types = [];
@classmethod
def _get_token_store(cls):
NotImplementedError
@classmethod
def _get_client_store(cls):
NotImplementedError
def add_grant(self, grant):
"""
Adds a Grant that the provider should support.
:param grant: An instance of a class that extends
:class:`oauth2.grant.GrantHandlerFactory`
"""
if hasattr(grant, "expires_in"):
self.token_generator.expires_in[grant.grant_type] = grant.expires_in
if hasattr(grant, "refresh_expires_in"):
self.token_generator.refresh_expires_in = grant.refresh_expires_in
self.grant_types.append(grant)
def _determine_grant_type(self, request):
for grant in self.grant_types:
grant_handler = grant(request, self)
if grant_handler is not None:
return grant_handler
raise UnsupportedGrantError
def authenticate(self):
response = Response()
grant_type = self._determine_grant_type(self.request)
grant_type.read_validate_params(self.request)
grant_type.process(self.request, response, {})
return PyramidResponse(body=response.body, status=response.status_code, content_type="application/json")
|
Python
| 0.000001 |
@@ -1,27 +1,4 @@
-__author__ = 'Bhoomit'%0A
%0Aimp
@@ -2527,8 +2527,9 @@
n/json%22)
+%0A
|
b1aebcccfb3f29bf3bc35ea4e73ba0e86dc0e64d
|
Fix message output method.
|
python/OmniSharp.py
|
python/OmniSharp.py
|
import vim, urllib2, urllib, urlparse, logging, json, os, os.path, cgi
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from SocketServer import ThreadingMixIn
logger = logging.getLogger('omnisharp')
logger.setLevel(logging.WARNING)
log_dir = os.path.join(vim.eval('expand("<sfile>:p:h")'), '..', 'log')
if not os.path.exists(log_dir):
os.makedirs(log_dir)
hdlr = logging.FileHandler(os.path.join(log_dir, 'python.log'))
logger.addHandler(hdlr)
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
def getResponse(endPoint, additionalParameters=None):
parameters = {}
parameters['line'] = vim.eval('line(".")')
parameters['column'] = vim.eval('col(".")')
parameters['buffer'] = '\r\n'.join(vim.eval("getline(1,'$')")[:])
if(vim.eval('exists("+shellslash") && &shellslash')):
parameters['filename'] = vim.current.buffer.name.replace('/', '\\')
else:
parameters['filename'] = vim.current.buffer.name
if(additionalParameters != None):
parameters.update(additionalParameters)
target = urlparse.urljoin(vim.eval('g:OmniSharp_host'), endPoint)
parameters = urllib.urlencode(parameters)
try:
response = urllib2.urlopen(target, parameters)
return response.read()
except:
vim.command("call confirm('Could not connect to " + target + "')")
return ''
#All of these functions take vim variable names as parameters
def getCompletions(ret, column, partialWord):
parameters = {}
parameters['column'] = vim.eval(column)
parameters['wordToComplete'] = vim.eval(partialWord)
parameters['buffer'] = '\r\n'.join(vim.eval('s:textBuffer')[:])
js = getResponse('/autocomplete', parameters)
command_base = ("add(" + ret +
", {'word': '%(CompletionText)s', 'abbr': '%(DisplayText)s', 'info': \"%(Description)s\", 'icase': 1, 'dup':1 })")
if(js != ''):
completions = json.loads(js)
for completion in completions:
try:
command = command_base % completion
vim.eval(command)
except:
logger.error(command)
def findUsages(ret):
js = getResponse('/findusages')
if(js != ''):
usages = json.loads(js)['Usages']
populateQuickFix(ret, usages)
def populateQuickFix(ret, quickfixes):
command_base = ("add(" + ret + ", {'filename': '%(FileName)s', 'text': '%(Text)s', 'lnum': '%(Line)s', 'col': '%(Column)s'})")
if(quickfixes != None):
for quickfix in quickfixes:
quickfix["FileName"] = os.path.relpath(quickfix["FileName"])
try:
command = command_base % quickfix
vim.eval(command)
except:
logger.error(command)
def findImplementations(ret):
js = getResponse('/findimplementations')
if(js != ''):
usages = json.loads(js)['Locations']
command_base = ("add(" + ret +
", {'filename': '%(FileName)s', 'lnum': '%(Line)s', 'col': '%(Column)s'})")
if(len(usages) == 1):
usage = usages[0]
filename = usage['FileName']
if(filename != None):
if(filename != vim.current.buffer.name):
vim.command('e ' + usage['FileName'])
#row is 1 based, column is 0 based
vim.current.window.cursor = (usage['Line'], usage['Column'] - 1 )
else:
for usage in usages:
usage["FileName"] = os.path.relpath(usage["FileName"])
try:
command = command_base % usage
vim.eval(command)
except:
logger.error(command)
def gotoDefinition():
js = getResponse('/gotodefinition');
if(js != ''):
definition = json.loads(js)
filename = definition['FileName']
if(filename != None):
if(filename != vim.current.buffer.name):
vim.command('e ' + definition['FileName'])
#row is 1 based, column is 0 based
vim.current.window.cursor = (definition['Line'], definition['Column'] - 1 )
def getCodeActions():
js = getResponse('/getcodeactions')
if(js != ''):
actions = json.loads(js)['CodeActions']
for index, action in enumerate(actions):
print "%d : %s" % (index, action)
if len(actions) > 0:
return True
return False
def runCodeAction(option):
parameters = {}
parameters['codeaction'] = vim.eval(option)
js = getResponse('/runcodeaction', parameters);
text = json.loads(js)['Text']
if(text == None):
return
lines = text.splitlines()
cursor = vim.current.window.cursor
vim.command('normal ggdG')
lines = [line.encode('utf-8') for line in lines]
vim.current.buffer[:] = lines
vim.current.window.cursor = cursor
def findSyntaxErrors(ret):
js = getResponse('/syntaxerrors')
if(js != ''):
errors = json.loads(js)['Errors']
command_base = ("add(" + ret +
", {'filename': '%(FileName)s', 'text': '%(Message)s', 'lnum': '%(Line)s', 'col': '%(Column)s'})")
for err in errors:
try:
command = command_base % err
vim.eval(command)
except:
logger.error(command)
def typeLookup(ret):
js = getResponse('/typelookup');
if(js != ''):
type = json.loads(js)['Type']
if(type != None):
vim.command("let %s = '%s'" % (ret, type))
def renameTo(renameTo):
parameters = {}
parameters['renameto'] = vim.eval("a:renameto")
js = getResponse('/rename', parameters)
response = json.loads(js)
changes = response['Changes']
currentBuffer = vim.current.buffer.name
cursor = vim.current.window.cursor
for change in changes:
lines = change['Buffer'].splitlines()
lines = [line.encode('utf-8') for line in lines]
filename = change['FileName']
vim.command(':argadd ' + filename)
buffer = filter(lambda b: b.name != None and b.name.upper() == filename.upper(), vim.buffers)[0]
vim.command(':b ' + filename)
buffer[:] = lines
vim.command(':undojoin')
vim.command(':b ' + currentBuffer)
vim.current.window.cursor = cursor
def setBuffer(buffer):
lines = buffer.splitlines()
lines = [line.encode('utf-8') for line in lines]
vim.current.buffer[:] = lines
def build(ret):
response = json.loads(getResponse('/build'))
success = response["Success"]
if success:
print "Build succeeded"
else:
print "Build failed"
quickfixes = response['QuickFixes']
populateQuickFix(ret, quickfixes)
def codeFormat():
response = json.loads(getResponse('/codeformat'))
setBuffer(response["Buffer"])
|
Python
| 0.000038 |
@@ -1338,22 +1338,26 @@
nd(%22
-call confirm('
+echo 'OmniSharp :
Coul
@@ -1388,17 +1388,16 @@
get + %22'
-)
%22)%0A
|
a5bce21ccfb1a0d231a7d5d782c0e572d8bca86a
|
use sesolve instead of mesolve for unitary evolution in propagator
|
qutip/propagator.py
|
qutip/propagator.py
|
# This file is part of QuTiP.
#
# QuTiP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# QuTiP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with QuTiP. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (C) 2011 and later, Paul D. Nation & Robert J. Johansson
#
###########################################################################
import types
import numpy as np
import scipy.linalg as la
import warnings
from qutip.qobj import Qobj
from qutip.rhs_generate import rhs_clear
from qutip.superoperator import vec2mat, mat2vec
from qutip.mesolve import mesolve
from qutip.essolve import essolve
from qutip.steadystate import steadystate
from qutip.states import basis
from qutip.states import projection
from qutip.odeoptions import Odeoptions
def propagator(H, t, c_op_list, args=None, options=None):
"""
Calculate the propagator U(t) for the density matrix or wave function such
that :math:`\psi(t) = U(t)\psi(0)` or
:math:`\\rho_{\mathrm vec}(t) = U(t) \\rho_{\mathrm vec}(0)`
where :math:`\\rho_{\mathrm vec}` is the vector representation of the
density matrix.
Parameters
----------
H : qobj or list
Hamiltonian as a Qobj instance of a nested list of Qobjs and
coefficients in the list-string or list-function format for
time-dependent Hamiltonians (see description in :func:`qutip.mesolve`).
t : float or array-like
Time or list of times for which to evaluate the propagator.
c_op_list : list
List of qobj collapse operators.
args : list/array/dictionary
Parameters to callback functions for time-dependent Hamiltonians and
collapse operators.
options : :class:`qutip.Odeoptions`
with options for the ODE solver.
Returns
-------
a : qobj
Instance representing the propagator :math:`U(t)`.
"""
if options is None:
options = Odeoptions()
options.rhs_reuse = True
rhs_clear()
elif options.rhs_reuse:
warnings.warn("propagator is using previously defined rhs function (options.rhs_reuse = True)")
tlist = [0, t] if isinstance(t, (int, float, np.int64, np.float64)) else t
if len(c_op_list) == 0:
# calculate propagator for the wave function
if isinstance(H, types.FunctionType):
H0 = H(0.0, args)
N = H0.shape[0]
dims = H0.dims
elif isinstance(H, list):
H0 = H[0][0] if isinstance(H[0], list) else H[0]
N = H0.shape[0]
dims = H0.dims
else:
N = H.shape[0]
dims = H.dims
u = np.zeros([N, N, len(tlist)], dtype=complex)
for n in range(0, N):
psi0 = basis(N, n)
output = mesolve(H, psi0, tlist, [], [], args, options)
for k, t in enumerate(tlist):
u[:, n, k] = output.states[k].full().T
# todo: evolving a batch of wave functions:
#psi_0_list = [basis(N, n) for n in range(N)]
#psi_t_list = mesolve(H, psi_0_list, [0, t], [], [], args, options)
#for n in range(0, N):
# u[:,n] = psi_t_list[n][1].full().T
else:
# calculate the propagator for the vector representation of the
# density matrix (a superoperator propagator)
if isinstance(H, types.FunctionType):
H0 = H(0.0, args)
N = H0.shape[0]
dims = [H0.dims, H0.dims]
elif isinstance(H, list):
H0 = H[0][0] if isinstance(H[0], list) else H[0]
N = H0.shape[0]
dims = [H0.dims, H0.dims]
else:
N = H.shape[0]
dims = [H.dims, H.dims]
u = np.zeros([N * N, N * N, len(tlist)], dtype=complex)
for n in range(0, N * N):
psi0 = basis(N * N, n)
rho0 = Qobj(vec2mat(psi0.full()))
output = mesolve(H, rho0, tlist, c_op_list, [], args, options)
for k, t in enumerate(tlist):
u[:, n, k] = mat2vec(output.states[k].full()).T
if len(tlist) == 2:
return Qobj(u[:, :, 1], dims=dims)
else:
return [Qobj(u[:, :, k], dims=dims) for k in range(len(tlist))]
def _get_min_and_index(lst):
"""
Private function for obtaining min and max indicies.
"""
minval, minidx = lst[0], 0
for i, v in enumerate(lst[1:]):
if v < minval:
minval, minidx = v, i + 1
return minval, minidx
def propagator_steadystate(U):
"""Find the steady state for successive applications of the propagator
:math:`U`.
Parameters
----------
U : qobj
Operator representing the propagator.
Returns
-------
a : qobj
Instance representing the steady-state density matrix.
"""
evals, evecs = la.eig(U.full())
ev_min, ev_idx = _get_min_and_index(abs(evals - 1.0))
evecs = evecs.T
rho = Qobj(vec2mat(evecs[ev_idx]))
rho = rho * (1.0 / rho.tr())
rho = 0.5 * (rho + rho.dag()) # make sure rho is herm
return rho
|
Python
| 0 |
@@ -1041,24 +1041,58 @@
ort mesolve%0A
+from qutip.sesolve import sesolve%0A
from qutip.e
@@ -3251,33 +3251,33 @@
output =
-m
+s
esolve(H, psi0,
@@ -3282,28 +3282,24 @@
, tlist, %5B%5D,
- %5B%5D,
args, optio
|
661766d003c85ded302052119bf54f0ae972b9fb
|
Fix site ID fallback when testing.
|
mezzanine/utils/sites.py
|
mezzanine/utils/sites.py
|
from __future__ import unicode_literals
import os
import sys
from django.contrib.sites.models import Site
from mezzanine.conf import settings
from mezzanine.core.request import current_request
def current_site_id():
"""
Responsible for determining the current ``Site`` instance to use
when retrieving data for any ``SiteRelated`` models. If a request
is available, and the site can be determined from it, we store the
site against the request for subsequent retrievals. Otherwise the
order of checks is as follows:
- ``site_id`` in session. Used in the admin so that admin users
can switch sites and stay on the same domain for the admin.
- host for the current request matched to the domain of the site
instance.
- ``MEZZANINE_SITE_ID`` environment variable, so management
commands or anything else outside of a request can specify a
site.
- ``SITE_ID`` setting.
"""
from mezzanine.utils.cache import cache_installed, cache_get, cache_set
request = current_request()
site_id = getattr(request, "site_id", None)
if request and not site_id:
site_id = request.session.get("site_id", None)
if not site_id:
domain = request.get_host().lower()
if cache_installed():
# Don't use Mezzanine's cache_key_prefix here, since it
# uses this very function we're in right now to create a
# per-site cache key.
bits = (settings.CACHE_MIDDLEWARE_KEY_PREFIX, domain)
cache_key = "%s.site_id.%s" % bits
site_id = cache_get(cache_key)
if not site_id:
try:
site = Site.objects.get(domain__iexact=domain)
except Site.DoesNotExist:
pass
else:
site_id = site.id
if cache_installed():
cache_set(cache_key, site_id)
if not site_id:
site_id = os.environ.get("MEZZANINE_SITE_ID", settings.SITE_ID)
if request and site_id:
request.site_id = site_id
return site_id
def has_site_permission(user):
"""
Checks if a staff user has staff-level access for the current site.
The actual permission lookup occurs in ``SitePermissionMiddleware``
which then marks the request with the ``has_site_permission`` flag,
so that we only query the db once per request, so this function
serves as the entry point for everything else to check access. We
also fall back to an ``is_staff`` check if the middleware is not
installed, to ease migration.
"""
mw = "mezzanine.core.middleware.SitePermissionMiddleware"
if mw not in settings.MIDDLEWARE_CLASSES:
from warnings import warn
warn(mw + " missing from settings.MIDDLEWARE_CLASSES - per site"
"permissions not applied")
return user.is_staff and user.is_active
return getattr(user, "has_site_permission", False)
def host_theme_path(request):
"""
Returns the directory of the theme associated with the given host.
"""
for (host, theme) in settings.HOST_THEMES:
if host.lower() == request.get_host().split(":")[0].lower():
try:
__import__(theme)
module = sys.modules[theme]
except ImportError:
pass
else:
return os.path.dirname(os.path.abspath(module.__file__))
return ""
def templates_for_host(request, templates):
"""
Given a template name (or list of them), returns the template names
as a list, with each name prefixed with the device directory
inserted into the front of the list.
"""
if not isinstance(templates, (list, tuple)):
templates = [templates]
theme_dir = host_theme_path(request)
host_templates = []
if theme_dir:
for template in templates:
host_templates.append("%s/templates/%s" % (theme_dir, template))
host_templates.append(template)
return host_templates
return templates
|
Python
| 0 |
@@ -1991,16 +1991,94 @@
ite_id)%0A
+ if request and site_id:%0A request.site_id = site_id%0A
if n
@@ -2179,32 +2179,76 @@
uest and site_id
+ and not getattr(settings, %22TESTING%22, False)
:%0A reques
|
ba50883881d3e652c1175489e16c3c5839807feb
|
add new: GWinstek GDS-840S, RS-232 connection.
|
serial/serialGDS840S.py
|
serial/serialGDS840S.py
|
Python
| 0 |
@@ -0,0 +1,706 @@
+#!/usr/bin/env python%0A%0A# RS-232 serial support for GW Instek GDS-840S Digital Storage Oscilloscope%0A# http://www.gwinstek.com/html/en/DownloadFile.asp?sn=255&uid=&lv=%0A# Filename: 82DS-82000IA.pdf%0A%0Aimport serial%0A%0A# Values set on unit manually (but these are standard settings)%0Aser = serial.Serial('/dev/ttyUSB0',baudrate=38400, bytesize=8, stopbits=1, %5C%0A parity=serial.PARITY_NONE, timeout=3)%0A%0Aser.open()%0A%0Adef sendCmd(handler,command):%0A handler.write(%22%25s%5Cn%22 %25(command))%0A%0Adef recvCmd(handler):%0A return handler.readline().strip()%0A %0A %0AsendCmd(ser, %22*IDN?%22)%0Aid = ser.readline()%0Aprint id%0A%0A#~ sendCmd(ser, %22:AUToset%22)%0A%0AsendCmd(ser, %22:MEASure:FREQuency?%22)%0Afreq = recvCmd(ser)%0Aprint freq%0A%0Aser.close()%0A
|
|
096087b4fddf9bc2644bcbb71834fcfc5985558c
|
add flash order
|
scripts/flash-order.py
|
scripts/flash-order.py
|
Python
| 0 |
@@ -0,0 +1,3168 @@
+#!/usr/bin/python3%0A%0Afrom pubnub.pnconfiguration import PNConfiguration%0Afrom pubnub.pubnub import PubNub%0A%0Apnconfig = PNConfiguration()%0Apnconfig.subscribe_key = %22my_subkey%22%0Apnconfig.publish_key = %22my_pubkey%22%0Apnconfig.ssl = False%0A%0Apubnub = PubNub(pnconfig)%0A%0Afrom pubnub.callbacks import SubscribeCallback%0Afrom pubnub.enums import PNOperationType, PNStatusCategory%0A%0Aclass MySubscribeCallback(SubscribeCallback):%0A def status(self, pubnub, status):%0A pass%0A # The status object returned is always related to subscribe but could contain%0A # information about subscribe, heartbeat, or errors%0A # use the operationType to switch on different options%0A if status.operation == PNOperationType.PNSubscribeOperation %5C%0A or status.operation == PNOperationType.PNUnsubscribeOperation:%0A if status.category == PNStatusCategory.PNConnectedCategory:%0A pass%0A # This is expected for a subscribe, this means there is no error or issue whatsoever%0A elif status.category == PNStatusCategory.PNReconnectedCategory:%0A pass%0A # This usually occurs if subscribe temporarily fails but reconnects. This means%0A # there was an error but there is no longer any issue%0A elif status.category == PNStatusCategory.PNDisconnectedCategory:%0A pass%0A # This is the expected category for an unsubscribe. This means there%0A # was no error in unsubscribing from everything%0A elif status.category == PNStatusCategory.PNUnexpectedDisconnectCategory:%0A pass%0A # This is usually an issue with the internet connection, this is an error, handle%0A # appropriately retry will be called automatically%0A elif status.category == PNStatusCategory.PNAccessDeniedCategory:%0A pass%0A # This means that PAM does allow this client to subscribe to this%0A # channel and channel group configuration. This is another explicit error%0A else:%0A pass%0A # This is usually an issue with the internet connection, this is an error, handle appropriately%0A # retry will be called automatically%0A elif status.operation == PNOperationType.PNSubscribeOperation:%0A # Heartbeat operations can in fact have errors, so it is important to check first for an error.%0A # For more information on how to configure heartbeat notifications through the status%0A # PNObjectEventListener callback, consult %3Clink to the PNCONFIGURATION heartbeart config%3E%0A if status.is_error():%0A pass%0A # There was an error with the heartbeat operation, handle here%0A else:%0A pass%0A # Heartbeat operation was successful%0A else:%0A pass%0A # Encountered unknown status type%0A %0A def presence(self, pubnub, presence):%0A pass # handle incoming presence data%0A %0A def message(self, pubnub, message):%0A pass # handle incoming messages%0A %0A %0Apubnub.add_listener(MySubscribeCallback())%0A
|
|
7dee9be2022bdf481bc5bc6766684058fd9d44e5
|
add script for generating the manifest for a given package
|
scripts/genmanifest.py
|
scripts/genmanifest.py
|
Python
| 0.000002 |
@@ -0,0 +1,1229 @@
+#!/usr/bin/python%0A#%0A# Copyright (c) 2008 rPath, Inc.%0A#%0A# This program is distributed under the terms of the Common Public License,%0A# version 1.0. A copy of this license should have been distributed with this%0A# source file in a file called LICENSE. If it is not present, the license%0A# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# without any warranty; without even the implied warranty of merchantability%0A# or fitness for a particular purpose. See the Common Public License for%0A# full details.%0A#%0A%0Aimport os%0Aimport sys%0A%0Asys.path.insert(0, os.environ%5B'HOME'%5D + '/hg/rpath-xmllib')%0Asys.path.insert(0, os.environ%5B'HOME'%5D + '/hg/conary')%0Asys.path.insert(0, os.environ%5B'HOME'%5D + '/hg/mirrorball')%0A%0Afrom conary.lib import util%0Asys.excepthook = util.genExcepthook()%0A%0Afrom updatebot import bot, config, log%0A%0Alog.addRootLogger()%0Acfg = config.UpdateBotConfig()%0Acfg.read(os.environ%5B'HOME'%5D + '/hg/mirrorball/config/centos/updatebotrc')%0Aobj = bot.Bot(cfg)%0A%0Aobj._populatePkgSource()%0A%0ApkgName = sys.argv%5B1%5D%0A%0AsrcPkg = obj._updater._getPackagesToImport(pkgName)%0Amanifest = obj._updater._getManifestFromPkgSource(srcPkg)%0Aprint '%5Cn'.join(manifest)%0A
|
|
0ca7d4a20c8a65e45ddb7c61ca72c0e6c464a80e
|
Create template_redacted entry for templates created by migration
|
migrations/versions/0296_template_redacted_fix.py
|
migrations/versions/0296_template_redacted_fix.py
|
Python
| 0 |
@@ -0,0 +1,617 @@
+%22%22%22%0A%0ARevision ID: 0296_template_redacted_fix%0ARevises: 0295_api_key_constraint%0ACreate Date: 2019-06-07 17:02:14.350064%0A%0A%22%22%22%0Afrom alembic import op%0A%0A%0Arevision = '0296_template_redacted_fix'%0Adown_revision = '0295_api_key_constraint'%0A%0A%0Adef upgrade():%0A op.execute(%22%22%22%0A INSERT INTO template_redacted (template_id, redact_personalisation, updated_at, updated_by_id)%0A SELECT templates.id, FALSE, now(), templates.created_by_id%0A FROM templates%0A WHERE templates.id NOT IN (SELECT template_id FROM template_redacted WHERE template_id = templates.id)%0A ;%0A %22%22%22)%0A%0A%0Adef downgrade():%0A pass%0A
|
|
a38f18b8c51ad83b5c4b92853fa5640137131ad9
|
imprime sequencia de gtins, calculando dígito verificador
|
script/gera_gtin.py
|
script/gera_gtin.py
|
Python
| 0.000001 |
@@ -0,0 +1,212 @@
+from gtin import GTIN%0A%0A%0Acountry = 789%0Acompany = 96188%0Aproduct = 7251%0Aquant = 127%0A%0Afor incr in range(quant):%0A numero_gtin = '%7B%7D%7B%7D%7B%7D'.format(country, company, product+incr)%0A print(str(GTIN(raw=numero_gtin)))%0A
|
|
2611476df6f362cd59e4aad38a243fc8f6cbf8a8
|
Purge da página de palestra quando salva palestrante
|
devincachu/purger.py
|
devincachu/purger.py
|
# -*- coding: utf-8 -*-
import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
|
Python
| 0 |
@@ -485,28 +485,214 @@
on_delete(pmodels.Palestra)%0A
+ roan.purge(p.get_absolute_url_and_link_title()%5B'url'%5D).on_save(pmodels.Palestrante)%0A roan.purge(p.get_absolute_url_and_link_title()%5B'url'%5D).on_delete(pmodels.Palestrante)%0A
|
a01d471bc8bd171de9301b428466f7ccba93872b
|
Revert "Made the scheduling of Queue.get more fair."
|
diesel/util/queue.py
|
diesel/util/queue.py
|
from time import time
from uuid import uuid4
from collections import deque
from diesel import wait, fire, sleep, first
class QueueEmpty(Exception): pass
class QueueTimeout(Exception): pass
class Queue(object):
def __init__(self):
self.inp = deque()
self.waiters = deque()
def put(self, i=None):
self.inp.append(i)
if self.waiters:
wait_id = self.waiters.popleft()
fire(wait_id)
def get(self, waiting=True, timeout=None):
start = time()
if not self.inp and waiting:
wait_id = uuid4().hex
self.waiters.append(wait_id)
while not self.inp and waiting:
if timeout:
remaining = timeout - (time() - start)
if remaining <= 0:
raise QueueTimeout()
else:
first(waits=[wait_id], sleep=remaining)
else:
wait(wait_id)
if self.inp:
return self.inp.popleft()
elif not waiting:
raise QueueEmpty()
def __iter__(self):
return self
def next(self):
return self.get()
@property
def is_empty(self):
return not bool(self.inp)
if __name__ == '__main__':
from diesel import Application, Loop, sleep
app = Application()
queue = Queue()
def worker():
sleep(0.25)
queue.put(1)
queue.put(2)
def consumer_no_wait():
try:
queue.get(waiting=False)
except QueueEmpty:
pass
else:
assert False
def consumer_timeout():
try:
queue.get(timeout=0.1)
except QueueTimeout:
pass
else:
assert False
def consumer(expected):
val = queue.get()
assert expected == val, '%s != %s' % (expected, val)
if queue.is_empty:
print 'success!'
app.halt()
app.add_loop(Loop(worker))
app.add_loop(Loop(consumer_no_wait))
app.add_loop(Loop(consumer_timeout))
app.add_loop(Loop(lambda: consumer(1)))
app.add_loop(Loop(lambda: consumer(2)))
app.run()
|
Python
| 0 |
@@ -247,21 +247,29 @@
elf.
-inp = deque()
+wait_id = uuid4().hex
%0A
@@ -278,23 +278,19 @@
self.
-waiters
+inp
= deque
@@ -359,277 +359,96 @@
-if self.waiters:%0A wait_id = self.waiters.popleft()%0A fire(wait_id)%0A%0A def get(self, waiting=True, timeout=None):%0A start = time()%0A if not self.inp and waiting:%0A wait_id = uuid4().hex%0A self.waiters.append(wait_id
+fire(self.wait_id)%0A%0A def get(self, waiting=True, timeout=None):%0A start = time(
)%0A
@@ -695,16 +695,21 @@
(waits=%5B
+self.
wait_id%5D
@@ -766,16 +766,21 @@
wait(
+self.
wait_id)
@@ -897,16 +897,17 @@
Empty()%0A
+%0A
def
|
4fe11f89c008909dd21451ac0e23dce86de7c849
|
Add profiling dev script.
|
dev_scripts/profile_structure.py
|
dev_scripts/profile_structure.py
|
Python
| 0.000098 |
@@ -0,0 +1,402 @@
+#!/usr/bin/env python%0A%0Afrom pymatgen.io.vaspio import Poscar%0Aimport cProfile%0Aimport pstats%0Aimport os%0A%0Ap = Poscar.from_file(%22../test_files/POSCAR.LiFePO4%22, check_for_POTCAR=False)%0As = p.structure%0A%0Adef test():%0A nn = s.get_sites_in_sphere(%5B0, 0, 0%5D, 20)%0A print len(nn)%0A%0AcProfile.run('test()', 'testprof')%0Ap = pstats.Stats('testprof')%0Ap.sort_stats('cumulative').print_stats(20)%0Aos.remove(%22testprof%22)%0A
|
|
ea30b49012af2003049f4b1b7deeecb1232c7513
|
Create permutations.py
|
permutations.py
|
permutations.py
|
Python
| 0.000202 |
@@ -0,0 +1,249 @@
+#Kunal Gautam%0A#Codewars : @Kunalpod%0A#Problem name: Permutations%0A#Problem level: 4 kyu%0A%0Afrom itertools import groupby, permutations as perm%0Adef permutations(string):%0A return %5Bk for k,_ in groupby(sorted(%5B''.join(comb) for comb in perm(string)%5D))%5D%0A
|
|
eb943bb45695472483352978060a94e0d48b5e4a
|
Add scatterplot
|
plot/scatter.py
|
plot/scatter.py
|
Python
| 0.000006 |
@@ -0,0 +1,693 @@
+import matplotlib.pyplot as plt%0A%0Adef plot_scatter(x, y, ax=None, color=None, alpha=None, size=None, labels=None, title=%22Scatterplot%22, figsize=(10,6)):%0A # TODO: Add x, and y labels%0A # TODO: grid%0A if ax is None:%0A fig, ax = plt.subplots(figsize=figsize)%0A fig.suptitle(title, fontsize=15)%0A else:%0A fig = ax.get_figure()%0A ax.scatter(x=x, y=y, c=color, alpha=alpha, s=size)%0A%0A # LABEL - each of the points%0A if labels is not None:%0A for xx, yy, label in zip(x, y, labels):%0A plt.annotate(label, xy=(xx, yy), xytext=(7, 0),%0A textcoords='offset points',%0A ha='left', va='center')%0A return fig, ax%0A
|
|
02f207269f7d2773919e520e04ab8f9261357d4b
|
Add isup plugin
|
plugins/isup.py
|
plugins/isup.py
|
Python
| 0.000001 |
@@ -0,0 +1,747 @@
+import requests%0Aimport urllib.parse%0A%0Aclass Plugin:%0A def __call__(self, bot):%0A bot.on_respond(r%22is (.*) (up%7Cdown)(%5C?)?%22, self.on_respond)%0A bot.on_respond(r%22isup (.*)$%22, self.on_respond)%0A bot.on_help(%22isup%22, self.on_help)%0A%0A def on_respond(self, bot, msg, reply):%0A url = %22http://isitup.org/%22 + urllib.parse.quote(msg%5B%22match%22%5D.group(1)) + %22.json%22%0A headers = %7B %22User-Agent%22: %22SmartBot%22 %7D%0A%0A res = requests.get(url, headers=headers).json()%0A if res%5B%22status_code%22%5D == 1:%0A reply(%22%7B0%7D looks up for me.%22.format(res%5B%22domain%22%5D))%0A else:%0A reply(%22%7B0%7D looks down for me.%22.format(res%5B%22domain%22%5D))%0A%0A def on_help(self, bot, msg, reply):%0A reply(%22Syntax: is %3Cdomain%3E up%7Cdown%22)%0A
|
|
8c6b412e01e81a7c062ba8234ebafc6fca61651c
|
Add shovel test.quick for sanity check before pushing
|
shovel/test.py
|
shovel/test.py
|
Python
| 0 |
@@ -0,0 +1,837 @@
+# coding: utf-8%0Afrom __future__ import absolute_import, division, print_function%0A%0Aimport subprocess%0Afrom collections import OrderedDict%0A%0Afrom shovel import task%0A%0A%0A@task%0Adef quick():%0A failed = OrderedDict.fromkeys(%0A %5B'test', 'docs', 'spelling', 'doc8', 'flake8'%5D, False)%0A%0A failed%5B'tests'%5D = bool(subprocess.call(%5B'py.test', 'astrodynamics/'%5D))%0A failed%5B'docs'%5D = bool(subprocess.call(%0A %5B'sphinx-build', '-W', '-b', 'html', 'docs', 'docs/_build/html'%5D))%0A failed%5B'spelling'%5D = bool(subprocess.call(%5B%0A 'sphinx-build', '-W', '-b', 'spelling', 'docs', 'docs/_build/html'%5D))%0A failed%5B'doc8'%5D = bool(subprocess.call(%5B'doc8', 'docs'%5D))%0A failed%5B'flake8'%5D = bool(subprocess.call(%5B'flake8'%5D))%0A%0A print('%5CnSummary:')%0A for k, v in failed.items():%0A print('%7B:8s%7D: %7B%7D'.format(k, 'Fail' if v else 'Pass'))%0A
|
|
cdcc45eb6982e68415632a8bcfbc5e7596e0a1cf
|
add resize_logos.py
|
scripts/resize_logos.py
|
scripts/resize_logos.py
|
Python
| 0.000001 |
@@ -0,0 +1,993 @@
+#!/usr/bin/env python%0A%0Aimport os%0Aimport os.path as op%0Afrom PIL import Image%0A%0AdirPath = op.abspath('./logos')%0Aout_dir = op.join(dirPath, 'resize')%0A%0Aif not op.exists(out_dir):%0A os.mkdir(out_dir)%0A%0Asupported_formats = %5B'png', 'gif', 'jpg'%5D%0A%0Afor img_file in os.listdir(dirPath):%0A%0A if img_file%5B-3:%5D not in supported_formats:%0A print('Extension for file %7B%7D not supported, skipped.'.format(img_file))%0A continue%0A%0A print(img_file)%0A img_name = img_file%5B:-4%5D%0A print(img_name)%0A%0A fpath = os.path.join(dirPath, img_file)%0A outPath = os.path.join(out_dir, img_name)%0A%0A img = Image.open(fpath)%0A if img.mode == %22CMYK%22:%0A img = img.convert(%22RGB%22)%0A%0A img.thumbnail((190, 90), Image.ANTIALIAS)%0A img_w, img_h = img.size%0A%0A background = Image.new('RGBA', (190, 90), (255, 255, 255, 255))%0A bg_w, bg_h = background.size%0A%0A offset = int((bg_w - img_w) / 2), int((bg_h - img_h) / 2)%0A%0A background.paste(img, offset)%0A%0A background.save(outPath+%22_thumb.png%22)%0A%0A
|
|
ff994f8bfd7642fc95694d511a1cec81d0ba8f4d
|
fix bugs
|
plstackapi/planetstack/api/sites.py
|
plstackapi/planetstack/api/sites.py
|
from plstackapi.openstack.client import OpenStackClient
from plstackapi.openstack.driver import OpenStackDriver
from plstackapi.planetstack.api.auth import auth_check
from plstackapi.planetstack.models import Site
def add_site(auth, **fields):
driver = OpenStackDriver(client = auth_check(auth))
site = Site(**fields)
nova_fields = {'tenant_name': fields['login_base'],
'description': fields['name',
'enabled': fields['enabled']}
tenant = driver.create_tenant(**nova_fields)
site.tenant_id=tenant.id
site.save()
return role
def update_site(auth, tenant_id, **fields):
driver = OpenStackDriver(client = auth_check(auth))
sites = Site.objects.filter(tenant_id=tenant_id)
if not sites:
return
site = Site[0]
nova_fields = {}
if 'description' in fields:
nova_fields['description'] = fields['name']
if 'enabled' in fields:
nova_fields['enabled'] = fields['enabled']
site.updtae(**fields)
return site
def delete_site(auth, filter={}):
driver = OpenStackDriver(client = auth_check(auth))
sites = Site.objects.filter(**filter)
for site in sites:
driver.delete_tenant({'id': site.tenant_id})
site.delete()
return 1
def get_sites(auth, filter={}):
client = auth_check(auth)
sites = Site.objects.filter(**filter)
return sites
|
Python
| 0.000001 |
@@ -425,16 +425,17 @@
s%5B'name'
+%5D
,%0A
|
1be4972ca39408b8d4770b5722642996908c9a70
|
add 5-for.py
|
python/5-for.py
|
python/5-for.py
|
Python
| 0.003482 |
@@ -0,0 +1,288 @@
+#!/usr/bin/env python%0A%0A%0Afor letter in 'python':%0A print %22Current character is %22, letter%0A%0A%0Afruits = %5B'banana', 'apple', 'mango'%5D%0Afor fruit in fruits:%0A print %22Current fruit is %22, fruit%0A%0Afor index in range(len(fruits)):%0A print %22Current fruit is %22, fruits%5Bindex%5D%0A%0A%0Aprint %22Good bye!%22%0A%0A
|
|
4cef0dc3af25ec4c781ed04b28d425374f793702
|
add socket comm class
|
socket_communication.py
|
socket_communication.py
|
Python
| 0 |
@@ -0,0 +1,523 @@
+#!/usr/bin/env python%0A%0Aimport socket%0A%0Aclass SocketCommunication:%0A def __init__(self):%0A self.RECEIVER_HOST = '192.168.1.4' # The remote host%0A self.PORT = 3000 # The same port as used by the server%0A%0A def open(self):%0A self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) %0A self.sock.connect((self.RECEIVER_HOST, self.PORT))%0A%0A def close(self):%0A self.sock.close()%0A%0A def communicate(self, data):%0A self.sock.send(data) %0A%0Adef main():%0A pass #TODO: add example%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
d0f105e1aa50b74025eed0a1e32561c0f16b9ef0
|
create counter for facilitate_moves
|
source/referee_chair.py
|
source/referee_chair.py
|
from facilitator_credentials import Facilitator
from announcer_chair import Announcer
from time import sleep
class Referee(Facilitator):
def __init__(self, table_top, player1, player2):
self.announcer = Announcer()
self.table_top = table_top
self.player1 = player1
self.player2 = player2
self.whos_turn = self.player1
def start_game(self):
self.announcer.show(self.announcer.start)
self.select_mode()
def select_mode(self):
self.announcer.show(self.announcer.select)
select = self.ask_human()
if select == '1':
self.facilitate_turns()
elif select == '2':
self.prep_next_turn()
self.facilitate_turns()
else:
self.announcer.show(self.announcer.bad_move)
self.game_menu()
def facilitate_turns(self):
self.show_board()
self.whos_turn.move(self.table_top.board)
sleep(1.75)
the_game_is_over = self.check_for_game_over()
if the_game_is_over != False:
self.game_over(the_game_is_over)
else:
self.prep_next_turn()
self.facilitate_turns()
def prep_next_turn(self):
if self.whos_turn == self.player1:
self.whos_turn = self.player2
elif self.whos_turn == self.player2:
self.whos_turn = self.player1
def game_over(self, winner):
self.show_board()
if winner == "tie":
self.announcer.show(self.announcer.tie)
elif winner == "computer":
self.announcer.show(self.announcer.computer)
elif winner == "human":
self.announcer.show(self.announcer.human)
def ask_human(self):
return raw_input('> ')
def show_board(self):
board = self.announcer.render_board(self.table_top.board)
self.announcer.show(board)
def check_for_game_over(self):
tie = self.check_for_tie()
winner = self.check_for_winner()
if winner == True:
return self.whos_turn.id
elif tie == True:
return "tie"
else:
return False
def check_for_tie(self):
is_it_a_tie = True
for number in self.table_top.board:
if number == 0:
return False
return is_it_a_tie
def check_for_winner(self):
board = self.table_top.board
p1_win = self.get_board_size(board)
p2_win = p1_win * 10
win_list = self.get_win_list()
did_they_win = False
for i in range(0, len(win_list)):
win_factor = 0
for j in win_list[i]:
win_factor += board[j]
if win_factor == p1_win or win_factor == p2_win:
did_they_win = True
return did_they_win
def get_win_list(self):
board = self.table_top.board
board_size = self.get_board_size(board)
win_list = []
win_list.extend(self.get_winning_rows(board_size))
win_list.extend(self.get_winning_cols(board_size))
win_list.extend(self.get_winning_diags(board_size))
return win_list
def get_winning_rows(self, board_size):
winning_rows = self.get_empty_list(board_size)
start_coord = 0
for i in range(0, board_size):
stop_coord = start_coord + board_size
for j in range(start_coord, stop_coord):
winning_rows[i].append(j)
start_coord += board_size
return winning_rows
def get_winning_cols(self, board_size):
winning_cols = self.get_empty_list(board_size)
start_coord = 0
stop_coord = board_size * board_size
for i in range(0, board_size):
for j in range(start_coord, stop_coord, board_size):
winning_cols[i].append(j)
start_coord += 1
return winning_cols
def get_empty_list(self, board_size):
empty_list = []
for i in range(0, board_size):
empty_list.append([])
return empty_list
def get_winning_diags(self, board_size):
winning_diags = []
winning_diags.append(self.get_NW_SE_diag(board_size))
winning_diags.append(self.get_SW_NE_diag(board_size))
return winning_diags
def get_NW_SE_diag(self, board_size):
diag = []
coord = 0
for i in range(0, board_size):
diag.append(coord)
coord += board_size + 1
return diag
def get_SW_NE_diag(self, board_size):
diag = []
coord = board_size -1
for i in range(0, board_size):
diag.append(coord)
coord += board_size - 1
return diag
|
Python
| 0 |
@@ -356,16 +356,45 @@
.player1
+%0A self.moves_taken = 0
%0A%0A de
@@ -964,32 +964,62 @@
able_top.board)%0A
+ self.moves_taken += 1%0A
sleep(1.
@@ -1022,9 +1022,9 @@
p(1.
-7
+2
5)%0A
@@ -1998,43 +1998,8 @@
f):%0A
- tie = self.check_for_tie()%0A
@@ -2116,19 +2116,29 @@
lif
-tie == True
+self.moves_taken == 9
:%0A
@@ -2166,32 +2166,32 @@
%22%0A else:%0A
+
retu
@@ -2204,193 +2204,8 @@
se%0A%0A
- def check_for_tie(self):%0A is_it_a_tie = True%0A for number in self.table_top.board:%0A if number == 0:%0A return False%0A return is_it_a_tie%0A%0A
|
50a7b9bc262c98f4e387746f67a638f50f94ba38
|
Add migration merge
|
portal/migrations/versions/e396fb1974ef_.py
|
portal/migrations/versions/e396fb1974ef_.py
|
Python
| 0.000001 |
@@ -0,0 +1,351 @@
+from alembic import op%0Aimport sqlalchemy as sa%0A%0A%0A%22%22%22empty message%0A%0ARevision ID: e396fb1974ef%0ARevises: ('773b1de060dd', '3271a78bbc8b')%0ACreate Date: 2018-04-24 12:19:56.689921%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = 'e396fb1974ef'%0Adown_revision = ('773b1de060dd', '3271a78bbc8b')%0A%0A%0Adef upgrade():%0A pass%0A%0A%0Adef downgrade():%0A pass%0A
|
|
85c67110db1fbb5a25faef36bdfe282952f5a034
|
Create __init__.py
|
src/__init__.py
|
src/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
d686f54aff87d4dea1266ee1fec8c1c320dee5b9
|
add tests for fizz_buzz.
|
test_fizz_buzz.py
|
test_fizz_buzz.py
|
Python
| 0 |
@@ -0,0 +1,662 @@
+from cStringIO import StringIO%0Aimport sys%0Aimport fizz_buzz%0A%0A%0Aclass Capture(list):%0A %22%22%22Context manager for capturing stdout.%22%22%22%0A def __enter__(self):%0A self._stdout = sys.stdout%0A sys.stdout = self._stringio = StringIO()%0A return self%0A%0A def __exit__(self, *args):%0A self.extend(self._stringio.getvalue().splitlines())%0A sys.stdout = self._stdout%0A%0A%0Adef test_fizz_buzz():%0A expected = %5B'1 2 F 4 B F 7 8 F B', '1 F 3 F 5 F B F 9 F 11 F 13 FB 15'%5D%0A with Capture() as output:%0A fizz_buzz.main(%22input_test.txt%22)%0A assert len(expected) == len(output)%0A for ex, act in zip(expected, output):%0A assert ex == act%0A
|
|
7d4148a683d8d1a70e81fd1033c535738bab2533
|
Fix bug of break when press "[/]" when app start
|
NEMbox/player.py
|
NEMbox/player.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: omi
# @Date: 2014-07-15 15:48:27
# @Last Modified by: omi
# @Last Modified time: 2015-01-30 18:05:08
'''
网易云音乐 Player
'''
# Let's make some noise
import subprocess
import threading
import time
import os
import signal
import random
import re
from ui import Ui
# carousel x in [left, right]
carousel = lambda left, right, x: left if (x > right) else (right if x < left else x)
class Player:
def __init__(self):
self.ui = Ui()
self.datatype = 'songs'
self.popen_handler = None
# flag stop, prevent thread start
self.playing_flag = False
self.pause_flag = False
self.songs = []
self.idx = 0
self.volume = 60
self.process_length = 0
self.process_location = 0
self.process_first = False
def popen_recall(self, onExit, popenArgs):
"""
Runs the given args in a subprocess.Popen, and then calls the function
onExit when the subprocess completes.
onExit is a callable object, and popenArgs is a lists/tuple of args that
would give to subprocess.Popen.
"""
def runInThread(onExit, popenArgs):
self.popen_handler = subprocess.Popen(['mpg123', '-R', ], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
#self.popen_handler.stdin.write("SILENCE\n")
self.popen_handler.stdin.write("V " + str(self.volume) + "\n")
self.popen_handler.stdin.write("L " + popenArgs + "\n")
self.process_first = True
# self.popen_handler.wait()
while (True):
if self.playing_flag == False:
break
try:
strout = self.popen_handler.stdout.readline()
except IOError:
break
if re.match("^\@F.*$", strout):
process_data = strout.split(" ")
process_location = float(process_data[4])
if self.process_first:
self.process_length = process_location
self.process_first = False
self.process_location = 0
else:
self.process_location = self.process_length - process_location
continue
if strout == "@P 0\n":
self.popen_handler.stdin.write("Q\n")
self.popen_handler.kill()
break
if self.playing_flag:
self.idx = carousel(0, len(self.songs) - 1, self.idx + 1)
onExit()
return
thread = threading.Thread(target=runInThread, args=(onExit, popenArgs))
thread.start()
# returns immediately after the thread starts
return thread
def recall(self):
self.playing_flag = True
item = self.songs[self.idx]
self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time())
self.popen_recall(self.recall, item['mp3_url'])
def play(self, datatype, songs, idx):
# if same playlists && idx --> same song :: pause/resume it
self.datatype = datatype
if datatype == 'songs' or datatype == 'djchannels':
if idx == self.idx and songs == self.songs:
if self.pause_flag:
self.resume()
else:
self.pause()
else:
if datatype == 'songs' or datatype == 'djchannels':
self.songs = songs
self.idx = idx
# if it's playing
if self.playing_flag:
self.switch()
# start new play
else:
self.recall()
# if current menu is not song, pause/resume
else:
if self.playing_flag:
if self.pause_flag:
self.resume()
else:
self.pause()
else:
pass
# play another
def switch(self):
self.stop()
# wait process be killed
time.sleep(0.01)
self.recall()
def stop(self):
if self.playing_flag and self.popen_handler:
self.playing_flag = False
self.popen_handler.stdin.write("Q\n")
self.popen_handler.kill()
def pause(self):
self.pause_flag = True
os.kill(self.popen_handler.pid, signal.SIGSTOP)
item = self.songs[self.idx]
self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True)
def resume(self):
self.pause_flag = False
os.kill(self.popen_handler.pid, signal.SIGCONT)
item = self.songs[self.idx]
self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time())
def next(self):
self.stop()
time.sleep(0.01)
self.idx = carousel(0, len(self.songs) - 1, self.idx + 1)
self.recall()
def prev(self):
self.stop()
time.sleep(0.01)
self.idx = carousel(0, len(self.songs) - 1, self.idx - 1)
self.recall()
def shuffle(self):
self.stop()
time.sleep(0.01)
num = random.randint(0, 12)
self.idx = carousel(0, len(self.songs) - 1, self.idx + num)
self.recall()
def volume_up(self):
self.volume = self.volume + 7
if (self.volume > 100):
self.volume = 100
self.popen_handler.stdin.write("V " + str(self.volume) + "\n")
def volume_down(self):
self.volume = self.volume - 7
if (self.volume < 0):
self.volume = 0
self.popen_handler.stdin.write("V " + str(self.volume) + "\n")
def update_size(self):
try:
self.ui.update_size()
item = self.songs[self.idx]
if self.playing_flag:
self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time())
if self.pause_flag:
self.ui.build_playinfo(item['song_name'], item['artist'], item['album_name'], item['quality'], time.time(), pause=True)
except IndexError:
pass
|
Python
| 0 |
@@ -2896,32 +2896,107 @@
f recall(self):%0A
+ if self.idx %3C 0 or self.idx %3E= len(self.songs):%0A return%0A
self.pla
|
4fa2ca578b7015bee68f9f2f7bc26df2f7ab01b4
|
add test_cli.py module and initial test
|
tests/test_cli.py
|
tests/test_cli.py
|
Python
| 0 |
@@ -0,0 +1,557 @@
+#!/usr/bin/env python%0A# coding: utf8%0A%22%22%22%0AUnit tests for cli functionality%0A%22%22%22%0A%0A# --- Imports%0A%0Aimport subprocess%0A%0Aimport geocoder%0A%0A# --- Constants%0A%0A_CLI_EX = './geocoder/cli.py' # CLI executable path%0A%0A%0Aus_address = '595 Market St'%0Aus_city = 'San Francisco'%0Aus_state = 'CA'%0Aus_zipcode = '94105'%0A%0Alocation = ' '.join(%5Bus_address, us_city, us_state, us_zipcode%5D)%0A%0A%0A# --- CLI tests. Each shell call should have return code 0 if successfull.%0A%0Adef test_cli_default():%0A # default provider cli test%0A assert not subprocess.call(%5B'python', _CLI_EX, location%5D)%0A
|
|
8f4ac0b12c0f83ff892e16e312cc5edbfb089850
|
add tests for no config startup
|
tests/test_cli.py
|
tests/test_cli.py
|
Python
| 0 |
@@ -0,0 +1,305 @@
+%0Afrom click.testing import CliRunner%0Afrom vaping import cli%0Aimport pytest%0A%0A%0Adef test_start_no_home():%0A runner = CliRunner()%0A with pytest.raises(ValueError) as excinfo:%0A runner.invoke(cli.cli, %5B'start'%5D, catch_exceptions=False)%0A%0A assert str(excinfo.value).startswith('no config specified')%0A
|
|
1f2f4837e823ff6cc5c9cb961f4852753926f0d7
|
Create __init__.py
|
tumor/__init__.py
|
tumor/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
f146583961733feb90567fdf03a6a5ee122c550f
|
Create r34.py
|
r34.py
|
r34.py
|
Python
| 0.000001 |
@@ -0,0 +1,1109 @@
+# Aradiabot function for searching rule34.xxx%0A# As they don't have an API, this was easier to put in it's own file so I could organize everything.%0A%0Aimport requests%0Afrom html.parser import HTMLParser%0Aimport random%0Aimport sys%0Acounter = %5B10,9,8,7,6,5,4,3,2,1%5D%0Aimages = %5B%5D%0A%0Aclass booruparser(HTMLParser):%0A%09def handle_starttag(self, tag, attrs):%0A%09%09if tag == 'a':%0A%09%09%09if any('id' in pairs for pairs in attrs):%0A%09%09%09%09try:%0A%09%09%09%09%09images.append(str(attrs%5B1%5D%5B1%5D))%0A%09%09%09%09except:%0A%09%09%09%09%09pass%0A%09%09%09%09%0Aclass imageparser(HTMLParser):%0A%09def handle_starttag(self, tag, attrs):%0A%09%09if ('id', 'image') in attrs:%0A%09%09%09print(%22http:%22 + attrs%5B2%5D%5B1%5D)%0A%09%09%09%0A%09%09%09%0Aparser = booruparser()%0Aimgparser = imageparser()%0A%0Atags = %22%22%0A%0Afor arg in sys.argv:%0A%09if arg == sys.argv%5B0%5D:%0A%09%09pass%0A%09else:%0A%09%09tags = tags + arg + %22+%22%09%09%0A%0Acount = 0%0Awhile len(images) %3C 1:%0A%09if count %3C 10:%0A%09%09parser.feed(requests.get('http://rule34.xxx/index.php?page=post&s=list&tags=' + tags + '&pid=' + str(counter%5Bcount%5D)).text)%0A%09%09count = count + 1%0A%09else:%0A%09%09break%0A%09%0Aif count != 10:%0A%09image = requests.get('http://rule34.xxx/' + random.choice(images)).text%0A%09imgparser.feed(image)%0Aelse:%0A%09print(%220%22)%0A
|
|
d0287d9deaa3eb03076cdd199414b772a291e2c5
|
Add command for moving zips
|
calaccess_website/management/commands/mvzips.py
|
calaccess_website/management/commands/mvzips.py
|
Python
| 0.000001 |
@@ -0,0 +1,3803 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0AMove downloaded and cleaned zips to their proper place in the raw data archived dir.%0A%22%22%22%0Aimport boto3%0Afrom django.conf import settings%0Afrom calaccess_raw.management.commands import CalAccessCommand%0Afrom calaccess_raw.models.tracking import RawDataVersion%0Aimport logging%0Alogger = logging.getLogger(__name__)%0A%0A%0Aclass Command(CalAccessCommand):%0A %22%22%22%0A Move downloaded and cleaned zips to their proper place in the raw data archived dir.%0A %22%22%22%0A help = 'Move downloaded and cleaned zips to their proper place in the raw data archived dir'%0A%0A def handle(self, *args, **options):%0A %22%22%22%0A Make it happen.%0A %22%22%22%0A super(Command, self).handle(*args, **options)%0A%0A # set up boto session%0A self.session = boto3.Session(%0A aws_access_key_id=settings.AWS_ACCESS_KEY_ID,%0A aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,%0A region_name=settings.AWS_S3_REGION_NAME%0A )%0A # and client%0A self.client = self.session.client('s3')%0A%0A # loop over all the versions%0A for v in RawDataVersion.objects.exclude(id=34).exclude(id=33):%0A # if there's a download zip%0A if v.download_zip_archive:%0A # set the initial path%0A initial_download_path = v.download_zip_archive.name%0A # split datetime from file name and ext%0A download_datetime, download_fullname = initial_download_path.split('/')%0A # split file name and ext%0A download_filename, download_ext = download_fullname.split('.')%0A # set new path%0A new_download_path = '%7Bfn%7D_%7Bdt%7D.%7Bfx%7D'.format(%0A fn=download_filename,%0A dt=download_datetime,%0A fx=download_ext%0A )%0A # move%0A logger.debug('Move %7B0%7D to %7B1%7D'.format(%0A initial_download_path,%0A new_download_path%0A )%0A )%0A self.client.copy_object(%0A Bucket=settings.AWS_STORAGE_BUCKET_NAME,%0A Key=new_download_path,%0A CopySource=%7B%0A 'Bucket': settings.AWS_STORAGE_BUCKET_NAME,%0A 'Key': initial_download_path,%0A %7D,%0A )%0A # reset file name%0A v.download_zip_archive.name = new_download_path%0A %0A # repeat for clean zips%0A if v.clean_zip_archive:%0A # set the initial path%0A initial_clean_path = v.clean_zip_archive.name%0A # split datetime from file name and ext%0A clean_datetime, clean_fullname = initial_clean_path.split('/')%0A # split file name and ext%0A clean_filename, clean_ext = clean_fullname.split('.')%0A # set new path%0A new_clean_path = 'clean_%7Bdt%7D.%7Bfx%7D'.format(%0A dt=clean_datetime,%0A fx=clean_ext%0A )%0A # move%0A logger.debug('Move %7B0%7D to %7B1%7D'.format(%0A initial_clean_path,%0A new_clean_path%0A )%0A )%0A self.client.copy_object(%0A Bucket=settings.AWS_STORAGE_BUCKET_NAME,%0A Key=new_clean_path,%0A CopySource=%7B%0A 'Bucket': settings.AWS_STORAGE_BUCKET_NAME,%0A 'Key': initial_clean_path,%0A %7D,%0A )%0A # reset file name%0A v.clean_zip_archive.name = new_clean_path%0A%0A # save the version%0A v.save()%0A
|
|
cdd1f3410b8ae304485f7992ac6048e1277cffe1
|
Add local locale from file
|
parsedatetime/pdt_locales/__init__.py
|
parsedatetime/pdt_locales/__init__.py
|
# -*- encoding: utf-8 -*-
"""
pdt_locales
All of the included locale classes shipped with pdt.
"""
try:
import PyICU as pyicu
except:
pyicu = None
def lcase(x):
return x.lower()
from .base import pdtLocale_base, pdtLocale_icu
from .de_DE import *
from .en_AU import *
from .en_US import *
from .es import *
from .nl_NL import *
from .pt_BR import *
from .ru_RU import *
|
Python
| 0.000001 |
@@ -93,16 +93,26 @@
pdt.%0A%22%22%22
+%0Aimport os
%0A%0Atry:%0A
@@ -162,16 +162,29 @@
= None%0A%0A
+import yaml%0A%0A
%0Adef lca
@@ -381,24 +381,24 @@
BR import *%0A
-
from .ru_RU
@@ -398,16 +398,797 @@
.ru_RU import *%0A
+%0ApdtLocales = %5B%0A 'icu',%0A 'en_US',%0A 'en_AU',%0A 'es_ES',%0A 'de_DE',%0A 'nl_NL',%0A 'ru_RU',%0A%5D%0A%0A%0Adef load_yaml(path):%0A %22%22%22%0A Read yaml data from filepath%0A :param path:%0A :return:%0A %22%22%22%0A with open(path, 'r') as fio:%0A return yaml.load(fio.read())%0A%0A%0Adef _get_yaml_path(locale):%0A %22%22%22%0A Return filepath of locale file%0A :param locale:%0A :return:%0A %22%22%22%0A return os.path.join(os.path.dirname(__file__), '%25s.yaml' %25 locale)%0A%0A%0Adef load_locale(locale):%0A %22%22%22%0A Return data of locale%0A :param locale:%0A :return:%0A %22%22%22%0A assert locale in pdtLocales, %22The locale '%25s' is not supported%22 %25 locale%0A _data_base = load_yaml(_get_yaml_path('base'))%0A return _data_base.update(**load_yaml(_get_yaml_path(locale)))%0A%0A%0Aload_locale('ru_RU')%0A
|
c98039a25638db0c124efeaa394f89f2a84a2ede
|
Create aekjdfh.py
|
aekjdfh.py
|
aekjdfh.py
|
Python
| 0.000037 |
@@ -0,0 +1,18 @@
+sdl;jfhlkjsdhfhdf%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.