commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
b6a55999cd0f6ff6a7d69b7eb59e859d415b275f
|
Add test.py with old-formatting test
|
test.py
|
test.py
|
Python
| 0.000003 |
@@ -0,0 +1,213 @@
+#! /usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# vim:fenc=utf-8%0A#%0A# Copyright %C2%A9 2014 Martine Lenders %[email protected]%3E%0A#%0A# Distributed under terms of the MIT license.%0A%0A%22%25s%22 %25 %22test%22%0A%22%25d%22 %25 2%0A%22%25.4f%22 %25 2.0%0A%0A
|
|
f4d26567afc9185e0f9370eda43d30084437ade5
|
Solve Code Fights make array consecutive 2 problem
|
CodeFights/makeArrayConsecutive2.py
|
CodeFights/makeArrayConsecutive2.py
|
Python
| 0.998889 |
@@ -0,0 +1,708 @@
+#!/usr/local/bin/python%0A# Code Fights Make Array Consecutive 2 Problem%0A%0A%0Adef makeArrayConsecutive2(statues):%0A return (len(range(min(statues), max(statues) + 1)) - len(statues))%0A%0A%0Adef main():%0A tests = %5B%0A %5B%5B6, 2, 3, 8%5D, 3%5D,%0A %5B%5B0, 3%5D, 2%5D,%0A %5B%5B5, 4, 6%5D, 0%5D,%0A %5B%5B6, 3%5D, 2%5D,%0A %5B%5B1%5D, 0%5D%0A %5D%0A%0A for t in tests:%0A res = makeArrayConsecutive2(t%5B0%5D)%0A ans = t%5B1%5D%0A if ans == res:%0A print(%22PASSED: makeArrayConsecutive2(%7B%7D) returned %7B%7D%22%0A .format(t%5B0%5D, res))%0A else:%0A print(%22FAILED: makeArrayConsecutive2(%7B%7D) returned %7B%7D, answer: %7B%7D%22%0A .format(t%5B0%5D, res, ans))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
06d8f4290cf433a538cef4851acefd6e42c8341d
|
Add simple example
|
examples/client.py
|
examples/client.py
|
Python
| 0.000375 |
@@ -0,0 +1,404 @@
+#!/usr/bin/env python%0A%0Aimport os%0Aimport sys%0A%0Asys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))%0A%0Afrom accepton import Client%0A%0AAPI_KEY = 'skey_be064297e7b2db4b6ce5928e8dcad582'%0A%0Aaccepton = Client(api_key=API_KEY, environment='development')%0Atoken = accepton.create_token(amount=1099, application_fee=99, currency='cad',%0A description='Test charge')%0A%0Aprint(token)%0A
|
|
96dd9b2968039be3fa87a30e8a16ed1c77be10bb
|
solve 94
|
94_BinaryTreeInorderTraversal.py
|
94_BinaryTreeInorderTraversal.py
|
Python
| 0.999999 |
@@ -0,0 +1,746 @@
+# Definition for a binary tree node.%0A# class TreeNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0Aclass Solution:%0A # @param %7BTreeNode%7D root%0A # @return %7Binteger%5B%5D%7D%0A def inorderTraversal(self, root):%0A if not root:%0A return %5B%5D%0A result = %5B%5D%0A stack = %5B(False, root)%5D%0A while stack:%0A read, node = stack.pop()%0A if read:%0A result.append(node.val)%0A else:%0A if node.right:%0A stack.append((False, node.right))%0A stack.append((True, node))%0A if node.left:%0A stack.append((False, node.left))%0A return result%0A %0A
|
|
e8c0b17bb28f1212b302959144086d72c205bf4c
|
store toc list in optional file to make merging easier
|
publisher/conf.py
|
publisher/conf.py
|
import glob
import os
work_dir = os.path.dirname(__file__)
papers_dir = os.path.join(work_dir,'../papers')
output_dir = os.path.join(work_dir,'../output')
template_dir = os.path.join(work_dir,'_templates')
static_dir = os.path.join(work_dir,'_static')
css_file = os.path.join(static_dir,'scipy-proc.css')
build_dir = os.path.join(work_dir,'_build')
pdf_dir = os.path.join(build_dir, 'pdfs')
html_dir = os.path.join(build_dir, 'html')
bib_dir = os.path.join(html_dir, 'bib')
toc_conf = os.path.join(build_dir, 'toc.json')
proc_conf = os.path.join(work_dir,'../scipy_proc.json')
dirs = sorted([os.path.basename(d)
for d in glob.glob('%s/*' % papers_dir)
if os.path.isdir(d)])
|
Python
| 0 |
@@ -319,16 +319,67 @@
c.css')%0A
+toc_list = os.path.join(static_dir,'toc.txt')%0A
build_di
@@ -676,16 +676,119 @@
n')%0A
+if os.path.isfile(toc_list):%0A with open(toc_list) as f:%0A dirs = f.read().splitlines()%0Aelse:%0A
dirs
-
|
f046bd8982f08a31448bb5e4e10ded2a14ea95b0
|
Create __init__.py
|
iotqatools/__init__.py
|
iotqatools/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
e4a33badd98c4c927c4128e22fd839f54711cfd6
|
Create PedidoCadastrar.py
|
backend/Models/Predio/PedidoCadastrar.py
|
backend/Models/Predio/PedidoCadastrar.py
|
Python
| 0 |
@@ -0,0 +1,448 @@
+from Framework.Pedido import Pedido%0Afrom Framework.ErroNoHTTP import ErroNoHTTP%0A%0Aclass PedidoCadastrar(Pedido):%0A%0A%09def __init__(self,variaveis_do_ambiente):%0A%09%09super(PedidoCadastrar, self).__init__(variaveis_do_ambiente)%0A%09%09try:%0A%09%09%09self.id = self.corpo%5B'id'%5D%0A%09%09%09self.nome = self.corpo%5B'nome'%5D%0A%09%09except:%0A%09%09%09raise ErroNoHTTP(400)%0A%09%09%0A%09def getId(self):%0A%09%09return self.id%0A%0A%09def setNome(self,nome):%0A%09%09self.nome = nome%0A%0A%09def getNome(self):%0A%09%09return self.nome%0A
|
|
8a911b877c5ae196ce6e4cc7e6c284b742645bc8
|
Update headers strategy to mimic internal bytes representation.
|
test/test_invalid_headers.py
|
test/test_invalid_headers.py
|
# -*- coding: utf-8 -*-
"""
test_invalid_headers.py
~~~~~~~~~~~~~~~~~~~~~~~
This module contains tests that use invalid header blocks, and validates that
they fail appropriately.
"""
import pytest
import h2.connection
import h2.errors
import h2.events
import h2.exceptions
import h2.utilities
from hypothesis import given
from hypothesis.strategies import text, lists, tuples
HEADERS_STRATEGY = lists(tuples(text(), text()))
class TestInvalidFrameSequences(object):
"""
Invalid header sequences cause ProtocolErrors to be thrown when received.
"""
base_request_headers = [
(':authority', 'example.com'),
(':path', '/'),
(':scheme', 'https'),
(':method', 'GET'),
('user-agent', 'someua/0.0.1'),
]
invalid_header_blocks = [
base_request_headers + [('Uppercase', 'name')],
base_request_headers + [(':late', 'pseudo-header')],
[(':path', 'duplicate-pseudo-header')] + base_request_headers,
base_request_headers + [('connection', 'close')],
base_request_headers + [('proxy-connection', 'close')],
base_request_headers + [('keep-alive', 'close')],
base_request_headers + [('transfer-encoding', 'gzip')],
base_request_headers + [('upgrade', 'super-protocol/1.1')],
base_request_headers + [('te', 'chunked')],
]
@pytest.mark.parametrize('headers', invalid_header_blocks)
def test_headers_event(self, frame_factory, headers):
"""
Test invalid headers are rejected with PROTOCOL_ERROR.
"""
c = h2.connection.H2Connection(client_side=False)
c.receive_data(frame_factory.preamble())
c.clear_outbound_data_buffer()
f = frame_factory.build_headers_frame(headers)
data = f.serialize()
with pytest.raises(h2.exceptions.ProtocolError):
c.receive_data(data)
expected_frame = frame_factory.build_goaway_frame(
last_stream_id=0, error_code=h2.errors.PROTOCOL_ERROR
)
assert c.data_to_send() == expected_frame.serialize()
def test_transfer_encoding_trailers_is_valid(self, frame_factory):
"""
Transfer-Encoding trailers is allowed by the filter.
"""
headers = (
self.base_request_headers + [('te', 'trailers')]
)
c = h2.connection.H2Connection(client_side=False)
c.receive_data(frame_factory.preamble())
f = frame_factory.build_headers_frame(headers)
data = f.serialize()
events = c.receive_data(data)
assert len(events) == 1
request_event = events[0]
assert request_event.headers == headers
class TestFilter(object):
"""
Test the filter function directly.
These tests exists to confirm the behaviour of the filter function in a
wide range of scenarios. Many of these scenarios may not be legal for
HTTP/2 and so may never hit the function, but it's worth validating that it
behaves as expected anyway.
"""
@given(HEADERS_STRATEGY)
def test_range_of_acceptable_outputs(self, headers):
"""
validate_headers either returns the data unchanged or throws a
ProtocolError.
"""
try:
assert headers == h2.utilities.validate_headers(headers)
except h2.exceptions.ProtocolError:
assert True
|
Python
| 0 |
@@ -352,20 +352,22 @@
import
-text
+binary
, lists,
@@ -411,20 +411,24 @@
les(
-text(), text
+binary(), binary
()))
|
82152af00c54ea94a4e8cd90d3cd5f45ef28ee86
|
add missing unit test file
|
test/test_utils.py
|
test/test_utils.py
|
Python
| 0 |
@@ -0,0 +1,1042 @@
+# coding=utf-8%0Afrom __future__ import unicode_literals%0A%0Aimport os%0Aimport codecs%0A%0A%0Afrom nose.tools import eq_%0Afrom pyecharts.utils import (%0A freeze_js,%0A write_utf8_html_file,%0A get_resource_dir%0A)%0A%0A%0Adef test_get_resource_dir():%0A path = get_resource_dir('templates')%0A expected = os.path.join(os.getcwd(), '..', 'pyecharts', 'templates')%0A eq_(path, os.path.abspath(expected))%0A%0A%0Adef test_freeze_js():%0A html_content = %22%22%22%0A %3C/style%3E%0A %3C!-- build --%3E%0A %3Cscript src=%22js/echarts/echarts.min.js%22%3E%3C/script%3E%0A %3Cscript src=%22js/echarts/echarts-wordcloud.min.js%22%3E%3C/script%3E%0A %3C!-- endbuild --%3E%0A %3C/head%3E%3Cbody%3E%22%22%22%0A%0A html_content = freeze_js(html_content)%0A assert 'exports.echarts' in html_content%0A assert 'echarts-wordcloud' in html_content%0A%0A%0Adef test_write_utf8_html_file():%0A content = %22%E6%9F%B1%E7%8A%B6%E5%9B%BE%E6%95%B0%E6%8D%AE%E5%A0%86%E5%8F%A0%E7%A4%BA%E4%BE%8B%22%0A file_name = 'test.html'%0A write_utf8_html_file(file_name, content)%0A with codecs.open(file_name, 'r', 'utf-8') as f:%0A actual_content = f.read()%0A eq_(content, actual_content)%0A
|
|
9c52dae7f5de64865fff51a24680c43e041376ea
|
Add random_subtree script
|
random_subtree.py
|
random_subtree.py
|
Python
| 0.000001 |
@@ -0,0 +1,671 @@
+#!/usr/bin/env python2%0A%0A# Use either ete2 or ete3%0Atry:%0A import ete3 as ete%0Aexcept ImportError:%0A import ete2 as ete%0A%0Aimport numpy as np%0A%0ACLI = %22%22%22%0AUSAGE:%0A random_subtree %3Ctree%3E %3Cn%3E%0A%0ASubsamples %3Cn%3E taxa from the Newick tree in %3Ctree%3E, preserving the branch%0Alengths of subsampled taxa.%0A%22%22%22%0A%0Adef main(treefile, n):%0A n = int(n)%0A tree = ete.Tree(treefile)%0A leaves = tree.get_leaf_names()%0A subsample = %5Bleaves%5Bi%5D for i in np.random.choice(n, size=len(tree))%5D%0A tree.prune(subsample, preserve_branch_length=True)%0A print(tree.write())%0A%0Aif __name__ == %22__main__%22:%0A import docopt%0A opts = docopt.docopt(CLI)%0A main(opts%5B'%3Ctree%3E'%5D, int(opts%5B'%3Cn%3E'%5D))%0A
|
|
3c1e61b4b47ec244e4cadd4bf34e0a21cf1ff7e1
|
Create w3_1.py
|
w3_1.py
|
w3_1.py
|
Python
| 0.000482 |
@@ -0,0 +1,13 @@
+print(%22%E7%AC%AC%E4%B8%89%E9%80%B1%22)%0A
|
|
8bb9d6cbe161654126bb3aa3adecdb99ee0d9987
|
Create sct4.py
|
sct4.py
|
sct4.py
|
Python
| 0.000003 |
@@ -0,0 +1,137 @@
+from mpi4py import MPI%0Acomm = MPI.COMM_WORLD%0Arank=comm.rank%0Asize=comm.size%0Aprint 'Rank:',rank%0Aprint 'Node Count:',size%0Aprint 9**(rank+3)%0A
|
|
aafd823069176075b4810496ee98cea3203b5652
|
Make a command to make subsets. Subsets are useful for testing during development.
|
build_time/src/make_subset.py
|
build_time/src/make_subset.py
|
Python
| 0 |
@@ -0,0 +1,2617 @@
+%22%22%22%0A Copyright 2014 Google Inc. All rights reserved.%0A%0A Licensed under the Apache License, Version 2.0 (the %22License%22);%0A you may not use this file except in compliance with the License.%0A You may obtain a copy of the License at%0A%0A http://www.apache.org/licenses/LICENSE-2.0%0A%0A Unless required by applicable law or agreed to in writing, software%0A distributed under the License is distributed on an %22AS IS%22 BASIS,%0A WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A See the License for the specific language governing permissions and%0A limitations under the License.%0A%22%22%22%0A%0Aimport argparse%0Aimport os%0Aimport sys%0Afrom fontTools.subset import Options, load_font, Subsetter, save_font%0A%0Adef main(args):%0A %22%22%22Subset a font (useful for making small test fonts).%0A%0A Arguments:%0A font-file%0A --hinting=(False%7CTrue) ,default is false%0A %22%22%22%0A parser = argparse.ArgumentParser()%0A parser.add_argument('fontfile', help='Input font file')%0A parser.add_argument('--text', default='', %0A help='Text to include in the subset')%0A parser.add_argument('--unicodes', default='', %0A help='Comma separated list of Unicode codepoints (hex) '%0A 'to include in the subset; eg, %22e7,0xe8,U+00e9%22')%0A parser.add_argument('--glyphs', default='', %0A help='Comma separated list of glyph IDs (decimal) to '%0A 'include in the subset; eg, %221,27%22')%0A parser.add_argument('--hinting',default=False, action='store_true', %0A help='Enable hinting if specified, no hinting if not '%0A 'present')%0A%0A cmd_args = parser.parse_args(args)%0A%0A options = Options()%0A # Definitely want the .notdef glyph and outlines.%0A options.notdef_glyph = True%0A options.notdef_outline = True%0A # Get the item. to keep in the subset.%0A text = cmd_args.text%0A unicodes_str = cmd_args.unicodes.lower().replace('0x', '').replace('u+', '')%0A unicodes = %5B int(c,16) for c in unicodes_str.split(',') if c %5D%0A glyphs = %5B int(c) for c in cmd_args.glyphs.split(',') if c %5D%0A fontfile = cmd_args.fontfile%0A options.hinting = cmd_args.hinting # False =%3E no hinting%0A%0A dir = os.path.dirname(fontfile)%0A basename = os.path.basename(fontfile)%0A filename, extension = os.path.splitext(basename)%0A output_file = dir + '/' + filename + '_subset' + extension%0A font = load_font(fontfile, options, lazy=False)%0A %0A subsetter = Subsetter(options)%0A subsetter.populate(text=text, unicodes=unicodes, glyphs=glyphs)%0A subsetter.subset(font)%0A save_font(font, output_file, options)%0A%0A%0Aif __name__ == '__main__':%0A main(sys.argv%5B1:%5D)%0A
|
|
01f4aedac1df6f2e55c76d60c52d1e0c5ccfd9f2
|
Revert "Delete test file"
|
tests/mock_vws/test_query.py
|
tests/mock_vws/test_query.py
|
Python
| 0 |
@@ -0,0 +1,2170 @@
+%22%22%22%0ATests for the mock of the query endpoint.%0A%0Ahttps://library.vuforia.com/articles/Solution/How-To-Perform-an-Image-Recognition-Query.%0A%22%22%22%0A%0Aimport io%0Afrom typing import Any, Dict%0Afrom urllib.parse import urljoin%0A%0Aimport pytest%0Aimport requests%0Afrom requests import codes%0Afrom requests_mock import POST%0A%0Afrom tests.utils import VuforiaDatabaseKeys%0Afrom vws._request_utils import authorization_header, rfc_1123_date%0A%0A%[email protected]('verify_mock_vuforia')%0Aclass TestQuery:%0A %22%22%22%0A Tests for the query endpoint.%0A %22%22%22%0A%0A def test_no_results(%0A self,%0A vuforia_database_keys: VuforiaDatabaseKeys,%0A high_quality_image: io.BytesIO,%0A ) -%3E None:%0A %22%22%22%0A With no results%0A %22%22%22%0A image_content = high_quality_image.read()%0A content_type = 'multipart/form-data'%0A query: Dict%5Bstr, Any%5D = %7B%7D%0A date = rfc_1123_date()%0A request_path = '/v1/query'%0A url = urljoin('https://cloudreco.vuforia.com', request_path)%0A files = %7B'image': ('image.jpeg', image_content, 'image/jpeg')%7D%0A%0A request = requests.Request(%0A method=POST,%0A url=url,%0A headers=%7B%7D,%0A data=query,%0A files=files,%0A )%0A%0A prepared_request = request.prepare() # type: ignore%0A%0A authorization_string = authorization_header(%0A access_key=vuforia_database_keys.client_access_key,%0A secret_key=vuforia_database_keys.client_secret_key,%0A method=POST,%0A content=prepared_request.body,%0A content_type=content_type,%0A date=date,%0A request_path=request_path,%0A )%0A%0A headers = %7B%0A **prepared_request.headers,%0A 'Authorization': authorization_string,%0A 'Date': date,%0A %7D%0A%0A prepared_request.prepare_headers(headers=headers)%0A%0A session = requests.Session()%0A response = session.send(request=prepared_request) # type: ignore%0A assert response.status_code == codes.OK%0A assert response.json()%5B'result_code'%5D == 'Success'%0A assert response.json()%5B'results'%5D == %5B%5D%0A assert 'query_id' in response.json()%0A
|
|
2e330d5cd2ad033c675d5888a2f43e0f846a4df1
|
Add CodeDeploy
|
troposphere/codedeploy.py
|
troposphere/codedeploy.py
|
Python
| 0.000001 |
@@ -0,0 +1,2293 @@
+# Copyright (c) 2015, Mark Peek %[email protected]%3E%0A# All rights reserved.%0A#%0A# See LICENSE file for full license.%0A%0Afrom . import AWSObject, AWSProperty%0Afrom .validators import positive_integer%0A%0A%0AKEY_ONLY = %22KEY_ONLY%22%0AVALUE_ONLY = %22VALUE_ONLY%22%0AKEY_AND_VALUE = %22KEY_AND_VALUE%22%0A%0A%0Aclass GitHubLocation(AWSProperty):%0A props = %7B%0A 'CommitId': (basestring, True),%0A 'Repository': (basestring, True),%0A %7D%0A%0A%0Aclass S3Location(AWSProperty):%0A props = %7B%0A 'Bucket': (basestring, True),%0A 'BundleType': (basestring, True),%0A 'ETag': (basestring, False),%0A 'Key': (basestring, True),%0A 'Version': (basestring, True),%0A %7D%0A%0A%0Aclass Revision(AWSProperty):%0A props = %7B%0A 'GitHubLocation': (GitHubLocation, False),%0A 'RevisionType': (basestring, False),%0A 'S3Location': (S3Location, False),%0A %7D%0A%0A%0Aclass Deployment(AWSProperty):%0A props = %7B%0A 'Description': (basestring, False),%0A 'IgnoreApplicationStopFailures': (bool, False),%0A 'Revision': (Revision, True),%0A %7D%0A%0A%0Aclass Ec2TagFilters(AWSProperty):%0A props = %7B%0A 'Key': (basestring, False),%0A 'Type': (basestring, False),%0A 'Value': (basestring, False),%0A %7D%0A%0A%0Aclass OnPremisesInstanceTagFilters(AWSProperty):%0A props = %7B%0A 'Key': (basestring, False),%0A 'Type': (basestring, False),%0A 'Value': (basestring, False),%0A %7D%0A%0A%0Aclass MinimumHealthyHosts(AWSProperty):%0A props = %7B%0A 'Type': (basestring, False),%0A 'Value': (positive_integer, False),%0A %7D%0A%0A%0Aclass Application(AWSObject):%0A resource_type = %22AWS::CodeDeploy::Application%22%0A%0A props = %7B%0A %7D%0A%0A%0Aclass DeploymentConfig(AWSObject):%0A resource_type = %22AWS::CodeDeploy::DeploymentConfig%22%0A%0A props = %7B%0A 'MinimumHealthyHosts': (MinimumHealthyHosts, False),%0A %7D%0A%0A%0Aclass DeploymentGroup(AWSObject):%0A resource_type = %22AWS::DirectoryService::DeploymentGroup%22%0A%0A props = %7B%0A 'ApplicationName': (basestring, True),%0A 'AutoScalingGroups': (%5Bbasestring%5D, False),%0A 'Deployment': (Deployment, False),%0A 'DeploymentConfigName': (basestring, False),%0A 'Ec2TagFilters': (Ec2TagFilters, False),%0A 'OnPremisesInstanceTagFilters': (OnPremisesInstanceTagFilters, False),%0A 'ServiceRoleArn': (basestring, True),%0A %7D%0A
|
|
0091af78bd191e34ecb621b20e79d6dd3d32ebb6
|
Add unit tests for VocabularySet
|
tests/test_core.py
|
tests/test_core.py
|
Python
| 0 |
@@ -0,0 +1,2146 @@
+#!/usr/bin/env python%0Afrom __future__ import division%0A%0Afrom unittest import TestCase, main%0Afrom metasane.core import VocabularySet%0A%0Aclass VocabularySetTests(TestCase):%0A def setUp(self):%0A %22%22%22Initialize data used in the tests.%22%22%22%0A self.single_vocab = %7B'vocab_1': VOCAB_1.split('%5Cn')%7D%0A self.multi_vocab = %7B%0A 'vocab_1': VOCAB_1.split('%5Cn'),%0A 'vocab_2': VOCAB_2.split('%5Cn')%0A %7D%0A self.multi_vocab_inst = VocabularySet(self.multi_vocab)%0A%0A def test_init_empty(self):%0A %22%22%22Test constructing an instance with no vocabs.%22%22%22%0A obs = VocabularySet(%7B%7D)%0A self.assertEqual(len(obs), 0)%0A%0A def test_init_single(self):%0A %22%22%22Test constructing an instance with a single vocab.%22%22%22%0A obs = VocabularySet(self.single_vocab)%0A self.assertEqual(len(obs), 1)%0A self.assertTrue('vocab_1' in obs)%0A%0A def test_init_multi(self):%0A %22%22%22Test constructing an instance with multiple vocabs.%22%22%22%0A self.assertEqual(len(self.multi_vocab_inst), 2)%0A self.assertTrue('vocab_1' in self.multi_vocab_inst)%0A self.assertTrue('vocab_2' in self.multi_vocab_inst)%0A%0A def test_contains(self):%0A %22%22%22Test membership based on ID.%22%22%22%0A self.assertTrue('vocab_1' in self.multi_vocab_inst)%0A self.assertTrue('vocab_2' in self.multi_vocab_inst)%0A self.assertFalse('vocab_3' in self.multi_vocab_inst)%0A%0A def test_getitem(self):%0A %22%22%22Test retrieving vocab based on ID.%22%22%22%0A obs = self.multi_vocab_inst%5B'vocab_1'%5D%0A self.assertEqual(obs, set(%5B'foo', 'bar', 'baz'%5D))%0A%0A obs = self.multi_vocab_inst%5B'vocab_2'%5D%0A self.assertEqual(obs, set(%5B'xyz', '123', 'abc'%5D))%0A%0A def test_getitem_nonexistent(self):%0A %22%22%22Test retrieving vocab based on nonexistent ID.%22%22%22%0A with self.assertRaises(KeyError):%0A _ = self.multi_vocab_inst%5B'vocab_3'%5D%0A%0A def test_len(self):%0A %22%22%22Test retrieving the number of vocabs.%22%22%22%0A self.assertEqual(len(self.multi_vocab_inst), 2)%0A%0AVOCAB_1 = %22%22%22foo%0A %5Ct %5Ct%0AbaR%5Ct%5Ct%0A%0A%5Ct%5CtBAZ%0A%0A%22%22%22%0A%0AVOCAB_2 = %22%22%22abc%0A123%0Axyz%22%22%22%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
1f9240f0b954afa9f587f468872c3e1e215f2eaa
|
Implement channel mode +s (or what's left of it)
|
txircd/modules/cmode_s.py
|
txircd/modules/cmode_s.py
|
Python
| 0 |
@@ -0,0 +1,678 @@
+from txircd.modbase import Mode%0A%0Aclass SecretMode(Mode):%0A%09def listOutput(self, command, data):%0A%09%09if command != %22LIST%22:%0A%09%09%09return data%0A%09%09cdata = data%5B%22cdata%22%5D%0A%09%09if %22s%22 in cdata%5B%22modes%22%5D and cdata%5B%22name%22%5D not in data%5B%22user%22%5D.channels:%0A%09%09%09data%5B%22cdata%22%5D = %7B%7D%0A%09# other +s stuff is hiding in other modules.%0A%0Aclass Spawner(object):%0A%09def __init__(self, ircd):%0A%09%09self.ircd = ircd%0A%09%09self.mode_s = None%0A%09%0A%09def spawn(self):%0A%09%09self.mode_s = SecretMode()%0A%09%09return %7B%0A%09%09%09%22modes%22: %7B%0A%09%09%09%09%22cns%22: self.mode_s%0A%09%09%09%7D,%0A%09%09%09%22actions%22: %7B%0A%09%09%09%09%22commandextra%22: %5Bself.mode_s.listOutput%5D%0A%09%09%7D%0A%09%0A%09def cleanup(self):%0A%09%09self.ircd.removeMode(%22cns%22)%0A%09%09self.ircd.actions%5B%22commandextra%22%5D.remove(self.mode_s.listOutput)
|
|
9f59cf074c4f64616bf3a31fd5c6fc649e99e4ae
|
Checks whether all the case-based letters of the strings are uppercase
|
techgig_isupper.py
|
techgig_isupper.py
|
Python
| 0.999999 |
@@ -0,0 +1,117 @@
+def main():%0A s=raw_input()%0A if s.isupper():%0A print %22True%22%0A else:%0A print %22False%22%0A %0A%0Amain()%0A%0A
|
|
2a26fc7f0ac6223ebcb20eb1de550e899e5728db
|
add beginnings of script for ball identification
|
scripts/hist.py
|
scripts/hist.py
|
Python
| 0 |
@@ -0,0 +1,2214 @@
+import cv2%0Aimport numpy as np%0A%0Aframe = cv2.imread('/mnt/c/Users/T-HUNTEL/Desktop/hackathon/table3.jpg')%0Ah,w,c = frame.shape%0Aprint frame.shape%0A%0A%0A# Convert BGR to HSV%0Ahsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)%0A%0ABORDER_COLOR = 0%0Adef flood_fill(image, x, y, value):%0A count = 1%0A points = %5B(x, y)%5D%0A %22Flood fill on a region of non-BORDER_COLOR pixels.%22%0A if x %3E= image.shape%5B1%5D or y %3E= image.shape%5B0%5D or image%5Bx,y%5D == BORDER_COLOR:%0A return None, None%0A edge = %5B(x, y)%5D%0A image%5Bx, y%5D = value%0A%0A while edge:%0A newedge = %5B%5D%0A for (x, y) in edge:%0A for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)):%0A if s %3C= image.shape%5B1%5D and y %3C= image.shape%5B0%5D and %5C%0A %09image%5Bs, t%5D not in (BORDER_COLOR, value):%0A image%5Bs, t%5D = value%0A points.append((s, t))%0A count += 1%0A newedge.append((s, t))%0A edge = newedge%0A%0A return count, points%0A%0A%0A# thresholds for different balls / background%0Alow_bkg = np.array(%5B15, 40, 50%5D, dtype=np.uint8)%0Ahigh_bkg = np.array(%5B40, 190, 200%5D, dtype=np.uint8)%0A%0Alower_blue = np.array(%5B110,50,50%5D, dtype=np.uint8)%0Aupper_blue = np.array(%5B130,255,255%5D, dtype=np.uint8)%0A%0Alow_yellow = np.array(%5B20, 30, 30%5D, dtype=np.uint8)%0Ahigh_yellow = np.array(%5B30, 255, 255%5D, dtype=np.uint8)%0A%0A%0A# mask out the background%0Amask = cv2.inRange(hsv, low_bkg, high_bkg)%0Amask = np.invert(mask)%0A%0A%0A# Bitwise-AND mask and original image%0Aobjects = cv2.bitwise_and(frame,frame, mask= mask)%0A%0Ahsv = cv2.cvtColor(objects, cv2.COLOR_BGR2HSV)%0A%0A# mask the yellow balls%0Amask = cv2.inRange(hsv, low_yellow, high_yellow)%0A%0Ayellows = cv2.bitwise_and(objects, objects, mask=mask)%0A%0A# find the biggest cloud of 1's in the yellow mask%0Abiggest_cloud = %5B%5D%0Abiggest_count = 0%0A%0Aimage = mask / 255.%0A%0Awhile len(np.where(image == 1)%5B0%5D) %3E 0:%0A loc = np.where(image == 1)%0A y = loc%5B0%5D%5B0%5D%0A x = loc%5B1%5D%5B0%5D%0A count, cloud = flood_fill(image, y, x, 2)%0A if count %3E biggest_count:%0A print count%0A biggest_count = count%0A biggest_cloud = cloud%0A%0Aprint biggest_cloud%0Aprint biggest_count%0A%0Acv2.imwrite('mask.jpg', mask)%0Acv2.imwrite('yellows.jpg', yellows)%0Acv2.imwrite('frame.jpg', frame)%0A
|
|
6a686a800a3579970a15fa9552b2eb4e1b6b3ed9
|
add some tools for ml scoring
|
corgi/ml.py
|
corgi/ml.py
|
Python
| 0 |
@@ -0,0 +1,1620 @@
+import numpy as np%0A%0Aimport pandas as pd%0Afrom scipy.stats import kendalltau, spearmanr%0Afrom sklearn.metrics import (accuracy_score, f1_score, log_loss,%0A mean_squared_error, precision_score, recall_score)%0Afrom sklearn.model_selection import StratifiedKFold%0Afrom tqdm import tqdm%0A%0A%0Aclassifier_scoring = %7B%0A 'accuracy': accuracy_score,%0A 'log_loss': log_loss,%0A 'f1_score': lambda x, y: f1_score(x, y, average='weighted'),%0A 'precision': lambda x, y: precision_score(x, y, average='weighted'),%0A 'recall': lambda x, y: recall_score(x, y, average='weighted'),%0A%7D%0A%0Aregression_scoring = %7B%0A 'mean_squared_error': mean_squared_error,%0A 'kendalltau': lambda x, y: kendalltau(x, y).correlation,%0A 'spearmanr': lambda x, y: spearmanr(x, y)%5B0%5D,%0A%7D%0A%0Adef scores(y, y_pred, scoring=None):%0A if scoring is None:%0A raise Exception(%22cross_val_scores requires a dict of measures.%22)%0A%0A scores = %7B%7D%0A for k, metric in scoring.items():%0A scores%5Bk%5D = metric(y, y_pred)%0A return scores%0A%0A%0Adef cross_val_scores(clf, X, y, cv=3, scoring=None):%0A if scoring is None:%0A raise Exception(%22cross_val_scores requires a dict of measures.%22)%0A%0A X, y = np.array(X), np.array(y)%0A skf = StratifiedKFold(n_splits=cv)%0A scores = %5B%5D%0A for train, test in tqdm(skf.split(X, y)):%0A clf.fit(X%5Btrain%5D, y%5Btrain%5D)%0A y_pred = clf.predict(X%5Btest%5D)%0A score = %7B%7D%0A for k, metric in scoring.items():%0A try:%0A score%5Bk%5D = metric(y%5Btest%5D, y_pred)%0A except:%0A pass%0A scores.append(score)%0A%0A return pd.DataFrame(scores)%0A
|
|
2d7d4987eb06372496ce4a5b7b961a12deba9574
|
add windows-specific tests for shell_{quote,split}
|
tests/util_test.py
|
tests/util_test.py
|
Python
| 0 |
@@ -0,0 +1,1888 @@
+# Copyright 2022 Google LLC%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Aimport sys%0A%0Afrom nanoemoji.util import shell_quote, shell_split%0A%0Aimport pytest%0A%0A%0A# Source:%0A# https://github.com/python/cpython/blob/653e563/Lib/test/test_subprocess.py#L1198-L1214%0ALIST2CMDLINE_TEST_DATA = %5B%0A (%5B%22a b c%22, %22d%22, %22e%22%5D, '%22a b c%22 d e'),%0A (%5B'ab%22c', %22%5C%5C%22, %22d%22%5D, 'ab%5C%5C%22c %5C%5C d'),%0A (%5B'ab%22c', %22 %5C%5C%22, %22d%22%5D, 'ab%5C%5C%22c %22 %5C%5C%5C%5C%22 d'),%0A (%5B%22a%5C%5C%5C%5C%5C%5Cb%22, %22de fg%22, %22h%22%5D, 'a%5C%5C%5C%5C%5C%5Cb %22de fg%22 h'),%0A (%5B'a%5C%5C%22b', %22c%22, %22d%22%5D, 'a%5C%5C%5C%5C%5C%5C%22b c d'),%0A (%5B%22a%5C%5C%5C%5Cb c%22, %22d%22, %22e%22%5D, '%22a%5C%5C%5C%5Cb c%22 d e'),%0A (%5B%22a%5C%5C%5C%5Cb%5C%5C c%22, %22d%22, %22e%22%5D, '%22a%5C%5C%5C%5Cb%5C%5C c%22 d e'),%0A (%5B%22ab%22, %22%22%5D, 'ab %22%22'),%0A%5D%0ACMDLINE2LIST_TEST_DATA = %5B(cmdline, args) for args, cmdline in LIST2CMDLINE_TEST_DATA%5D%0A%0A%[email protected](not sys.platform.startswith(%22win%22), reason=%22Windows only%22)%[email protected](%0A %22args, expected_cmdline%22,%0A LIST2CMDLINE_TEST_DATA,%0A ids=%5Bs for _, s in LIST2CMDLINE_TEST_DATA%5D,%0A)%0Adef test_windows_shell_quote(args, expected_cmdline):%0A assert %22 %22.join(shell_quote(s) for s in args) == expected_cmdline%0A%0A%[email protected](not sys.platform.startswith(%22win%22), reason=%22Windows only%22)%[email protected](%0A %22cmdline, expected_args%22,%0A CMDLINE2LIST_TEST_DATA,%0A ids=%5Bs for s, _ in CMDLINE2LIST_TEST_DATA%5D,%0A)%0Adef test_windows_shell_split(cmdline, expected_args):%0A assert shell_split(cmdline) == expected_args%0A
|
|
a6ff8a5838f82be3d5b0b4196c03fbf7c15aff7a
|
Test dat.info
|
test.py
|
test.py
|
Python
| 0 |
@@ -0,0 +1,346 @@
+import unittest%0Aimport requests%0A%0Aport = 'http://localhost:6461'%0A%0Adef info():%0A call = port + '/api'%0A req = requests.get(call, stream=True)%0A print(req.content)%0A return req.status_code%0A%0Aclass DatTest(unittest.TestCase):%0A%0A def test_info(self):%0A %09self.assertEqual(info(), 200)%0A %0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
18df3284fd6dc176b71c41599d02a24dc021f8db
|
add file that is useful for testing; but will be much more useful when I figure out how to turn of debugging output in Flask.
|
test.py
|
test.py
|
Python
| 0 |
@@ -0,0 +1,390 @@
+#!/usr/bin/env python%0A%0Aimport os%0Afrom doctest import testmod, NORMALIZE_WHITESPACE, ELLIPSIS%0A%0Aimport backend, client, frontend, misc, model, session%0A%0Adef tm(module):%0A testmod(module, optionflags=NORMALIZE_WHITESPACE %7C ELLIPSIS)%0A%0Adef run_doctests():%0A tm(backend)%0A tm(client)%0A tm(frontend)%0A tm(misc)%0A tm(model)%0A tm(session)%0A%0Aif __name__ == '__main__':%0A run_doctests()%0A
|
|
ae6184a023f9a14c54663270d4a4294b8c3832f4
|
Create test.py
|
test.py
|
test.py
|
Python
| 0.000002 |
@@ -0,0 +1,32 @@
+import os%0A%0Aprint(%22hello there%22)%0A
|
|
9309f7190314abdd8b56368147862453d17d97b5
|
Create test.py
|
test.py
|
test.py
|
Python
| 0.000005 |
@@ -0,0 +1 @@
+%0A
|
|
d527bc83d44b91bb827c02907faf8cd7e7d49544
|
Add dateutil gist
|
dateutil.py
|
dateutil.py
|
Python
| 0 |
@@ -0,0 +1,542 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# vim:fenc=utf-8:et%0A%22%22%22Date and Time util%0A%22%22%22%0A__author__ = %5B%22Jianlong Chen %[email protected]%3E%22%5D%0A__date__ = %222013-07-17%22%0A%0A import datetime%0A%0Adef year():%0A return datetime.datetime.strftime(datetime.datetime.now(), '%25Y')%0A%0Adef date_time():%0A return datetime.datetime.strftime(datetime.datetime.now(), '%25Y-%25m-%25d %25H:%25M:%25S')%0A%0Adef date():%0A return datetime.datetime.strftime(datetime.datetime.now(), '%25Y-%25m-%25d')%0A%0Adef hour():%0A return datetime.datetime.strftime(datetime.datetime.now(), '%25H')%0A
|
|
75a61dfe788102d04e1cc3b151e839fa9add724f
|
Fix review requests
|
tools/export/cdt/__init__.py
|
tools/export/cdt/__init__.py
|
import re
from os.path import join, exists, realpath, relpath, basename
from os import makedirs
from tools.export.makefile import Makefile, GccArm, Armc5, IAR
class Eclipse(Makefile):
"""Generic Eclipse project. Intended to be subclassed by classes that
specify a type of Makefile.
"""
def generate(self):
"""Generate Makefile, .cproject & .project Eclipse project file,
py_ocd_settings launch file, and software link .p2f file
"""
super(Eclipse, self).generate()
include_paths_replace_re= re.compile(r'(^[.]/|^[.]$)')
ctx = {
'name': self.project_name,
'elf_location': join('BUILD',self.project_name)+'.elf',
'c_symbols': self.toolchain.get_symbols(),
'asm_symbols': self.toolchain.get_symbols(True),
'target': self.target,
'include_paths': map(lambda s: include_paths_replace_re.sub('%s/' % self.project_name, s), self.resources.inc_dirs),
'load_exe': str(self.LOAD_EXE).lower()
}
if not exists(join(self.export_dir,'eclipse-extras')):
makedirs(join(self.export_dir,'eclipse-extras'))
self.gen_file('cdt/pyocd_settings.tmpl', ctx,
join('eclipse-extras',self.target+'_pyocd_settings.launch'))
self.gen_file('cdt/necessary_software.tmpl', ctx,
join('eclipse-extras','necessary_software.p2f'))
self.gen_file('cdt/.cproject.tmpl', ctx, '.cproject')
self.gen_file('cdt/.project.tmpl', ctx, '.project')
class EclipseGcc(Eclipse, GccArm):
LOAD_EXE = True
NAME = "Eclipse-GCC-ARM"
class EclipseArmc5(Eclipse, Armc5):
LOAD_EXE = False
NAME = "Eclipse-Armc5"
class EclipseIAR(Eclipse, IAR):
LOAD_EXE = True
NAME = "Eclipse-IAR"
|
Python
| 0 |
@@ -520,32 +520,21 @@
-include_paths_replace_re
+starting_dot
= re
@@ -867,46 +867,21 @@
s':
-map(lambda s: include_paths_replace_re
+%5Bstarting_dot
.sub
@@ -908,19 +908,31 @@
t_name,
-s),
+inc) for inc in
self.re
@@ -947,17 +947,17 @@
inc_dirs
-)
+%5D
,%0A
|
39fa13cf9b12f3828d4776d10532405c0ea43603
|
Add an example
|
examples/example.py
|
examples/example.py
|
Python
| 0 |
@@ -0,0 +1,748 @@
+%22%22%22%0AFlow as follows:%0ACreate Service -%3E Create User -%3E Initiate Authentication -%3E Verify Pin%0A%22%22%22%0A%0Afrom messente.verigator.api import Api%0A%0Aapi = Api(%22username%22, %22password%22)%0Aservice = api.services.create(%22http://example.com%22, %22service_name%22)%0A%0Auser = api.users.create(service.id, %22+xxxxxxxxxxx%22, %22username%22)%0A%0Aauth_id = api.auth.initiate(service.id, user.id, api.auth.METHOD_SMS)%0A%0Awhile True:%0A try:%0A input = raw_input # Python 2 compatibility%0A except NameError:%0A pass%0A%0A token = input(%22Enter Sms Pin: %22)%0A auth_res, error = api.auth.verify(service.id, user.id, api.auth.METHOD_SMS, token, auth_id)%0A%0A if auth_res:%0A break%0A%0A print(%22Not Verified... Reason: %7B%7D%22.format(error%5B'result'%5D))%0A%0Aprint(%22Verified Successfully!%22)%0A
|
|
265f8c48f4b257287dd004ba783a8aa6f94bb870
|
Add Latin params file
|
cltk/tokenize/latin/params.py
|
cltk/tokenize/latin/params.py
|
Python
| 0.000001 |
@@ -0,0 +1,820 @@
+%22%22%22 Params: Latin%0A%22%22%22%0A%0A__author__ = %5B'Patrick J. Burns %[email protected]%3E'%5D%0A__license__ = 'MIT License.'%0A%0APRAENOMINA = %5B'a', 'agr', 'ap', 'c', 'cn', 'd', 'f', 'k', 'l', %22m'%22, 'm', 'mam', 'n', 'oct', 'opet', 'p', 'post', 'pro', 'q', 's', 'ser', 'sert', 'sex', 'st', 't', 'ti', 'v', 'vol', 'vop', 'a', 'ap', 'c', 'cn', 'd', 'f', 'k', 'l', 'm', %22m'%22, 'mam', 'n', 'oct', 'opet', 'p', 'paul', 'post', 'pro', 'q', 'ser', 'sert', 'sex', 'sp', 'st', 'sta', 't', 'ti', 'v', 'vol', 'vop'%5D%0A%0ACALENDAR = %5B'ian', 'febr', 'mart', 'apr', 'mai', 'iun', 'iul', 'aug', 'sept', 'oct', 'nov', 'dec'%5D %5C%0A + %5B'kal', 'non', 'id', 'a.d'%5D%0A%0AMISC = %5B'coll', 'cos', 'ord', 'pl.', 's.c', 'suff', 'trib'%5D%0A%0AABBREVIATIONS = set(%0A PRAENOMINA +%0A CALENDAR +%0A MISC%0A )%0A
|
|
0a9efede94c64d114cf536533b94a47210a90604
|
Add viper.common.constants.py
|
viper/common/constants.py
|
viper/common/constants.py
|
Python
| 0 |
@@ -0,0 +1,256 @@
+# This file is part of Viper - https://github.com/botherder/viper%0A# See the file 'LICENSE' for copying permission.%0A%0Aimport os%0A%0A_current_dir = os.path.abspath(os.path.dirname(__file__))%0AVIPER_ROOT = os.path.normpath(os.path.join(_current_dir, %22..%22, %22..%22))%0A%0A
|
|
2d7ea21c2d9171a79298866bf02abf64b849be0e
|
add a simple info cog
|
dog/ext/info.py
|
dog/ext/info.py
|
Python
| 0 |
@@ -0,0 +1,1657 @@
+from textwrap import dedent%0A%0Aimport discord%0Afrom discord.ext.commands import guild_only%0Afrom lifesaver.bot import Cog, group, Context%0Afrom lifesaver.utils import human_delta%0A%0A%0Aclass Info(Cog):%0A %22%22%22A cog that provides information about various entities like guilds or members.%22%22%22%0A%0A @group(aliases=%5B'guild', 'guild_info', 'server_info'%5D, invoke_without_command=True)%0A @guild_only()%0A async def server(self, ctx: Context):%0A %22%22%22Views information about this server.%22%22%22%0A embed = discord.Embed(title=ctx.guild.name)%0A embed.set_thumbnail(url=ctx.guild.icon_url)%0A embed.set_footer(text=f'Owned by %7Bctx.guild.owner%7D', icon_url=ctx.guild.owner.avatar_url)%0A%0A g: discord.Guild = ctx.guild%0A n_humans = sum(1 for m in g.members if not m.bot)%0A n_bots = len(g.members) - n_humans%0A embed.description = dedent(f%22%22%22%5C%0A %7Bn_humans%7D humans, %7Bn_bots%7D bots (%7Bn_humans + n_bots%7D members)%0A %0A Created %7Bg.created_at%7D%0A %7Bhuman_delta(g.created_at)%7D ago%0A %22%22%22)%0A%0A embed.add_field(name='Entities', value=dedent(f%22%22%22%5C%0A %7Blen(g.text_channels)%7D text channels, %7Blen(g.voice_channels)%7D voice channels, %7Blen(g.categories)%7D categories%0A %7Blen(g.roles)%7D roles%0A %22%22%22))%0A%0A await ctx.send(embed=embed)%0A%0A @server.command(aliases=%5B'icon_url'%5D)%0A @guild_only()%0A async def icon(self, ctx: Context):%0A %22%22%22Sends this server's icon.%22%22%22%0A if not ctx.guild.icon_url:%0A await ctx.send('No server icon.')%0A return%0A%0A await ctx.send(ctx.guild.icon_url_as(format='png'))%0A%0A%0Adef setup(bot):%0A bot.add_cog(Info(bot))%0A
|
|
eff85f039674ca9fe69294ca2e81644dc4ff4cb6
|
add celery for all notification mail
|
gnowsys-ndf/gnowsys_ndf/ndf/views/tasks.py
|
gnowsys-ndf/gnowsys_ndf/ndf/views/tasks.py
|
Python
| 0 |
@@ -0,0 +1,1493 @@
+from celery import task%0Afrom django.contrib.auth.models import User%0Afrom django.template.loader import render_to_string%0Afrom django.contrib.sites.models import Site%0A%0Afrom gnowsys_ndf.notification import models as notification%0Afrom gnowsys_ndf.ndf.models import Node%0Afrom gnowsys_ndf.ndf.models import node_collection, triple_collection%0Aimport json%0A%0Atry:%0A from bson import ObjectId%0Aexcept ImportError: # old pymongo%0A from pymongo.objectid import ObjectId%0A%0Asitename = Site.objects.all()%5B0%5D%0A%0A@task%0Adef task_set_notify_val(request_user_id, group_id, msg, activ, to_user):%0A '''%0A Attach notification mail to celery task%0A '''%0A request_user = User.objects.get(id=request_user_id)%0A to_send_user = User.objects.get(id=to_user)%0A try:%0A group_obj = node_collection.one(%7B'_id': ObjectId(group_id)%7D)%0A site = sitename.name.__str__()%0A objurl = %22http://test%22%0A render = render_to_string(%0A %22notification/label.html%22,%0A %7B%0A 'sender': request_user.username,%0A 'activity': activ,%0A 'conjunction': '-',%0A 'object': group_obj,%0A 'site': site,%0A 'link': objurl%0A %7D%0A )%0A notification.create_notice_type(render, msg, %22notification%22)%0A notification.send(%5Bto_send_user%5D, render, %7B%22from_user%22: request_user%7D)%0A return True%0A except Exception as e:%0A print %22Error in sending notification- %22+str(e)%0A return False%0A%0A
|
|
f956af85b27d104e84754b4d93a761b82ae39831
|
add external_iterate.py
|
external_iterate.py
|
external_iterate.py
|
Python
| 0 |
@@ -0,0 +1,2654 @@
+#!/usr/bin/env python%0A%0A%22%22%22Compile a Myrial program into logical relational algebra.%22%22%22%0A%0Aimport raco.myrial.interpreter as interpreter%0Aimport raco.myrial.parser as parser%0Aimport raco.scheme%0Afrom raco import algebra%0Afrom raco import myrialang%0Afrom raco.compile import optimize%0Afrom raco.language import MyriaAlgebra%0A%0Aimport argparse%0Aimport json%0Aimport os%0Aimport sys%0A%0Adef evaluate(plan):%0A if isinstance(plan, algebra.DoWhile):%0A evaluate(plan.left)%0A evaluate(plan.right)%0A elif isinstance(plan, algebra.Sequence):%0A for child in plan.children():%0A evaluate(child)%0A else:%0A logical = str(plan)%0A physical = optimize(%5B('', plan)%5D, target=MyriaAlgebra, source=algebra.LogicalAlgebra)%0A phys = myrialang.compile_to_json(logical, logical, physical)%0A print phys%0A json.dumps(phys)%0A%0Adef print_pretty_plan(plan, indent=0):%0A if isinstance(plan, algebra.DoWhile):%0A print '%25s%25s' %25 (' ' * indent, plan.shortStr())%0A print_pretty_plan(plan.left, indent + 4)%0A print_pretty_plan(plan.right, indent + 4)%0A elif isinstance(plan, algebra.Sequence):%0A print '%25s%25s' %25 (' ' * indent, plan.shortStr())%0A for child in plan.children():%0A print_pretty_plan(child, indent + 4)%0A else:%0A print '%25s%25s' %25 (' ' * indent, plan)%0A%0Adef parse_options(args):%0A parser = argparse.ArgumentParser()%0A parser.add_argument('-p', dest='parse_only',%0A help=%22Parse only%22, action='store_true')%0A parser.add_argument('file', help='File containing Myrial source program')%0A%0A ns = parser.parse_args(args)%0A return ns%0A%0Aclass FakeCatalog(object):%0A def __init__(self, catalog):%0A self.catalog = catalog%0A%0A def get_scheme(self, relation_key):%0A return raco.Scheme(self.catalog%5Brelation_key%5D)%0A%0A @classmethod%0A def load_from_file(cls, path):%0A with open(path) as fh:%0A return cls(eval(fh.read()))%0A%0Adef main(args):%0A opt = parse_options(args)%0A%0A # Search for a catalog definition file%0A catalog_path = os.path.join(os.path.dirname(opt.file), 'catalog.py')%0A catalog = None%0A if os.path.exists(catalog_path):%0A catalog = FakeCatalog.load_from_file(catalog_path)%0A%0A _parser = parser.Parser()%0A processor = interpreter.StatementProcessor(catalog)%0A%0A with open(opt.file) as fh:%0A statement_list = _parser.parse(fh.read())%0A%0A if opt.parse_only:%0A print statement_list%0A else:%0A processor.evaluate(statement_list)%0A plan = processor.get_physical_plan()%0A evaluate(plan)%0A%0A return 0%0A%0Aif __name__ == %22__main__%22:%0A sys.exit(main(sys.argv%5B1:%5D))%0A
|
|
9f3d384cfcab3cbfd89d231f2a59c270e743dd33
|
Add Raises in the docstring of tf.histogram_fixed_width_bins
|
tensorflow/python/ops/histogram_ops.py
|
tensorflow/python/ops/histogram_ops.py
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Histograms.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export('histogram_fixed_width_bins')
def histogram_fixed_width_bins(values,
value_range,
nbins=100,
dtype=dtypes.int32,
name=None):
"""Bins the given values for use in a histogram.
Given the tensor `values`, this operation returns a rank 1 `Tensor`
representing the indices of a histogram into which each element
of `values` would be binned. The bins are equal width and
determined by the arguments `value_range` and `nbins`.
Args:
values: Numeric `Tensor`.
value_range: Shape [2] `Tensor` of same `dtype` as `values`.
values <= value_range[0] will be mapped to hist[0],
values >= value_range[1] will be mapped to hist[-1].
nbins: Scalar `int32 Tensor`. Number of histogram bins.
dtype: dtype for returned histogram.
name: A name for this operation (defaults to 'histogram_fixed_width').
Returns:
A `Tensor` holding the indices of the binned values whose shape matches
`values`.
Examples:
```python
# Bins will be: (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
nbins = 5
value_range = [0.0, 5.0]
new_values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with tf.compat.v1.get_default_session() as sess:
indices = tf.histogram_fixed_width_bins(new_values, value_range, nbins=5)
variables.global_variables_initializer().run()
sess.run(indices) => [0, 0, 1, 2, 4]
```
"""
with ops.name_scope(name, 'histogram_fixed_width_bins',
[values, value_range, nbins]):
values = ops.convert_to_tensor(values, name='values')
shape = array_ops.shape(values)
values = array_ops.reshape(values, [-1])
value_range = ops.convert_to_tensor(value_range, name='value_range')
nbins = ops.convert_to_tensor(nbins, dtype=dtypes.int32, name='nbins')
nbins_float = math_ops.cast(nbins, values.dtype)
# Map tensor values that fall within value_range to [0, 1].
scaled_values = math_ops.truediv(
values - value_range[0],
value_range[1] - value_range[0],
name='scaled_values')
# map tensor values within the open interval value_range to {0,.., nbins-1},
# values outside the open interval will be zero or less, or nbins or more.
indices = math_ops.floor(nbins_float * scaled_values, name='indices')
# Clip edge cases (e.g. value = value_range[1]) or "outliers."
indices = math_ops.cast(
clip_ops.clip_by_value(indices, 0, nbins_float - 1), dtypes.int32)
return array_ops.reshape(indices, shape)
@tf_export('histogram_fixed_width')
def histogram_fixed_width(values,
value_range,
nbins=100,
dtype=dtypes.int32,
name=None):
"""Return histogram of values.
Given the tensor `values`, this operation returns a rank 1 histogram counting
the number of entries in `values` that fell into every bin. The bins are
equal width and determined by the arguments `value_range` and `nbins`.
Args:
values: Numeric `Tensor`.
value_range: Shape [2] `Tensor` of same `dtype` as `values`.
values <= value_range[0] will be mapped to hist[0],
values >= value_range[1] will be mapped to hist[-1].
nbins: Scalar `int32 Tensor`. Number of histogram bins.
dtype: dtype for returned histogram.
name: A name for this operation (defaults to 'histogram_fixed_width').
Returns:
A 1-D `Tensor` holding histogram of values.
Examples:
```python
# Bins will be: (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
nbins = 5
value_range = [0.0, 5.0]
new_values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with tf.compat.v1.get_default_session() as sess:
hist = tf.histogram_fixed_width(new_values, value_range, nbins=5)
variables.global_variables_initializer().run()
sess.run(hist) => [2, 1, 1, 0, 2]
```
"""
with ops.name_scope(name, 'histogram_fixed_width',
[values, value_range, nbins]) as name:
# pylint: disable=protected-access
return gen_math_ops._histogram_fixed_width(
values, value_range, nbins, dtype=dtype, name=name)
# pylint: enable=protected-access
|
Python
| 0 |
@@ -2256,16 +2256,189 @@
lues%60.%0A%0A
+ Raises:%0A TypeError: If any unsupported dtype is provided.%0A tf.errors.InvalidArgumentError: If value_range does not%0A satisfy value_range%5B0%5D %3C value_range%5B1%5D.%0A%0A
Exampl
|
9184d4cebf95ee31836970bedffaddc3bfaa2c2d
|
Prepare v2.20.8.dev
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.20.7'
|
Python
| 0.000004 |
@@ -442,7 +442,11 @@
.20.
-7
+8.dev
'%0A
|
1bf65c4b18b1d803b9515f80056c4be5790e3bde
|
Prepare v1.2.276.dev
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.275'
|
Python
| 0.000002 |
@@ -443,7 +443,11 @@
2.27
-5
+6.dev
'%0A
|
f4a4b733445abba45a0a168dde9b7c10248688a6
|
Prepare v1.2.318.dev
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.317'
|
Python
| 0.000002 |
@@ -443,7 +443,11 @@
2.31
-7
+8.dev
'%0A
|
860cf7b9743744c9d21796b227cf21d684fb5519
|
Add test_modulepickling_change_cache_dir
|
test/test_cache.py
|
test/test_cache.py
|
Python
| 0.000003 |
@@ -0,0 +1,990 @@
+from jedi import settings%0Afrom jedi.cache import ParserCacheItem, _ModulePickling%0A%0A%0AModulePickling = _ModulePickling()%0A%0A%0Adef test_modulepickling_change_cache_dir(monkeypatch, tmpdir):%0A %22%22%22%0A ModulePickling should not save old cache when cache_directory is changed.%0A%0A See: %60#168 %3Chttps://github.com/davidhalter/jedi/pull/168%3E%60_%0A %22%22%22%0A dir_1 = str(tmpdir.mkdir('first'))%0A dir_2 = str(tmpdir.mkdir('second'))%0A%0A item_1 = ParserCacheItem('fake parser 1')%0A item_2 = ParserCacheItem('fake parser 2')%0A path_1 = 'fake path 1'%0A path_2 = 'fake path 2'%0A%0A monkeypatch.setattr(settings, 'cache_directory', dir_1)%0A ModulePickling.save_module(path_1, item_1)%0A cached = ModulePickling.load_module(path_1, item_1.change_time - 1)%0A assert cached == item_1.parser%0A%0A monkeypatch.setattr(settings, 'cache_directory', dir_2)%0A ModulePickling.save_module(path_2, item_2)%0A cached = ModulePickling.load_module(path_1, item_1.change_time - 1)%0A assert cached is None%0A
|
|
8a511662948bff2f878d5af31fd45d02eee6dd4b
|
MigrationHistory.applied should be NOT NULL
|
south/models.py
|
south/models.py
|
from django.db import models
class MigrationHistory(models.Model):
app_name = models.CharField(max_length=255)
migration = models.CharField(max_length=255)
applied = models.DateTimeField(blank=True, null=True)
class Meta:
unique_together = (('app_name', 'migration'),)
@classmethod
def for_migration(cls, migration):
try:
return cls.objects.get(app_name=migration.app_name(),
migration=migration.name())
except cls.DoesNotExist:
return cls(app_name=migration.app_name(),
migration=migration.name())
def get_migrations(self):
from south.migration import Migrations
return Migrations(self.app_name)
def get_migration(self):
return self.get_migrations().migration(self.migration)
|
Python
| 1 |
@@ -207,19 +207,8 @@
True
-, null=True
)%0A%0A
|
d082eb41c2ccef7178d228896a7658fe52bcbdec
|
Create directory for useless symbols remove
|
tests/UselessSymbolsRemove/__init__.py
|
tests/UselessSymbolsRemove/__init__.py
|
Python
| 0 |
@@ -0,0 +1,122 @@
+#!/usr/bin/env python%0A%22%22%22%0A:Author Patrik Valkovic%0A:Created 17.08.2017 14:38%0A:Licence GNUv3%0APart of grammpy-transforms%0A%0A%22%22%22
|
|
3c82f0228095b2616b35a2881f51c93999fdd79b
|
Test models/FieldMapper
|
tests/test_models/test_field_mapper.py
|
tests/test_models/test_field_mapper.py
|
Python
| 0 |
@@ -0,0 +1,1859 @@
+import json%0Aimport jsonschema%0Afrom django.test import TestCase%0Afrom core.models import FieldMapper%0Afrom tests.utils import json_string%0A%0Aclass FieldMapperTestCase(TestCase):%0A @classmethod%0A def setUpTestData(cls):%0A cls.attributes = %7B%0A 'name': 'Test Field Mapper',%0A 'config_json': json_string(%7B%22add_literals%22:%7B%22foo%22:%22bar%22%7D%7D),%0A 'field_mapper_type': 'xml2kvp'%0A %7D%0A cls.field_mapper = FieldMapper(**cls.attributes)%0A%0A def test_str(self):%0A self.assertEqual('Test Field Mapper, FieldMapper: #%7B%7D'.format(FieldMapperTestCase.field_mapper.id),%0A format(FieldMapperTestCase.field_mapper))%0A%0A def test_as_dict(self):%0A as_dict = FieldMapperTestCase.field_mapper.as_dict()%0A for k, v in FieldMapperTestCase.attributes.items():%0A self.assertEqual(as_dict%5Bk%5D, v)%0A%0A def test_config(self):%0A self.assertEqual(json.loads(FieldMapperTestCase.attributes%5B'config_json'%5D),%0A FieldMapperTestCase.field_mapper.config)%0A%0A def test_config_none(self):%0A no_config_mapper = FieldMapper(name='new field mapper')%0A self.assertIsNone(no_config_mapper.config)%0A%0A def test_validate_config_json(self):%0A self.assertIsNone(FieldMapperTestCase.field_mapper.validate_config_json())%0A%0A def test_validate_config_json_invalid(self):%0A invalid_config_mapper = FieldMapper(config_json=json_string(%7B%22add_literals%22: %22invalid value%22%7D))%0A self.assertRaises(jsonschema.exceptions.ValidationError,%0A invalid_config_mapper.validate_config_json)%0A%0A def test_validate_config_json_provided(self):%0A invalid_config_mapper = FieldMapper(config_json=json_string(%7B%22add_literals%22: %22invalid value%22%7D))%0A self.assertIsNone(invalid_config_mapper.validate_config_json(json_string(%7B%22add_literals%22:%7B%22foo%22:%22bar%22%7D%7D)))%0A
|
|
ebe10d39064410fc49ac90e38339a54d0ed47c80
|
update hooks for sqlalchemy
|
setup/hooks/hook-sqlalchemy.py
|
setup/hooks/hook-sqlalchemy.py
|
Python
| 0 |
@@ -0,0 +1,2096 @@
+__author__ = 'stephanie'%0A# Copyright (C) 2009, Giovanni Bajo%0A# Based on previous work under copyright (c) 2001, 2002 McMillan Enterprises, Inc.%0A#%0A# This program is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU General Public License%0A# as published by the Free Software Foundation; either version 2%0A# of the License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with this program; if not, write to the Free Software%0A# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA%0A%0A# Contributed by Greg Copeland%0A%0Afrom PyInstaller.hooks.hookutils import exec_statement%0A%0A# include most common database bindings%0A# some database bindings are detected and include some%0A# are not. We should explicitly include database backends.%0Ahiddenimports = %5B'pysqlite2', 'MySQLdb', 'psycopg2', 'pyodbc', 'pymysql'%5D%0A%0Aprint %22in custom sql alchemy hook %22%0A%0A# sqlalchemy.databases package from pre 0.6 sqlachemy versions%0Adatabases = exec_statement(%22import sqlalchemy.databases;print sqlalchemy.databases.__all__%22)%0Adatabases = eval(databases.strip())%0A%0Afor n in databases:%0A hiddenimports.append(%22sqlalchemy.databases.%22 + n)%0A%0A# sqlalchemy.orm package from pre 0.6 sqlachemy versions%0Aorm = exec_statement(%22import sqlalchemy.ormprint sqlalchemy.orm.__all__%22)%0Aorm = eval(orm.strip())%0A%0Afor n in orm:%0A hiddenimports.append(%22sqlalchemy.orm.%22 + n)%0A%0A%0A# sqlalchemy.dialects package from 0.6 and newer sqlachemy versions%0Aversion = exec_statement('import sqlalchemy; print sqlalchemy.__version__')%0Ais_alch06 = version %3E= '0.6'%0A%0Aif is_alch06:%0A dialects = exec_statement(%22import sqlalchemy.dialects;print sqlalchemy.dialects.__all__%22)%0A dialects = eval(dialects.strip())%0A%0A for n in databases:%0A hiddenimports.append(%22sqlalchemy.dialects.%22 + n)%0A
|
|
7f8f5e14f88304b272423ab12728d5329a2ba808
|
use raw strings for urls
|
shop/urls/cart.py
|
shop/urls/cart.py
|
from django.conf.urls.defaults import url, patterns
from shop.views.cart import CartDetails, CartItemDetail
urlpatterns = patterns('',
url(r'^delete/$', CartDetails.as_view(action='delete'), # DELETE
name='cart_delete'),
url('^item/$', CartDetails.as_view(action='post'), # POST
name='cart_item_add'),
url(r'^$', CartDetails.as_view(), name='cart'), # GET
url(r'^update/$', CartDetails.as_view(action='put'),
name='cart_update'),
# CartItems
url('^item/(?P<id>[0-9]+)$', CartItemDetail.as_view(),
name='cart_item'),
url('^item/(?P<id>[0-9]+)/delete$',
CartItemDetail.as_view(action='delete'),
name='cart_item_delete'),
)
|
Python
| 0.000009 |
@@ -234,24 +234,25 @@
'),%0A url(
+r
'%5Eitem/$', C
@@ -487,32 +487,33 @@
rtItems%0A url(
+r
'%5Eitem/(?P%3Cid%3E%5B0
@@ -582,16 +582,17 @@
url(
+r
'%5Eitem/(
|
bb38258e4d23d5b2c39eed4e0be5b00580bada34
|
Remove useless imports resulting from the merge of different branches
|
memopol2/main/views.py
|
memopol2/main/views.py
|
import time
from datetime import datetime
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.utils import simplejson
from django.core import serializers
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from couchdbkit import Server
from memopol2.util import get_couch_doc_or_404
from memopol2.main.models import Position, Database
from memopol2 import settings # TODO check this if neccessary and not obsoleted by django.conf import settings
def index_names(request):
return render_to_response('index.html', {'meps_list': Database().get_meps_by_names()}, context_instance=RequestContext(request))
def index_groups(request):
return render_to_response('index.html', {'groups': Database().get_groups()}, context_instance=RequestContext(request))
def index_countries(request):
return render_to_response('index.html', {'countries': Database().get_countries()}, context_instance=RequestContext(request))
def index_by_country(request, country_code):
return render_to_response('index.html', {'meps_list': Database().get_meps_by_country(country_code)}, context_instance=RequestContext(request))
def index_by_group(request, group):
return render_to_response('index.html', {'meps_list': Database().get_meps_by_group(group)}, context_instance=RequestContext(request))
def mep(request, mep_id):
data = Database().get_mep(mep_id)
ctx = {'mep_id': mep_id, 'mep': mep, 'd': data }
ctx['positions'] = Position.objects.filter(mep_id=mep_id)
ctx['visible_count'] = len([ x for x in ctx['positions'] if x.visible ])
return render_to_response('mep.html', ctx, context_instance=RequestContext(request))
def mep_raw(request, mep_id):
mep_ = Database().get_mep(mep_id)
jsonstr = simplejson.dumps(mep_, indent=4)
ctx = {'mep_id': mep_id, 'mep': mep_, 'jsonstr': jsonstr}
return render_to_response('mep_raw.html', ctx, context_instance=RequestContext(request))
def mep_addposition(request, mep_id):
if not request.is_ajax():
return HttpResponseServerError()
results = {'success':False}
# make sure the mep exists
mep_ = Database().get_mep(mep_id)
try:
text = request.GET[u'text']
if settings.DEBUG:
if 'slow' in text:
time.sleep(10)
if 'fail' in text:
raise Exception("Simulated failure ! (input contains 'fail' and DEBUG is on)")
pos = Position(mep_id=mep_id, content=text)
pos.submitter_username = request.user.username
pos.submitter_ip = request.META["REMOTE_ADDR"]
pos.submit_datetime = datetime.today()
pos.moderated = False
pos.visible = False
pos.save()
results = {'success':True}
except:
pass
return HttpResponse(simplejson.dumps(results), mimetype='application/json')
@staff_member_required
def moderation(request):
ctx = {}
ctx['positions'] = Position.objects.filter(moderated=False)
return render_to_response('moderation.html', ctx, context_instance=RequestContext(request))
@staff_member_required
def moderation_get_unmoderated_positions(request):
if not request.is_ajax():
return HttpResponseServerError()
last_id = request.GET[u'last_id']
positions = Position.objects.filter(moderated=False, id__gt=last_id)
return HttpResponse(serializers.serialize('json', positions), mimetype='application/json')
@staff_member_required
def moderation_moderate_positions(request):
if not request.is_ajax():
return HttpResponseServerError()
results = {'success':False}
position = get_object_or_404(Position, pk=int(request.GET[u'pos_id']))
try:
position.moderated = True
position.visible = (request.GET[u'decision'] == "1")
position.save()
results = {'success':True}
except:
pass
return HttpResponse(simplejson.dumps(results), mimetype='application/json')
|
Python
| 0 |
@@ -436,203 +436,45 @@
ol2.
-util import get_couch_doc_or_404%0Afrom memopol2.main.models import Position, Database%0Afrom memopol2 import settings # TODO check this if neccessary and not obsoleted by django.conf import settings
+main.models import Position, Database
%0A%0Ade
|
45869cdf6087cd625db385ef52475d98c9842efa
|
add migen_local_install script
|
migen_local_install.py
|
migen_local_install.py
|
Python
| 0.000001 |
@@ -0,0 +1,163 @@
+import os%0Aos.system(%22git clone http://github.com/m-labs/migen%22)%0Aos.system(%22mv migen migen_tmp%22)%0Aos.system(%22mv migen_tmp/migen migen%22)%0Aos.system(%22rm -rf migen_tmp%22)
|
|
5f31e729ce6752c2f0a6b7f19f76c2a7e95636b9
|
Create friends-of-appropriate-ages.py
|
Python/friends-of-appropriate-ages.py
|
Python/friends-of-appropriate-ages.py
|
Python
| 0.000029 |
@@ -0,0 +1,1479 @@
+# Time: O(a%5E2 + n), a is the number of ages,%0A# n is the number of people%0A# Space: O(a)%0A%0A# Some people will make friend requests.%0A# The list of their ages is given and ages%5Bi%5D is the age of the ith person.%0A#%0A# Person A will NOT friend request person B (B != A)%0A# if any of the following conditions are true:%0A#%0A# age%5BB%5D %3C= 0.5 * age%5BA%5D + 7%0A# age%5BB%5D %3E age%5BA%5D%0A# age%5BB%5D %3E 100 && age%5BA%5D %3C 100%0A# Otherwise, A will friend request B.%0A#%0A# Note that if A requests B, B does not necessarily request A.%0A# Also, people will not friend request themselves.%0A#%0A# How many total friend requests are made?%0A#%0A# Example 1:%0A#%0A# Input: %5B16,16%5D%0A# Output: 2%0A# Explanation: 2 people friend request each other.%0A# Example 2:%0A#%0A# Input: %5B16,17,18%5D%0A# Output: 2%0A# Explanation: Friend requests are made 17 -%3E 16, 18 -%3E 17.%0A# Example 3:%0A#%0A# Input: %5B20,30,100,110,120%5D%0A# Output:%0A# Explanation: Friend requests are made 110 -%3E 100, 120 -%3E 110, 120 -%3E 100.%0A#%0A# Notes:%0A# - 1 %3C= ages.length %3C= 20000.%0A# - 1 %3C= ages%5Bi%5D %3C= 120.%0A%0Atry:%0A xrange # Python 2%0Aexcept NameError:%0A xrange = range # Python 3%0A%0Aimport collections%0A%0A%0Aclass Solution(object):%0A def numFriendRequests(self, ages):%0A %22%22%22%0A :type ages: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A def request(a, b):%0A return 0.5*a+7 %3C b %3C= a%0A%0A c = collections.Counter(ages)%0A return sum(int(request(a, b)) * c%5Ba%5D*(c%5Bb%5D-int(a == b))%0A for a in c%0A for b in c)%0A
|
|
9caf9d3bfaaff9d7721f611d9c351dd14f67daa6
|
add log progress
|
log_progress.py
|
log_progress.py
|
Python
| 0.000001 |
@@ -0,0 +1,1459 @@
+def log_progress(sequence, every=None, size=None):%0A from ipywidgets import IntProgress, HTML, VBox%0A from IPython.display import display%0A%0A is_iterator = False%0A if size is None:%0A try:%0A size = len(sequence)%0A except TypeError:%0A is_iterator = True%0A if size is not None:%0A if every is None:%0A if size %3C= 200:%0A every = 1%0A else:%0A every = size / 200 # every 0.5%25%0A else:%0A assert every is not None, 'sequence is iterator, set every'%0A%0A if is_iterator:%0A progress = IntProgress(min=0, max=1, value=1)%0A progress.bar_style = 'info'%0A else:%0A progress = IntProgress(min=0, max=size, value=0)%0A label = HTML()%0A box = VBox(children=%5Blabel, progress%5D)%0A display(box)%0A%0A index = 0%0A try:%0A for index, record in enumerate(sequence, 1):%0A if index == 1 or index %25 every == 0:%0A if is_iterator:%0A label.value = '%7Bindex%7D / ?'.format(index=index)%0A else:%0A progress.value = index%0A label.value = u'%7Bindex%7D / %7Bsize%7D'.format(%0A index=index,%0A size=size%0A )%0A yield record%0A except:%0A progress.bar_style = 'danger'%0A raise%0A else:%0A progress.bar_style = 'success'%0A progress.value = index%0A label.value = unicode(index or '?')
|
|
d9c7ce7f2b47bee3b2e657157fe4df8f9a00973a
|
Create smiles_preview.py
|
smiles_preview.py
|
smiles_preview.py
|
Python
| 0.000658 |
@@ -0,0 +1,1393 @@
+import sublime%0Aimport sublime_plugin%0Aimport base64%0Aimport os%0Aimport re%0A%0Aclass SmilesPreview(sublime_plugin.EventListener):%0A def on_hover(self, view, point, hover_zone):%0A if (hover_zone == sublime.HOVER_TEXT):%0A # locate smiles in the string. smiles string should be at the beginning and followed by tab (cxsmiles)%0A hovered_line_text = view.substr(view.line(point)).strip()%0A smiles_regex = re.compile(r'%5E(%5B%5EJ%5D%5BA-Za-z0-9@+%5C-%5C%5B%5C%5D%5C(%5C)%5C%5C%5C/%25=#$%5D+)%5Ct', re.IGNORECASE)%0A if (smiles_regex.match(hovered_line_text)):%0A smiles_string = smiles_regex.match(hovered_line_text).group(0)%0A file_name = %221.png%22%0A os.system(%22obabel -ismi -:%22 + smiles_string + %22-opng -O %22 + file_name)%0A # Check that file exists%0A if (file_name and os.path.isfile(file_name)):%0A encoded = str(base64.b64encode(%0A open(file_name, %22rb%22).read()%0A ), %22utf-8%22)%0A view.show_popup('%3Cimg src=%22data:image/png;base64,' + %0A encoded + %0A '%22%3E', %0A flags=sublime.HIDE_ON_MOUSE_MOVE_AWAY, %0A location=point)%0A return%0A return%0A return%0A %0A
|
|
fbeb3d04b16afa0b2daf49597a07c32b0d72630c
|
Add missing mica.report __init__ to project
|
mica/report/__init__.py
|
mica/report/__init__.py
|
Python
| 0.000019 |
@@ -0,0 +1,33 @@
+from .report import main, update%0A
|
|
dca9931e894c1e5cae9f5229b04cc72c31eef5f5
|
Create a.py
|
a.py
|
a.py
|
Python
| 0.000489 |
@@ -0,0 +1,47 @@
+# this code is wrote on python%0A%0Aa = 3%0A%0Aprint a%0A
|
|
2c0a06a8e460de06dd9a929baa02e2d369fbe0a6
|
Prepare v2.17.4.dev
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.17.3'
|
Python
| 0.000003 |
@@ -442,7 +442,11 @@
.17.
-3
+4.dev
'%0A
|
b815b2e94814e86ba2e4713d15aa2143594344bc
|
Prepare v2.13.13.dev
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.13.12'
|
Python
| 0.000004 |
@@ -439,11 +439,15 @@
'2.13.1
-2
+3.dev
'%0A
|
a30a6104554fb39d068fd8aadbb128dff1d482fb
|
Create dl.py
|
dl.py
|
dl.py
|
Python
| 0 |
@@ -0,0 +1,2161 @@
+#!/usr/bin/env python%0Aimport requests, urllib2, os, shutil, sys, futures%0Afrom time import sleep%0A%0Adownload_board = sys.argv%5B1%5D%0A%0Adef download(**kwargs):%0A with open('./'+download_board+'/'+kwargs%5B'filename'%5D, 'wb') as handle:%0A request = requests.get(kwargs%5B'url'%5D, stream=True)%0A%0A for block in request.iter_content(1024):%0A if not block:%0A break%0A handle.write(block)%0A%0Aif os.path.exists(%22stopcron.txt%22):%0A print %22stopcron.txt exists, downloader is aborting%22%0A exit()%0A%0Aif not os.path.exists(download_board+%22-modified.txt%22):%0A shutil.copy(%22.backup_modified.txt%22, download_board+%22-modified.txt%22)%0A%0Aif os.path.getsize(download_board+%22-modified.txt%22) == 0:%0A shutil.copy(%22.backup_modified.txt%22, download_board+%22-modified.txt%22)%0A%0Apages = %5B%5D%0Awith open(download_board+%22-modified.txt%22, 'r') as f:%0A modified = %5Bs.strip(%22%5Cn%22) for s in f.readlines()%5D%0A%0Arealch = 0%0Afor a in xrange(15):%0A p = requests.get(%22http://a.4cdn.org/%22+download_board+%22/%25s.json%22 %25 str(a), headers=%7B'If-Modified-Since': str(modified%5Ba%5D)%7D)%0A if p.status_code == 200 or len(modified%5Ba%5D) == 0:%0A pages.append(p.json())%0A modified%5Ba%5D = p.headers%5B'Last-Modified'%5D %0A sleep(1.0)%0A a = a + 1%0A%0Awith open(download_board+%22-modified.txt%22, 'w') as f:%0A for a in modified:%0A f.write(a+%22%5Cn%22)%0Alinks = %5B%5D%0A%0Aalready = 0%0Alinks = %5B%5D%0Afilenames = %5B%5D%0Afor page in pages:%0A for thread in page%5B'threads'%5D:%0A for post in thread%5B'posts'%5D:%0A if u'filename' in post:%0A filename_clean = post%5Bu'filename'%5D%0A ext_clean = post%5Bu'ext'%5D%0A if 'filename' in post and not os.path.exists(%22./%22+download_board+%22/%22+filename_clean+ext_clean):%0A links.append(%22http://i.4cdn.org/%22+download_board+%22/%22+filename_clean+ext_clean)%0A filenames.append(filename_clean+ext_clean)%0A%0Aif not os.path.exists(%22./%22+download_board+%22/%22):%0A os.makedirs(%22./%22+download_board+%22/%22)%0A%0Awith futures.ThreadPoolExecutor(max_workers=10) as e:%0A for i in xrange(len(links)):%0A e.submit(download, url=links%5Bi%5D, filename=filenames%5Bi%5D)%0Aprint %22%5Bchanrip%5D %25s downloaded%22 %25 (str(len(links)))%0A
|
|
fe0d5bb1533723bc83ce37b0f10e283ef37168c4
|
make sure cplot function args are mpc
|
mpmath/visualization.py
|
mpmath/visualization.py
|
"""
Plotting (requires matplotlib)
"""
from mptypes import inf, isnan, arange, complex_types
from functions import sqrt, arg
from colorsys import hsv_to_rgb, hls_to_rgb
plot_ignore = (ValueError, ArithmeticError, ZeroDivisionError)
def plot(f, xlim=[-5,5], ylim=None, points=200, file=None):
"""
Shows a simple 2D plot of a function or list of functions over
a given interval. Some examples:
plot(lambda x: exp(x)*li(x), [1, 4])
plot([cos, sin], [-4, 4])
plot([fresnels, fresnelc], [-4, 4])
plot([sqrt, cbrt], [-4, 4])
plot(lambda t: zeta(0.5+t*j), [-20, 20])
plot([floor, ceil, abs, sign], [-5, 5])
Points where the function raises a numerical exception or
returns an infinite value are removed from the graph.
For parts where the function assumes complex values, the
real part is plotted with dashes and the imaginary part
is plotted with dots.
NOTE: This function requires matplotlib (pylab).
"""
import pylab
if not isinstance(f, (tuple, list)):
f = [f]
a, b = xlim
colors = ['b', 'r', 'g', 'm', 'k']
for n, func in enumerate(f):
x = arange(a, b, (b-a)/float(points))
segments = []
segment = []
in_complex = False
for i in xrange(len(x)):
try:
v = func(x[i])
if isnan(v) or abs(v) == inf:
raise ValueError
if isinstance(v, complex_types):
re = float(v.real)
im = float(v.imag)
if not in_complex:
in_complex = True
segments.append(segment)
segment = []
segment.append((float(x[i]), re, im))
else:
if in_complex:
in_complex = False
segments.append(segment)
segment = []
segment.append((float(x[i]), v))
except plot_ignore:
if segment:
segments.append(segment)
segment = []
if segment:
segments.append(segment)
for segment in segments:
x = [s[0] for s in segment]
y = [s[1] for s in segment]
if not x:
continue
c = colors[n % len(colors)]
if len(segment[0]) == 3:
z = [s[2] for s in segment]
pylab.plot(x, y, '--'+c, linewidth=1.5)
pylab.plot(x, z, ':'+c, linewidth=1.5)
else:
pylab.plot(x, y, c, linewidth=1.5)
pylab.xlim(xlim)
if ylim:
pylab.ylim(ylim)
pylab.grid(True)
if file:
pylab.savefig(file)
else:
pylab.show()
def default_color_function(z):
pi = 3.1415926535898
a = (float(arg(z)) + pi) / (2*pi)
a = (a + 0.5) % 1.0
b = 1.0 - float(1/(1.0+abs(z)**0.3))
return hls_to_rgb(a, b, 1.0)
def cplot(f, re=[-5,5], im=[-5,5], points=2000, color=default_color_function,
verbose=False, file=None):
"""
Plots the given complex-valued function over a rectangular part
of the complex plane given by the pairs of intervals re and im.
For example:
cplot(lambda z: z, [-2, 2], [-10, 10])
cplot(exp)
cplot(zeta, [0, 1], [0, 50])
By default, the complex argument (phase) is shown as color and
the magnitude is show as brightness. You can also supply a
custom color function ('color'). This function should take a
complex number as input and return an RGB 3-tuple containing
floats in the range 0.0-1.0.
To obtain a sharp image, the number of points may need to be
increased to 100,000 or thereabout. Since evaluating the
function that many times is likely to be slow, the 'verbose'
option is useful to display progress.
"""
import pylab
rea, reb = re
ima, imb = im
dre = reb - rea
dim = imb - ima
M = int(sqrt(points*dre/dim)+1)
N = int(sqrt(points*dim/dre)+1)
x = pylab.linspace(rea, reb, M)
y = pylab.linspace(ima, imb, N)
# Note: we have to be careful to get the right rotation.
# Test with these plots:
# cplot(lambda z: z if z.real < 0 else 0)
# cplot(lambda z: z if z.imag < 0 else 0)
w = pylab.zeros((N, M, 3))
for n in xrange(N):
for m in xrange(M):
z = x[m] + y[n]*1j
try:
v = color(f(z))
except plot_ignore:
v = (0.0, 0.0, 0.0)
w[n,m] = v
if verbose:
print n, "of", N
pylab.imshow(w, extent=(rea, reb, ima, imb), origin='lower')
if file:
pylab.savefig(file)
else:
pylab.show()
|
Python
| 0.000806 |
@@ -56,16 +56,21 @@
s import
+ mpc,
inf, is
@@ -4594,22 +4594,23 @@
z =
+mpc(
x%5Bm%5D
- +
+,
y%5Bn%5D
-*1j
+)
%0D%0A
|
0b90446471805276ed141800337e6044ce130b93
|
Test for the bugfix of Project.last_update
|
akvo/rsr/tests/models/test_project.py
|
akvo/rsr/tests/models/test_project.py
|
Python
| 0 |
@@ -0,0 +1,1965 @@
+# -*- coding: utf-8 -*-%0A%0A# Akvo Reporting is covered by the GNU Affero General Public License.%0A# See more details in the license.txt file located at the root folder of the Akvo RSR module.%0A# For additional details on the GNU license please see %3C http://www.gnu.org/licenses/agpl.html %3E.%0A%0A%0Afrom unittest import TestCase%0A%0Afrom django.contrib.auth import get_user_model%0A%0Afrom akvo.rsr.models import Project%0Afrom akvo.rsr.models import ProjectUpdate, PayPalGateway, MollieGateway, PaymentGatewaySelector%0A%0A%0Aclass ProjectModelTestCase(TestCase):%0A %22%22%22Tests for the project model%22%22%22%0A%0A def test_project_last_update(self):%0A %22%22%22 Test Project.last_update. The field is a denormalization keeping track of the latest%0A update for a project, if any. When deletion of updates was introduced, a bug occurs when%0A deleting the latest update, as the Project.last_update field was set to None in that case.%0A The tests check that the fix for this bug works correctly%0A %22%22%22%0A # setup needed model instances%0A paypal = PayPalGateway.objects.create(name='paypal')%0A mollie = MollieGateway.objects.create(name='mollie')%0A project_1 = Project.objects.create(title=%22Test project 1%22)%0A user_1 = get_user_model().objects.create(email='[email protected]')%0A update_1 = ProjectUpdate.objects.create(title=%22Test update 1%22, project=project_1, user=user_1)%0A update_2 = ProjectUpdate.objects.create(title=%22Test update 2%22, project=project_1, user=user_1)%0A%0A # check that update_2 is the latest%0A self.assertTrue(update_1.created_at %3C update_2.created_at)%0A%0A # check that update_2 is Project.last_update%0A self.assertEqual(project_1.last_update, update_2)%0A%0A update_2.delete()%0A # now update_1 should be last_update%0A self.assertEqual(project_1.last_update, update_1)%0A%0A update_1.delete()%0A # now last_update is None%0A self.assertEqual(project_1.last_update, None)%0A
|
|
30381ced0d7535428398b3df5f1caffd684b20d5
|
Implement K means network.
|
KMeansnet.py
|
KMeansnet.py
|
Python
| 0 |
@@ -0,0 +1,1041 @@
+import numpy as np%0A%0Aclass Kmeansnet(object):%0A%09def __init__(self, data, clusters, eta):%0A%09%09self.data = data%0A%09%09self.n_dim = data.shape%5B1%5D%0A%09%09self.num_clusters = clusters%0A%09%09self.weights = np.random.rand(self.num_clusters, self.n_dim)%0A%09%09self.eta = eta%0A%0A%09def calc_dist(self, inp, weights):%0A%09%09return np.sum((weights * inp), axis=1)%0A%0A%09def normalise_data(self, data):%0A%09%09normalisers = np.sqrt(np.sum(data ** 2, axis=1)).reshape(self.data.shape%5B0%5D, 1)%0A%09%09return data / normalisers%0A%0A%09def train(self, epochs):%0A%09%09self.data = self.normalise_data(self.data)%0A%0A%09%09for i in range(epochs):%0A%09%09%09for d in range(self.data.shape%5B0%5D):%0A%09%09%09%09dist = self.calc_dist(self.data%5Bd, :%5D, self.weights)%0A%09%09%09%09cluster = np.argmax(dist)%0A%09%09%09%09self.weights%5Bcluster, :%5D += self.eta * self.data%5Bd, :%5D - self.weights%5Bcluster, :%5D%0A%0A%09def predict(self, inp):%0A%09%09dist = self.calc_dist(inp, self.weights)%0A%09%09best = np.argmax(dist)%0A%09%09return best%0A%0A%09def predict_all(self, data):%0A%09%09best = np.zeros((data.shape%5B0%5D, 1))%0A%09%09for i in range(data.shape%5B0%5D):%0A%09%09%09best%5Bi%5D = self.predict(data%5Bi, :%5D)%0A%09%09return best%0A
|
|
e514c4e4dc295b8e7e101fc7b5ed7dfc4ad0014a
|
Fix layer sort key generation
|
nose2/plugins/layers.py
|
nose2/plugins/layers.py
|
import logging
import re
import six
from nose2 import events
from nose2.suite import LayerSuite
from nose2.compat import unittest, OrderedDict
BRIGHT = r'\033[1m'
RESET = r'\033[0m'
__unittest = True
log = logging.getLogger(__name__)
class Layers(events.Plugin):
alwaysOn = True
def startTestRun(self, event):
event.suite = self._makeLayerSuite(event)
def _makeLayerSuite(self, event):
return self._sortByLayers(
event.suite, self.session.testLoader.suiteClass)
def _sortByLayers(self, suite, suiteClass):
top = suiteClass()
# first find all of the layers mentioned
layers = OrderedDict()
for test in self._flatten(suite):
# split tests up into buckets by layer
layer = getattr(test, 'layer', None)
if layer:
layers.setdefault(layer, LayerSuite(layer=layer)).addTest(test)
else:
top.addTest(test)
# then organize layers into a tree
remaining = list(layers.keys())
seen = set()
tree = {}
while remaining:
ly = remaining.pop()
if ly in seen:
continue
seen.add(ly)
# superclasses of this layer
if ly is None:
deps = []
else:
deps = [cls for cls in bases_and_mixins(ly)
if cls is not object]
deps.reverse()
if not deps:
# layer is top-level
self._addToTree(tree, ly, None)
else:
outer = ly
while deps:
inner, outer = outer, deps.pop()
self._addToTree(tree, inner, outer)
if outer not in layers:
remaining.append(outer)
layers[outer] = LayerSuite(layer=outer)
# finally build the top-level suite
self._treeToSuite(tree, None, top, layers)
# printtree(top)
return top
def _addToTree(self, tree, inner, outer):
found = False
for k, v in tree.items():
if inner in v:
found = True
if outer is not None:
v.remove(inner)
break
if outer is not None or not found:
tree.setdefault(outer, []).append(inner)
def _treeToSuite(self, tree, key, suite, layers):
mysuite = layers.get(key, None)
if mysuite:
suite.addTest(mysuite)
suite = mysuite
sublayers = tree.get(key, [])
# ensure that layers with a set order are in order
sublayers.sort(key=self._sortKey)
log.debug('sorted sublayers of %s (%s): %s', mysuite,
getattr(mysuite, 'layer', 'no layer'), sublayers)
for layer in sublayers:
self._treeToSuite(tree, layer, suite, layers)
def _flatten(self, suite):
out = []
for test in suite:
try:
out.extend(self._flatten(test))
except TypeError:
out.append(test)
return out
def _sortKey(self, layer):
pos = getattr(layer, 'position', None)
# ... lame
if pos:
key = six.u("%04f") % pos
else:
key = layer.__name__
return key
class LayerReporter(events.Plugin):
commandLineSwitch = (
None, 'layer-reporter', 'Add layer information to test reports')
configSection = 'layer-reporter'
def __init__(self):
self.indent = self.config.as_str('indent', ' ')
self.colors = self.config.as_bool('colors', False)
self.highlight_words = self.config.as_list('highlight-words',
['A', 'having', 'should'])
self.highlight_re = re.compile(
r'\b(%s)\b' % '|'.join(self.highlight_words))
self.layersReported = set()
def reportStartTest(self, event):
if self.session.verbosity < 2:
return
test = event.testEvent.test
layer = getattr(test, 'layer', None)
if not layer:
return
for ix, lys in enumerate(self.ancestry(layer)):
for layer in lys:
if layer not in self.layersReported:
desc = self.describeLayer(layer)
event.stream.writeln('%s%s' % (self.indent * ix, desc))
self.layersReported.add(layer)
event.stream.write(self.indent * (ix+1))
def describeLayer(self, layer):
return self.format(getattr(layer, 'description', layer.__name__))
def format(self, st):
if self.colors:
return self.highlight_re.sub(r'%s\1%s' % (BRIGHT, RESET), st)
return st
def describeTest(self, event):
if hasattr(event.test, 'methodDescription'):
event.description = self.format(event.test.methodDescription())
if event.errorList and hasattr(event.test, 'layer'):
# walk back layers to build full description
self.describeLayers(event)
def describeLayers(self, event):
desc = [event.description]
base = event.test.layer
for layer in (base.__mro__ + getattr(base, 'mixins', ())):
if layer is object:
continue
desc.append(self.describeLayer(layer))
desc.reverse()
event.description = ' '.join(desc)
def ancestry(self, layer):
layers = [[layer]]
bases = [base for base in bases_and_mixins(layer)
if base is not object]
while bases:
layers.append(bases)
seen = set() # ???
newbases = []
for b in bases:
for bb in bases_and_mixins(b):
if bb is not object and bb not in seen:
newbases.append(bb)
bases = newbases
layers.reverse()
return layers
def bases_and_mixins(layer):
return (layer.__bases__ + getattr(layer, 'mixins', ()))
# for debugging
def printtree(suite, indent=''):
six.print_('%s%s ->' % (indent, getattr(suite, 'layer', 'no layer')))
for test in suite:
if isinstance(test, unittest.BaseTestSuite):
printtree(test, indent + ' ')
else:
six.print_('%s %s' % (indent, test))
six.print_('%s<- %s' % (indent, getattr(suite, 'layer', 'no layer')))
|
Python
| 0.000001 |
@@ -3275,16 +3275,28 @@
if pos
+ is not None
:%0A
@@ -3317,17 +3317,17 @@
x.u(%22%2504
-f
+d
%22) %25 pos
|
b876332debd21edb3e3b84f01bb8aec5196bd8d8
|
add enumerating partition
|
resource-4/combinatorics/integer-partitions/enumerating/partition.py
|
resource-4/combinatorics/integer-partitions/enumerating/partition.py
|
Python
| 0.000044 |
@@ -0,0 +1,157 @@
+#zero%0Aif n == 0:%0A%09yield %5B%5D%0A%09return%0A%0A#modify%0Afor ig in partitions(n-1):%0A%09yield %5B1%5D + ig%0Aif ig and (len(ig) %3C 2 or ig%5B1%5D %3E ig%5B0%5D):%0A%09yield %5Big%5B0%5D + 1%5D + ig%5B1:%5D%0A
|
|
d9c95fcf89f0e72c3504a4988e6d4fb6ef2ae6cd
|
Add the timeseries neural network
|
src/backend/timeseries_nnet.py
|
src/backend/timeseries_nnet.py
|
Python
| 0.999953 |
@@ -0,0 +1,3141 @@
+# Modified code from https://github.com/hawk31/nnet-ts%0Aimport logging%0Aimport numpy as np%0Afrom keras.optimizers import SGD%0Afrom keras.models import Sequential%0Afrom keras.layers.core import Dense, Activation%0Afrom sklearn.preprocessing import StandardScaler%0A%0Alogging.basicConfig(format='%25(levelname)s:%25(message)s', level=logging.DEBUG)%0A%0A%0Aclass TimeSeriesNnet(object):%0A def __init__(self, timeseries, hidden_layers=%5B20, 15, 5%5D,%0A activation_functions=%5B'relu', 'relu', 'relu'%5D,%0A optimizer=SGD(), loss='mean_absolute_error',%0A lag=11):%0A self._hidden_layers = hidden_layers%0A self._activation_functions = activation_functions%0A self._optimizer = optimizer%0A self._loss = loss%0A%0A self._lag = lag%0A self._timeseries = self._prepare_data(timeseries)%0A self._scaler = StandardScaler()%0A self._nn = Sequential()%0A%0A if len(self._hidden_layers) != len(self._activation_functions):%0A raise Exception('hidden_layers size must match'%0A 'activation_functions size')%0A%0A def _prepare_data(self, timeseries):%0A return np.array(timeseries, dtype='float64')%0A%0A def fit(self, epochs=10000, verbose=0):%0A timeseries_len = len(self._timeseries)%0A if self._lag %3E= timeseries_len:%0A raise ValueError('Lag is higher than length of the timeseries')%0A%0A X = np.zeros((timeseries_len - self._lag, self._lag), dtype='float64')%0A y = np.log(self._timeseries%5Bself._lag:%5D)%0A%0A # Building X matrixs%0A logging.info('Building regressor matrix')%0A for i in range(0, timeseries_len - self._lag):%0A X%5Bi, :%5D = self._timeseries%5Brange(i, i + self._lag)%5D%0A%0A logging.info('Scaling data')%0A self._scaler.fit(X)%0A X = self._scaler.transform(X)%0A%0A # Neural net architecture%0A logging.info('Checking network consistency')%0A self._nn.add(Dense(self._hidden_layers%5B0%5D, input_shape=(X.shape%5B1%5D,)))%0A self._nn.add(Activation(self._activation_functions%5B0%5D))%0A%0A for layer_size, activation_function in zip(%0A self._hidden_layers%5B1:%5D, self._activation_functions%5B1:%5D):%0A self._nn.add(Dense(layer_size))%0A self._nn.add(Activation(activation_function))%0A%0A # Add final node%0A self._nn.add(Dense(1))%0A self._nn.add(Activation('linear'))%0A self._nn.compile(loss=self._loss, optimizer=self._optimizer)%0A%0A # Train neural net%0A logging.info('Training neural net')%0A self._nn.fit(X, y, nb_epoch=epochs, verbose=verbose)%0A%0A def predict_ahead(self, n_ahead=1):%0A # Store predictions and predict iteratively%0A predictions = np.zeros(n_ahead)%0A%0A timeseries = self._timeseries%0A%0A for i in range(n_ahead):%0A current_x = self._scaler.transform(%0A timeseries%5B-self._lag:%5D.reshape((1, self._lag)))%0A next_pred = self._nn.predict(current_x)%0A predictions%5Bi%5D = np.exp(next_pred%5B0, 0%5D)%0A timeseries = np.concatenate((%0A timeseries, np.exp(next_pred%5B0, :%5D)), axis=0)%0A%0A return predictions%0A
|
|
fefb9a9fa5a7c6080bc52896e2d1517828b01a3d
|
Add all PLs to db
|
migrations/versions/299e1d15a55f_populate_provincial_legislatures.py
|
migrations/versions/299e1d15a55f_populate_provincial_legislatures.py
|
Python
| 0 |
@@ -0,0 +1,1803 @@
+%22%22%22populate-provincial-legislatures%0A%0ARevision ID: 299e1d15a55f%0ARevises: 1f97f799a477%0ACreate Date: 2018-08-20 16:17:28.919476%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '299e1d15a55f'%0Adown_revision = '1f97f799a477'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A %22%22%22%0A Ensure all provinces exist as Provincial Legislatures%0A %22%22%22%0A from pmg.models import House, db%0A from pmg.utils import get_provincial_legislatures%0A%0A pls = %5B%0A %7B%0A 'name': 'Eastern Cape Legislature',%0A 'name_short': 'EC'%0A %7D,%0A %7B%0A 'name': 'Free State Legislature',%0A 'name_short': 'FS'%0A %7D,%0A %7B%0A 'name': 'Gauteng Legislature',%0A 'name_short': 'GT'%0A %7D,%0A %7B%0A 'name': 'KwaZulu-Natal Legislature',%0A 'name_short': 'KZN'%0A %7D,%0A %7B%0A 'name': 'Limpopo Legislature',%0A 'name_short': 'LIM'%0A %7D,%0A %7B%0A 'name': 'Mpumalanga Legislature',%0A 'name_short': 'MP'%0A %7D,%0A %7B%0A 'name': 'Northern Cape Legislature',%0A 'name_short': 'NC'%0A %7D,%0A %7B%0A 'name': 'North West Legislature',%0A 'name_short': 'NW'%0A %7D,%0A %7B%0A 'name': 'Western Cape Parliament',%0A 'name_short': 'WC'%0A %7D%0A %5D%0A existing_pls = House.query.filter(House.sphere=='provincial').all()%0A pl_codes = %5Bp.name_short for p in existing_pls%5D%0A%0A for pl in pls:%0A if pl%5B'name_short'%5D not in pl_codes:%0A%0A new_pl = House()%0A new_pl.name = pl%5B'name'%5D%0A new_pl.name_short = pl%5B'name_short'%5D%0A new_pl.sphere = 'provincial'%0A%0A db.session.add(new_pl)%0A %0A db.session.commit()%0A%0A%0Adef downgrade():%0A pass%0A
|
|
253ff8bc8f848effea6ad7602b6424cf997c926c
|
rename celeba_multitask_acc to celeba_multilabel_acc
|
caffe/result/celeba_multilabel_acc.py
|
caffe/result/celeba_multilabel_acc.py
|
Python
| 0.999996 |
@@ -0,0 +1,856 @@
+import os%0Aimport numpy as np%0Aimport sys%0A%0Alabel_file = open('/home/hypan/data/celebA/test.txt', 'r')%0Alines = label_file.readlines()%0Alabel_file.close()%0A%0Aacc = np.zeros(40)%0Acou = 0%0A%0Afor line in lines:%0A info = line.strip('%5Cr%5Cn').split()%0A name = info%5B0%5D.split('.')%5B0%5D%0A gt_labels = info%5B1: %5D%0A feat_path = '/home/hypan/data/celebA/result/' + sys.argv%5B1%5D + '/test_feature/' + name + '.npy'%0A if not os.path.exists(feat_path):%0A print '%7B%7D has not predict feature.'.format(name)%0A pd_labels = np.load(feat_path)%0A cnt = len(pd_labels)%0A for i in range(cnt):%0A gt_label = int(gt_labels%5Bi%5D)%0A pd_label = pd_labels%5Bi%5D%0A if pd_label %3E 0:%0A pd_label = 1%0A else:%0A pd_label = -1%0A if gt_label == pd_label:%0A acc%5Bi%5D += 1%0A cou += 1%0A%0Afor i in range(40):%0A print i, acc%5Bi%5D * 1.0 / cou%0A
|
|
d2e165ace4fc26b51e18494c4878f95ebcefa20a
|
add api
|
web/routers/api.py
|
web/routers/api.py
|
Python
| 0 |
@@ -0,0 +1,2313 @@
+# coding: utf-8%0Aimport os%0Aimport json%0Aimport time%0Aimport datetime%0A%0Aimport humanize%0Aimport flask%0Afrom flask import request, flash, redirect, url_for, render_template%0A%0Aimport models%0Aimport gcfg%0A%0Abp = flask.Blueprint('api', __name__)%0A%[email protected]('/')%0Adef home():%0A return flask.render_template('api.html')%0A%[email protected]('/v1/repolist')%[email protected]_session%0Adef repolist():%0A goos='windows'%0A goarch='amd64'%0A data = %5B%5D%0A for r in models.select(r for r in models.Recommend)%5B:%5D:%0A item = dict(%0A reponame=r.repo.name,%0A alias=r.name,%0A author=r.repo.author,%0A description=r.repo.description,%0A offical=r.repo.offcial,%0A category=r.category.name if r.category else None,%0A stars=r.repo.stars,%0A osarch=goos+'-'+goarch,%0A )%0A files = %5B%5D%0A for b in r.repo.builds:%0A if not b.downloadable:%0A continue%0A%0A # actually only one loop%0A file = %7B'label':b.tag, 'updated':b.updated%7D%0A for f in models.select(f for f in models.File %5C%0A if f.build==b and f.os == goos and f.arch == goarch)%5B:1%5D:%0A file.update(%7B'binfiles': %5Bos.path.basename(f.reponame)%5D, # FIXME: need to parse from gobuildrc%0A 'size': f.size, 'url': f.outlink, 'sha1': f.sha%7D)%0A files.append(file)%0A if files:%0A item%5B'files'%5D = files%0A data.append(item)%0A%0A data.append(dict(%0A reponame = 'github.com/codeskyblue/cgotest',%0A description='this is is just a test program',%0A alias='cgotest', # this could be null%0A author='unknown,lunny',%0A offical=True,%0A category='music',%0A stars=18,%0A files=%5B%0A %7B'label': 'branch:master', 'url': 'http://gobuild3.qiniudn.com/github.com/gogits/gogs/branch-v-master/gogs-linux-386.tar.gz', 'binfiles': %5B'gogs'%5D, 'sha1': '408eebced1c2cdbd363df2fe843831bf337d4273', 'size': 7000000%7D,%0A %7B'label': 'tag:v0.5.2', 'url': 'http://gobuild3.qiniudn.com/github.com/gogits/gogs/tag-v-v0.5.2/gogs-linux-386.tar.gz', 'binfiles': %5B'gogs'%5D, 'sha1': '960e329d46ec7a79745cf3438eaf3c3151d38d97', 'size': 7100000%7D%5D,%0A ))%0A return flask.jsonify(%7B'status': 0, 'message': 'success', 'osarch': 'linux-386', 'data': data%7D)%0A
|
|
6c9760b328716d6b2e099698293c93cba9361932
|
Add script for testing error reporting.
|
checkserver/testchecks/check_error.py
|
checkserver/testchecks/check_error.py
|
Python
| 0 |
@@ -0,0 +1,1008 @@
+#!/usr/bin/env python%0A# Copyright 2012 The greplin-nagios-utils Authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Status%0A%0Anagios config:%0Ause regular-service%0Aparams $HOSTNAME$%0A%22%22%22%0A%0A%0Afrom greplin.nagios import parseArgs, Maximum, ResponseBuilder%0A%0A%0Adef check(argv):%0A %22%22%22Runs the check.%22%22%22%0A _ = parseArgs('check_fast.py', ('NAME', str), argv=argv) / 0 # Badness!%0A%0A%0A (ResponseBuilder().addRule('seven', Maximum(8, 11), 7)).finish()%0A%0A%0Aif __name__ == '__main__':%0A import sys%0A check(sys.argv)%0A
|
|
bc651b5ca15cf41eece321b77142c2973bd41ede
|
Add a sqlite config
|
zinnia/tests/implementations/sqlite.py
|
zinnia/tests/implementations/sqlite.py
|
Python
| 0.000002 |
@@ -0,0 +1,220 @@
+%22%22%22Settings for testing zinnia on SQLite%22%22%22%0Afrom zinnia.tests.implementations.settings import * # noqa%0A%0ADATABASES = %7B%0A 'default': %7B%0A 'NAME': 'zinnia.db',%0A 'ENGINE': 'django.db.backends.sqlite3'%0A %7D%0A%7D%0A
|
|
b546ac87cd3e3821619a5ac7ed7806c1f569a3cd
|
Create PySMS.py
|
PySMS.py
|
PySMS.py
|
Python
| 0.000001 |
@@ -0,0 +1,518 @@
+# -*- coding: utf-8 -*-%0Aimport smtplib%0Afrom time import strftime%0A%0A%0A%0A# User account credentials -- (gmail username and password)%0AUSERNAME = ''%0APASSWORD = ''%0A%0A# Routing -- (FROMADDR can be null iirc)%0AFROMADDR = ''%0ATOADDRS = ''%0A%0A# Message Body%0AMESSAGE = ''%0A%0A%0Adef SendMessage(MESSAGE):%0A server = smtplib.SMTP('smtp.gmail.com:587')%0A server.starttls()%0A server.login(USERNAME, PASSWORD)%0A server.sendmail(FROMADDR, TOADDRS, MESSAGE)%0A server.quit()%0A%0A%0Adef TimeStamp():%0A return strftime('%25-I:%25M %25p - %25b %25d %25Y')%0A
|
|
36a8a2f52f1b85d70cda0bf399a371a4c04d0ccd
|
add utility script to easily launch the bottle development server
|
util/dev_runner.py
|
util/dev_runner.py
|
Python
| 0 |
@@ -0,0 +1,139 @@
+import os, dmon, bottle%0A%0Aos.chdir(os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..')))%0Abottle.run(host='localhost', port=8001)
|
|
2fdace2e358ede8da1a6f569b063548f8969d825
|
Add supervisor config generator
|
util/supervisor.py
|
util/supervisor.py
|
Python
| 0 |
@@ -0,0 +1,2329 @@
+from util.config import Configuration%0Afrom configparser import ConfigParser%0Aimport platform%0Aimport os%0A%0A%0Aclass Supervisor:%0A __config = Configuration()%0A%0A def __init__(self):%0A self.__config_file = self.__config.get_config_dir() + '/supervisor.conf'%0A%0A def generate_config(self, servers):%0A parser = ConfigParser()%0A%0A config_dir = self.__config.get_config_dir()%0A%0A parser.add_section('unix_http_server')%0A parser.set('unix_http_server', 'file', config_dir + '/supervisor.sock')%0A parser.set('unix_http_server', 'chmod', '0700')%0A%0A parser.add_section('supervisord')%0A parser.set('supervisord', 'logfile', config_dir + '/supervisor_error.log')%0A parser.set('supervisord', 'pidfile', config_dir + '/supervisor.pid')%0A%0A parser.add_section('rpcinterface:supervisor')%0A parser.set('rpcinterface:supervisor', 'supervisor.rpcinterface_factory', 'supervisor.rpcinterface:make_main_rpcinterface')%0A%0A parser.add_section('supervisorctl')%0A parser.set('supervisorctl', 'serverurl', 'unix://' + config_dir + '/supervisor.sock')%0A%0A ql_executable = self.get_ql_executable()%0A%0A for sid,data in servers.items():%0A name = 'qlds_' + sid%0A section = 'program:' + name%0A parser.add_section(section)%0A parser.set(section, 'command', self.build_command_line(data, ql_executable))%0A parser.set(section, 'process_name', name)%0A parser.set(section, 'autorestart', 'true')%0A%0A if os.path.isfile(self.__config_file) and not os.access(self.__config_file, os.W_OK):%0A raise IOError('Cannot write to file ' + self.__config_file)%0A%0A with (open(self.__config_file, 'w+')) as config_fp:%0A parser.write(config_fp)%0A%0A def build_command_line(self, server, executable):%0A command_line = %5Bexecutable%5D%0A%0A for k,v in server.items():%0A command_line.append('+set %25s %25s' %25 (k, v))%0A%0A return ' '.join(command_line)%0A%0A def get_ql_executable(self):%0A if platform.architecture()%5B0%5D == '64bit':%0A executable = 'run_server_x64.sh'%0A else:%0A executable = 'run_server_x86.sh'%0A%0A return os.path.expanduser(self.__config.get('dir', 'ql')) + '/' + executable%0A%0A def get_config_location(self):%0A return self.__config_file%0A
|
|
9c731cd17ccc853207b715b778622274b28e9efd
|
Create clientsocket.py
|
clientsocket.py
|
clientsocket.py
|
Python
| 0.000002 |
@@ -0,0 +1,349 @@
+%0A#!/usr/bin/env python%0A%0Aimport socket%0A%0AclientSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) %0AclientSocket.connect((%22www.google.com%22, 80)) %0Arequest = %22GET / HTTP/1.0%5Cn%5Cn%22 %0AclientSocket.sendall(request)%0A%0Aresponse = bytearray()%0Awhile True:%0A%09part = clientSocket.recv(1024) %0A%09if (part):%0A%09%09response.extend(part)%0A%09else:%0A%09%09break%0A%0Aprint response%0A
|
|
885ff9c8886abd30518d2cd149f37f0ba507bb71
|
add 6
|
006.py
|
006.py
|
Python
| 0.999998 |
@@ -0,0 +1,250 @@
+def sum_squares(l):%0A return reduce(lambda x, y: x + y**2, l)%0A%0A%0Adef square_sums(l):%0A return reduce(lambda x, y: x + y, l) ** 2%0A%0Ar = range(1, 101)%0A%0Assum = sum_squares(r)%0Assquare = square_sums(r)%0Adelta = ssquare - ssum%0A%0Aprint ssum, ssquare, delta%0A
|
|
7ce57e27265d4ea7639aaf6f806b9312d17c5c5a
|
Create HR_pythonSwapCase.py
|
HR_pythonSwapCase.py
|
HR_pythonSwapCase.py
|
Python
| 0.000165 |
@@ -0,0 +1,61 @@
+#pythonSwapCase.py%0Adef swap_case(s):%0A return s.swapcase()%0A
|
|
104ce4eb41a8d1d8307618f619dbf5336af1056d
|
Add CVE plugin.
|
plumeria/plugins/cve.py
|
plumeria/plugins/cve.py
|
Python
| 0 |
@@ -0,0 +1,1873 @@
+import re%0Aimport urllib.parse%0A%0Afrom plumeria.command import commands, CommandError%0Afrom plumeria.util import http%0Afrom plumeria.util.ratelimit import rate_limit%0A%0ACVE_PATTERN = re.compile(%22%5E(CVE-%5C%5Cd%7B4,5%7D-%5Cd+)$%22, re.IGNORECASE)%0A%0A%[email protected](%22cve%22, category=%22Search%22)%0A@rate_limit()%0Aasync def cve(message):%0A %22%22%22%0A Look up information about a CVE.%0A%0A Example::%0A%0A /cve CVE-2010-3213%0A%0A Response::%0A%0A CVE-2010-3213 - Cross-site request forgery (CSRF) vulner%5B...%5D%0A Auth: NONE / Complexity: MEDIUM / Vector: NETWORK%0A https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2010-3213%0A %E2%80%A2 (462) Cross-Domain Search Timing%0A %E2%80%A2 (467) Cross Site Identification%0A %E2%80%A2 (62) Cross Site Request Forgery (aka Session Riding)%0A %E2%80%A2 (111) JSON Hijacking (aka JavaScript Hijacking)%0A%0A %22%22%22%0A q = message.content.strip()%0A if not q:%0A raise CommandError(%22Search term required!%22)%0A m = CVE_PATTERN.search(q)%0A if not m:%0A raise CommandError(%22No CVE found in the given input%22)%0A r = await http.get(%22https://cve.circl.lu/api/cve/%7B%7D%22.format(m.group(1).upper()))%0A data = r.json()%0A if len(data.keys()):%0A capecs = %22%5Cn%22.join(%0A map(lambda e: %22%5Cu2022 (%7Bid%7D) %7Bname%7D%22.format(id=e%5B'id'%5D, name=e%5B'name'%5D), data.get(%22capec%22, %5B%5D)))%0A return %22**%7Bcve%7D** %5B%7Bcvss%7D%5D - %7Bsummary%7D%5Cn*Auth: %7Bauth%7D / Complexity: %7Bcomplexity%7D / Vector: %7Bvector%7D*%5Cn%3C%7Burl%7D%3E%5Cn%7Bcapecs%7D%22.format(%0A cve=data%5B'id'%5D,%0A cvss=data%5B'cvss'%5D,%0A summary=data%5B'summary'%5D,%0A auth=data%5B'access'%5D%5B'authentication'%5D,%0A complexity=data%5B'access'%5D%5B'complexity'%5D,%0A vector=data%5B'access'%5D%5B'vector'%5D,%0A capecs=capecs,%0A url=%22https://cve.mitre.org/cgi-bin/cvename.cgi?name=%7B%7D%22.format(urllib.parse.quote(data%5B'id'%5D)))%0A else:%0A raise CommandError(%22no results found%22)%0A
|
|
ac7c5f51e270e48d3be9363a7c65b4b2f019c90c
|
Add tests for xkcd bot.
|
contrib_bots/bots/xkcd/test_xkcd.py
|
contrib_bots/bots/xkcd/test_xkcd.py
|
Python
| 0 |
@@ -0,0 +1,1855 @@
+#!/usr/bin/env python%0A%0Afrom __future__ import absolute_import%0Afrom __future__ import print_function%0A%0Aimport mock%0Aimport os%0Aimport sys%0A%0Aour_dir = os.path.dirname(os.path.abspath(__file__))%0A# For dev setups, we can find the API in the repo itself.%0Aif os.path.exists(os.path.join(our_dir, '..')):%0A sys.path.insert(0, '..')%0Afrom bots_test_lib import BotTestCase%0A%0Aclass TestXkcdBot(BotTestCase):%0A bot_name = %22xkcd%22%0A%0A @mock.patch('logging.exception')%0A def test_bot(self, mock_logging_exception):%0A help_txt = %22xkcd bot supports these commands:%22%0A err_txt = %22xkcd bot only supports these commands:%22%0A commands = '''%0A* %60@xkcd help%60 to show this help message.%0A* %60@xkcd latest%60 to fetch the latest comic strip from xkcd.%0A* %60@xkcd random%60 to fetch a random comic strip from xkcd.%0A* %60@xkcd %3Ccomic id%3E%60 to fetch a comic strip based on %60%3Ccomic id%3E%60 e.g %60@xkcd 1234%60.'''%0A invalid_id_txt = %22Sorry, there is likely no xkcd comic strip with id: #%22%0A expected = %7B%0A %22%22: err_txt+commands,%0A %22help%22: help_txt+commands,%0A %22x%22: err_txt+commands,%0A %220%22: invalid_id_txt + %220%22,%0A %221%22: (%22#1: **Barrel - Part 1**%5Cn%5BDon't we all.%5D%22%0A %22(https://imgs.xkcd.com/comics/barrel_cropped_(1).jpg)%22),%0A %221800%22: (%22#1800: **Chess Notation**%5Cn%22%0A %22%5BI've decided to score all my conversations %22%0A %22using chess win-loss notation. (??)%5D%22%0A %22(https://imgs.xkcd.com/comics/chess_notation.png)%22),%0A %22999999999%22: invalid_id_txt + %22999999999%22,%0A %7D%0A for m, r in expected.items():%0A self.assert_bot_output(%0A %7B'content': m, 'type': %22private%22, 'sender_email': %22foo%22%7D, r)%0A self.assert_bot_output(%0A %7B'content': m, 'type': %22stream%22, 'sender_email': %22foo%22%7D, r)%0A
|
|
552e2381b25c9d3591e7b4bf4a4c5796744b15ba
|
Add demo configuration
|
.salt/files/demo.py
|
.salt/files/demo.py
|
Python
| 0 |
@@ -0,0 +1,417 @@
+from .prod import *%0A%0A%0ALEAFLET_CONFIG%5B'TILES'%5D = %5B%0A (gettext_noop('Scan'), 'http://%7Bs%7D.livembtiles.makina-corpus.net/makina/OSMTopo/%7Bz%7D/%7Bx%7D/%7By%7D.png', 'OSM Topo'),%0A (gettext_noop('Ortho'), 'https://%7Bs%7D.tiles.mapbox.com/v3/makina-corpus.i3p1001l/%7Bz%7D/%7Bx%7D/%7By%7D.png', '© MapBox Satellite'),%0A%5D%0ALEAFLET_CONFIG%5B'SRID'%5D = 3857%0A%0AALTIMETRIC_PROFILE_COLOR = '#F77E00'%0A%0AMAPENTITY_CONFIG%5B'MAP_BACKGROUND_FOGGED'%5D = False%0A%0A
|
|
4b4bfd8d1bfb5e6db7ac5d24be526f188ceb6e68
|
add payout exceptions
|
bluebottle/payouts_dorado/exceptions.py
|
bluebottle/payouts_dorado/exceptions.py
|
Python
| 0 |
@@ -0,0 +1,275 @@
+class PayoutException(Exception):%0A%0A def __init__(self, message, error_list=None):%0A self.message = message%0A self.error_list = error_list%0A%0A def __str__(self):%0A return str(self.message)%0A%0A def __unicode__(self):%0A return unicode(self.message)%0A
|
|
5908d941fc113ee02b7d5962f0209a528ab9ecb1
|
Add cross-site css module
|
core/modules/uses_stylesheet_naver.py
|
core/modules/uses_stylesheet_naver.py
|
Python
| 0.000001 |
@@ -0,0 +1,471 @@
+from bs4 import BeautifulSoup%0A%0A%22%22%22%0ASites that are in the Naver domain are already checked by is_masquerading. So no need to check url again%0A%22%22%22%0Adef uses_stylesheet_naver(resp):%0A print('uses_stylesheet_naver')%0A answer = %22U%22%0A%0A current_page = BeautifulSoup(resp.text, 'lxml')%0A stylesheets = current_page.find_all('link', rel=%22stylesheet%22)%0A%0A for stylesheet in stylesheets:%0A if %22naver.com%22 in stylesheet%5B'href'%5D:%0A return %22P%22%0A return answer%0A
|
|
d217ee9c830a6cccb70155ceff44746b4e5215d6
|
Add missing csv migration
|
saleor/csv/migrations/0004_auto_20200604_0633.py
|
saleor/csv/migrations/0004_auto_20200604_0633.py
|
Python
| 0.000011 |
@@ -0,0 +1,1104 @@
+# Generated by Django 3.0.6 on 2020-06-04 11:33%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22csv%22, %220003_auto_20200520_0247%22),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name=%22exportevent%22,%0A name=%22type%22,%0A field=models.CharField(%0A choices=%5B%0A (%22export_pending%22, %22Data export was started.%22),%0A (%22export_success%22, %22Data export was completed successfully.%22),%0A (%22export_failed%22, %22Data export failed.%22),%0A (%22export_deleted%22, %22Export file was started.%22),%0A (%0A %22exported_file_sent%22,%0A %22Email with link to download file was sent to the customer.%22,%0A ),%0A (%0A %22Export_failed_info_sent%22,%0A %22Email with info that export failed was sent to the customer.%22,%0A ),%0A %5D,%0A max_length=255,%0A ),%0A ),%0A %5D%0A
|
|
2b86b727cd701464969de5679d30f9bea38a08f3
|
Create TheDescent.py
|
Easy/TheDescent/TheDescent.py
|
Easy/TheDescent/TheDescent.py
|
Python
| 0.000001 |
@@ -0,0 +1,455 @@
+import sys%0Aimport math%0A%0Awhile True:%0A tallest_index = -1%0A tallest_height = -1%0A %0A for i in range(8):%0A mountain_h = int(input()) # represents the height of one mountain.%0A%0A if(tallest_height != -1):%0A if(mountain_h %3E tallest_height):%0A tallest_index = i%0A tallest_height = mountain_h%0A else:%0A tallest_index = i%0A tallest_height = mountain_h%0A%0A print(tallest_index)%0A
|
|
280aa4c8db7b5580b73ab6980f10d21a6ef2d761
|
Add an audio output using the pygame mixer. This abuses pygame to a fair extent, but works reasonably with large-ish buffer sizes.
|
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/PyGameOutput.py
|
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/PyGameOutput.py
|
Python
| 0 |
@@ -0,0 +1,1160 @@
+import numpy%0Aimport Numeric%0Aimport pygame%0Aimport Axon%0Aimport time%0A%0Aclass PyGameOutput(Axon.ThreadedComponent.threadedcomponent):%0A bufferSize = 1024%0A sampleRate = 44100%0A def __init__(self, **argd):%0A super(PyGameOutput, self).__init__(**argd)%0A pygame.mixer.init(self.sampleRate, -16, 1, self.bufferSize)%0A %0A def main(self):%0A while 1:%0A if not pygame.mixer.get_init():%0A pygame.mixer.init(self.sampleRate, -16, 1, self.bufferSize)%0A else:%0A if self.dataReady(%22inbox%22):%0A numpyArray = self.recv(%22inbox%22)%0A # Scale to 16 bit int%0A numpyArray *= 2**15-1%0A numpyArray = numpyArray.astype(%22int16%22)%0A numericArray = Numeric.asarray(numpyArray)%0A sound = pygame.sndarray.make_sound(numericArray)%0A sound.play()%0A%0A if not self.anyReady():%0A self.pause()%0A%0Aif __name__ == %22__main__%22:%0A from Kamaelia.Chassis.Pipeline import Pipeline%0A from Kamaelia.Apps.Jam.Audio.SineSynth import SineOsc%0A%0A Pipeline(SineOsc(), PyGameOutput()).run()%0A
|
|
4433cadaa39dd84b922329c84a7e791d81cac7c6
|
Add a very simple test that *must* always pass. * Useful for testing the newstyle API
|
nettests/simpletest.py
|
nettests/simpletest.py
|
Python
| 0.000001 |
@@ -0,0 +1,792 @@
+from ooni import nettest%0Aclass SimpleTest(nettest.TestCase):%0A inputs = range(1,100)%0A optParameters = %5B%5B'asset', 'a', None, 'Asset file'%5D,%0A %5B'controlserver', 'c', 'google.com', 'Specify the control server'%5D,%0A %5B'resume', 'r', 0, 'Resume at this index'%5D,%0A %5B'other', 'o', None, 'Other arguments'%5D%5D%0A%0A def test_foo(self, *arg, **kw):%0A print %22Running %25s with %25s%22 %25 (%22test_foo%22, self.input)%0A self.report%5B'test_foo'%5D = 'Antani'%0A self.report%5B'shared'%5D = %22sblinda%22%0A self.assertEqual(1,1)%0A%0A def test_f4oo(self, *arg, **kw):%0A print %22Running %25s with %25s%22 %25 (%22test_f4oo%22, self.input)%0A self.report%5B'test_f4oo'%5D = 'Antani'%0A self.report%5B'shared'%5D = %22sblinda2%22%0A self.assertEqual(1,1)%0A
|
|
124c4f30455d0892608622ddd09a0e7d83c3e8da
|
Create xmltodict_implementation.py
|
Useful-Libs/xmltodict_implementation.py
|
Useful-Libs/xmltodict_implementation.py
|
Python
| 0.000006 |
@@ -0,0 +1,334 @@
+import xmltodict%0A%0Awith open('path/to/file.xml') as fd:%0A doc = xmltodict.parse(fd.read())%0A%0A%0A doc%5B'mydocument'%5D%5B'@has'%5D # == u'an attribute'%0A doc%5B'mydocument'%5D%5B'and'%5D%5B'many'%5D # == %5Bu'elements', u'more elements'%5D%0A doc%5B'mydocument'%5D%5B'plus'%5D%5B'@a'%5D # == u'complex'%0A doc%5B'mydocument'%5D%5B'plus'%5D%5B'#text'%5D # == u'element as well'%0A
|
|
bf1b53ea1b4aa93805a35ad51dd63c55c4cc31a8
|
Update deprecated cgi package to urlparse and handle key=value bodies
|
endpoints/api_request.py
|
endpoints/api_request.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Endpoints API request-related data and functions."""
from __future__ import with_statement
# pylint: disable=g-bad-name
import cgi
import copy
import json
import logging
import urllib
import zlib
import util
class ApiRequest(object):
"""Simple data object representing an API request.
Parses the request from environment variables into convenient pieces
and stores them as members.
"""
_API_PREFIX = '/_ah/api/'
def __init__(self, environ):
"""Constructor.
Args:
environ: An environ dict for the request as defined in PEP-333.
Raises:
ValueError: If the path for the request is invalid.
"""
self.headers = util.get_headers_from_environ(environ)
self.http_method = environ['REQUEST_METHOD']
self.url_scheme = environ['wsgi.url_scheme']
self.server = environ['SERVER_NAME']
self.port = environ['SERVER_PORT']
self.path = environ['PATH_INFO']
self.query = environ.get('QUERY_STRING')
self.body = environ['wsgi.input'].read()
if self.body and self.headers.get('CONTENT-ENCODING') == 'gzip':
# Increasing wbits to 16 + MAX_WBITS is necessary to be able to decode
# gzipped content (as opposed to zlib-encoded content).
# If there's an error in the decompression, it could be due to another
# part of the serving chain that already decompressed it without clearing
# the header. If so, just ignore it and continue.
try:
self.body = zlib.decompress(self.body, 16 + zlib.MAX_WBITS)
except zlib.error:
pass
self.source_ip = environ.get('REMOTE_ADDR')
self.relative_url = self._reconstruct_relative_url(environ)
if not self.path.startswith(self._API_PREFIX):
raise ValueError('Invalid request path: %s' % self.path)
self.path = self.path[len(self._API_PREFIX):]
if self.query:
self.parameters = cgi.parse_qs(self.query, keep_blank_values=True)
else:
self.parameters = {}
self.body_json = json.loads(self.body) if self.body else {}
self.request_id = None
# Check if it's a batch request. We'll only handle single-element batch
# requests on the dev server (and we need to handle them because that's
# what RPC and JS calls typically show up as). Pull the request out of the
# list and record the fact that we're processing a batch.
if isinstance(self.body_json, list):
if len(self.body_json) != 1:
logging.warning('Batch requests with more than 1 element aren\'t '
'supported in devappserver2. Only the first element '
'will be handled. Found %d elements.',
len(self.body_json))
else:
logging.info('Converting batch request to single request.')
self.body_json = self.body_json[0]
self.body = json.dumps(self.body_json)
self._is_batch = True
else:
self._is_batch = False
def _reconstruct_relative_url(self, environ):
"""Reconstruct the relative URL of this request.
This is based on the URL reconstruction code in Python PEP 333:
http://www.python.org/dev/peps/pep-0333/#url-reconstruction. Rebuild the
URL from the pieces available in the environment.
Args:
environ: An environ dict for the request as defined in PEP-333.
Returns:
The portion of the URL from the request after the server and port.
"""
url = urllib.quote(environ.get('SCRIPT_NAME', ''))
url += urllib.quote(environ.get('PATH_INFO', ''))
if environ.get('QUERY_STRING'):
url += '?' + environ['QUERY_STRING']
return url
def copy(self):
return copy.deepcopy(self)
def is_rpc(self):
# Google's JsonRPC protocol creates a handler at /rpc for any Cloud
# Endpoints API, with api name, version, and method name being in the
# body of the request.
# If the request is sent to /rpc, we will treat it as JsonRPC.
# The client libraries for iOS's Objective C use RPC and not the REST
# versions of the API.
return self.path == 'rpc'
def is_batch(self):
return self._is_batch
|
Python
| 0 |
@@ -725,19 +725,8 @@
ame%0A
-import cgi%0A
impo
@@ -774,16 +774,32 @@
urllib%0A
+import urlparse%0A
import z
@@ -2469,11 +2469,16 @@
s =
-cgi
+urlparse
.par
@@ -2581,18 +2581,30 @@
n =
-json.loads
+self._process_req_body
(sel
@@ -3528,16 +3528,408 @@
False%0A%0A
+ def _process_req_body(self, body):%0A %22%22%22Process the body of the HTTP request.%0A%0A If the body is valid JSON, return the JSON as a dict.%0A Else, convert the key=value format to a dict and return that.%0A%0A Args:%0A body: The body of the HTTP request.%0A %22%22%22%0A try:%0A return json.loads(body)%0A except ValueError:%0A return urlparse.parse_qs(body, keep_blank_values=True)%0A%0A
def _r
|
1e6956fb793e12b720b521feb4c0eeabaf490cea
|
add cache.py to cleanup cache
|
cache.py
|
cache.py
|
Python
| 0.000001 |
@@ -0,0 +1,891 @@
+#!/usr/bin/python%0A%0Aimport os%0Aimport time%0Afrom workflow import Workflow%0A%0A%0AAGE = 3600 * 24%0ALOG = None%0A%0Aclass Cache(object):%0A%0A def __init__(self):%0A global LOG%0A self.wf = Workflow()%0A LOG = self.wf.logger%0A self.cachedir = self.wf.cachedir%0A self.wf.cached_data_age = self.cached_data_age%0A%0A def cached_data_age(self, name):%0A cache_path = self.wf.cachefile(name)%0A if not os.path.exists(cache_path):%0A return 0%0A return time.time() - os.stat(cache_path).st_mtime%0A%0A def clean(self):%0A for file in os.listdir(self.wf.cachedir):%0A if file.endswith(%22.log%22):%0A continue%0A if not self.wf.cached_data_fresh(file, AGE):%0A LOG.debug(%22deleting cache file: %22 + file)%0A os.remove(self.wf.cachefile(file))%0A%0A%0Aif __name__==%22__main__%22:%0A cache = Cache()%0A cache.clean()%0A
|
|
25e09e4dbbc6dbc87c3b1cc2833021a9ae022a0e
|
Create compact/struct.py for python2/3 compatibility
|
pyvisa/compat/struct.py
|
pyvisa/compat/struct.py
|
Python
| 0 |
@@ -0,0 +1,1101 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A pyvisa.compat.struct%0A ~~~~~~~~~~~~~~~~~~~~~~~~~%0A%0A Python 2/3 compatibility for struct module%0A%0A :copyright: 2015, PSF%0A :license: PSF License%0A%22%22%22%0A%0Afrom __future__ import division, unicode_literals, print_function, absolute_import%0A%0Aimport sys%0Aimport struct%0A%0A# we always want the exception to be able to catch it%0Aerror = struct.error%0A%0A# compatibility for unicode literals was introduced in 2.7.8%0A# if we're above that there is nothing to do except aliasing%0Aif sys.hexversion %3E= 0x02070800:%0A pack = struct.pack%0A pack_into = struct.pack_into%0A unpack = struct.unpack%0A unpack_from = struct.unpack_from%0A calcsize = struct.calcsize%0Aelse:%0A def pack(fmt, *args):%0A return struct.pack(str(fmt), *args)%0A%0A def pack_into(fmt, *args, **argk):%0A return struct.pack_into(str(fmt), *args, **argk)%0A%0A def unpack(fmt, string):%0A return struct.unpack(str(fmt), string)%0A%0A def unpack_from(fmt, *args, **kwargs):%0A return struct.unpack_from(str(fmt), *args, **kwargs)%0A%0A def calcsize(fmt):%0A return struct.calcsize(str(fmt))%0A
|
|
ef8bc0ddffa142e8580606377bff1d2737365711
|
add various utilities in dog.util
|
dog/util.py
|
dog/util.py
|
Python
| 0.000251 |
@@ -0,0 +1,1064 @@
+import discord%0A%0A%0Adef make_profile_embed(member):%0A embed = discord.Embed()%0A embed.set_author(name=f'%7Bmember.name%7D#%7Bmember.discriminator%7D',%0A icon_url=member.avatar_url)%0A return embed%0A%0A%0Adef american_datetime(datetime):%0A return datetime.strftime('%25m/%25d/%25Y %25I:%25M:%25S %25p')%0A%0A%0Adef pretty_timedelta(delta):%0A big = ''%0A%0A if delta.days %3E= 7 and delta.days %3C 21:%0A weeks = round(delta.days / 7, 2)%0A plural = 's' if weeks == 0 or weeks %3E 1 else ''%0A big = f'%7Bweeks%7D week%7Bplural%7D'%0A%0A # assume that a month is 31 days long, i am not trying%0A # to be aware%0A if delta.days %3E= 21 and delta.days %3C 365:%0A days = round(delta.days / 31, 2)%0A plural = 's' if days == 0 or days %3E 1 else ''%0A big = f'%7Bdays%7D month%7Bplural%7D'%0A%0A if delta.days %3E= 365:%0A years = round(delta.days / 365)%0A plural = 's' if years == 0 or years %3E 1 else ''%0A big = f'%7Byears%7D year%7Bplural%7D'%0A%0A m, s = divmod(delta.seconds, 60)%0A h, m = divmod(m, 60)%0A%0A return '%7B%7D, %7B:02d%7Dh%7B:02d%7Dm%7B:02d%7Ds'.format(big, h, m, s)%0A
|
|
595356b13c68dbd3ecd50fe4eede1b479e918056
|
This is junk
|
django-hq/apps/transformers/junk.py
|
django-hq/apps/transformers/junk.py
|
Python
| 0.999993 |
@@ -0,0 +1,33 @@
+# This is a test%0D%0A# for linefeeds
|
|
a7f97bbb5019b073d211c999d05b0500434d3c75
|
Use six.moves.http_client instead of httplib.
|
oscar/test/testcases.py
|
oscar/test/testcases.py
|
from six.moves import http_client
from django.core.urlresolvers import reverse
from django.contrib.auth.models import Permission
from django_webtest import WebTest
from purl import URL
from oscar.core.compat import get_user_model
User = get_user_model()
def add_permissions(user, permissions):
"""
:param permissions: e.g. ['partner.dashboard_access']
"""
for permission in permissions:
app_label, _, codename = permission.partition('.')
perm = Permission.objects.get(content_type__app_label=app_label,
codename=codename)
user.user_permissions.add(perm)
class WebTestCase(WebTest):
is_staff = False
is_anonymous = False
username = 'testuser'
email = '[email protected]'
password = 'somefancypassword'
is_superuser = False
permissions = []
def setUp(self):
self.user = None
if not self.is_anonymous or self.is_staff:
self.user = User.objects.create_user(self.username, self.email,
self.password)
self.user.is_staff = self.is_staff
perms = self.permissions
add_permissions(self.user, perms)
self.user.save()
self.login()
def get(self, url, **kwargs):
kwargs.setdefault('user', self.user)
return self.app.get(url, **kwargs)
def post(self, url, **kwargs):
kwargs.setdefault('user', self.user)
return self.app.post(url, **kwargs)
def login(self, username=None, password=None):
username = username or self.username
password = password or self.password
self.client.login(username=username, password=password)
# Custom assertions
def assertIsRedirect(self, response, expected_url=None):
self.assertTrue(response.status_code in (
http_client.FOUND, http_client.MOVED_PERMANENTLY))
if expected_url:
location = URL.from_string(response['Location'])
self.assertEqual(expected_url, location.path())
def assertRedirectsTo(self, response, url_name):
self.assertTrue(str(response.status_code).startswith('3'))
location = response.headers['Location']
redirect_path = location.replace('http://localhost:80', '')
self.assertEqual(reverse(url_name), redirect_path)
def assertNoAccess(self, response):
self.assertContext(response)
self.assertTrue(response.status_code in (httplib.NOT_FOUND,
httplib.FORBIDDEN))
def assertRedirectUrlName(self, response, name, kwargs=None):
self.assertIsRedirect(response)
location = response['Location'].replace('http://testserver', '')
self.assertEqual(location, reverse(name, kwargs=kwargs))
def assertIsOk(self, response):
self.assertEqual(httplib.OK, response.status_code)
def assertContext(self, response):
self.assertTrue(response.context is not None,
'No context was returned')
def assertInContext(self, response, key):
self.assertContext(response)
self.assertTrue(key in response.context,
"Context should contain a variable '%s'" % key)
|
Python
| 0 |
@@ -2498,19 +2498,23 @@
in (http
-lib
+_client
.NOT_FOU
@@ -2570,19 +2570,23 @@
http
-lib
+_client
.FORBIDD
@@ -2905,11 +2905,15 @@
http
-lib
+_client
.OK,
|
d9c197840282c6bdedf5e001a1092aa707ae139c
|
update email field length
|
corehq/apps/data_analytics/migrations/0008_auto_20161114_1903.py
|
corehq/apps/data_analytics/migrations/0008_auto_20161114_1903.py
|
Python
| 0.000001 |
@@ -0,0 +1,460 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.11 on 2016-11-14 19:03%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('data_analytics', '0007_auto_20160819_1423'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='maltrow',%0A name='email',%0A field=models.EmailField(max_length=254),%0A ),%0A %5D%0A
|
|
8ccf3d937d25ec93d1ce22d60735ffbcaf776fe3
|
Add a script for plotting distance to target.
|
analysis/plot-target-distance.py
|
analysis/plot-target-distance.py
|
Python
| 0 |
@@ -0,0 +1,985 @@
+import climate%0Aimport itertools%0Aimport lmj.plot%0Aimport numpy as np%0A%0Aimport source as experiment%0Aimport plots%0A%0A%[email protected](%0A root='plot data from this experiment subjects',%0A pattern=('plot data from files matching this pattern', 'option'),%0A markers=('plot data for these mocap markers', 'option'),%0A target_num=('plot data for this target', 'option', None, int),%0A approach_sec=('plot variance for N sec prior to target acquisition', 'option', None, float),%0A)%0Adef main(root, pattern='*/*block*/*circuit*.csv.gz', markers='r-fing-index l-fing-index r-heel r-knee', target_num=5, approach_sec=2):%0A with plots.plot() as ax:%0A for i, trial in enumerate(experiment.Experiment(root).trials_matching(pattern)):%0A for t in range(11):%0A s = trial.movement_to(t).distance_to_target().interpolate().reset_index(drop=True)%0A ax.plot(s.index, s.values, color=lmj.plot.COLOR11%5Bt%5D)%0A%0A%0Aif __name__ == '__main__':%0A climate.call(main)%0A
|
|
d8c18d9244ca09e942af57d74a407498c25d05ce
|
Add Linear Discriminant Analaysis.
|
LDA.py
|
LDA.py
|
Python
| 0.000016 |
@@ -0,0 +1,1184 @@
+import numpy as np%0Afrom scipy import linalg as LA%0A%0Aclass LDA(object):%0A%09def __init__(self, data_inputs, data_labels):%0A%09%09self.data_inputs = np.array(data_inputs)%0A%09%09self.data_labels = data_labels%0A%09%09self.test_cases = self.data_inputs.shape%5B0%5D%0A%09%09self.labels = np.unique(data_labels)%0A%09%09self.Sw = np.zeros((self.data_inputs.shape%5B1%5D, self.data_inputs.shape%5B1%5D))%0A%09%09self.Sb = self.Sw.copy()%0A%0A%09def analyse(self):%0A%09%09C = np.cov(self.data_inputs.T)%0A%0A%09%09for label in self.labels:%0A%09%09%09indices = np.where(self.data_labels == label)%0A%09%09%09points = self.data_inputs%5Bindices%5B0%5D%5D%0A%09%09%09classcov = np.cov(points.T)%0A%09%09%09self.Sw += (np.float(points.shape%5B0%5D)/self.test_cases) * classcov%0A%0A%09%09self.Sb = C - self.Sw%0A%09%09evals, evecs = LA.eig(self.Sw, self.Sb)%0A%09%09indices = np.argsort(evals)%0A%09%09indices = indices%5B::-1%5D%0A%09%09evals = evals%5Bindices%5D%0A%09%09evecs = evecs%5Bindices%5D%0A%09%09self.eigen_vals = evals%0A%09%09self.eigen_vecs = evecs%0A%0A%09def reduce_dim(self, red_n, data_inputs=None):%0A%09%09w = self.eigen_vecs%5B:,:red_n%5D%0A%09%09if data_inputs is None:%0A%09%09%09data_inputs = self.data_inputs%0A%09%09return np.dot(data_inputs, w)%0A%0A%09def expand_dim(self, red_data):%0A%09%09red_n = red_data.shape%5B1%5D%0A%09%09return np.transpose(np.dot(self.eigen_vecs%5B:,:red_n%5D, red_data.T))%0A
|
|
3cc1bceaca2fe74d3d9f9fa846f976ba99cc7dee
|
Create RDF.py
|
RDF.py
|
RDF.py
|
Python
| 0.000004 |
@@ -0,0 +1,1991 @@
+from sys import argv%0Aimport pandas as pd%0Aimport numpy as np%0Afrom functions import crdf%0Aimport time%0Aimport accelerate as acc%0Aimport matplotlib%0Afrom matplotlib import pyplot as plt%0Afn = argv%5B1%5D%0Aprint('Box origin must be at the center!')%0Apos = pd.read_csv(fn, delim_whitespace=True, squeeze=1, header=None).values%0Aimport time%0Aimport numpy as np%0A%0ANdim = 500 # Finess of delta function%0AV = box%5B0%5D*box%5B1%5D*box%5B2%5D%0Arho_bins = Ndim**3/V # Number density of bins%0Arho = pos.shape%5B0%5D/V%0As = time.time()%0Ap, e = np.histogramdd(pos, bins=(Ndim, Ndim, Ndim), range=((-box%5B0%5D/2, box%5B0%5D/2), (-box%5B1%5D/2, box%5B1%5D/2),(-box%5B2%5D/2, box%5B2%5D/2)))%0Aprint('Binning particles: %25s' %25 (time.time()-s))%0Ap = np.fft.fftshift(p) # POS is of center-origin, here move origin to cornor.%0As = time.time()%0Afp = acc.mkl.fftpack.fftn(p) # Accelerate package%0Aprint('FFT time: %25s' %25 (time.time()-s))%0AFP = fp*fp.conj()%0As = time.time()%0ARDF = np.fft.ifftn(FP).real # IFFT%7B%3Crho(K)rho(-K)%3E%7D, 1/N%5Csum_i......(see numpy.fft, so rho_bins is needed)%0Aprint('IFFT time: %25s' %25 (time.time()-s))%0ARDF%5B0,0,0%5D -= pos.shape%5B0%5D%0ARDF = np.fft.fftshift(RDF)%0Arbin = 0.2 # %3E= box / Ndiv%0Arx = e%5B0%5D%5B:Ndim%5D + 0.5*(e%5B0%5D%5B-1%5D-e%5B0%5D%5B-2%5D)%0Ary = e%5B1%5D%5B:Ndim%5D + 0.5*(e%5B1%5D%5B-1%5D-e%5B1%5D%5B-2%5D)%0Arz = e%5B2%5D%5B:Ndim%5D + 0.5*(e%5B2%5D%5B-1%5D-e%5B2%5D%5B-2%5D)%0Afrom numba import jit%0A%0A@jit # normalize g(R) to g(r)%0Adef norm_r(RDF, rbin, rx, ry, rz):%0A rdf = np.zeros(int(box.max()/2*3**0.5/rbin)+1, dtype=np.float)%0A cter = np.zeros(rdf.shape, dtype=np.float)%0A for i in range(Ndim):%0A for j in range(Ndim):%0A for k in range(Ndim):%0A rr = rx%5Bi%5D**2+ry%5Bj%5D**2+rz%5Bk%5D**2%0A r = int(rr**0.5/rbin)%0A rdf%5Br%5D += RDF%5Bi,j,k%5D%0A cter%5Br%5D += 1%0A return np.nan_to_num(rdf/cter)%0A%0A%0Ardf = norm_r(RDF, rbin, rx,ry,rz)%0Ardf /= pos.shape%5B0%5D * rho # NA*NB/V for gAB(r)%0Ardf *= rho_bins # NORMED BY BIN DENSITY%0Arr = np.arange(rdf.shape%5B0%5D)*rbin%0A%0Ao = open('rdf.txt', 'w')%0Afor i, y in enumerate(rdf):%0A o.write('%25.8f %25.8f%5Cn' %25 ((i+0.5) * rbin, y))%0Ao.close()%0A
|
|
bce02c436479adf3bf3deae22704be5cf1cace89
|
set addr_list and contact_list attr #5635
|
erpnext/utilities/address_and_contact.py
|
erpnext/utilities/address_and_contact.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def load_address_and_contact(doc, key):
"""Loads address list and contact list in `__onload`"""
from erpnext.utilities.doctype.address.address import get_address_display
doc.get("__onload").addr_list = [a.update({"display": get_address_display(a)}) \
for a in frappe.get_all("Address",
fields="*", filters={key: doc.name},
order_by="is_primary_address desc, modified desc")]
if doc.doctype != "Lead":
doc.get("__onload").contact_list = frappe.get_all("Contact",
fields="*", filters={key: doc.name},
order_by="is_primary_contact desc, modified desc")
def has_permission(doc, ptype, user):
links = get_permitted_and_not_permitted_links(doc.doctype)
if not links.get("not_permitted_links"):
# optimization: don't determine permissions based on link fields
return True
# True if any one is True or all are empty
names = []
for df in (links.get("permitted_links") + links.get("not_permitted_links")):
doctype = df.options
name = doc.get(df.fieldname)
names.append(name)
if name and frappe.has_permission(doctype, ptype, doc=name):
return True
if not any(names):
return True
else:
return False
def get_permission_query_conditions_for_contact(user):
return get_permission_query_conditions("Contact")
def get_permission_query_conditions_for_address(user):
return get_permission_query_conditions("Address")
def get_permission_query_conditions(doctype):
links = get_permitted_and_not_permitted_links(doctype)
if not links.get("not_permitted_links"):
# when everything is permitted, don't add additional condition
return ""
elif not links.get("permitted_links"):
conditions = []
# when everything is not permitted
for df in links.get("not_permitted_links"):
# like ifnull(customer, '')='' and ifnull(supplier, '')=''
conditions.append("ifnull(`tab{doctype}`.`{fieldname}`, '')=''".format(doctype=doctype, fieldname=df.fieldname))
return "( " + " and ".join(conditions) + " )"
else:
conditions = []
for df in links.get("permitted_links"):
# like ifnull(customer, '')!='' or ifnull(supplier, '')!=''
conditions.append("ifnull(`tab{doctype}`.`{fieldname}`, '')!=''".format(doctype=doctype, fieldname=df.fieldname))
return "( " + " or ".join(conditions) + " )"
def get_permitted_and_not_permitted_links(doctype):
permitted_links = []
not_permitted_links = []
meta = frappe.get_meta(doctype)
for df in meta.get_link_fields():
if df.options not in ("Customer", "Supplier", "Company", "Sales Partner"):
continue
if frappe.has_permission(df.options):
permitted_links.append(df)
else:
not_permitted_links.append(df)
return {
"permitted_links": permitted_links,
"not_permitted_links": not_permitted_links
}
|
Python
| 0 |
@@ -369,17 +369,18 @@
onload%22)
-.
+%5B%22
addr_lis
@@ -380,16 +380,18 @@
ddr_list
+%22%5D
= %5Ba.up
@@ -615,17 +615,18 @@
onload%22)
-.
+%5B%22
contact_
@@ -629,16 +629,18 @@
act_list
+%22%5D
= frapp
|
10dbbe5b10abf954ab912fc3a2cdfe1532bf71cf
|
test file added
|
cortex-py/test/test_cortex.py
|
cortex-py/test/test_cortex.py
|
Python
| 0 |
@@ -0,0 +1,2704 @@
+import time%0Aimport cortex%0A%0A%0Aclass MyDataHandler:%0A def __init__(self):%0A self.alldata = %5B%5D%0A %0A def MyErrorHandler(self, iLevel, msg):%0A print(%22ERROR: %22)%0A print(iLevel, msg.contents)%0A return 0%0A %0A def MyDataHandler(self, Frame):%0A print(%22got called%22)%0A try:%0A print(%22Received multi-cast frame no %25d%5Cn%22%25(Frame.contents.iFrame))%0A print %22Bodies: %22, Frame.contents.nBodies%0A print %22BodyData: %22, Frame.contents.BodyData%5B0%5D.szName%0A print %22Number of Markers of Body%5B0%5D: %22, Frame.contents.BodyData%5B0%5D.nMarkers%0A for i in range(Frame.contents.BodyData%5B0%5D.nMarkers):%0A print %22MarkerX %22, Frame.contents.BodyData%5B0%5D.Markers%5Bi%5D%5B0%5D%0A print %22MarkerY %22, Frame.contents.BodyData%5B0%5D.Markers%5Bi%5D%5B1%5D%0A print %22MarkerZ %22, Frame.contents.BodyData%5B0%5D.Markers%5Bi%5D%5B2%5D%0A print %22BodyMarker%5B2%5D.x: %22, Frame.contents.BodyData%5B0%5D.Markers%5B3%5D%5B0%5D%0A print %22Unidentified markers: %22, Frame.contents.nUnidentifiedMarkers%0A print %22Delay: %22, Frame.contents.fDelay%0A print %22%22, Frame.contents.UnidentifiedMarkers%5B0%5D%5B0%5D%0A self.alldata.append(Frame.contents.UnidentifiedMarkers%5B0%5D%5B0%5D)%0A except:%0A print(%22Frame empty%22)%0A return 0%0A%0Aif __name__ == %22__main__%22:%0A my_obj = MyDataHandler()%0A %0A Cortex_SetErrorMsgHandlerFunc(my_obj.MyErrorHandler)%0A Cortex_SetDataHandlerFunc(my_obj.MyDataHandler)%0A%0A if Cortex_Initialize() != 0:%0A print(%22ERROR: unable to initialize%22)%0A Cortex_Exit()%0A exit(0)%0A%0A pBodyDefs = Cortex_GetBodyDefs()%0A if pBodyDefs == None:%0A print(%22Failed to get body defs%22)%0A else:%0A print(%22Got body defs%22)%0A print(%22bodydefs: %22, pBodyDefs.contents.nBodyDefs)%0A print %22Marker names: %22%0A print %22%22, pBodyDefs.contents.BodyDefs%5B0%5D.szName%0A for i in range(pBodyDefs.contents.BodyDefs%5B0%5D.nMarkers):%0A print %22Marker: %22, pBodyDefs.contents.BodyDefs%5B0%5D.szMarkerNames%5Bi%5D%0A Cortex_FreeBodyDefs(pBodyDefs)%0A pBodyDefs = None%0A %0A pResponse = c_void_p%0A nBytes = c_int%0A retval = Cortex_Request(%22GetContextFrameRate%22, pResponse, nBytes)%0A if retval != 0:%0A print(%22ERROR, GetContextFrameRate%22)%0A%0A #contextFrameRate = cast(pResponse, POINTER(c_float))%0A%0A #print(%22ContextFrameRate = %253.1f Hz%22, contextFrameRate)%0A%0A print(%22*** Starting live mode ***%22)%0A retval = Cortex_Request(%22LiveMode%22, pResponse, nBytes)%0A time.sleep(1.0)%0A retval = Cortex_Request(%22Pause%22, pResponse, nBytes)%0A print(%22*** Paused live mode ***%22)%0A%0A print(%22****** Cortex_Exit ******%22)%0A retval = Cortex_Exit();%0A %0A print my_obj.alldata%0A
|
|
8b257c2a4b8f949f81965b7ffaa80d18c48974a4
|
add app framework
|
app.py
|
app.py
|
Python
| 0.000003 |
@@ -0,0 +1,314 @@
+import tornado.ioloop%0Aimport tornado.web%0A%0Aclass MainHandler(tornado.web.RequestHandler):%0A def get(self):%0A self.write(%22Hello, world%22)%0A%0Aapplication = tornado.web.Application(%5B%0A (r%22/%22, MainHandler),%0A%5D)%0A%0Aif __name__ == %22__main__%22:%0A application.listen(8888)%0A tornado.ioloop.IOLoop.instance().start()%0A
|
|
e258b608c40b2abca30fbc85601e05c48558fff9
|
add weird migration
|
webapp/calendars/migrations/0023_auto_20160109_1307.py
|
webapp/calendars/migrations/0023_auto_20160109_1307.py
|
Python
| 0.000002 |
@@ -0,0 +1,696 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('calendars', '0022_auto_20151121_1628'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='category',%0A options=%7B'verbose_name_plural': 'Categories'%7D,%0A ),%0A migrations.AlterField(%0A model_name='category',%0A name='color',%0A field=models.CharField(max_length=16, default='primary', choices=%5B('primary', 'Niebieski'), ('success', 'Zielony'), ('info', 'Jasno niebieski'), ('warning', '%C5%BB%C3%B3%C5%82ty'), ('danger', 'Czerwony')%5D),%0A ),%0A %5D%0A
|
|
7b15ad790631926030f8b0b6c32f214f2c8001b1
|
Create __init__.py
|
cno/boolean/__init__.py
|
cno/boolean/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
edf6e9ceacab9aa2d8795340089182ead07c30b3
|
Add ipopt v3.12.4 package.
|
var/spack/repos/builtin/packages/ipopt/package.py
|
var/spack/repos/builtin/packages/ipopt/package.py
|
Python
| 0 |
@@ -0,0 +1,1983 @@
+from spack import *%0A%0Aclass Ipopt(Package):%0A %22%22%22Ipopt (Interior Point OPTimizer, pronounced eye-pea-Opt) is a%0A software package for large-scale nonlinear optimization.%22%22%22%0A homepage = %22https://projects.coin-or.org/Ipopt%22%0A url = %22http://www.coin-or.org/download/source/Ipopt/Ipopt-3.12.4.tgz%22%0A%0A version('3.12.4', '12a8ecaff8dd90025ddea6c65b49cb03')%0A version('3.12.3', 'c560cbfa9cbf62acf8b485823c255a1b')%0A version('3.12.2', 'ec1e855257d7de09e122c446506fb00d')%0A version('3.12.1', 'ceaf895ce80c77778f2cab68ba9f17f3')%0A version('3.12.0', 'f7dfc3aa106a6711a85214de7595e827')%0A%0A depends_on(%22blas%22)%0A depends_on(%22lapack%22)%0A depends_on(%22pkg-config%22)%0A depends_on(%22mumps+double~mpi%22) %0A %0A def install(self, spec, prefix):%0A # Dependency directories%0A blas_dir = spec%5B'blas'%5D.prefix%0A lapack_dir = spec%5B'lapack'%5D.prefix%0A mumps_dir = spec%5B'mumps'%5D.prefix%0A%0A # Add directory with fake MPI headers in sequential MUMPS%0A # install to header search path%0A mumps_flags = %22-ldmumps -lmumps_common -lpord -lmpiseq%22%0A mumps_libcmd = %22-L%25s %22 %25 mumps_dir.lib + mumps_flags%0A%0A # By convention, spack links blas & lapack libs to libblas & liblapack%0A blas_lib = %22-L%25s%22 %25 blas_dir.lib + %22 -lblas%22%0A lapack_lib = %22-L%25s%22 %25 lapack_dir.lib + %22 -llapack%22%0A %0A configure_args = %5B%0A %22--prefix=%25s%22 %25 prefix,%0A %22--with-mumps-incdir=%25s%22 %25 mumps_dir.include,%0A %22--with-mumps-lib=%25s%22 %25 mumps_libcmd,%0A %22--enable-shared%22,%0A %22--with-blas-incdir=%25s%22 %25 blas_dir.include,%0A %22--with-blas-lib=%25s%22 %25 blas_lib,%0A %22--with-lapack-incdir=%25s%22 %25 lapack_dir.include,%0A %22--with-lapack-lib=%25s%22 %25 lapack_lib%0A %5D%0A %0A configure(*configure_args)%0A%0A # IPOPT does not build correctly in parallel on OS X%0A make(parallel=False)%0A make(%22test%22, parallel=False)%0A make(%22install%22, parallel=False)%0A
|
|
fe32ab94bbf36621fa926d565a7720b52f1d5f11
|
268. Missing Number. In-place
|
p268_inplace.py
|
p268_inplace.py
|
Python
| 0.999999 |
@@ -0,0 +1,803 @@
+import unittest%0A%0A%0Aclass Solution(object):%0A def missingNumber(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: int%0A %22%22%22%0A n = len(nums)%0A%0A for num in nums:%0A if num == -1:%0A continue%0A%0A while num != n and nums%5Bnum%5D != -1:%0A next_num = nums%5Bnum%5D%0A nums%5Bnum%5D = -1%0A num = next_num%0A%0A for i, num in enumerate(nums):%0A if num != -1:%0A return i%0A return n%0A%0A%0Aclass Test(unittest.TestCase):%0A def test(self):%0A self._test(%5B0, 1, 3%5D, 2)%0A self._test(%5B0, 1, 2%5D, 3)%0A%0A def _test(self, nums, expected):%0A actual = Solution().missingNumber(nums)%0A self.assertEqual(actual, expected)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
533aeb6cdc045f7d4cbfc4bc20dd89da4179ab35
|
Add application class to hold all the resources pertaining to an end-user application including RPC servers, HTTP servers etc.
|
app/core/services/application.py
|
app/core/services/application.py
|
Python
| 0 |
@@ -0,0 +1,1309 @@
+from threading import RLock%0Afrom uuid import uuid4%0A%0Afrom app.core.messaging import Sender%0A%0A%0Aclass Application(object):%0A APPLICATION_INFO_QUEUE = %22/_system/applications%22%0A%0A def __init__(self):%0A self.unique_id = str(uuid4())%0A self.rpc_servers = %7B%7D%0A self.app_servers = %7B%7D%0A self.info_lock = RLock()%0A%0A def register_rpc_server(self, rpc_server):%0A with self.info_lock:%0A names = %7Bx.name for x in self.rpc_servers.values()%7D%0A if rpc_server.name in names:%0A raise ValueError(%22Name already exists: %22 + rpc_server.name)%0A self.rpc_servers%5Brpc_server.queue_name%5D = rpc_server%0A self.push_update()%0A%0A def register_application_server(self, server):%0A with self.info_lock:%0A self.app_servers%5Bserver.id%5D = server%0A self.push_update()%0A%0A def push_update(self):%0A sender = Sender(self.APPLICATION_INFO_QUEUE)%0A sender.start()%0A sender.send(self.info_message, headers=%7B%22KEY%22: self.unique_id%7D)%0A sender.close()%0A%0A @property%0A def info_message(self):%0A with self.info_lock:%0A return %7B%0A %22apps%22: %7Bx.unique_id: x.info_message for x in self.app_servers%7D,%0A %22rpc%22: %7Bx.unique_id: x.info_message for x in self.rpc_servers%7D%0A %7D%0A
|
|
1df5619347b8f3e2a9fd49c95455e8b3aba07cf9
|
Add example of desired new quick server usage
|
examples/quick_server.py
|
examples/quick_server.py
|
Python
| 0 |
@@ -0,0 +1,108 @@
+import hug%0A%0A%[email protected]()%0Adef quick():%0A return %22Serving!%22%0A%0A%0Aif __name__ == '__main__':%0A __hug__.serve()%0A
|
|
ce1a080c01a5f792d128278fbb035f50e106e959
|
set up general logging and twitter stream log
|
geotweet/log.py
|
geotweet/log.py
|
Python
| 0 |
@@ -0,0 +1,818 @@
+import logging%0Afrom logging.handlers import TimedRotatingFileHandler%0Aimport os%0A%0A%0ALOG_NAME = 'geotweet'%0ALOG_FILE = os.getenv('GEOTWEET_LOG', '/tmp/geotweet.log')%0ALOG_LEVEL = logging.DEBUG%0ATWITTER_LOG_NAME = 'twitter-stream'%0A%0A%0Adef get_logger():%0A logger = logging.getLogger(LOG_NAME)%0A logger.setLevel(LOG_LEVEL)%0A fh = logging.FileHandler(LOG_FILE)%0A logformat = '%25(levelname)s %25(asctime)s: %25(message)s'%0A formatter = logging.Formatter(logformat)%0A fh.setFormatter(formatter)%0A logger.addHandler(fh)%0A return logger%0A%0A%0Adef get_rotating_logger(logfile, interval, when=%22M%22):%0A logger = logging.getLogger(TWITTER_LOG_NAME)%0A logger.setLevel(LOG_LEVEL)%0A handler = TimedRotatingFileHandler(logfile, when=when, interval=interval)%0A logger.addHandler(handler)%0A return logger%0A%0A%0Alogger = get_logger()%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.