commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
1c2292dcd47865a3dbd3f7b9adf53433f6f34770
Create new package. (#6215)
var/spack/repos/builtin/packages/r-timedate/package.py
var/spack/repos/builtin/packages/r-timedate/package.py
Python
0.000002
@@ -0,0 +1,1696 @@ +##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass RTimedate(RPackage):%0A %22%22%22Environment for teaching %22Financial Engineering and Computational%0A Finance%22. Managing chronological and calendar objects.%22%22%22%0A%0A homepage = %22https://cran.r-project.org/package=timeDate%22%0A url = %22https://cran.r-project.org/src/contrib/timeDate_3012.100.tar.gz%22%0A list_url = %22https://cran.r-project.org/src/contrib/Archive/timeDate%22%0A%0A version('3012.100', '9f69d3724efbf0e125e6b8e6d3475fe4')%0A
5ba4fce42892634213bede09759bbca1cd56e346
add package py-brian2 (#3617)
var/spack/repos/builtin/packages/py-brian2/package.py
var/spack/repos/builtin/packages/py-brian2/package.py
Python
0
@@ -0,0 +1,2297 @@ +##############################################################################%0A# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the LICENSE file for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass PyBrian2(PythonPackage):%0A %22%22%22A clock-driven simulator for spiking neural networks%22%22%22%0A%0A homepage = %22http://www.briansimulator.org%22%0A url = %22https://pypi.io/packages/source/B/Brian2/Brian2-2.0.1.tar.gz%22%0A%0A version('2.0.1', 'df5990e9a71f7344887bc02f54dfd0f0')%0A version('2.0rc3', '3100c5e4eb9eb83a06ff0413a7d43152')%0A%0A variant('docs', default=False)%0A%0A # depends on py-setuptools@6: for windows, if spack targets windows,%0A # this will need to be added here%0A depends_on('py-setuptools', type='build')%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('py-pyparsing', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A%0A # depends_on('[email protected]:', type=('build', 'run')) # extra test%0A depends_on('[email protected]:', type=('build', 'run'), when='+docs')%0A depends_on('[email protected]:', type=('build', 'run'), when='+docs')%0A
26525354e7bf2465a561a5172a0d9fef4205e77d
move column defs to singleton object
chart06columns.py
chart06columns.py
Python
0.000048
@@ -0,0 +1,2278 @@ +'''define columns in all reports produced by chart06'''%0Aimport numpy as np%0A%0A%0A# all possible column definition%0A_defs = %7B%0A 'median_absolute_error': %5B6, '%256d', (' ', 'MAE'), 'median absolute error'%5D,%0A 'model': %5B5, '%255s', (' ', 'model'),%0A 'model name (en = elastic net, gd = gradient boosting, rf = random forests)'%5D,%0A 'n_months_back': %5B2, '%252d', (' ', 'bk'), 'number of mnths back for training'%5D,%0A 'max_depth': %5B4, '%254d', (' ', 'mxd'), 'max depth of any individual decision tree'%5D,%0A 'n_estimators': %5B4, '%254d', (' ', 'next'), 'number of estimators (= number of trees)'%5D,%0A 'max_features': %5B4, '%254s', (' ', 'mxft'), 'maximum number of features examined to split a node'%5D,%0A 'learning_rate': %5B4, '%254.1f', (' ', 'lr'), 'learning rate for gradient boosting'%5D,%0A 'alpha': %5B5, '%255.2f', (' ', 'alpha'), 'constant multiplying penalty term for elastic net'%5D,%0A 'l1_ratio': %5B4, '%254.2f', (' ', 'l1'), 'l1_ratio mixing L1 and L2 penalties for elastic net'%5D,%0A 'units_X': %5B6, '%256s', (' ', 'unitsX'), 'units for the x value; either natural (nat) or log'%5D,%0A 'units_y': %5B6, '%256s', (' ', 'unitsY'), 'units for the y value; either natural (nat) or log'%5D,%0A 'validation_month': %5B6, '%256d', ('vald', 'month'), 'month used for validation'%5D,%0A 'rank': %5B4, '%254d', (' ', 'rank'), 'rank within validation month; 1 == lowest MAE'%5D,%0A 'median_price': %5B6, '%256d', ('median', 'price'), 'median price in the validation month'%5D,%0A 'mae_validation': %5B6, '%256d', ('vald ', 'MAE'), 'median absolute error in validation month'%5D,%0A 'mae_next': %5B6, '%256d', ('next ', 'MAE'),%0A 'median absolute error in test month (which follows the validation month)'%5D,%0A 'note': %5B15, '%2515s', (' ', 'note'),%0A 'when provided, the next MAE column contains the specified value'%5D,%0A 'rank_index': %5B5, '%255d', ('rank', 'index'), 'ranking of model performance in the validation month; 0 == best'%5D,%0A 'weight': %5B6, '%256.4f', (' ', 'weight'), 'weight of the model in the ensemble method'%5D,%0A%7D%0A%0A%0Adef defs_for_columns(*key_list):%0A return %5B%5Bkey%5D + _defs%5Bkey%5D%0A for key in key_list%0A %5D%0A%0A%0Adef replace_by_spaces(k, v):%0A 'define values that are replaced by spaces'%0A if isinstance(v, float) and np.isnan(v):%0A return True%0A return False%0A
32740172d4258a95145a5bb68be315fe1640db23
Add alpha version of bootstraps script
bootstraps.py
bootstraps.py
Python
0
@@ -0,0 +1,1389 @@ +''' %0ADoes N times random stacks of X maps of large L in pixels.%0A%0AAt each stacks it gets the central temperature, makes a histogram for all %0Astacks, then fits a normal distribution for the histogram. %0A'''%0A%0AN = 100000%0AX = 10%0AL = 16%0A%0Aimport stacklib as sl%0Aimport numpy as np%0Afrom scipy.stats import norm%0Aimport matplotlib.pyplot as plt%0Aimport matplotlib.mlab as mlab%0Aimport os%0Apath = os.environ%5B%22HOME%22%5D + '/FILES/' %0A%0Am = path + 'ACT_148_equ_season_3_1way_v3_src_free.fits'%0Aw = path + 'ACT_148_equ_season_3_1way_calgc_strictcuts2_weights.fits'%0Ab = path + 'profile_AR1_2009_pixwin_130224.txt'%0As = path + 'Equa_mask_15mJy.fits'%0A%0ARA0 = 55.%0ARA1 = 324.%0ADEC0 = -1.5%0ADEC1 = 1.5%0A%0AM = sl.StackMap(m,w,b,s,RA0,RA1,DEC0,DEC1)%0AM.squeezefullmap()%0AM.filterfullmap() %0AM.unsqueezefullmap()%0A%0ADeltaTs = %5B%5D%0A%0Adef onestack(X,L):%0A cat = sl.fakecatalog(X)%0A M.setsubmapL(L)%0A M.setstackmap()%0A for item in cat:%0A M.setsubmap(item%5B0%5D,item%5B1%5D)%0A M.stacksubmap()%0A M.finishstack()%0A return DeltaTs.append(M.stackmap%5BL/2,L/2%5D)%0A%0Afor i in range(N):%0A onestack(X,L)%0A%0A# histogram%0An, bins, patches = plt.hist(DeltaTs,bins=50,normed = 1, facecolor = 'blue')%0A%0A# best fit of data%0A(mu, sigma) = norm.fit(DeltaTs)%0A%0A# add a 'best fit' line%0Ay = mlab.normpdf( bins, mu, sigma)%0Al = plt.plot(bins, y, 'r--', linewidth=2)%0A%0A%0Aplt.xlabel('Temperature (microKelvin)')%0Aplt.ylabel('Probability Density')%0A%0Aplt.show()
89b10996cfe6e60870e55b7c759aa73448bfa4d8
remove off curve pen
pens/removeOffcurvesPen.py
pens/removeOffcurvesPen.py
Python
0.000001
@@ -0,0 +1,1916 @@ +## use BasePen as base class%0Afrom fontTools.pens.basePen import BasePen%0A%0Aclass RemoveOffcurvesPen(BasePen):%0A %22%22%22%0A A simple pen drawing a contour without any offcurves.%0A %22%22%22%0A def __init__(self, glyphSet):%0A BasePen.__init__(self, glyphSet)%0A %0A self._contours = %5B%5D%0A self._components = %5B%5D%0A %0A def _moveTo(self, pt):%0A self._contours.append(%5B%5D)%0A self._contours%5B-1%5D.append((%22moveTo%22, pt)) %0A %0A def _lineTo(self, pt):%0A self._contours%5B-1%5D.append((%22lineTo%22, pt))%0A %0A def _curveToOne(self, pt1, pt2, pt3):%0A self._contours%5B-1%5D.append((%22lineTo%22, pt3)) %0A %0A def qCurveTo(self, *points):%0A pt = points%5B-1%5D%0A self._contours%5B-1%5D.append((%22lineTo%22, pt))%0A%0A def _closePath(self):%0A self._contours%5B-1%5D.append((%22closePath%22, None))%0A %0A def _endpath(self):%0A self._contours%5B-1%5D.append((%22endPath%22, None))%0A %0A def addComponent(self, baseName, transformation):%0A self._components.append((baseName, transformation))%0A %0A def draw(self, outPen):%0A %22%22%22%0A Draw the stored instructions in an other pen.%0A %22%22%22%0A for contour in self._contours:%0A for penAttr, pt in contour:%0A func = getattr(outPen, penAttr)%0A if pt is None:%0A func()%0A else:%0A func(pt)%0A %0A for baseGlyph, transformation in self._components:%0A outPen.addComponent(baseGlyph, transformation)%0A %0A %0A## get the current glyph%0Ag = CurrentGlyph()%0A%0A## prepare the glyph for undo%0Ag.prepareUndo(%22Remove All Offcurves%22)%0A%0A## create a pen%0Apen = RemoveOffcurvesPen(g.getParent())%0A%0A## draw the glyph in the pen%0Ag.draw(pen)%0A%0A## clear the glyph%0Ag.clear()%0A%0A## draw the stored contour from the pen into the emtpy glyph%0Apen.draw(g.getPen())%0A%0A## tell the glyph undo watching is over%0Ag.performUndo()
10952213496e8a1cbf80ba1eee7a0e968bdea14a
add missing test
corehq/ex-submodules/couchforms/tests/test_errors.py
corehq/ex-submodules/couchforms/tests/test_errors.py
Python
0.000288
@@ -0,0 +1,1810 @@ +from django.test import TestCase%0Afrom casexml.apps.case.exceptions import IllegalCaseId%0Afrom corehq.apps.receiverwrapper import submit_form_locally%0Afrom couchforms.models import XFormError%0A%0A%0Aclass CaseProcessingErrorsTest(TestCase):%0A def test_no_case_id(self):%0A %22%22%22%0A submit form with a case block that has no case_id%0A%0A check that%0A - it errors%0A - the form is not saved under its original id%0A - an XFormError is saved with the original id as orig_id%0A - the error was logged (%3C-- is this hard to test?)%0A%0A %3Cdata xmlns=%22example.com/foo%22%3E%0A %3Ccase case_id=%22%22%3E%0A %3Cupdate%3E%3Cfoo%3Ebar%3C/foo%3E%3C/update%3E%0A %3C/case%3E%0A %3C/data%3E%0A %22%22%22%0A with self.assertRaises(IllegalCaseId):%0A submit_form_locally(%0A %22%22%22%3Cdata xmlns=%22example.com/foo%22%3E%0A %3Cmeta%3E%0A %3CinstanceID%3Eabc-easy-as-123%3C/instanceID%3E%0A %3C/meta%3E%0A %3Ccase case_id=%22%22 xmlns=%22http://commcarehq.org/case/transaction/v2%22%3E%0A %3Cupdate%3E%3Cfoo%3Ebar%3C/foo%3E%3C/update%3E%0A %3C/case%3E%0A %3C/data%3E%22%22%22,%0A 'my_very_special_domain',%0A )%0A xform_errors = XFormError.view(%0A 'domain/docs',%0A startkey=%5B'my_very_special_domain', 'XFormError'%5D,%0A endkey=%5B'my_very_special_domain', 'XFormError', %7B%7D%5D,%0A )%0A related_errors = %5Bxform_error for xform_error in xform_errors%0A if xform_error.orig_id == 'abc-easy-as-123'%5D%0A self.assertEqual(len(related_errors), 1)%0A related_error = related_errors%5B0%5D%0A self.assertEqual(related_error.problem,%0A 'IllegalCaseId: case_id must not be empty')%0A
b7d31c8c3fc71a8c65937ab073341d56f2841b85
Update PkgDistributionCreator.py
MTM_Installer/PkgDistributionCreator.py
MTM_Installer/PkgDistributionCreator.py
#!/usr/bin/env python # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # *** Drew Coobs <[email protected]> *** # Modified version of Chris Gerke's PkgDistributionCreator script # https://github.com/autopkg/cgerke-recipes/blob/master/SharedProcessors/PkgDistributionCreator.py # import os.path import subprocess import shutil import xml.etree.ElementTree as ET from glob import glob from autopkglib import Processor, ProcessorError __all__ = ["PkgDistributionCreator"] class PkgDistributionCreator(Processor): description = ("Bundles together munki pkg installers with MTM onboarding pkg. ") input_variables = { "source_file1": { "required": True, "description": ("Path to a source file (MyCoolPkg1.pkg) "), }, "source_file2": { "required": True, "description": ("Path to a source file (MyCoolPkg2.pkg) "), }, "source_file3": { "required": True, "description": ("Path to a source file (MyCoolPkg3.pkg) "), }, "source_file4": { "required": True, "description": ("Path to a source file (MyCoolPkg4.pkg) "), }, "source_file5": { "required": True, "description": ("Path to a source file (MyCoolPkg5.pkg) "), }, "source_file6": { "required": True, "description": ("Path to a source file (MyCoolPkg6.pkg) "), }, "distribution_file": { "required": True, "description": ("Destination path of distribution file. "), }, "package_dir": { "required": True, "description": ("Directory containing source pkgs. "), }, "output_file": { "required": True, "description": ("Name of output file. "), }, } output_variables = { } __doc__ = description source_path = None def pkgConvert(self): if os.path.exists('/usr/bin/productbuild'): try: self.output("Found binary %s" % '/usr/bin/productbuild') except OSError as e: raise ProcessorError( "Can't find binary %s: %s" % ('/usr/bin/productbuild', e.strerror)) try: pbcmd = ["/usr/bin/productbuild", "--synthesize", "--package", self.env['source_file1'], "--package", self.env['source_file2'], "--package", self.env['source_file3'], "--package", self.env['source_file4'], "--package", self.env['source_file5'], "--package", self.env['source_file6'], self.env['distribution_file']] p = subprocess.Popen(pbcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = p.communicate() except OSError as e: raise ProcessorError("Creation of distribution file failed with error code %d: %s" % (e.errno, e.strerror)) if p.returncode != 0: raise ProcessorError("Creation of distribution file %s failed: %s" % (self.env['output_file'], err)) try: pbcmd = ["/usr/bin/productbuild", "--distribution", self.env['distribution_file'], "--package-path", self.env['package_dir'], self.env['output_file']] p = subprocess.Popen(pbcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = p.communicate() except OSError as e: raise ProcessorError("cmmac execution failed with error code %d: %s" % (e.errno, e.strerror)) if p.returncode != 0: raise ProcessorError("cmmac conversion of %s failed: %s" % (self.env['output_file'], err)) tree = ET.parse('/Users/Shared/AutoPkg/Cache/com.github.Gibbun.pkg.UofI_MTM_Installer/distribution.xml') root = tree.getroot() child = ET.SubElement(root, 'title') child.text = 'My Awesome App' root.write('/Users/Shared/AutoPkg/Cache/com.github.Gibbun.pkg.UofI_MTM_Installer/distribution.xml') def main(self): if os.path.exists(self.env['source_file1']): try: self.output("Found %s" % self.env['source_file1']) except OSError as e: raise ProcessorError( "Can't find %s" % (self.env['source_file1'], e.strerror)) if os.path.exists(self.env['source_file2']): try: self.output("Found %s" % self.env['source_file2']) except OSError as e: raise ProcessorError( "Can't find %s" % (self.env['source_file2'], e.strerror)) if os.path.exists(self.env['source_file3']): try: self.output("Found %s" % self.env['source_file3']) except OSError as e: raise ProcessorError( "Can't find %s" % (self.env['source_file3'], e.strerror)) if os.path.exists(self.env['source_file4']): try: self.output("Found %s" % self.env['source_file4']) except OSError as e: raise ProcessorError( "Can't find %s" % (self.env['source_file4'], e.strerror)) if os.path.exists(self.env['source_file5']): try: self.output("Found %s" % self.env['source_file5']) except OSError as e: raise ProcessorError( "Can't find %s" % (self.env['source_file5'], e.strerror)) if os.path.exists(self.env['source_file6']): try: self.output("Found %s" % self.env['source_file6']) self.pkgConvert() except OSError as e: raise ProcessorError( "Can't find %s" % (self.env['source_file6'], e.strerror)) if __name__ == '__main__': processor = PkgDistributionCreator() processor.execute_shell()
Python
0
@@ -4776,20 +4776,89 @@ -root +newtree = root.getroottree()%0A print(type(newtree))%0A newtree .write(' @@ -4940,24 +4940,25 @@ bution.xml') + %0A %0A de
f120e2524f09ed462bca52dbc83863ba74291dd5
Fix backend import.
tests/test_extension.py
tests/test_extension.py
import unittest from mopidy_gmusic import GMusicExtension, backend as backend_lib class ExtensionTest(unittest.TestCase): def test_get_default_config(self): ext = GMusicExtension() config = ext.get_default_config() self.assertIn('[gmusic]', config) self.assertIn('enabled = true', config) def test_get_config_schema(self): ext = GMusicExtension() schema = ext.get_config_schema() self.assertIn('username', schema) self.assertIn('password', schema) self.assertIn('deviceid', schema) def test_get_backend_classes(self): ext = GMusicExtension() backends = ext.get_backend_classes() self.assertIn(backend_lib.GMusicBackend, backends)
Python
0
@@ -53,23 +53,21 @@ ension, -b ac -kend +tor as back
7e15f973f0ee898a0c06e50151ada675be46263d
add basic data, query method and method scaffolds
notifo/notifo.py
notifo/notifo.py
Python
0.000004
@@ -0,0 +1,2055 @@ +# encoding: utf-8%0A%0A%22%22%22 notifo.py - python wrapper for notifo.com %22%22%22%0A%0Aimport json%0Aimport urllib%0Aimport urllib2%0A%0Aclass Notifo:%0A %22%22%22 Class for wrapping notifo.com %22%22%22%0A def __init__(self, user, api_secret):%0A self.user = user%0A self.api_secret = api_secret%0A self.root_url = %22https://api.notifo.com/v1/%22%0A # status codes (Request successful)%0A self.status_codes = %7B%0A 2201 : %22OK.%22,%0A 2202 : %22User is already subscribed.%22%0A %7D%0A # error codes (Something went wrong)%0A self.error_codes = %7B%0A 1100 : %22An error occurred.%22,%0A 1101 : %22Invalid credentials.%22,%0A 1102 : %22Not allowed to sent to user.%22,%0A 1105 : %22No such user.%22,%0A 1106 : %22Not allowed to subscribe user.%22,%0A 1107 : %22Missing required parameters.%22,%0A %7D%0A%0A def subsribe_user(self, user):%0A %22%22%22 method to subscribe a user to a service%0A %22%22%22%0A pass%0A%0A def send_notification(self):%0A %22%22%22 method to send a message to a user%0A %22%22%22%0A pass%0A%0A%0Adef _query(self, url, data = None):%0A %22%22%22 query method to do HTTP POST/GET%0A%0A Parameters:%0A url -%3E the url to POST/GET%0A data -%3E header_data as a dict (only for POST)%0A%0A Returns:%0A Parsed JSON data as dict%0A or%0A None on error%0A %22%22%22%0A if data is not None: # we have POST data if there is data%0A values = urllib.urlencode(data)%0A request = urllib2.Request(url, values)%0A else: # do a GET otherwise%0A request = urllib2.Request(url)%0A try:%0A response = urllib2.urlopen(request)%0A except IOError: # no connection%0A return None%0A json_data = response.read()%0A data = json.loads(json_data)%0A return data%0A
d40fb122d7083b9735728df15120ed682431be79
Create script for generating analysis seeds.
scripts/make_fhes_seeds.py
scripts/make_fhes_seeds.py
Python
0
@@ -0,0 +1,1814 @@ +import yaml%0Aimport sys%0Aimport numpy as np%0Afrom astropy.table import Table%0Afrom astropy.coordinates import SkyCoord%0Afrom fermipy.catalog import *%0Afrom fermipy.utils import *%0A%0A%0Adef get_coord(name,tab):%0A%0A row = tab%5Btab%5B'Source_Name'%5D == name%5D%0A return SkyCoord(float(row%5B'RAJ2000'%5D), float(row%5B'DEJ2000'%5D),unit='deg')%0A%0Adef avg_coords(coords):%0A xyz = np.zeros(3)%0A for t in coords:%0A xyz += t.cartesian.xyz%0A%0A xyz /= np.sum(xyz**2)**0.5%0A %0A c = SkyCoord(xyz%5B0%5D, xyz%5B1%5D, xyz%5B2%5D,representation='cartesian')%0A c.representation='spherical'%0A return c%0A%0A%0A%0Atab = Table.read(sys.argv%5B1%5D)%0Asrc_names = %5B%5D%0A %0Am = np.abs(tab%5B'glat'%5D) %3C 0.%0A#m %7C= (tab%5B'fit_ext_gauss_ts_ext'%5D %3E 9.0)%0A#m %7C= (tab%5B'fit_ext_disk_ts_ext'%5D %3E 9.0)%0Am %7C= (tab%5B'fit_halo_ts'%5D %3E 16.0)%0A#m %7C= (tab%5B'ts'%5D %3E 20000.0)%0A%0A%0Afor row in tab%5Bm%5D:%0A src_names += %5Brow%5B'codename'%5D%5D%0A%0Asrc_names = sorted(list(set(src_names)))%0A %0Ao = %7B%7D%0A%0Afor name in src_names:%0A #coords = %5Bget_coord(t,cat.table) for t in names%5D%0A #c0 = avg_coords(coords)%0A%0A print(name)%0A #print(create_source_name(c0))%0A %0A names = %5Bname%5D%0A %0A row = tab%5Btab%5B'codename'%5D == names%5B0%5D.lower().replace(' ','_')%5D %0A c0 = SkyCoord(row%5B'ra'%5D,row%5B'dec'%5D,unit='deg') %0A name = create_source_name(c0).replace('PS','FHES') + 'e'%0A #print(c0.ra.deg,c0.dec.deg)%0A %0A #print(names%5B0%5D)%0A #print(row%5B'codename'%5D)%0A %0A src = %7B'name' : name,%0A 'ra' : float(c0.ra.deg), 'dec' : float(c0.dec.deg),%0A 'SpectrumType' : 'PowerLaw', 'SpatialModel' : 'RadialGaussian',%0A 'SpatialWidth' : float(row%5B'fit_halo_r68'%5D),%0A 'Index' : float(row%5B'fit_halo_index'%5D)%7D%0A %0A o%5Bname.lower().replace(' ','_')%5D = %7B'selection' : %7B'target' : name%7D,%0A 'model' : %7B'sources' : %5Bsrc%5D%7D %7D%0A%0A%0Ayaml.dump(o,open('out.yaml','w'))%0A
9696acf13a6b25b1935b7fcaae5763db8e16e83a
Create MyoRemote.py
home/Alessandruino/MyoRemote.py
home/Alessandruino/MyoRemote.py
Python
0
@@ -0,0 +1,994 @@ +from com.thalmic.myo.enums import PoseType%0A%0Aremote = Runtime.start(%22remote%22,%22RemoteAdapter%22)%0Aremote.setDefaultPrefix(%22raspi%22)%0Aremote.connect(%22tcp://192.168.0.5:6767%22)%0A%0Aroll = 0.0%0A%0Asleep(2)%0A%0Apython.send(%22raspiarduino%22, %22connect%22,%22/dev/ttyUSB0%22)%0A%0Asleep(1)%0Apython.send(%22raspiarduino%22, %22digitalWrite%22,2,1)%0Apython.send(%22raspiarduino%22, %22digitalWrite%22,3,1)%0Apython.send(%22raspiarduino%22, %22servoAttach%22,%22raspiservo%22,6)%0Apython.send(%22raspiservo%22, %22map%22,5.0,12.0,50.0,110.0)%0A%0Amyo = Runtime.start(%22myo%22,%22MyoThalmic%22)%0A%0Amyo.connect()%0Amyo.addMyoDataListener(python)%0A%0Adef onMyoData(data):%0A if (data.getPose() == PoseType.FIST):%0A global roll%0A roll = data.getRoll()%0A python.send(%22raspiarduino%22, %22analogWrite%22,5,50)%0A python.send(%22raspiservo%22, %22moveTo%22,roll)%0A elif (data.getPose() == PoseType.WAVE_OUT):%0A python.send(%22raspiarduino%22, %22analogWrite%22,11,50)%0A elif (data.getPose() == PoseType.REST):%0A python.send(%22raspiarduino%22, %22analogWrite%22,5,0)%0A python.send(%22raspiarduino%22, %22analogWrite%22,11,0)%0A
6bd3869a2c2a6041e47da01ddaaa15b309bf90d7
Add example checkerscript
checker/examples/dummyrunner.py
checker/examples/dummyrunner.py
Python
0
@@ -0,0 +1,865 @@ +#!/usr/bin/python3%0A%0Aimport sys%0Aimport time%0Aimport os%0Aimport codecs%0A%0Adef generate_flag(tick, payload=None):%0A if payload is None:%0A sys.stdout.write(%22FLAG %25d%5Cn%22 %25 (tick,))%0A else:%0A sys.stdout.write(%22FLAG %25d %25s%5Cn%22 %25 (tick, codecs.encode(os.urandom(8), 'hex').decode('latin-1')))%0A sys.stdout.flush()%0A return sys.stdin.readline().strip()%0A%0Adef place_flag(flag, ip):%0A return 0%0A%0Adef check_for_flag(flag, ip): %0A return 0%0A%0Adef main(tick, ip):%0A result = place_flag(generate_flag(tick), ip)%0A if 0 != result:%0A sys.exit(result)%0A%0A oldesttick = max(tick-7, -1)%0A for ctick in range(tick-1, oldesttick, -1):%0A result = check_for_flag(generate_flag(ctick), ip)%0A if 0 != result:%0A sys.exit(result)%0A%0A sys.exit(0)%0A %0Aif __name__ == '__main__':%0A _, tick, ip = sys.argv%0A main(tick=int(tick), ip=ip)%0A
3026c007a4f9cbb6befa1599c8a8390a96d8396b
test import checks
pychecker2/utest/import.py
pychecker2/utest/import.py
Python
0
@@ -0,0 +1,1506 @@ +from pychecker2.TestSupport import WarningTester%0Afrom pychecker2 import ImportChecks%0A%0Aclass ImportTestCase(WarningTester):%0A def testImportChecks(self):%0A self.silent('import sys; print sys.argv')%0A self.silent('import pychecker2; print pychecker2')%0A self.silent('import pychecker2.utest; print pychecker2.utest')%0A%0A def testImportChecks(self):%0A self.warning('import sys%5Cn'%0A 'print sys.argv%5Cn'%0A 'import sys%5Cn',%0A 3, ImportChecks.ImportCheck.duplicateImport,%0A 'sys', ' in current scope')%0A self.warning('from sys import *%5Cn'%0A 'def f():%5Cn'%0A ' def g():%5Cn'%0A ' from sys import argv%5Cn'%0A ' return argv%5Cn'%0A ' return g() + g()%5Cn'%0A 'print argv%5Cn',%0A 4, ImportChecks.ImportCheck.duplicateImport, 'argv',%0A ' of import in parent scope %3CModuleScope: global%3E')%0A self.warning('import no_such_module%5Cn',%0A 1, ImportChecks.ImportCheck.importError, 'no_such_module',%0A 'No module named no_such_module')%0A self.warning('from pychecker2.utest.data import *%5Cn'%0A 'import exceptions%5Cn'%0A 'print exceptions%5Cn',%0A 2, ImportChecks.ImportCheck.shadowImport,%0A 'exceptions', 'pychecker2.utest.data', 1)%0A %0A
5219e970f1b09d8f2d41bf61a3b9f9803a8aed1d
Add database.py with working db find function
python-backend/database.py
python-backend/database.py
Python
0.000001
@@ -0,0 +1,530 @@ +from pymongo import MongoClient%0A%0Aclient = MongoClient()%0A%0Aclient = MongoClient('localhost', 27017)%0A%0A# %60community%60 database%0Adb = client.community;%0A%0A# Database find wrapper%0A%0Adef db_find( db_collection, db_query, find_one = False ):%0A%0A # Get collection%0A collection = db%5Bdb_collection%5D%0A%0A if (find_one):%0A result = collection.find(db_query)%0A else:%0A result = collection.find_one(db_query)%0A%0A return result;%0A%0A# Database insert wrapper%0A%0A# Database update wrapper%0A%0A# Database remove wrapper%0A%0Aprint db_find('test', %7B'name': 'test'%7D)%0A
2de5e121164eee707772fb1dfa1e8bef1ca3e80c
fix for built-in ssl (easier to build 32 bits export)
platform/x11/detect.py
platform/x11/detect.py
import os import sys def is_active(): return True def get_name(): return "X11" def can_build(): if (os.name!="posix"): return False if sys.platform == "darwin": return False # no x11 on mac for now errorval=os.system("pkg-config --version > /dev/null") if (errorval): print("pkg-config not found.. x11 disabled.") return False x11_error=os.system("pkg-config x11 --modversion > /dev/null ") if (x11_error): print("X11 not found.. x11 disabled.") return False ssl_error=os.system("pkg-config openssl --modversion > /dev/null ") if (ssl_error): print("OpenSSL not found.. x11 disabled.") return False x11_error=os.system("pkg-config xcursor --modversion > /dev/null ") if (x11_error): print("xcursor not found.. x11 disabled.") return False x11_error=os.system("pkg-config xinerama --modversion > /dev/null ") if (x11_error): print("xinerama not found.. x11 disabled.") return False return True # X11 enabled def get_opts(): return [ ('use_llvm','Use llvm compiler','no'), ('use_sanitizer','Use llvm compiler sanitize address','no'), ('use_leak_sanitizer','Use llvm compiler sanitize memory leaks','no'), ('pulseaudio','Detect & Use pulseaudio','yes'), ('new_wm_api', 'Use experimental window management API','no'), ('debug_release', 'Add debug symbols to release version','no'), ] def get_flags(): return [ ('builtin_zlib', 'no'), ("openssl", "yes"), ("theora","no"), ] def configure(env): is64=sys.maxsize > 2**32 if (env["bits"]=="default"): if (is64): env["bits"]="64" else: env["bits"]="32" env.Append(CPPPATH=['#platform/x11']) if (env["use_llvm"]=="yes"): if 'clang++' not in env['CXX']: env["CC"]="clang" env["CXX"]="clang++" env["LD"]="clang++" env.Append(CPPFLAGS=['-DTYPED_METHOD_BIND']) env.extra_suffix=".llvm" if (env["colored"]=="yes"): if sys.stdout.isatty(): env.Append(CXXFLAGS=["-fcolor-diagnostics"]) if (env["use_sanitizer"]=="yes"): env.Append(CXXFLAGS=['-fsanitize=address','-fno-omit-frame-pointer']) env.Append(LINKFLAGS=['-fsanitize=address']) env.extra_suffix+="s" if (env["use_leak_sanitizer"]=="yes"): env.Append(CXXFLAGS=['-fsanitize=address','-fno-omit-frame-pointer']) env.Append(LINKFLAGS=['-fsanitize=address']) env.extra_suffix+="s" #if (env["tools"]=="no"): # #no tools suffix # env['OBJSUFFIX'] = ".nt"+env['OBJSUFFIX'] # env['LIBSUFFIX'] = ".nt"+env['LIBSUFFIX'] if (env["target"]=="release"): if (env["debug_release"]=="yes"): env.Append(CCFLAGS=['-g2']) else: env.Append(CCFLAGS=['-O3','-ffast-math']) elif (env["target"]=="release_debug"): env.Append(CCFLAGS=['-O2','-ffast-math','-DDEBUG_ENABLED']) elif (env["target"]=="debug"): env.Append(CCFLAGS=['-g2', '-Wall','-DDEBUG_ENABLED','-DDEBUG_MEMORY_ENABLED']) env.ParseConfig('pkg-config x11 --cflags --libs') env.ParseConfig('pkg-config xinerama --cflags --libs') env.ParseConfig('pkg-config xcursor --cflags --libs') env.ParseConfig('pkg-config openssl --cflags --libs') env.ParseConfig('pkg-config freetype2 --cflags --libs') env.Append(CCFLAGS=['-DFREETYPE_ENABLED']) env.Append(CPPFLAGS=['-DOPENGL_ENABLED','-DGLEW_ENABLED']) env.Append(CPPFLAGS=["-DALSA_ENABLED"]) if (env["pulseaudio"]=="yes"): if not os.system("pkg-config --exists libpulse-simple"): print("Enabling PulseAudio") env.Append(CPPFLAGS=["-DPULSEAUDIO_ENABLED"]) env.ParseConfig('pkg-config --cflags --libs libpulse-simple') else: print("PulseAudio development libraries not found, disabling driver") env.Append(CPPFLAGS=['-DX11_ENABLED','-DUNIX_ENABLED','-DGLES2_ENABLED','-DGLES_OVER_GL']) env.Append(LIBS=['GL', 'GLU', 'pthread','asound','z']) #TODO detect linux/BSD! #env.Append(CPPFLAGS=['-DMPC_FIXED_POINT']) #host compiler is default.. if (is64 and env["bits"]=="32"): env.Append(CPPFLAGS=['-m32']) env.Append(LINKFLAGS=['-m32','-L/usr/lib/i386-linux-gnu']) elif (not is64 and env["bits"]=="64"): env.Append(CPPFLAGS=['-m64']) env.Append(LINKFLAGS=['-m64','-L/usr/lib/i686-linux-gnu']) import methods env.Append( BUILDERS = { 'GLSL120' : env.Builder(action = methods.build_legacygl_headers, suffix = 'glsl.h',src_suffix = '.glsl') } ) env.Append( BUILDERS = { 'GLSL' : env.Builder(action = methods.build_glsl_headers, suffix = 'glsl.h',src_suffix = '.glsl') } ) env.Append( BUILDERS = { 'GLSL120GLES' : env.Builder(action = methods.build_gles2_headers, suffix = 'glsl.h',src_suffix = '.glsl') } ) #env.Append( BUILDERS = { 'HLSL9' : env.Builder(action = methods.build_hlsl_dx9_headers, suffix = 'hlsl.h',src_suffix = '.hlsl') } ) if(env["new_wm_api"]=="yes"): env.Append(CPPFLAGS=['-DNEW_WM_API']) env.ParseConfig('pkg-config xinerama --cflags --libs')
Python
0
@@ -3037,32 +3037,62 @@ cflags --libs')%0A +%09if (env(%22openssl%22%5D==%22yes%22):%0A%09 %09env.ParseConfig
271dca123ff9bb3004cbd2cfa366f606dd250f94
Add test for configmap
tests/k8s/test_configmap.py
tests/k8s/test_configmap.py
Python
0.000001
@@ -0,0 +1,2359 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8%0A%0Aimport mock%0Aimport pytest%0A%0Afrom k8s.client import NotFound%0Afrom k8s.models.common import ObjectMeta%0Afrom k8s.models.configmap import ConfigMap%0A%0ANAME = %22my-name%22%0ANAMESPACE = %22my-namespace%22%0A%[email protected](%22k8s_config%22)%0Aclass TestIngress(object):%0A def test_created_if_not_exists(self, post, api_get):%0A api_get.side_effect = NotFound()%0A configmap = _create_default_configmap()%0A call_params = configmap.as_dict()%0A%0A assert configmap._new%0A configmap.save()%0A assert not configmap._new%0A%0A pytest.helpers.assert_any_call(post, _uri(NAMESPACE), call_params)%0A%0A%0A def test_updated_if_exists(self, get, put):%0A mock_response = _create_mock_response()%0A get.return_value = mock_response%0A configmap = _create_default_configmap()%0A%0A from_api = ConfigMap.get_or_create(metadata=configmap.metadata, data=configmap.data)%0A assert not from_api._new%0A assert from_api.data == %7B%22foo%22: %22bar%22%7D%0A%0A from_api.data = %7B%22baz%22: %22quux%22%7D%0A call_params = from_api.as_dict()%0A%0A from_api.save()%0A pytest.helpers.assert_any_call(put, _uri(NAMESPACE, NAME), call_params)%0A%0A%0A def test_deleted(self, delete):%0A ConfigMap.delete(NAME, namespace=NAMESPACE)%0A pytest.helpers.assert_any_call(delete, _uri(NAMESPACE, NAME))%0A%0A%0Adef _create_mock_response():%0A mock_response = mock.Mock()%0A mock_response.json.return_value = %7B%0A %22apiVersion%22: %22v1%22,%0A %22kind%22: %22ConfigMap%22,%0A %22metadata%22: %7B%0A %22creationTimestamp%22: %222017-09-08T13:37:00Z%22,%0A %22generation%22: 1,%0A %22labels%22: %7B%0A %22test%22: %22true%22%0A %7D,%0A %22name%22: NAME,%0A %22namespace%22: NAMESPACE,%0A %22resourceVersion%22: %2242%22,%0A %22selfLink%22: _uri(NAMESPACE, NAME),%0A %22uid%22: %22d8f1ba26-b182-11e6-a364-fa163ea2a9c4%22%0A %7D,%0A %22data%22: %7B%0A %22foo%22: %22bar%22,%0A %7D,%0A %7D%0A return mock_response%0A%0A%0Adef _create_default_configmap():%0A object_meta = ObjectMeta(name=NAME, namespace=NAMESPACE, labels=%7B%22test%22: %22true%22%7D)%0A data = %7B%22foo%22: %22bar%22%7D%0A configmap = ConfigMap(metadata=object_meta, data=data)%0A return configmap%0A%0A%0Adef _uri(namespace, name=%22%22):%0A return %22/api/v1/namespaces/%7Bnamespace%7D/configmaps/%7Bname%7D%22.format(name=name, namespace=namespace)%0A
db912d4097da45c2b14cce4f8f852cbc1e720750
add test framework
tests/test_clientManager.py
tests/test_clientManager.py
Python
0.000001
@@ -0,0 +1,387 @@ +from unittest import TestCase%0A%0A%0Aclass TestClientManager(TestCase):%0A def test_add_http_client(self):%0A self.fail()%0A%0A def test_add_local_client(self):%0A self.fail()%0A%0A def test_restrictClient(self):%0A self.fail()%0A%0A def test_load_clients_from_config(self):%0A self.fail()%0A%0A def test_federated_featurephenotypeassociaton_query(self):%0A self.fail()%0A
401a1aabde600336bd129cce8fb3884ed8945272
Create HCS_interpreter.py
HCS_interpreter.py
HCS_interpreter.py
Python
0.000001
@@ -0,0 +1,490 @@ +#!/usr/bin/env python3%0A%0Afrom HCS import HCS%0A%0Adef interpret_loop():%0A hcs = HCS()%0A while True:%0A print(%22%3E%3E %22, end=%22%22)%0A try:%0A command = input()%0A except EOFError as e:%0A print()%0A return %0A if command in %5B'quit', 'exit'%5D:%0A return%0A try:%0A print(hcs.eval(command))%0A except Exception as e:%0A print(%22Error: %5Cn%22 + repr(e)) %0A%0Aif __name__ == '__main__':%0A interpret_loop()%0A
d4a04d4a0fffd8dbb006d86504fc3593ae800cc6
add bitly shorten function in proper directory
will/plugins/productivity/bitly.py
will/plugins/productivity/bitly.py
Python
0
@@ -0,0 +1,891 @@ +# coding: utf-8%0A%0A%0Aimport bitly_api # pip install bitly_api%0A%0Afrom will.plugin import WillPlugin%0Afrom will.decorators import (respond_to, periodic, hear, randomly, route,%0A rendered_template, require_settings)%0A%0Afrom will import settings%0A%0A%0A# BITLY_ACCESS_TOKEN = ' %3Cget_access_token_from_bitly.com%3E '%0A%0A%0Aclass BitlyPlugin(WillPlugin):%0A %22%22%22Class for creating Bitly shorten URL's.%22%22%22%0A%0A @respond_to(%22%5Ebitly (?P%3Clong_url%3E.*)$%22)%0A @require_settings(%22BITLY_ACCESS_TOKEN%22,)%0A def get_bitly_shorten_url(self, message, long_url, short_url=None):%0A %22%22%22Function to get shorten_url from bit.ly through API.%22%22%22%0A # use oauth2 endpoints%0A c = bitly_api.Connection(access_token=settings.BITLY_ACCESS_TOKEN)%0A response = c.shorten(uri=long_url)%0A short_url = response%5B'url'%5D%0A self.reply(%22Shorten URL: %25s%22 %25 short_url, message=message)%0A
6922ad3922d187a3e05d339a49449292a1d7efd6
add Prototype pattern
prototype/Prototype.py
prototype/Prototype.py
Python
0
@@ -0,0 +1,1181 @@ +#%0A# Python Design Patterns: Prototype%0A# Author: Jakub Vojvoda %5Bgithub.com/JakubVojvoda%5D%0A# 2016%0A#%0A# Source code is licensed under MIT License%0A# (for more details see LICENSE)%0A# %0A%0Aimport sys%0Aimport copy%0A%0A#%0A# Prototype%0A# declares an interface for cloning itself%0A#%0Aclass Prototype:%0A def clone(self):%0A pass%0A %0A def getType(self):%0A pass%0A%0A#%0A# Concrete Prototypes%0A# implement an operation for cloning itself%0A#%0Aclass ConcretePrototypeA(Prototype):%0A def clone(self):%0A return copy.deepcopy(self)%0A %0A def getType(self):%0A return %22type A%22 %0A%0Aclass ConcretePrototypeB(Prototype):%0A def clone(self):%0A return copy.deepcopy(self)%0A %0A def getType(self):%0A return %22type B%22 %0A%0A#%0A# Client%0A# creates a new object by asking a prototype to clone itself%0A#%0Aclass Client:%0A def __init__(self):%0A self._types = %5BConcretePrototypeA(), ConcretePrototypeB()%5D%0A %0A def make(self, index):%0A return self._types%5Bindex%5D.clone() %0A%0A%0Aif __name__ == %22__main__%22:%0A client = Client()%0A %0A prototype = client.make(0)%0A print(prototype.getType())%0A %0A prototype = client.make(1)%0A print(prototype.getType())
841487ab4d0e05fa6f0780cf39973072417ec701
Complete cherry-pick of PR#95
service/management/commands/start_celery.py
service/management/commands/start_celery.py
Python
0
@@ -0,0 +1,461 @@ +import os%0Afrom django.core.management.base import BaseCommand%0Afrom subprocess import call%0A%0Aclass Command(BaseCommand):%0A help = 'Custom manage.py command to start celery.'%0A%0A def handle(self, *args, **options):%0A logfile = %22celery_node.log%22%0A if not os.path.isfile(logfile):%0A with open(logfile, 'w+') as f:%0A f.close()%0A call((%22celery worker --app=atmosphere --loglevel=INFO -c 5 --logfile=%25s%22 %25 logfile).split())%0A
896b7661b06b35ebcdae5cf53620fb1eb69ed07b
remove useless log
cobra/rule.py
cobra/rule.py
# -*- coding: utf-8 -*- """ rule ~~~~ Implements rule(languages/frameworks/vulnerabilities/rules) :author: Feei <[email protected]> :homepage: https://github.com/wufeifei/cobra :license: MIT, see LICENSE for more details. :copyright: Copyright (c) 2017 Feei. All rights reserved """ import os from . import const from .config import rules_path from .log import logger from .utils import to_bool from xml.etree import ElementTree def block(index): default_index_reverse = 'in-function' default_index = 0 blocks = { 'in-function-up': 0, 'in-function-down': 1, 'in-current-line': 2, 'in-function': 3, 'in-class': 4, 'in-class-up': 5, 'in-class-down': 6, 'in-file': 7, 'in-file-up': 8, 'in-file-down': 9 } if isinstance(index, int): blocks_reverse = dict((v, k) for k, v in blocks.items()) if index in blocks_reverse: return blocks_reverse[index] else: return default_index_reverse else: if index in blocks: return blocks[index] else: return default_index class Rule(object): def __init__(self): self.rules_path = rules_path @property def languages(self): """ Get all languages :return: { 'php':{ 'chiefly': 'true', 'extensions':[ '.php', '.php3', '.php4', '.php5' ] } } """ language_extensions = {} xml_languages = self._read_xml('languages.xml') if xml_languages is None: logger.critical('languages read failed!!!') return None for language in xml_languages: l_name = language.get('name').lower() l_chiefly = 'false' if language.get('chiefly') is not None: l_chiefly = language.get('chiefly') language_extensions[l_name] = { 'chiefly': l_chiefly, 'extensions': [] } for lang in language: l_ext = lang.get('value').lower() language_extensions[l_name]['extensions'].append(l_ext) return language_extensions @property def frameworks(self): """ Read all framework rules :return: dict """ frameworks_rules = {} xml_frameworks = self._read_xml('frameworks.xml') if xml_frameworks is None: logger.critical('frameworks read failed!!!') return None for framework in xml_frameworks: f_name = framework.get('name').lower() f_lang = framework.get('language').lower() f_code = framework.get('code') framework_info = { f_name: { 'code': f_code, 'rules': [] } } frameworks_rules[f_lang] = framework_info for rule in framework: rule_info = {rule.tag: rule.get('value')} frameworks_rules[f_lang][f_name]['rules'].append(rule_info) return frameworks_rules @property def vulnerabilities(self): """ Read all vulnerabilities information :return: """ vulnerabilities_info = {} xml_vulnerabilities = self._read_xml('vulnerabilities.xml') if xml_vulnerabilities is None: logger.critical('vulnerabilities read failed!!!') return None for vulnerability in xml_vulnerabilities: v_id = int(vulnerability.get('vid')) v_name = vulnerability.get('name').upper() vulnerabilities_info[str(v_id)] = v_name return vulnerabilities_info def rules(self, rules=None): """ Get all rules :return: dict """ logger.critical(rules) vulnerabilities = [] if rules is not None and len(rules) > 0: files = rules else: files = os.listdir(self.rules_path) for vulnerability_name in files: # VN: CVI-190001.xml v_path = os.path.join(self.rules_path, vulnerability_name.upper()) if os.path.isfile(v_path) is not True or 'cvi-template' in v_path.lower() or vulnerability_name.lower()[0:7] == 'cvi-999' or 'cvi' not in v_path.lower() or '.xml' not in v_path.lower(): logger.warning('Not regular rule file {f}'.format(f=v_path)) continue # rule information rule_info = { 'id': None, 'file': v_path, 'name': None, 'language': None, 'match': None, 'match-mode': 'regex-only-match', 'match2': None, 'match2-block': None, 'repair': None, 'repair-block': None, 'level': None, 'solution': None, 'test': { 'true': [], 'false': [] }, 'status': False, 'author': None } xml_rule = self._read_xml(v_path) if xml_rule is None: logger.critical('rule read failed!!! ({file})'.format(file=v_path)) continue cvi = v_path.lower().split('cvi-')[1][:6] rule_info['id'] = cvi for x in xml_rule: if x.tag == 'name': rule_info['name'] = x.get('value') if x.tag == 'language': rule_info['language'] = x.get('value') if x.tag == 'status': rule_info['status'] = to_bool(x.get('value')) if x.tag == 'author': name = x.get('name').encode('utf-8') email = x.get('email') rule_info['author'] = '{name}<{email}>'.format(name=name, email=email) if x.tag in ['match', 'match2', 'repair']: if x.text is not None: rule_info[x.tag] = x.text.strip() if x.tag == 'match': if x.get('mode') is None: logger.warning('unset match mode attr (CVI-{cvi})'.format(cvi=cvi)) if x.get('mode') not in const.match_modes: logger.warning('mode exception (CVI-{cvi})'.format(cvi=cvi)) rule_info['match-mode'] = x.get('mode') elif x.tag == 'repair': rule_info['repair-block'] = block(x.get('block')) elif x.tag == 'match2': rule_info['match2-block'] = block(x.get('block')) if x.tag == 'level': rule_info['level'] = x.get('value') if x.tag == 'solution': rule_info['solution'] = x.text.strip() if x.tag == 'test': for case in x: case_ret = case.get('assert').lower() case_test = '' if case.text is not None: case_test = case.text.strip() if case_ret in ['true', 'false']: rule_info['test'][case_ret].append(case_test) vulnerabilities.append(rule_info) return vulnerabilities def _read_xml(self, filename): """ Read XML :param filename: :return: """ path = os.path.join(self.rules_path, filename) try: tree = ElementTree.parse(path) return tree.getroot() except Exception as e: logger.warning('parse xml failed ({file})'.format(file=path)) return None
Python
0.000001
@@ -3996,39 +3996,8 @@ %22%22%22%0A - logger.critical(rules)%0A
d1ee86414d45c571571d75434b8c2256b0120732
Add py solution for 563. Binary Tree Tilt
py/binary-tree-tilt.py
py/binary-tree-tilt.py
Python
0.005643
@@ -0,0 +1,624 @@ +# Definition for a binary tree node.%0A# class TreeNode(object):%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0Aclass Solution(object):%0A def findTilt(self, root):%0A %22%22%22%0A :type root: TreeNode%0A :rtype: int%0A %22%22%22%0A return self.do_findTilt(root)%5B1%5D%0A%0A def do_findTilt(self, cur):%0A if cur is None:%0A return (0, 0)%0A lsum, ltiltsum = self.do_findTilt(cur.left)%0A rsum, rtiltsum = self.do_findTilt(cur.right)%0A tilt = abs(lsum - rsum)%0A return lsum + rsum + cur.val, ltiltsum + rtiltsum + tilt%0A
34bfea59b600f9dac457e2a16a812ce2fb768d15
Add graph.py to collect runtime data on workers and tasks (#8)
chtc/graph.py
chtc/graph.py
Python
0
@@ -0,0 +1,1195 @@ +#!/usr/bin/env python%0Afrom __future__ import print_function%0A%0Aimport csv%0Aimport itertools%0Aimport time%0A%0Afrom distributed import Client%0A%0A%0ASTART_TIMEOUT = 900 # 15 min%0AMAX_COLLECT_TIME = 86400 # 1 day%0A%0A%0Adef running_task_list(cli):%0A return list(itertools.chain.from_iterable(cli.processing().values()))%0A%0A%0Acli = Client('127.0.0.1:8786')%0A%0Aprint(%22Waiting for tasks to start running%22)%0A%0Atimeout = time.time() + START_TIMEOUT%0A%0Awhile not cli.ncores():%0A time.sleep(5)%0A if time.time() %3E timeout:%0A raise Exception(%22workers never started%22)%0A%0Aprint(%22First worker connected. Starting data collection.%22)%0A%0Astart_time = time.time()%0Aend_time = time.time() + MAX_COLLECT_TIME%0A%0Awith open('graph.csv', 'wb') as outfile:%0A writer = csv.writer(outfile)%0A%0A while cli.ncores() and time.time() %3C end_time:%0A n_running_tasks = len(running_task_list(cli))%0A n_cores = sum(cli.ncores().values())%0A n_futures = len(cli.who_has().keys())%0A%0A row = %5Btime.time() - start_time, n_cores, n_running_tasks, n_futures%5D%0A print(%22%7B0:%3E6.0f%7Ds %7B1:%3E5d%7D cores %7B2:%3E5d%7D tasks %7B3:%3E5d%7D futures%22.format(*row))%0A writer.writerow(row)%0A%0A time.sleep(5)%0A%0Aprint(%22Done with data collection.%22)%0A
925ff38344b5058ce196877e1fdcf79a1d1f6719
Add basic test for checking messages are received correctly
ue4/tests/test_messaging.py
ue4/tests/test_messaging.py
Python
0
@@ -0,0 +1,378 @@ +import pytest%0A%0Afrom m2u.ue4 import connection%0A%0A%0Adef test_send_message_size():%0A %22%22%22Send a big message, larger than buffer size, so the server has to%0A read multiple chunks.%0A%0A %22%22%22%0A message = %22TestMessageSize %22 + (%22abcdefg%22 * 5000)%0A connection.connect()%0A result = connection.send_message(message)%0A assert result == str(len(message))%0A connection.disconnect()%0A
ff700e5d6fc5e0c5062f687110563d7f0312a3f0
Set up test suite to ensure server admin routes are added.
server/tests/test_admin.py
server/tests/test_admin.py
Python
0
@@ -0,0 +1,2368 @@ +%22%22%22General functional tests for the API endpoints.%22%22%22%0A%0A%0Afrom django.test import TestCase, Client%0A# from django.urls import reverse%0A%0Afrom rest_framework import status%0A%0Afrom server.models import ApiKey, User%0A# from api.v2.tests.tools import SalAPITestCase%0A%0A%0Aclass AdminTest(TestCase):%0A %22%22%22Test the admin site is configured to have all expected views.%22%22%22%0A admin_endpoints = %7B%0A 'apikey', 'businessunit', 'condition', 'fact', 'historicalfact',%0A 'installedupdate', 'machinedetailplugin', 'machinegroup', 'machine',%0A 'pendingappleupdate', 'pendingupdate', 'pluginscriptrow',%0A 'pluginscriptsubmission', 'plugin', 'report', 'salsetting', 'updatehistoryitem',%0A 'updatehistory', 'userprofile'%7D%0A%0A def setUp(self):%0A self.client = Client()%0A self.user = User.objects.create(username='test')%0A%0A def test_no_access(self):%0A %22%22%22Test that unauthenticated requests redirected to login.%22%22%22%0A for path in self.admin_endpoints:%0A response = self.client.get('/admin/server/%7B%7D'.format(path))%0A # Redirect to login page.%0A self.assertEqual(response.status_code, status.HTTP_301_MOVED_PERMANENTLY)%0A%0A def test_ro_access(self):%0A %22%22%22Test that ro requests are rejected.%0A%0A RO users should not have access to the admin site (unless they have%0A %60is_staff = True%60.%0A %22%22%22%0A self.user.user_profile = 'RO'%0A self.user.save()%0A self.client.force_login(self.user)%0A%0A for path in self.admin_endpoints:%0A url = '/admin/server/%7B%7D/'.format(path)%0A response = self.client.get(url)%0A msg = 'Failed for path: %22%7B%7D%22'.format(path)%0A self.assertEqual(response.status_code, status.HTTP_302_FOUND, msg=msg)%0A self.assertEqual(response.url, '/admin/login/?next=/admin/server/%7B%7D/'.format(path),%0A msg=msg)%0A%0A def test_ga_access(self):%0A %22%22%22Ensure GA userprofile grants admin page access.%22%22%22%0A self.user.user_profile = 'GA'%0A self.user.save()%0A self.client.force_login(self.user)%0A%0A for path in self.admin_endpoints:%0A url = '/admin/server/%7B%7D/'.format(path)%0A response = self.client.get(url, follow=True)%0A msg = 'Failed for path: %22%7B%7D%22'.format(path)%0A self.assertEqual(response.status_code, status.HTTP_200_OK, msg=msg)%0A
1a06292cc1abe86c1d97dd948c0ba2744585ee8b
fix a test failure by adding @prevent_zombie decorator for create_time function on BSD
psutil/_psbsd.py
psutil/_psbsd.py
#!/usr/bin/env python # # $Id$ # import os import signal import errno import pwd import grp import _psutil_bsd # import psutil exceptions we can override with our own from error import * # module level constants (gets pushed up to psutil module) NoSuchProcess = _psutil_bsd.NoSuchProcess NUM_CPUS = _psutil_bsd.get_num_cpus() TOTAL_PHYMEM = _psutil_bsd.get_total_phymem() def avail_phymem(): "Return the amount of physical memory available on the system, in bytes." return _psutil_bsd.get_avail_phymem() def used_phymem(): "Return the amount of physical memory currently in use on the system, in bytes." return TOTAL_PHYMEM - _psutil_bsd.get_avail_phymem() def total_virtmem(): "Return the amount of total virtual memory available on the system, in bytes." return _psutil_bsd.get_total_virtmem() def avail_virtmem(): "Return the amount of virtual memory currently in use on the system, in bytes." return _psutil_bsd.get_avail_virtmem() def used_virtmem(): """Return the amount of used memory currently in use on the system, in bytes.""" return _psutil_bsd.get_total_virtmem() - _psutil_bsd.get_avail_virtmem() def get_system_cpu_times(): """Return a dict representing the following CPU times: user, nice, system, idle, interrupt.""" values = _psutil_bsd.get_system_cpu_times() return dict(user=values[0], nice=values[1], system=values[2], idle=values[3], irq=values[4]) def wrap_privileges(callable): """Call callable into a try/except clause so that if an OSError EPERM exception is raised we translate it into psutil.AccessDenied. """ def wrapper(*args, **kwargs): try: return callable(*args, **kwargs) except OSError, err: if err.errno == errno.EPERM: raise AccessDenied raise return wrapper def prevent_zombie(method): """Call method(self, pid) into a try/except clause so that if an OSError "No such process" exception is raised we assume the process has died and raise psutil.NoSuchProcess instead. """ def wrapper(self, pid, *args, **kwargs): try: return method(self, pid, *args, **kwargs) except OSError, err: if err.errno == errno.ESRCH: raise NoSuchProcess(pid) raise return wrapper class Impl(object): @wrap_privileges def get_process_info(self, pid): """Returns a tuple that can be passed to the psutil.ProcessInfo class constructor. """ infoTuple = _psutil_bsd.get_process_info(pid) return infoTuple @wrap_privileges def kill_process(self, pid, sig=signal.SIGKILL): """Terminates the process with the given PID.""" if sig is None: sig = signal.SIGKILL try: os.kill(pid, sig) except OSError, err: if err.errno == errno.ESRCH: raise NoSuchProcess(pid) raise @wrap_privileges def get_cpu_times(self, pid): """return a tuple containing process user/kernel time.""" return _psutil_bsd.get_cpu_times(pid) @wrap_privileges @prevent_zombie def get_memory_info(self, pid): """Return a tuple with the process' RSS and VMS size.""" return _psutil_bsd.get_memory_info(pid) def get_process_create_time(self, pid): return _psutil_bsd.get_process_create_time(pid) def get_pid_list(self): """Returns a list of PIDs currently running on the system.""" return _psutil_bsd.get_pid_list() def pid_exists(self, pid): """Check For the existence of a unix pid.""" if pid < 0: return False try: os.kill(pid, 0) except OSError, e: return e.errno == errno.EPERM else: return True
Python
0.000004
@@ -3148,37 +3148,16 @@ s(pid)%0A%0A - @wrap_privileges%0A @pre @@ -3314,24 +3314,44 @@ _info(pid)%0A%0A + @prevent_zombie%0A def get_
2913d840b63746669ac5695bd244abd6db24fe5a
Create script that prepares LaGeR strings for use with a machine learning training algorithms
lager_ml/lager_training_prep.py
lager_ml/lager_training_prep.py
Python
0
@@ -0,0 +1,1336 @@ +#!/usr/bin/env python3%0A%0A# This program prepares LaGeR strings for use with a machine learning training%0A# algorithm.%0A#%0A# It expands the string or set of strings to specific length (number of%0A# features), then generates variants for each of those. Finally, it converts%0A# the variants into numbers and adds the result to a dataset file.%0A%0Aimport sys%0Afrom subprocess import call%0A%0Aif (len(sys.argv) %3C 5):%0A%09print(%22lager_training_prep %5BGESTURE_NAME%5D %5BGESTURE_LABEL%5D %5BNUM_FEATURES%5D %5BNUM_VARIANTS%5D%22)%0A%09exit()%0A%0Agesture_name = sys.argv%5B1%5D%0Agesture_label = sys.argv%5B2%5D%0Anum_features = sys.argv%5B3%5D%0Anum_variants = sys.argv%5B4%5D%0A%0Aprint(%22Gesture name: %22, gesture_name)%0Aprint(%22Gesture label: %22, gesture_label)%0Aprint(%22Number of features: %22, num_features)%0Aprint(%22Number of variants: %22, num_variants)%0A%0Aorig_gesture_filename = gesture_name + %22.dat%22%0Agesture_expanded_filename = gesture_name + %22_expanded.dat%22%0Agesture_variants_filename = gesture_name + %22_expanded_variants.dat%22%0Agesture_numbers_filename = gesture_name + %22_expanded_variants_numbers.csv%22%0A%0Acall(%5B'./lager_expander.py', orig_gesture_filename, num_features%5D)%0Acall(%5B'../lager_generator/lager_generator.py', gesture_expanded_filename, num_variants%5D)%0Acall(%5B'./lager_file_to_numbers.py', gesture_variants_filename, gesture_label%5D)%0Acall('cat ' + gesture_numbers_filename + ' %3E%3E'+ ' dataset.csv', shell=True)%0A
352379690275e970693a06ed6981f530b6704354
Add index to Task.status
migrations/versions/181adec926e2_add_status_index_to_task.py
migrations/versions/181adec926e2_add_status_index_to_task.py
Python
0.000003
@@ -0,0 +1,409 @@ +%22%22%22Add status index to task%0A%0ARevision ID: 181adec926e2%0ARevises: 43397e521791%0ACreate Date: 2016-10-03 17:41:44.038137%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '181adec926e2'%0Adown_revision = '43397e521791'%0A%0Afrom alembic import op%0A%0A%0Adef upgrade():%0A op.create_index('idx_task_status', 'task', %5B'status'%5D, unique=False)%0A%0A%0Adef downgrade():%0A op.drop_index('id_task_status', table_name='task')%0A
6ccf99966461bd8545654084584d58093dac03d5
Add missing version file
pyrle/version.py
pyrle/version.py
Python
0.000002
@@ -0,0 +1,23 @@ +__version__ = %220.0.17%22%0A
f4e08d41d53cf74f8a53efeb7e238de6a98946cc
add script to find allreferenced hashes
add-ons/tools/get_referenced_hashes.py
add-ons/tools/get_referenced_hashes.py
Python
0.000001
@@ -0,0 +1,1805 @@ +#!/usr/bin/env python%0A%0Aimport sys%0Aimport cvmfs%0A%0Adef usage():%0A print sys.argv%5B0%5D + %22 %3Clocal repo name %7C remote repo url%3E %5Broot catalog%5D%22%0A print %22This script walks the catalogs and generates a list of all referenced content hashes.%22%0A%0A# get referenced hashes from a single catalog (files, chunks, nested catalogs)%0Adef get_hashes_for_catalog(catalog):%0A print %3E%3E sys.stderr, %22Processing%22 , catalog.hash , catalog%0A query = %22 SELECT DISTINCT %5C%0A lower(hex(hash)) %5C%0A FROM catalog %5C%0A WHERE hash != 0 %5C%0A UNION %5C%0A SELECT DISTINCT %5C%0A lower(hex(hash)) %7C%7C 'P' %5C%0A FROM chunks %5C%0A WHERE hash != 0 %5C%0A UNION %5C%0A SELECT DISTINCT %5C%0A sha1 %7C%7C 'C' %5C%0A FROM nested_catalogs;%22%0A return %7B res%5B0%5D for res in catalog.run_sql(query) %7D%0A%0Adef get_hashes_for_catalog_tree(repo, root_catalog):%0A hashes = %7B root_catalog.hash + %22C%22 %7D%0A for catalog in repo.catalogs(root_catalog):%0A hashes = hashes %7C get_hashes_for_catalog(catalog)%0A return hashes%0A%0Adef get_hashes_for_revision(repo, root_hash = None):%0A root_catalog = repo.retrieve_catalog(root_hash) if root_hash else repo.retrieve_root_catalog()%0A return get_hashes_for_catalog_tree(repo, root_catalog)%0A%0A%0A# check input values%0Aif len(sys.argv) != 2 and len(sys.argv) != 3:%0A usage()%0A sys.exit(1)%0A%0A# get input parameters%0Arepo_identifier = sys.argv%5B1%5D%0Aroot_catalog_hash = sys.argv%5B2%5D if len(sys.argv) == 3 else None%0A%0Arepo = cvmfs.open_repository(repo_identifier)%0Ahashes = get_hashes_for_revision(repo, root_catalog_hash)%0A%0Aprint '%5Cn'.join(hashes)%0A
df852b2ee81756fa62a98e425e156530333bf5a1
add migration to change order of participation choices
meinberlin/apps/plans/migrations/0033_change_order_participation_choices.py
meinberlin/apps/plans/migrations/0033_change_order_participation_choices.py
Python
0
@@ -0,0 +1,546 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.18 on 2019-01-28 13:27%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('meinberlin_plans', '0032_rename_topic_field'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='plan',%0A name='participation',%0A field=models.SmallIntegerField(choices=%5B(0, 'Yes'), (1, 'No'), (2, 'Still undecided')%5D, verbose_name='Participation'),%0A ),%0A %5D%0A
c6f060a12a6d4952fef85ec58294c0014b10b6d1
Remove the extra Networks url in ports details page's breadcrumb
openstack_dashboard/dashboards/admin/networks/ports/views.py
openstack_dashboard/dashboards/admin/networks/ports/views.py
# Copyright 2012 NEC Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon.utils import memoized from openstack_dashboard import api from openstack_dashboard.dashboards.admin.networks.ports \ import forms as ports_forms from openstack_dashboard.dashboards.admin.networks.ports \ import tables as ports_tables from openstack_dashboard.dashboards.admin.networks.ports \ import tabs as ports_tabs from openstack_dashboard.dashboards.project.networks.ports \ import views as project_views class CreateView(forms.ModalFormView): form_class = ports_forms.CreatePort form_id = "create_port_form" submit_label = _("Create Port") submit_url = "horizon:admin:networks:addport" page_title = _("Create Port") template_name = 'admin/networks/ports/create.html' url = 'horizon:admin:networks:detail' def get_success_url(self): return reverse(self.url, args=(self.kwargs['network_id'],)) @memoized.memoized_method def get_object(self): try: network_id = self.kwargs["network_id"] return api.neutron.network_get(self.request, network_id) except Exception: redirect = reverse(self.url, args=(self.kwargs['network_id'],)) msg = _("Unable to retrieve network.") exceptions.handle(self.request, msg, redirect=redirect) def get_context_data(self, **kwargs): context = super(CreateView, self).get_context_data(**kwargs) context['network'] = self.get_object() args = (self.kwargs['network_id'],) context['submit_url'] = reverse(self.submit_url, args=args) context['cancel_url'] = reverse(self.url, args=args) return context def get_initial(self): network = self.get_object() return {"network_id": self.kwargs['network_id'], "network_name": network.name} class DetailView(project_views.DetailView): tab_group_class = ports_tabs.PortDetailTabs def get_context_data(self, **kwargs): context = super(DetailView, self).get_context_data(**kwargs) port = context["port"] network_url = "horizon:admin:networks:detail" subnet_url = "horizon:admin:networks:subnets:detail" port.network_url = reverse(network_url, args=[port.network_id]) for ip in port.fixed_ips: ip['subnet_url'] = reverse(subnet_url, args=[ip['subnet_id']]) table = ports_tables.PortsTable(self.request, network_id=port.network_id) # TODO(robcresswell) Add URL for "Ports" crumb after bug/1416838 breadcrumb = [ (_("Networks"), self.get_redirect_url()), ((port.network_name or port.network_id), port.network_url), (_("Ports"), None) ] context["custom_breadcrumb"] = breadcrumb context["url"] = \ reverse('horizon:admin:networks:ports_tab', args=[port.network_id]) context["actions"] = table.render_row_actions(port) return context @staticmethod def get_redirect_url(): return reverse('horizon:admin:networks:index') class UpdateView(project_views.UpdateView): form_class = ports_forms.UpdatePort template_name = 'admin/networks/ports/update.html' context_object_name = 'port' submit_url = "horizon:admin:networks:editport" success_url = 'horizon:admin:networks:detail' def get_initial(self): initial = super(UpdateView, self).get_initial() port = self._get_object() initial['binding__host_id'] = port['binding__host_id'] initial['device_id'] = port['device_id'] initial['device_owner'] = port['device_owner'] return initial
Python
0.001216
@@ -3374,62 +3374,8 @@ = %5B%0A - (_(%22Networks%22), self.get_redirect_url()),%0A
8b71de6988b65665d60c696daffb12ab78c35472
allow passing of 'profile' argument to constructor
crosscat/IPClusterEngine.py
crosscat/IPClusterEngine.py
# # Copyright (c) 2010-2013, MIT Probabilistic Computing Project # # Lead Developers: Dan Lovell and Jay Baxter # Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka # Research Leads: Vikash Mansinghka, Patrick Shafto # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import functools # from IPython.parallel import Client # import crosscat.LocalEngine as LE import crosscat.utils.sample_utils as su # these imports are necessary to make ipcluster to work import crosscat import crosscat.LocalEngine def partialize(func, args_dict, dview): # why is this push necessary? dview.push(args_dict, block=True) helper = functools.partial(func, **args_dict) return helper class IPClusterEngine(LE.LocalEngine): """A simple interface to the Cython-wrapped C++ engine IPClusterEngine """ def __init__(self, config_filename, seed=0, sshkey=None, packer='json'): """Initialize a IPClusterEngine Do IPython.parallel operations to set up cluster and generate mapper. """ super(IPClusterEngine, self).__init__(seed=seed) rc = Client(config_filename, sshkey=sshkey, packer=packer) dview = rc.direct_view() lview = rc.load_balanced_view() with dview.sync_imports(local=True): import crosscat import crosscat.LocalEngine mapper = lambda f, tuples: self.lview.map(f, *tuples) # if you're trying to debug issues, consider clearning to start fresh # rc.clear(block=True) # self.rc = rc self.dview = dview self.lview = lview self.mapper = mapper self.do_initialize = None self.do_analyze = None return def get_initialize_arg_tuples(self, M_c, M_r, T, initialization, n_chains): args_dict = dict(M_c=M_c, M_r=M_r, T=T, initialization=initialization) do_initialize = partialize(crosscat.LocalEngine._do_initialize2, args_dict, self.dview) seeds = [self.get_next_seed() for seed_idx in range(n_chains)] arg_tuples = [seeds] # self.do_initialize = do_initialize return arg_tuples def get_analyze_arg_tuples(self, M_c, T, X_L, X_D, kernel_list=(), n_steps=1, c=(), r=(), max_iterations=-1, max_time=-1): n_chains = len(X_L) args_dict = dict(M_c=M_c, T=T, kernel_list=kernel_list, n_steps=n_steps, c=c, r=r, max_iterations=max_iterations, max_time=max_time) do_analyze = partialize(crosscat.LocalEngine._do_analyze2, args_dict, self.dview) seeds = [self.get_next_seed() for seed_idx in range(n_chains)] arg_tuples = [seeds, X_L, X_D] # self.do_analyze = do_analyze return arg_tuples
Python
0.000001
@@ -1382,16 +1382,30 @@ ilename, + profile=None, seed=0, @@ -1659,16 +1659,33 @@ ilename, + profile=profile, sshkey=
92debba4bb0b0064b865a53b40476effa4d09c78
Undo Framework example
pyside/demos/framework/undo/document.py
pyside/demos/framework/undo/document.py
Python
0.000001
@@ -0,0 +1,1839 @@ +from collections import namedtuple%0A%0Afrom PySide.QtGui import QWidget, QPalette, QPainter%0Afrom PySide.QtCore import Qt, QRect%0A%0A%0AShapeType = namedtuple('ShapeType', 'Rectangle Circle Triangle')(*range(3))%0A%0A%0Aclass Shape(object):%0A def __init__(self, type=ShapeType.Rectangle, color=Qt.red, rect=QRect()):%0A self._type = type%0A self._color = color%0A self._rect = rect%0A%0A @property%0A def type(self):%0A return self._type%0A %0A @property%0A def color(self):%0A return self._color%0A %0A @property%0A def rect(self):%0A return self._rect%0A %0A @property%0A def name(self):%0A return self._name%0A %0A%0Aclass Document(QWidget):%0A def __init__(self, parent=None):%0A super(Document, self).__init__(parent)%0A self._shapeList = %5B%5D%0A %0A self.setAutoFillBackground(True)%0A self.setBackgroundRole(QPalette.Base)%0A %0A pal = QPalette()%0A pal.setColor(QPalette.HighlightedText, Qt.red)%0A self.setPalette(pal)%0A%0A def paintEvent(self, event):%0A paintRegion = event.region()%0A painter = QPainter(self)%0A pal = self.palette()%0A%0A for shape in self._shapeList:%0A rect = shape.rect%0A %0A if not paintRegion.contains(rect):%0A continue%0A %0A shapeType = shape.type%0A %0A painter.setBrush(shape.color)%0A %0A if shapeType == ShapeType.Rectangle:%0A print %22rectangle%22%0A painter.drawRect(rect)%0A elif shapeType == ShapeType.Circle:%0A print %22circle%22%0A painter.drawEllipse(rect)%0A%0As1 = Shape(ShapeType.Rectangle, color=Qt.green, rect=QRect(0, 0, 100, 100))%0As2 = Shape(ShapeType.Circle, rect=QRect(200, 200, 100, 100))%0A%0Ad = Document()%0A%0Ad._shapeList = %5Bs1, s2%5D%0A%0Ad.show()%0A
fe08242647962af0fdfab0ce34417b6a6079ed65
add another import now missing
sympy/strategies/tests/test_traverse.py
sympy/strategies/tests/test_traverse.py
from sympy.strategies.traverse import (top_down, bottom_up, sall, top_down_once, bottom_up_once, expr_fns, basic_fns) from sympy import Basic, symbols, Symbol, S zero_symbols = lambda x: S.Zero if isinstance(x, Symbol) else x x,y,z = symbols('x,y,z') def test_sall(): zero_onelevel = sall(zero_symbols) assert zero_onelevel(Basic(x, y, Basic(x, z))) == \ Basic(0, 0, Basic(x, z)) def test_bottom_up(): _test_global_traversal(bottom_up) _test_stop_on_non_basics(bottom_up) def test_top_down(): _test_global_traversal(top_down) _test_stop_on_non_basics(top_down) def _test_global_traversal(trav): x,y,z = symbols('x,y,z') zero_all_symbols = trav(zero_symbols) assert zero_all_symbols(Basic(x, y, Basic(x, z))) == \ Basic(0, 0, Basic(0, 0)) def _test_stop_on_non_basics(trav): def add_one_if_can(expr): try: return expr + 1 except: return expr expr = Basic(1, 'a', Basic(2, 'b')) expected = Basic(2, 'a', Basic(3, 'b')) rl = trav(add_one_if_can) assert rl(expr) == expected class Basic2(Basic): pass rl = lambda x: Basic2(*x.args) if isinstance(x, Basic) else x def test_top_down_once(): top_rl = top_down_once(rl) assert top_rl(Basic(1, 2, Basic(3, 4))) == \ Basic2(1, 2, Basic(3, 4)) def test_bottom_up_once(): bottom_rl = bottom_up_once(rl) assert bottom_rl(Basic(1, 2, Basic(3, 4))) == \ Basic(1, 2, Basic2(3, 4)) def test_expr_fns(): from sympy.strategies.rl import rebuild from sympy import Add x, y = map(Symbol, 'xy') expr = x + y**3 e = bottom_up(lambda x: x + 1, expr_fns)(expr) b = bottom_up(lambda x: Basic.__new__(Add, x, 1), basic_fns)(expr) assert rebuild(b) == e
Python
0
@@ -102,28 +102,61 @@ ce, -expr_fns, basic +basic_fns)%0Afrom sympy.strategies.util import expr _fns -) %0Afro
0c29b431a0f5ce9115d7acdcaaabbd27546949c6
Add test for contact success view.
chmvh_website/contact/tests/views/test_success_view.py
chmvh_website/contact/tests/views/test_success_view.py
Python
0
@@ -0,0 +1,606 @@ +from django.test import RequestFactory%0Afrom django.urls import reverse%0A%0Afrom contact.views import SuccessView%0A%0A%0Aclass TestSuccessView(object):%0A %22%22%22Test cases for the success view%22%22%22%0A url = reverse('contact:success')%0A%0A def test_get(self, rf: RequestFactory):%0A %22%22%22Test sending a GET request to the view.%0A%0A Sending a GET request to the view should render the success%0A page.%0A %22%22%22%0A request = rf.get(self.url)%0A response = SuccessView.as_view()(request)%0A%0A assert response.status_code == 200%0A assert 'contact/success.html' in response.template_name%0A
f66a60411b4e1cb30ac1fde78735ba38e99289cf
Create cfprefs.py
cfprefs.py
cfprefs.py
Python
0
@@ -0,0 +1,326 @@ +#!/usr/bin/python%0A%0Aimport CoreFoundation%0A%0Adomain = 'com.apple.appstore'%0Akey = 'restrict-store-require-admin-to-install'%0A%0Akey_value = CoreFoundation.CFPreferencesCopyAppValue(key, domain)%0Aprint 'Key Value = ', key_value%0A%0Akey_forced = CoreFoundation.CFPreferencesAppValueIsForced(key, domain) %0Aprint 'Key Forced = ', key_forced%0A
e5fecce2693056ac53f7d34d00801829ea1094c3
add JPEG decoder CPU perf bench
tools/jpegdec_perf/reader_perf_multi.py
tools/jpegdec_perf/reader_perf_multi.py
Python
0
@@ -0,0 +1,1301 @@ +import cv2%0Aimport os%0Afrom turbojpeg import TurboJPEG, TJPF_GRAY, TJSAMP_GRAY, TJFLAG_PROGRESSIVE%0Aimport time%0Aimport threading%0A%0A# specifying library path explicitly%0A# jpeg = TurboJPEG(r'D:%5Cturbojpeg.dll')%0A# jpeg = TurboJPEG('/usr/lib64/libturbojpeg.so')%0A# jpeg = TurboJPEG('/usr/local/lib/libturbojpeg.dylib')%0A%0A# using default library installation%0A%0Adef decode():%0A jpeg = TurboJPEG()%0A image_folder = '/home/matrix/data/val/'%0A cnt = 0%0A time_sum = 0.0%0A for fname in sorted(os.listdir(image_folder)):%0A fpath = os.path.join(image_folder, fname)%0A # print(fpath)%0A in_file = open(fpath, 'rb')%0A jpg = in_file.read()%0A cnt += 1%0A # (width, height, jpeg_subsample, jpeg_colorspace) = jpeg.decode_header(jpg)%0A # print(width, height, jpeg_subsample, jpeg_colorspace)%0A begin = time.time() * 1000%0A raw = jpeg.decode(jpg)%0A end = time.time() * 1000%0A time_sum += end - begin%0A in_file.close() %0A print(%22image cnt: %22, cnt)%0A print(%22time per image is(ms):%22, time_sum / cnt)%0A%0A%0A%0Afor i in range(52):%0A print('thread %25s is running...' %25 threading.current_thread().name)%0A t = threading.Thread(target=decode, name='DecodeThread')%0A t.start()%0A # t.join()%0A print('thread %25s ended.' %25 threading.current_thread().name)%0A
c650d64247d63d2af7a8168795e7edae5c9ef6ef
Add realtime chart plotting example
realtime-plot.py
realtime-plot.py
Python
0
@@ -0,0 +1,1495 @@ +import time, random%0Aimport math%0Afrom collections import deque%0A%0Astart = time.time()%0A%0Aclass RealtimePlot:%0A def __init__(self, axes, max_entries = 100):%0A self.axis_x = deque(maxlen=max_entries)%0A self.axis_y = deque(maxlen=max_entries)%0A self.axes = axes%0A self.max_entries = max_entries%0A %0A self.lineplot, = axes.plot(%5B%5D, %5B%5D, %22ro-%22)%0A self.axes.set_autoscaley_on(True)%0A%0A def add(self, x, y):%0A self.axis_x.append(x)%0A self.axis_y.append(y)%0A self.lineplot.set_data(self.axis_x, self.axis_y)%0A self.axes.set_xlim(self.axis_x%5B0%5D, self.axis_x%5B-1%5D + 1e-15)%0A self.axes.relim(); self.axes.autoscale_view() # rescale the y-axis%0A%0A def animate(self, figure, callback, interval = 50):%0A import matplotlib.animation as animation%0A def wrapper(frame_index):%0A self.add(*callback(frame_index))%0A self.axes.relim(); self.axes.autoscale_view() # rescale the y-axis%0A return self.lineplot%0A animation.FuncAnimation(figure, wrapper, interval=interval)%0A%0Adef main():%0A from matplotlib import pyplot as plt%0A%0A fig, axes = plt.subplots()%0A display = RealtimePlot(axes)%0A display.animate(fig, lambda frame_index: (time.time() - start, random.random() * 100))%0A plt.show()%0A%0A fig, axes = plt.subplots()%0A display = RealtimePlot(axes)%0A while True:%0A display.add(time.time() - start, random.random() * 100)%0A plt.pause(0.001)%0A%0Aif __name__ == %22__main__%22: main()%0A
8ae82037dde45019cae8912f45a36cf3a362c444
Revert "HAProxy uses milliseconds ..."
openstack/network/v2/health_monitor.py
openstack/network/v2/health_monitor.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack.network import network_service from openstack import resource2 as resource class HealthMonitor(resource.Resource): resource_key = 'healthmonitor' resources_key = 'healthmonitors' base_path = '/lbaas/healthmonitors' service = network_service.NetworkService() # capabilities allow_create = True allow_get = True allow_update = True allow_delete = True allow_list = True _query_mapping = resource.QueryParameters( 'delay', 'expected_codes', 'http_method', 'max_retries', 'timeout', 'type', 'url_path', is_admin_state_up='adminstate_up', project_id='tenant_id', ) # Properties #: The time, in milliseconds, between sending probes to members. delay = resource.Body('delay') #: Expected HTTP codes for a passing HTTP(S) monitor. expected_codes = resource.Body('expected_codes') #: The HTTP method that the monitor uses for requests. http_method = resource.Body('http_method') #: The administrative state of the health monitor, which is up #: ``True`` or down ``False``. *Type: bool* is_admin_state_up = resource.Body('admin_state_up', type=bool) #: Maximum consecutive health probe tries. max_retries = resource.Body('max_retries') #: Name of the health monitor. name = resource.Body('name') #: List of pools associated with this health monitor #: *Type: list of dicts which contain the pool IDs* pool_ids = resource.Body('pools', type=list) #: The ID of the project this health monitor is associated with. project_id = resource.Body('tenant_id') #: The maximum number of milliseconds for a monitor to wait for a #: connection to be established before it times out. This value must #: be less than the delay value. timeout = resource.Body('timeout') #: The type of probe sent by the load balancer to verify the member #: state, which is PING, TCP, HTTP, or HTTPS. type = resource.Body('type') #: Path portion of URI that will be probed if type is HTTP(S). url_path = resource.Body('url_path')
Python
0.000006
@@ -1236,21 +1236,16 @@ ime, in -milli seconds, @@ -2184,13 +2184,8 @@ of -milli seco
c3add04f098e81b20946abaa99e6f2d81055b168
Lie algebras: Type A
sympy/liealgebras/type_A.py
sympy/liealgebras/type_A.py
Python
0.998793
@@ -0,0 +1,2362 @@ +from sympy.core import(Set, Dict, Tuple)%0Afrom cartan_type import CartanType_standard%0Afrom sympy.matrices import eye%0A%0A%0Aclass CartanType(Standard_Cartan):%0A%0A def __init__(self,n):%0A assert n %3E= 1%0A Standard_Cartan.__init__(self, %22A%22, n)%0A%0A%0A def dimension(self, n):%0A %22%22%22%0A Return the dimension of the vector space%0A V underlying the Lie algebra%0A Example%0A ========%0A %3E%3E%3E c = CartanType%5B%22A4%22%5D%0A %3E%3E%3E c.dimension%0A 4%0A %22%22%22%0A return n+1%0A%0A%0A def basic_root(self, i, j):%0A %22%22%22%0A This is a method just to generate roots%0A with a 1 iin the ith position and a -1%0A in the jth postion.%0A%0A %22%22%22%0A%0A n = self.n%0A root = %5B0%5D*(n+1)%0A root%5Bi%5D = 1%0A root%5Bj%5D = -1%0A return root%0A%0A def simple_root(self, i):%0A %22%22%22%0A Returns the ith simple root for the A series.%0A%0A Examples%0A ========%0A %3E%3E%3E c = CartanType%5B%22A4%22%5D%0A %3E%3E%3E c.simple_root(1)%0A %5B1,-1,0,0,0%5D%0A%0A %22%22%22%0A%0A return self.basic_root(i-1,i)%0A%0A def highest_root(self):%0A return self.basic_root(0, self.n - 1)%0A%0A def roots(self):%0A %22%22%22%0A Returns the total number of roots for A_n%0A %22%22%22%0A n = self.n%0A return n(n+1)%0A%0A def cartan_matrix(self):%0A %22%22%22%0A Returns the Cartan matrix for A_n.%0A The Cartan matrix matrix for a Lie algebra is%0A generated by assigning an ordering to the simple%0A roots, (alpha%5B1%5D, ...., alpha%5Bl%5D). Then the ijth%0A entry of the Cartan matrix is (%3Calpha%5Bi%5D,alpha%5Bj%5D%3E.%0A%0A Example%0A =======%0A %3E%3E%3E c = CartanType%5B'A4'%5D%0A %3E%3E%3E c.cartan_matrix%0A %5B2 -1 0 0 %5D%0A %5B %5D%0A %5B-1 2 -1 0%5D%0A %5B %5D%0A %5B0 -1 2 -1%5D%0A %5B %5D%0A %5B0 0 -1 2%5D%0A%0A%0A %22%22%22%0A%0A n = self.n%0A m = 2 * eye(n)%0A i = 1%0A for i %3C n-1:%0A m%5Bi,i+1%5D = -1%0A m%5Bi-1,i%5D = -1%0A i += 1%0A m%5B0,1%5D = -1%0A m%5Bn-1, n-2%5D = -1%0A return m%0A%0A def basis(self):%0A %22%22%22%0A Returns the number of independent generators of A_n%0A %22%22%22%0A n = self.n%0A return n**2 - 1%0A%0A def LieAlgebra(self):%0A %22%22%22%0A Returns the Lie algebra associated with A_n%0A %22%22%22%0A n = self.n%0A return %22su(%22 + str(n + 1) + %22)%22%0A
f7e4ca11c7bfc35bf0fd6becd2a5d5fdd2ca5ed5
Add a script to split data with partitions.
src/main/python/partition_data.py
src/main/python/partition_data.py
Python
0
@@ -0,0 +1,1110 @@ +import csv;%0Aimport random;%0Aimport sys;%0A%0Ain_file = str(sys.argv%5B1%5D)%0Aout_file = str(sys.argv%5B2%5D)%0Anum_partitions = int(sys.argv%5B3%5D)%0A%0Aheader = %5B%5D;%0Apartitions = %5B%5D;%0Afor i in range(num_partitions):%0A partitions.append(%5B%5D)%0A%0A# Load all the training rows%0Arow_num = 0;%0Awith open(in_file) as file:%0A reader = csv.reader(file);%0A header = reader.next();%0A for row in reader:%0A partitions%5Brow_num %25 num_partitions%5D.append(row);%0A row_num += 1;%0A%0A# Write test and train files for k partitions%0Afor i in range(num_partitions):%0A train_rows = %5B%5D%0A test_rows = partitions%5Bi%5D;%0A for j in range(num_partitions):%0A if i != j:%0A for row in partitions%5Bj%5D:%0A train_rows.append(row);%0A%0A with open(out_file+'_k'+str(i+1)+'_train.csv', 'wb') as ofile:%0A writer = csv.writer(ofile)%0A writer.writerow(header)%0A for row in train_rows:%0A writer.writerow(row)%0A%0A with open(out_file+'_k'+str(i+1)+'_test.csv', 'wb') as ofile:%0A writer = csv.writer(ofile)%0A writer.writerow(header)%0A for row in test_rows:%0A writer.writerow(row)%0A
dc76e7c085e7462d75567bf3d0228defb6bbbc58
Add tests for converter
tests/test_csv_converter.py
tests/test_csv_converter.py
Python
0
@@ -0,0 +1,1199 @@ +import os%0Aimport sys%0Asys.path.insert(0, os.path.dirname(__file__))%0Asys.path.insert(1, os.path.dirname(%22..%22))%0A%0Aimport unittest%0Afrom csv_converter import CsvConverter%0A%0Aclass TestCsvConverter(unittest.TestCase):%0A%0A def test_parse_csv(self):%0A converter = CsvConverter(%22tests/data/data_1.csv%22)%0A converter.setSourceColumns(%22tuotekoodi%22, %22qty%22)%0A converter.read_file()%0A%0A def test_convert_row(self):%0A converter = CsvConverter(%22%22)%0A row = converter.convertRow(%7B%0A %22product_code%22: %22some_code%22,%0A %22quantity%22: %2250%22%0A %7D)%0A self.assertEqual(%22some_code%22, row%5B%22product_code%22%5D)%0A%0A with self.assertRaises(ValueError):%0A row = converter.convertRow(%7B%0A %22product_code%22: %2223%22,%0A %22quantity%22: %22error%22%0A %7D)%0A%0A with self.assertRaises(ValueError):%0A row = converter.convertRow(%7B%0A %22product_code%22: %22%22,%0A %22quantity%22: %22error%22%0A %7D)%0A%0A with self.assertRaises(ValueError):%0A row = converter.convertRow(%7B%0A %22product_code%22: %22sd%22,%0A %22quantity%22: %22%22%0A %7D)%0A%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
645507ed9ec43b354880673fbc75afe169ef6697
Add test capturing bad implementation of contains handler.
tests/unit/test_handlers.py
tests/unit/test_handlers.py
Python
0
@@ -0,0 +1,244 @@ +from pmxbot import core%0A%0Adef test_contains_always_match():%0A%09%22%22%22%0A%09Contains handler should always match if no rate is specified.%0A%09%22%22%22%0A%09handler = core.ContainsHandler(name='#', func=None)%0A%09assert handler.match('Tell me about #foo', channel='bar')%0A
d61c42221774f36477b1288396f4e7e7337e905c
add data migration
formly/migrations/0012_fix_multi_text_answer_data.py
formly/migrations/0012_fix_multi_text_answer_data.py
Python
0.000001
@@ -0,0 +1,1279 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.6 on 2018-01-23 13:46%0Afrom __future__ import unicode_literals%0A%0Aimport json%0A%0Afrom django.db import migrations%0A%0Adef migrate_data(apps, schema_editor):%0A FieldResult = apps.get_model(%22formly%22, %22FieldResult%22)%0A # alias for Field.MULTIPLE_TEXT%0A MULTIPLE_TEXT_TYPE = 8%0A multiple_text_results = FieldResult.objects.filter(question__field_type=MULTIPLE_TEXT_TYPE)%0A print(%22%5Cn%22)%0A if multiple_text_results.exists() is False:%0A print(%22formly-data-migration: No multiple text results data found. Skipping data migration.%22)%0A return%0A%0A print(%22formly-data-migration: Updating data on %7B%7D FieldResult instances%22.format(multiple_text_results.count()))%0A for result in multiple_text_results:%0A raw_answer = result.answer%5B%22answer%22%5D%0A if isinstance(raw_answer, unicode):%0A try:%0A answer = json.loads(raw_answer)%0A except:%0A answer = %5Braw_answer%5D%0A result.answer%5B%22answer%22%5D = answer%0A result.save()%0A print(%22formly-data-migration: Data update complete!%22)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22formly%22, %220011_field_mapping%22),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(migrate_data),%0A %5D%0A
904e6757531ddf7b983cb30c0da8496d55eddb23
Reimplement Stoneskin Gargoyle using Attr()
fireplace/cards/naxxramas/collectible.py
fireplace/cards/naxxramas/collectible.py
from ..utils import * ## # Minions # Zombie Chow class FP1_001: deathrattle = Heal(ENEMY_HERO, 5) # Haunted Creeper class FP1_002: deathrattle = Summon(CONTROLLER, "FP1_002t"), Summon(CONTROLLER, "FP1_002t") # Mad Scientist class FP1_004: deathrattle = ForcePlay(CONTROLLER, RANDOM(CONTROLLER_DECK + SECRET)) # Shade of Naxxramas class FP1_005: events = OWN_TURN_BEGIN.on(Buff(SELF, "FP1_005e")) # Nerubian Egg class FP1_007: deathrattle = Summon(CONTROLLER, "FP1_007t") # Deathlord class FP1_009: deathrattle = ForcePlay(OPPONENT, RANDOM(OPPONENT_DECK + MINION)) # Webspinner class FP1_011: deathrattle = Give(CONTROLLER, RandomMinion(race=Race.BEAST)) # Sludge Belcher class FP1_012: deathrattle = Summon(CONTROLLER, "FP1_012t") # Kel'Thuzad class FP1_013: def resurrect_friendly_minions(self, *args): for minion in self.game.minions_killed_this_turn.filter(controller=self.controller): yield Summon(CONTROLLER, minion.id) events = TURN_END.on(resurrect_friendly_minions) # Stalagg class FP1_014: deathrattle = Find(KILLED + ID("FP1_015")) & Summon(CONTROLLER, "FP1_014t") # Feugen class FP1_015: deathrattle = Find(KILLED + ID("FP1_014")) & Summon(CONTROLLER, "FP1_014t") # Wailing Soul class FP1_016: play = Silence(FRIENDLY_MINIONS) # Voidcaller class FP1_022: deathrattle = ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + DEMON)) # Dark Cultist class FP1_023: deathrattle = Buff(RANDOM_FRIENDLY_MINION, "FP1_023e") # Unstable Ghoul class FP1_024: deathrattle = Hit(ALL_MINIONS, 1) # Anub'ar Ambusher class FP1_026: deathrattle = Bounce(RANDOM_FRIENDLY_MINION) # Stoneskin Gargoyle class FP1_027: events = OWN_TURN_BEGIN.on( lambda self, player: Heal(self, self.damage) ) # Undertaker class FP1_028: events = Summon(CONTROLLER, MINION + DEATHRATTLE).on(Buff(SELF, "FP1_028e")) # Dancing Swords class FP1_029: deathrattle = Draw(OPPONENT) # Loatheb class FP1_030: play = Buff(ENEMY_HERO, "FP1_030e") class FP1_030e: events = OWN_TURN_BEGIN.on(Destroy(SELF)) class FP1_030ea: cost = lambda self, i: i + 5 if self.owner.controller.current_player else i ## # Spells # Reincarnate class FP1_025: play = Destroy(TARGET), Summon(CONTROLLER, Copy(TARGET)) ## # Secrets # Duplicate class FP1_018: events = Death(FRIENDLY + MINION).on(Give(CONTROLLER, Copy(Death.Args.ENTITY)) * 2) # Avenge class FP1_020: events = Death(FRIENDLY + MINION).on(Find(FRIENDLY_MINIONS) & ( Buff(RANDOM_FRIENDLY_MINION, "FP1_020e"), Reveal(SELF) )) ## # Weapons # Death's Bite class FP1_021: deathrattle = Hit(ALL_MINIONS, 1)
Python
0
@@ -1684,57 +1684,46 @@ .on( -%0A%09%09lambda self, player: Heal(self, self.damage)%0A%09 +Heal(SELF, Attr(SELF, GameTag.DAMAGE)) )%0A%0A%0A
a1f864de0c5e71f0e9dc0ff4a23dc8101556832b
add new script
icart_mini_navigation/scripts/navigation_strategy.py
icart_mini_navigation/scripts/navigation_strategy.py
Python
0.000001
@@ -0,0 +1,1278 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Atry:%0A import roslib; roslib,load_manifest('rospeex_if')%0Aexcept:%0A pass%0A%0Aimport rospy%0Aimport re%0A%0Afrom rospeex_if import ROSpeexInterface%0Afrom std_msgs.msg import String%0A%0Asyscommand_pub = rospy.Publisher('syscommand', String, queue_size=10)%0Arospy.init_node('navigation_strategy', anonymous=True)%0Ar = rospy.Rate(10)%0A%0Aclass talk_node(object):%0A%0A def __init__(self):%0A%0A self._interface = ROSpeexInterface()%0A%0A def sr_response(self, message):%0A%0A run = re.compile('(?P%3Crun%3E%E8%B5%B0%E8%A1%8C)').search(message)%0A start = re.compile('(?P%3Cstart%3E%E9%96%8B%E5%A7%8B)').search(message)%0A%0A%09print 'you said : %25s' %25message%0A%0A if run is not None and start is not None:%0A%09 text = u'%E3%83%8A%E3%83%93%E3%82%B2%E3%83%BC%E3%82%B7%E3%83%A7%E3%83%B3%E3%82%92%E9%96%8B%E5%A7%8B%E3%81%97%E3%81%BE%E3%81%99%E3%80%82'%0A%09 robot_msg = 'start'%0A%0A rospy.loginfo(robot_msg)%0A syscommand_pub.publish(robot_msg)%0A%0A%0A print 'rospeex reply : %25s' %25text%0A self._interface.say(text, 'ja', 'nict')%0A%0A def run(self):%0A%0A self._interface.init()%0A self._interface.register_sr_response(self.sr_response)%0A self._interface.set_spi_config(language='ja',engine='nict')%0A rospy.spin()%0A%0Aif __name__ == '__main__':%0A try:%0A node = talk_node()%0A node.run()%0A except rospy.ROSInterruptException:%0A pass%0A
b3e9075e819402f93f7dc2e29b61e3e621ab7355
Add unit tests for avging imputations
impy/imputations/tests/test_averaging_imputations.py
impy/imputations/tests/test_averaging_imputations.py
Python
0
@@ -0,0 +1,1665 @@ +%22%22%22test_averaging_imputations.py%22%22%22%0Aimport unittest%0Aimport numpy as np%0Afrom impy.imputations import mean_imputation%0Afrom impy.imputations import mode_imputation%0Afrom impy.imputations import median_imputation%0Afrom impy.datasets import random_int%0A%0A%0Aclass TestAveraging(unittest.TestCase):%0A %22%22%22 Tests for Averaging %22%22%22%0A def setUp(self):%0A self.data = random_int(missingness=%22complete%22)%0A%0A def test_mean_return_type(self):%0A %22%22%22Mean Imputation Return Type%22%22%22%0A self.assertEqual(str(type(mean_imputation(self.data))),%0A %22%3Cclass 'numpy.ndarray'%3E%22)%0A%0A def test_mode_return_type(self):%0A %22%22%22Mode Imputation Return Type%22%22%22%0A self.assertEqual(str(type(mode_imputation(self.data))),%0A %22%3Cclass 'numpy.ndarray'%3E%22)%0A%0A def test_median_return_type(self):%0A %22%22%22Median Imputation Return Type%22%22%22%0A self.assertEqual(str(type(median_imputation(self.data))),%0A %22%3Cclass 'numpy.ndarray'%3E%22)%0A%0A def test_mean_fill(self):%0A %22%22%22 Mean Imputation Fill Complete Data(nothing should happen)%22%22%22%0A actual = mean_imputation(self.data)%0A self.assertTrue(np.array_equal(actual, self.data))%0A%0A def test_mode_fill(self):%0A %22%22%22 Mode Imputation Fill Complete Data(nothing should happen)%22%22%22%0A actual = mode_imputation(self.data)%0A self.assertTrue(np.array_equal(actual, self.data))%0A%0A def test_median_fill(self):%0A %22%22%22 Median Imputation Fill Complete Data(nothing should happen)%22%22%22%0A actual = median_imputation(self.data)%0A self.assertTrue(np.array_equal(actual, self.data))%0A%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
dd784f9035f66fd7d4febb8e43a09353821312b9
add solution for Kth Largest Element in an Array
algorithms/KthLargestElementInAnArray/KthLargestElementInAnArray.py
algorithms/KthLargestElementInAnArray/KthLargestElementInAnArray.py
Python
0
@@ -0,0 +1,699 @@ +class Solution:%0A # @param %7Binteger%5B%5D%7D nums%0A # @param %7Binteger%7D k%0A # @return %7Binteger%7D%0A%0A def findKthLargest(self, nums, k):%0A k = len(nums) - k%0A%0A def quickselect(st, ed):%0A pivot = nums%5Bed%5D%0A pos = st%0A for i in xrange(st, ed):%0A if nums%5Bi%5D %3C pivot:%0A nums%5Bi%5D, nums%5Bpos%5D = nums%5Bpos%5D, nums%5Bi%5D%0A pos += 1%0A nums%5Bpos%5D, nums%5Bed%5D = nums%5Bed%5D, nums%5Bpos%5D%0A if pos == k:%0A return nums%5Bpos%5D%0A elif pos %3C k:%0A return quickselect(pos+1, ed)%0A else:%0A return quickselect(st, pos-1)%0A%0A return quickselect(0, len(nums)-1)%0A
aa411ddcd62b824c0bfe8660c795b71e6e6929ea
add reset command.
axes/utils.py
axes/utils.py
Python
0.001104
@@ -0,0 +1,552 @@ +from axes.models import AccessAttempt%0A%0Adef reset(ip=None, silent=False):%0A if not ip:%0A attempts = AccessAttempt.objects.all()%0A if attempts:%0A for attempt in AccessAttempt.objects.all():%0A attempt.delete()%0A else:%0A if not silent:%0A print 'No attempts found.'%0A else:%0A try:%0A attempt = AccessAttempt.objects.get(ip_address=ip)%0A except:%0A if not silent:%0A print 'No matching attempt found.'%0A else:%0A attempt.delete()%0A
ceb8a32637bc0fd9ab0517be7f025755e19ec2c7
add leetcode Excel Sheet Column Number
leetcode/ExcelSheetColumnNumber/solution.py
leetcode/ExcelSheetColumnNumber/solution.py
Python
0.000001
@@ -0,0 +1,227 @@ +# -*- coding:utf-8 -*-%0Aclass Solution:%0A # @param s, a string%0A # @return an integer%0A def titleToNumber(self, s):%0A col = 0%0A for c in s:%0A col = col * 26 + ord(c) - ord('A') + 1%0A return col%0A
cf7e9dfec0c0cdab913f98ff325210b552610219
Add new runner, search!
salt/runners/search.py
salt/runners/search.py
Python
0
@@ -0,0 +1,312 @@ +'''%0ARunner frontend to search system%0A'''%0A%0A# Import salt libs%0Aimport salt.search%0Aimport salt.output%0A%0Adef query(term):%0A '''%0A Query the search system%0A '''%0A search = salt.search.Search(__opts__)%0A result = search.query(term)%0A salt.output.display_output(result, 'pprint', __opts__)%0A return result%0A
da704e95b010330efd350e7ed85e51f252b8a453
add missing migration
aldryn_redirects/migrations/0002_on_delete_and_verbose_names.py
aldryn_redirects/migrations/0002_on_delete_and_verbose_names.py
Python
0.000258
@@ -0,0 +1,983 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.2 on 2016-02-22 08:03%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('aldryn_redirects', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='redirecttranslation',%0A options=%7B'default_permissions': (), 'managed': True, 'verbose_name': 'redirect Translation'%7D,%0A ),%0A migrations.AlterField(%0A model_name='redirect',%0A name='site',%0A field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='aldryn_redirects_redirect_set', to='sites.Site'),%0A ),%0A migrations.AlterField(%0A model_name='redirecttranslation',%0A name='language_code',%0A field=models.CharField(db_index=True, max_length=15, verbose_name='Language'),%0A ),%0A %5D%0A
17ae9e25663d029af11236584b4c759c895ae830
Improve and consolidate condition scripts of Lithium to support timeouts and regex via optparse. r=Jesse
util/fileIngredients.py
util/fileIngredients.py
Python
0
@@ -0,0 +1,1160 @@ +#!/usr/bin/env python%0A#%0A# This Source Code Form is subject to the terms of the Mozilla Public%0A# License, v. 2.0. If a copy of the MPL was not distributed with this file,%0A# You can obtain one at http://mozilla.org/MPL/2.0/.%0A%0Afrom __future__ import with_statement%0A%0Aimport re%0A%0Adef fileContains(f, s, isRegex):%0A if isRegex:%0A return fileContainsRegex(f, re.compile(s, re.MULTILINE))%0A else:%0A return fileContainsStr(f, s), s%0A%0A%0Adef fileContainsStr(f, s):%0A found = False%0A with open(f, 'rb') as g:%0A for line in g:%0A if line.find(s) != -1:%0A print line.rstrip()%0A found = True%0A return found%0A%0Adef fileContainsRegex(f, regex):%0A # e.g. ~/fuzzing/lithium/lithium.py crashesat --timeout=30%0A # --regex '%5E#0%5Cs*0x.* in%5Cs*.*(?:%5Cn%7C%5Cr%5Cn?)#1%5Cs*' ./js --ion -n 735957.js%0A # Note that putting %22%5E%22 and %22$%22 together is unlikely to work.%0A matchedStr = ''%0A found = False%0A with open(f, 'rb') as g:%0A foundRegex = regex.search(g.read())%0A if foundRegex:%0A matchedStr = foundRegex.group()%0A print matchedStr%0A found = True%0A return found, matchedStr%0A
03baa59cea76ab85f661bfa3e8d910fd6a7ae82a
Remove leading slash in redirections
nikola/plugins/task/redirect.py
nikola/plugins/task/redirect.py
# -*- coding: utf-8 -*- # Copyright © 2012-2016 Roberto Alsina and others. # Permission is hereby granted, free of charge, to any # person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the # Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the # Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice # shall be included in all copies or substantial portions of # the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """Generate redirections.""" from __future__ import unicode_literals import os from nikola.plugin_categories import Task from nikola import utils class Redirect(Task): """Generate redirections.""" name = "redirect" def gen_tasks(self): """Generate redirections tasks.""" kw = { 'redirections': self.site.config['REDIRECTIONS'], 'output_folder': self.site.config['OUTPUT_FOLDER'], 'filters': self.site.config['FILTERS'], } yield self.group_task() if kw['redirections']: for src, dst in kw["redirections"]: src_path = os.path.join(kw["output_folder"], src) yield utils.apply_filters({ 'basename': self.name, 'name': src_path, 'targets': [src_path], 'actions': [(utils.create_redirect, (src_path, dst))], 'clean': True, 'uptodate': [utils.config_changed(kw, 'nikola.plugins.task.redirect')], }, kw["filters"])
Python
0.000001
@@ -1811,16 +1811,28 @@ r%22%5D, src +.lstrip('/') )%0A
21ef2114975a315815d960fd1f28c5e4036fb935
Update browsermark to use results.AddValue(..)
tools/perf/benchmarks/browsermark.py
tools/perf/benchmarks/browsermark.py
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Runs Browsermark CSS, DOM, WebGL, JS, resize and page load benchmarks. Browsermark benchmark suite have five test groups: a) CSS group: measures your browsers 2D and 3D performance, and finally executes CSS Crunch test b) DOM group: measures variety of areas, like how well your browser traverse in Document Object Model Tree or how fast your browser can create dynamic content c) General group: measures areas like resize and page load times d) Graphics group: tests browsers Graphics Processing Unit power by measuring WebGL and Canvas performance e) Javascript group: executes number crunching by doing selected Array and String operations Additionally Browsermark will test your browsers conformance, but conformance tests are not included in this suite. """ import os from telemetry import benchmark from telemetry.page import page_measurement from telemetry.page import page_set class _BrowsermarkMeasurement(page_measurement.PageMeasurement): def MeasurePage(self, _, tab, results): # Select nearest server(North America=1) and start test. js_start_test = """ for (var i=0; i < $('#continent a').length; i++) { if (($('#continent a')[i]).getAttribute('data-id') == '1') { $('#continent a')[i].click(); $('.start_test.enabled').click(); } } """ tab.ExecuteJavaScript(js_start_test) tab.WaitForJavaScriptExpression( 'window.location.pathname.indexOf("results") != -1', 600) result = int(tab.EvaluateJavaScript( 'document.getElementsByClassName("score")[0].innerHTML')) results.Add('Score', 'score', result) @benchmark.Disabled class Browsermark(benchmark.Benchmark): """Browsermark suite tests CSS, DOM, resize, page load, WebGL and JS.""" test = _BrowsermarkMeasurement def CreatePageSet(self, options): ps = page_set.PageSet( file_path=os.path.abspath(__file__), archive_data_file='../page_sets/data/browsermark.json', make_javascript_deterministic=False) ps.AddPageWithDefaultRunNavigate('http://browsermark.rightware.com/tests/') return ps
Python
0.000001
@@ -1054,16 +1054,51 @@ page_set +%0Afrom telemetry.value import scalar %0A%0Aclass @@ -1801,17 +1801,72 @@ ults.Add -( +Value(%0A scalar.ScalarValue(results.current_page, 'Score', @@ -1882,16 +1882,17 @@ result) +) %0A%0A%0A@benc
8204a8b84cdcd515ea1dcf7ab67574b6db5baca6
Add WS caller
web_services/ws_test.py
web_services/ws_test.py
Python
0
@@ -0,0 +1,898 @@ +import functools%0Aimport xmlrpclib%0AHOST = 'localhost'%0APORT = 8069%0ADB = 'odoo_curso'%0AUSER = 'admin'%0APASS = 'admin'%0AROOT = 'http://%25s:%25d/xmlrpc/' %25 (HOST,PORT)%0A%0A# 1. Login%0Auid = xmlrpclib.ServerProxy(ROOT + 'common').login(DB,USER,PASS)%0Aprint %22Logged in as %25s (uid:%25d)%22 %25 (USER,uid)%0A%0Acall = functools.partial(%0A xmlrpclib.ServerProxy(ROOT + 'object').execute,%0A DB, uid, PASS)%0A%0A# 2. Read the sessions%0Amodel = 'openacademy.session'%0Adomain = %5B%5D%0Amethod_name = 'search_read'%0Asessions = call(model, method_name, domain, %5B'name','seats','taken_seats'%5D)%0Afor session in sessions:%0A print %22Session %25s (%25s seats), taken seats %25d%22 %25 (session%5B'name'%5D, session%5B'seats'%5D, session%5B'taken_seats'%5D)%0A%0A# 3.create a new session%0Acourse_id = call('openacademy.course', 'search', %5B('name','ilike','Functional')%5D)%5B0%5D%0Asession_id = call(model, 'create', %7B%0A 'name' : 'My session loca',%0A 'course_id' : course_id,%0A%7D)%0A%0A
605fb4c6726d0c66bada870bffe526d493195b33
Create USN.py
USN.py
USN.py
Python
0.000003
@@ -0,0 +1,108 @@ +#Spooky scary skeeletons send shiveers down your spine%0A%0A#You are a gunner in the Navy. Destroy the Commies.%0A
129e548ac0be8ee3a60dd85aca9d095456b7d3a6
Add new py-testresources package (#14031)
var/spack/repos/builtin/packages/py-testresources/package.py
var/spack/repos/builtin/packages/py-testresources/package.py
Python
0
@@ -0,0 +1,636 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyTestresources(PythonPackage):%0A %22%22%22Testresources, a pyunit extension for managing expensive test resources.%0A %22%22%22%0A%0A homepage = %22https://launchpad.net/testresources%22%0A url = %22https://pypi.io/packages/source/t/testresources/testresources-2.0.1.tar.gz%22%0A%0A version('2.0.1', sha256='ee9d1982154a1e212d4e4bac6b610800bfb558e4fb853572a827bc14a96e4417')%0A%0A depends_on('py-setuptools', type='build')%0A
f8712c62ad069b815ff775bd758bdbf693bdbdb7
Add some constants.
src/pyfuckery/constants.py
src/pyfuckery/constants.py
Python
0.000001
@@ -0,0 +1,401 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# XXX Update Docstring%0A%22%22%22%0ApyFuckery - constants.py%0ACreated on 2/12/17.%0A%0A%0A%22%22%22%0A# Stdlib%0Aimport logging%0Aimport re%0A# Third Party Code%0A# Custom Code%0Alog = logging.getLogger(__name__)%0A%0A%0A# Brainfuck tokens%0ASYM_PTR_INC = '%3E'%0ASYM_PTR_DEC = '%3C'%0ASYM_DATA_INC = '+'%0ASYM_DATA_DEC = '-'%0ASYM_IO_OUTPUT = '.'%0ASYM_IO_INPUT = ','%0ASYM_JMP_FWD = '%5B'%0ASYM_JMP_BACKWARD = '%5D'%0A%0A
37190d6fba4d2a769ff2dcd154aa8cf1721f7026
yea yea
gmm.py
gmm.py
Python
0.999953
@@ -0,0 +1,1547 @@ +%22%22%22Trains a GMM on formant data (e.g. from the Hillenbrand corpus).%0A%22%22%22%0A%0Aimport numpy as np%0Afrom sklearn.mixture import GMM%0Afrom sklearn import metrics%0Aimport pylab as pl%0Afrom collections import defaultdict%0A%0A%0A%0Adef parse(fname):%0A with open(fname) as f:%0A d = map(lambda l: l.rstrip('%5Cn').split(), f.readlines())%0A header = d%5B0%5D%0A d = filter(lambda x: not 'NaN' in x, d)%0A return header, np.array(d%5B1:%5D)%0A%0A%0Adef eval_clusters(y_pred, y):%0A # maximize the 1-to-1 matching between y_pred and y%0A sety = set(y)%0A counts = defaultdict(lambda: defaultdict(lambda: 0))%0A for i in xrange(y_pred.shape%5B0%5D):%0A counts%5By%5Bi%5D%5D%5By_pred%5Bi%5D%5D += 1%0A maps_to = %7B%7D%0A for y_, yp_c_d in counts.iteritems():%0A max_ = 0%0A ind_max = None%0A for yp, c in yp_c_d.iteritems():%0A if c %3E max_:%0A max_ = c%0A ind_max = yp%0A maps_to%5By_%5D = ind_max%0A y_gold = np.array(map(lambda x: maps_to%5Bx%5D, y))%0A print %22Adjusted rand scores:%22,%0A print metrics.adjusted_rand_score(y_gold, y_pred)%0A print %22Homogeneity:%22,%0A print metrics.homogeneity_score(y_gold, y_pred) %0A print %22Completeness:%22,%0A print metrics.completeness_score(y_gold, y_pred) %0A print %22V-measure:%22,%0A print metrics.v_measure_score(y_gold, y_pred) %0A%0A%0Aif __name__ == %22__main__%22:%0A h, d = parse('formants.dat')%0A X = d%5B:,3:5%5D.astype(np.float)%0A y = d%5B:,2%5D%0A print %22All the%22, len(set(y)), %22vowels:%22, set(y)%0A %0A gmm = GMM(n_components=len(set(y)))%0A gmm.fit(X)%0A eval_clusters(gmm.predict(X), y)%0A%0A%0A%0A
c97e44697444b15686bd0a6b5158c90630958238
Add LRU example
lru.py
lru.py
Python
0
@@ -0,0 +1,1739 @@ +from datetime import datetime%0A%0A%0Aclass LRUCacheItem(object):%0A %22%22%22Data structure of items stored in cache%22%22%22%0A def __init__(self, key, item):%0A self.key = key%0A self.item = item%0A self.timestamp = datetime.now()%0A%0A%0Aclass LRUCache(object):%0A %22%22%22A sample class that implements LRU algorithm%22%22%22%0A%0A def __init__(self, length, delta=None):%0A self.length = length%0A self.delta = delta%0A self.hash = %7B%7D%0A self.item_list = %5B%5D%0A%0A def insertItem(self, item):%0A %22%22%22Insert new items to cache%22%22%22%0A%0A if item.key in self.hash:%0A # Move the existing item to the head of item_list.%0A item_index = self.item_list.index(item)%0A self.item_list%5B:%5D = self.item_list%5B:item_index%5D + self.item_list%5Bitem_index+1:%5D%0A self.item_list.insert(0, item)%0A else:%0A # Remove the last item if the length of cache exceeds the upper bound.%0A if len(self.item_list) %3E self.length:%0A self.removeItem(self.item_list%5B-1%5D)%0A%0A # If this is a new item, just append it to%0A # the front of item_list.%0A self.hash%5Bitem.key%5D = item%0A self.item_list.insert(0, item)%0A%0A def removeItem(self, item):%0A %22%22%22Remove those invalid items%22%22%22%0A%0A del self.hash%5Bitem.key%5D%0A del self.item_list%5Bself.item_list.index(item)%5D%0A%0A def validateItem(self):%0A %22%22%22Check if the items are still valid.%22%22%22%0A%0A def _outdated_items():%0A now = datetime.now()%0A for item in self.item_list:%0A time_delta = now - item.timestamp%0A if time_delta.seconds %3E self.delta:%0A yield item%0A map(lambda x: self.removeItem(x), _outdated_items())%0A
dd7ffbf97f9ae8426d7f60e465941f3f70bccdd6
add file
new.py
new.py
Python
0.000001
@@ -0,0 +1,14 @@ +print(%22test%22)%0A
494c8b88727dc958a7ba37f76d4c470837d26e1d
Define register files
reg.py
reg.py
Python
0.000043
@@ -0,0 +1,122 @@ +EXP = 'EXP'%0AVAL = 'VAL'%0AENV = 'ENV'%0AUNEV = 'UNEV'%0AFUNC = 'FUNC'%0AARGL = 'ARGL'%0ACONT = 'CONT'%0ACURR = 'CURR'%0A%0ASTACK = 'STACK'
1c46aa8a03e577ddb3db55a11df3db70905110d2
Add serial_logger.py
serial_logger.py
serial_logger.py
Python
0.000021
@@ -0,0 +1,846 @@ +#!/usr/bin/env python%0A# encoding: utf-8%0A%0A# Log serial monitor data%0A# TO-DO: add options for serial device, baud rate%0A%0Aimport serial%0Aimport datetime%0Aser = serial.Serial('/dev/cu.usbmodemfa131', 9600)%0A%0Anow = datetime.datetime.now()%0A%0Adef get_date_string():%0A day = now.day%0A month = now.month%0A year = now.year%0A current_day = %22%7B0%7D-%7B1%7D-%7B2%7D%22.format(year, month, day)%0A%0A return current_day%0A%0A%0Awhile True:%0A current_date = get_date_string()%0A filename = current_date + '.temperature.log'%0A with open(filename, 'a') as log:%0A try:%0A temp = ser.readline()%0A #temp = 76%0A now = datetime.datetime.now()%0A iso = now.isoformat()%0A data = %22%7B0%7D %7B1%7D%22.format(iso, temp)%0A print data.strip()%0A log.write(data)%0A #print now, temp%0A except:%0A pass%0A
0df0daf7f52015258c3607bb2822c1c77c5e8207
add tensorflow sample
python/other/flow.py
python/other/flow.py
Python
0
@@ -0,0 +1,213 @@ +import tensorflow as tf%0A%0Aa = tf.constant(1, name=%22a%22)%0Ab = tf.constant(1, name=%22b%22)%0A%0Ac = a + b%0A%0Aprint(c)%0Agraph = tf.get_default_graph()%0Aprint(graph.as_graph_def())%0A%0Awith tf.Session() as sess:%0A print(sess.run(c))
fdc900d5da48ae9aea1c7537e026dc2d46c62bc8
add some reuseable aggregation code
shrunk/aggregations.py
shrunk/aggregations.py
Python
0.000001
@@ -0,0 +1,1148 @@ +%0Adef match_short_url(url):%0A return %7B%22$match%22: %7B%22short_url%22:url%7D%7D%0A%0Adef match_id(id):%0A return %7B%22$match%22: %7B%22short_url%22:url%7D%7D%0A%0A#monthly visits aggregations phases%0Agroup_ips=%7B%22$group%22: %7B%0A %22_id%22: %22$source_ip%22,%0A %22times%22: %7B%0A %22$addToSet%22: %22$time%22%0A %7D,%0A %22count%22: %7B %0A %22$sum%22: 1%0A %7D%0A%7D%7D%0Atake_first_visit=%7B%22$project%22: %7B%0A %22time%22: %7B%0A %22$arrayElemAt%22: %5B%22$times%22,0%5D%0A %7D,%0A %22count%22: 1%0A%7D%7D%0A#this monthly sort can probably get abstracted and reused%0Agroup_months=%7B%22$group%22: %7B%0A %22_id%22: %7B%0A %22month%22: %7B%22$month%22: %22$time%22%7D,%0A %22year%22 : %7B%22$year%22 : %22$time%22%7D%0A %7D,%0A %22first_time_visits%22: %7B%0A %22$sum%22: 1%0A %7D,%0A %22all_visits%22: %7B%0A %22$sum%22: %22$count%22%0A %7D%0A%7D%7D%0Amake_sortable=%7B%22$project%22: %7B%0A %22month%22: %22$_id.month%22,%0A %22year%22 : %22$_id.year%22,%0A %22first_time_visits%22: 1,%0A %22all_visits%22: 1%0A%7D%7D%0Achronological_sort=%7B %22$sort%22: %7B%0A %22year%22 : 1,%0A %22month%22: 1%0A%7D%7D%0Aclean_results=%7B%22$project%22: %7B%0A %22first_time_visits%22: 1,%0A %22all_visits%22: 1%0A%7D%7D%0Amonthly_visits_aggregation=%5Bgroup_ips, take_first_visit, group_months, #process data%0A make_sortable, chronological_sort, clean_results%5D #sort%0A
f9da8c4aa061223dac5147f6eaec6ad3419d1d6a
Add cli module to accept a language option
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/cli.py
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/cli.py
Python
0.000001
@@ -0,0 +1,458 @@ +import os%0Aos.environ%5B%22KIVY_NO_ARGS%22%5D = %221%22%0A%0Aimport click%0A%0Afrom %7B%7Bcookiecutter.repo_name%7D%7D.%7B%7Bcookiecutter.repo_name%7D%7D import %7B%7Bcookiecutter.app_class_name%7D%7D%0A%0A%[email protected]()%[email protected](%0A '-l', '--language', help='Default language of the App', default='en',%0A type=click.Choice(%5B'en', 'de'%5D)%0A)%0Adef main(language):%0A %22%22%22Run %7B%7Bcookiecutter.app_class_name%7D%7D with the given language setting.%0A %22%22%22%0A %7B%7Bcookiecutter.app_class_name%7D%7D(language).run()%0A
195b74304fa1c5eab3bc2e16df1346c2f92916f8
Test py
testnet/tests/configs_api_test.py
testnet/tests/configs_api_test.py
Python
0
@@ -0,0 +1,2623 @@ +#!/usr/bin/env python3%0A%0Aimport unittest%0Aimport datetime%0A%0Afrom exonum import ExonumApi, random_hex%0A%0Aclass ConfigsApi(ExonumApi):%0A %0A def new_config_propose(self, config, height, actual_from_height):%0A tx, c = self.send_transaction(%22config/propose%22, %7B%22config%22: config, %22height%22: height, %22actual_from_height%22: actual_from_height%7D)%0A return (self.get_config_propose(tx))%0A%0A def new_config_vote(self):%0A tx, _ = self.send_transaction(%0A %22config/vote%22, %7B%22config_propose_hash%22: hash%7D)%0A%0A def get_config_propose(self, hash):%0A r = self.get(%22config/propose/%22 + hash)%0A return r.json()%0A%0A def get_config_vote(self, pubkey):%0A r = self.get(%22config/vote/%22 + hash)%0A return r.json()%0A%0Aclass ConfigsApiTest(ConfigsApi):%0A %0A def setUp(self):%0A super().setUp()%0A self.host = %22http://127.0.0.1:8400/api/v1%22%0A self.times = 120%0A%0A def create_many_proposes(self, txs): %0A final_tx = None%0A%0A print()%0A print(%22 - Create %7B%7D config_proposes%22.format(txs))%0A start = datetime.datetime.now()%0A for i in range(txs):%0A r, c = self.post_transaction(%0A %22wallets/create%22, %7B%22name%22: %22name_%22 + str(i)%7D) %0A final_tx = r%5B%22tx_hash%22%5D%0A%0A tx = self.wait_for_transaction(final_tx)%0A self.assertNotEqual(tx, None)%0A finish = datetime.datetime.now()%0A%0A delta = finish - start%0A ms = delta.seconds * 1000 + delta.microseconds / 1000%0A print(%22 - Commited, txs=%7B%7D, total time: %7B%7Ds%22.format(txs, ms / 1000))%0A%0A start = datetime.datetime.now()%0A for i in range(txs):%0A info = self.find_user(cookies%5Bi%5D)%0A self.assertEqual(info%5B%22name%22%5D, %22name_%22 + str(i))%0A finish = datetime.datetime.now()%0A%0A delta = finish - start%0A ms = delta.seconds * 1000 + delta.microseconds / 1000%0A print(%22 - All users found, total time: %7B%7Ds%22.format(ms / 1000))%0A%0A def test_create_config_propose(self):%0A r, c = self.create_user(%22My First User%22)%0A self.assertEqual(r%5B%22name%22%5D, %22My First User%22)%0A self.assertEqual(r%5B%22balance%22%5D, 0)%0A%0A def test_create_proposes_1_10(self):%0A self.create_many_proposes(10)%0A%0A def test_create_proposes_2_100(self):%0A self.create_many_proposes(100)%0A%0A def test_create_proposes_3_1000(self):%0A self.create_many_proposes(1000)%0A%0A def test_create_proposes_4_5000(self):%0A self.create_many_proposes(5000)%0A%0A def test_create_proposes_5_10000(self):%0A self.create_many_proposes(10000)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main(verbosity=2, buffer=None)%0A
1ac0c90037923c06a337b7236b678d8ca2b45e5f
Fix unit test
tests/unit/engines/test_acgsou.py
tests/unit/engines/test_acgsou.py
from collections import defaultdict import mock from searx.engines import acgsou from searx.testing import SearxTestCase class TestAcgsouEngine(SearxTestCase): def test_request(self): query = 'test_query' dic = defaultdict(dict) dic['pageno'] = 1 params = acgsou.request(query, dic) self.assertTrue('url' in params) self.assertTrue(query in params['url']) self.assertTrue('acgsou.com' in params['url']) def test_response(self): resp = mock.Mock(text='<html></html>') self.assertEqual(acgsou.response(resp), []) html = """ <table id="listTable" class="list_style table_fixed"> <thead class="tcat"> <tr> tablehead </tr> </thead> <tbody class="tbody" id="data_list"> <tr class="alt1 "> <td nowrap="nowrap">date</td> <td><a href="category.html">testcategory</a></td> <td style="text-align:left;"> <a href="show-torrentid.html" target="_blank">torrentname</a> </td> <td>1MB</td> <td nowrap="nowrap"> <span class="bts_1"> 29 </span> </td> <td nowrap="nowrap"> <span class="btl_1"> 211 </span> </td> <td nowrap="nowrap"> <span class="btc_"> 168 </span> </td> <td><a href="random.html">user</a></td> </tr> </table> """ resp = mock.Mock(text=html) results = acgsou.response(resp) self.assertEqual(type(results), list) self.assertEqual(len(results), 1) r = results[0] self.assertEqual(r['url'], 'https://www.acgsou.com/show-torrentid.html') self.assertEqual(r['content'], 'Category: "testcategory".') self.assertEqual(r['title'], 'torrentname') self.assertEqual(r['filesize'], 1048576)
Python
0.000005
@@ -608,16 +608,31 @@ l = %22%22%22%0A + %3Chtml%3E%0A %3Ctable i @@ -721,17 +721,495 @@ -tablehead + %3Cth axis=%22string%22 class=%22l1 tableHeaderOver%22%3E%E5%8F%91%E5%B8%83%E6%97%B6%E9%97%B4%3C/th%3E%0A %3Cth axis=%22string%22 class=%22l2 tableHeaderOver%22%3E%E5%88%86%E7%B1%BB%3C/th%3E%0A %3Cth axis=%22string%22 class=%22l3 tableHeaderOver%22%3E%E8%B5%84%E6%BA%90%E5%90%8D%E7%A7%B0%3C/th%3E%0A %3Cth axis=%22size%22 class=%22l4 tableHeaderOver%22%3E%E5%A4%A7%E5%B0%8F%3C/th%3E%0A %3Cth axis=%22number%22 class=%22l5 tableHeaderOver%22%3E%E7%A7%8D%E5%AD%90%3C/th%3E%0A %3Cth axis=%22number%22 class=%22l6 tableHeaderOver%22%3E%E4%B8%8B%E8%BD%BD%3C/th%3E%0A %3Cth axis=%22number%22 class=%22l7 tableHeaderOver%22%3E%E5%AE%8C%E6%88%90%3C/th%3E%0A %3Cth axis=%22string%22 class=%22l8 tableHeaderOver%22%3E%E5%8F%91%E5%B8%83%E8%80%85/%E8%81%94%E7%9B%9F%3C/th%3E %0A @@ -1267,29 +1267,16 @@ _list%22%3E%0A - %3Ctr cla @@ -1287,16 +1287,16 @@ alt1 %22%3E%0A + @@ -1916,15 +1916,38 @@ tr%3E%0A -%3C/table + %3C/tbody%3E%0A%3C/table%3E%0A%3C/html %3E%0A
641cf239ae5f4ddb64d81f5c977c1b14381ae805
Add exec permission to download_build_install.py
third_party/instrumented_libraries/download_build_install.py
third_party/instrumented_libraries/download_build_install.py
#!/usr/bin/python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Downloads, builds (with instrumentation) and installs shared libraries.""" import argparse import os import shutil import subprocess import sys # Should be a dict from 'sanitizer type' to 'compiler flag'. SUPPORTED_SANITIZERS = {'asan': 'address'} class ScopedChangeDirectory(object): """Changes current working directory and restores it back automatically.""" def __init__(self, path): self.path = path self.old_path = '' def __enter__(self): self.old_path = os.getcwd() os.chdir(self.path) def __exit__(self, exc_type, exc_value, traceback): os.chdir(self.old_path) def get_script_absolute_path(): return os.path.dirname(os.path.abspath(__file__)) def get_library_build_dependencies(library): command = 'apt-get -s build-dep %s | grep Inst | cut -d " " -f 2' % library command_result = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True) build_dependencies = [l.strip() for l in command_result.stdout] return build_dependencies def download_build_install(parsed_arguments): sanitizer_flag = SUPPORTED_SANITIZERS[parsed_arguments.sanitizer_type] environment = os.environ.copy() environment['CFLAGS'] = '-fsanitize=%s -g -fPIC -w' % sanitizer_flag environment['CXXFLAGS'] = '-fsanitize=%s -g -fPIC -w' % sanitizer_flag # We use XORIGIN as RPATH and after building library replace it to $ORIGIN # The reason: this flag goes through configure script and makefiles # differently for different libraries. So the dollar sign '$' should be # differently escaped. Instead of having problems with that it just # uses XORIGIN to build library and after that replaces it to $ORIGIN # directly in .so file. environment['LDFLAGS'] = '-Wl,-z,origin -Wl,-R,XORIGIN/.' library_directory = '%s/%s' % (parsed_arguments.intermediate_directory, parsed_arguments.library) install_prefix = '%s/%s/instrumented_libraries/%s' % ( get_script_absolute_path(), parsed_arguments.product_directory, parsed_arguments.sanitizer_type) if not os.path.exists(library_directory): os.makedirs(library_directory) with ScopedChangeDirectory(library_directory), \ open(os.devnull, 'w') as dev_null: if subprocess.call('apt-get source %s' % parsed_arguments.library, stdout=dev_null, stderr=dev_null, shell=True): raise Exception('Failed to download %s' % parsed_arguments.library) # There should be exactly one subdirectory after downloading a package. subdirectories = [d for d in os.listdir('.') if os.path.isdir(d)] if len(subdirectories) != 1: raise Exception('There was not one directory after downloading ' \ 'a package %s' % parsed_arguments.library) with ScopedChangeDirectory(subdirectories[0]): # Now we are in the package directory. configure_command = './configure %s --prefix=%s' % ( parsed_arguments.custom_configure_flags, install_prefix) if subprocess.call(configure_command, stdout=dev_null, stderr=dev_null, env=environment, shell=True): raise Exception("Failed to configure %s" % parsed_arguments.library) if subprocess.call('make -j%s' % parsed_arguments.jobs, stdout=dev_null, stderr=dev_null, shell=True): raise Exception("Failed to make %s" % parsed_arguments.library) if subprocess.call('make -j%s install' % parsed_arguments.jobs, stdout=dev_null, stderr=dev_null, shell=True): raise Exception("Failed to install %s" % parsed_arguments.library) # Touch a txt file to indicate library is installed. open('%s/%s.txt' % (install_prefix, parsed_arguments.library), 'w').close() # Remove downloaded package and generated temporary build files. shutil.rmtree(library_directory) def main(): argument_parser = argparse.ArgumentParser( description = 'Download, build and install instrumented library') argument_parser.add_argument('-j', '--jobs', type=int, default=1) argument_parser.add_argument('-l', '--library', required=True) argument_parser.add_argument('-i', '--product-directory', default='.', help='Relative path to the directory with chrome binaries') argument_parser.add_argument('-m', '--intermediate-directory', default='.', help='Relative path to the directory for temporary build files') argument_parser.add_argument('-c', '--custom-configure-flags', default='') argument_parser.add_argument('-s', '--sanitizer-type', required=True, choices=SUPPORTED_SANITIZERS.keys()) parsed_arguments = argument_parser.parse_args() # Ensure current working directory is this script directory os.chdir(get_script_absolute_path()) # Ensure all build dependencies are installed build_dependencies = get_library_build_dependencies(parsed_arguments.library) if len(build_dependencies): print >> sys.stderr, 'Please, install build-dependencies for %s' % \ parsed_arguments.library print >> sys.stderr, 'One-liner for APT:' print >> sys.stderr, 'sudo apt-get -y --no-remove build-dep %s' % \ parsed_arguments.library sys.exit(1) download_build_install(parsed_arguments) if __name__ == '__main__': main()
Python
0.000002
aa720214722ca6ea445cf4ba38aa5f51ef7772b4
add random user for notes
add_random_user.py
add_random_user.py
Python
0
@@ -0,0 +1,1089 @@ +#!/usr/bin/python%0D%0A#coding=utf-8%0D%0A%0D%0Aimport sys%0D%0A%0D%0Aimport MySQLdb%0D%0Afrom DBUtils.PooledDB import PooledDB%0D%0Aimport hashlib%0D%0Aimport time%0D%0Aimport random%0D%0A%0D%0Ag_dbPool = PooledDB(MySQLdb, 5, host='function-hz.com', user='notes', passwd='welc0me', db='db_notes', port=3306, charset = %22utf8%22, use_unicode = True);%0D%0A%0D%0Adef create_random_user(user_name, szPwd):%0D%0A #create user by cell phone number and send dynamic password%0D%0A conn = g_dbPool.connection()%0D%0A cur=conn.cursor()%0D%0A count = cur.execute(%22insert into user(user_name, password) values (%25s, %25s) %22 %5C%0D%0A , (user_name, hashlib.md5(szPwd).hexdigest()))%0D%0A conn.commit()%0D%0A%0D%0A if (1 == count):%0D%0A return True%0D%0A else:%0D%0A return False%0D%0A%0D%0Aif __name__ == '__main__':%0D%0A print (%22start add rendom user%22)%0D%0A for i in range(1, 5000000):%0D%0A szPhone = str(random.randint(11111111111, 99999999999))%0D%0A szPwd = %22123456%22%0D%0A print (%22create user %25d %25s ==%3E %25s%22 %25 (i, szPhone, szPwd))%0D%0A # nPhone = random.randint(11111111111, 99999999999)%0D%0A create_random_user(szPhone, szPwd)%0D%0A%0D%0A
05aa314ac9b5d38bb7a30e30aced9b27b2797888
Add tests for non-async constructs
python/ql/test/experimental/dataflow/tainttracking/defaultAdditionalTaintStep/test_syntax.py
python/ql/test/experimental/dataflow/tainttracking/defaultAdditionalTaintStep/test_syntax.py
Python
0.000006
@@ -0,0 +1,876 @@ +# Add taintlib to PATH so it can be imported during runtime without any hassle%0Aimport sys; import os; sys.path.append(os.path.dirname(os.path.dirname((__file__))))%0Afrom taintlib import *%0A%0A# This has no runtime impact, but allows autocomplete to work%0Afrom typing import TYPE_CHECKING%0Aif TYPE_CHECKING:%0A from ..taintlib import *%0A%0A%0A# Actual tests%0A%0Aclass Context:%0A def __enter__(self):%0A return TAINTED_STRING%0A%0A def __exit__(self, exc_type, exc, tb):%0A pass%0A%0Adef test_with():%0A ctx = Context()%0A taint(ctx)%0A with ctx as tainted:%0A ensure_tainted(tainted) # $ tainted%0A%0A%0Aclass Iter:%0A def __iter__(self):%0A return self%0A%0A def __next__(self):%0A raise StopIteration%0A%0Adef test_for():%0A iter = Iter()%0A taint(iter)%0A for tainted in iter:%0A ensure_tainted(tainted) # $ tainted%0A%0A%0A%0A# Make tests runable%0A%0Atest_with()%0Atest_for()%0A
388c51ea5f83f718b885d784b566bc1873998c3a
add management command used to find all duplicate districts
custom/icds_reports/management/commands/find_duplicate_district_topojsons.py
custom/icds_reports/management/commands/find_duplicate_district_topojsons.py
Python
0.000027
@@ -0,0 +1,1053 @@ +from django.core.management import BaseCommand%0A%0Afrom custom.icds_reports.utils.topojson_util.topojson_util import get_topojson_file_for_level, %5C%0A get_district_topojson_data%0A%0A%0Aclass Command(BaseCommand):%0A help = %22Prints out any districts whose names are duplicated across states.%22%0A%0A def handle(self, *args, **kwargs):%0A district_topojson_data = get_district_topojson_data()%0A districts_to_states = %7B%7D%0A districts_with_duplicates = set()%0A for state, data in district_topojson_data.items():%0A for district_name in data%5B'districts'%5D:%0A if district_name in districts_to_states:%0A districts_with_duplicates.add(district_name)%0A districts_to_states%5Bdistrict_name%5D.append(state)%0A else:%0A districts_to_states%5Bdistrict_name%5D = %5Bstate%5D%0A print('District Name: %5BStates%5D%5Cn')%0A for duplicate_district in districts_with_duplicates:%0A print(f'%7Bduplicate_district%7D: %7B%22, %22.join(districts_to_states%5Bduplicate_district%5D)%7D')%0A
8634db8fe61f819cf24023514d94e4ebfc7e819f
Add Stats() class
auth0/v2/stats.py
auth0/v2/stats.py
Python
0.000001
@@ -0,0 +1,886 @@ +from .rest import RestClient%0A%0A%0Aclass Stats(object):%0A %22%22%22Auth0 stats endpoints%0A%0A Args:%0A domain (str): Your Auth0 domain, e.g: 'username.auth0.com'%0A%0A jwt_token (str): An API token created with your account's global%0A keys. You can create one by using the token generator in the%0A API Explorer: https://auth0.com/docs/api/v2%0A %22%22%22%0A%0A def __init__(self, domain, jwt_token):%0A self.domain = domain%0A self.client = RestClient(jwt=jwt_token)%0A%0A def _url(self, action):%0A return 'https://%25s/api/v2/stats/%25s' %25 (self.domain, action)%0A%0A def active_users(self):%0A return self.client.get(self._url('active-users'))%0A%0A def daily_stats(self, from_date=None, to_date=None):%0A return self.client.get(self._url('daily'), params=%7B'from': from_date,%0A 'to': to_date%7D)%0A
c1fcf54b63de95c85a9505d83062d8b320b1cbdf
Add python cloudfront update_distribution example to replace ACM Certificate
python/example_code/cloudfront/update_distribution_certificate.py
python/example_code/cloudfront/update_distribution_certificate.py
Python
0
@@ -0,0 +1,2573 @@ +# Copyright 2010-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22). You%0A# may not use this file except in compliance with the License. A copy of%0A# the License is located at%0A#%0A# http://aws.amazon.com/apache2.0/%0A#%0A# or in the %22license%22 file accompanying this file. This file is%0A# distributed on an %22AS IS%22 BASIS, WITHOUT WARRANTIES OR CONDITIONS OF%0A# ANY KIND, either express or implied. See the License for the specific%0A# language governing permissions and limitations under the License.%0A%0Aimport boto3%0Aimport sys%0A%0A#support for python 2 and 3 input types%0Adef read(output):%0A if sys.version_info%5B0%5D %3C 3:%0A return(raw_input(output))%0A else:%0A return(input(output))%0A%0A# Create CloudFront client%0Acf = boto3.client('cloudfront')%0A%0A# List distributions with the pagination interface%0Aprint(%22%5CnAvailable CloudFront Distributions:%5Cn%22)%0Apaginator = cf.get_paginator('list_distributions')%0Afor distributionlist in paginator.paginate():%0A for distribution in distributionlist%5B'DistributionList'%5D%5B'Items'%5D:%0A print(%22Domain: %22 + distribution%5B'DomainName'%5D)%0A print(%22Distribution Id: %22 + distribution%5B'Id'%5D)%0A print(%22Certificate Source: %22 + distribution%5B'ViewerCertificate'%5D%5B'CertificateSource'%5D)%0A if (distribution%5B'ViewerCertificate'%5D%5B'CertificateSource'%5D == %22acm%22):%0A print(%22Certificate ARN: %22 + distribution%5B'ViewerCertificate'%5D%5B'Certificate'%5D)%0A print(%22%22)%0A%0Aprint('Enter the Distribution Id of the CloudFront Distribution who%5C's ACM Certificate you would like to replace. ')%0Adistribution_id = read('Note that certificate source must be ACM - DistributionId: ')%0A%0Adistribution_config_response=cf.get_distribution_config(Id=distribution_id)%0Adistribution_config=distribution_config_response%5B'DistributionConfig'%5D%0Adistribution_etag=distribution_config_response%5B'ETag'%5D%0A%0Aif (distribution_config%5B'ViewerCertificate'%5D%5B'CertificateSource'%5D != %22acm%22):%0A%09print(%22%5CnThe DistributionId you have entered is not currently using an ACM Certificate, exiting...%5Cn%22)%0A%09exit()%0A%0Aold_cert_arn=distribution_config%5B'ViewerCertificate'%5D%5B'ACMCertificateArn'%5D%0A%0Anew_cert_arn=read(%22Please enter the ARN of the new ACM Certificate you would like to attach to Distribution %22 + distribution_id + %22: %22)%0A%0Aprint(%22Replacing: %22 + old_cert_arn + %22%5Cnwith: %22 + new_cert_arn + %22%5Cn%22)%0A%0Adistribution_config%5B'ViewerCertificate'%5D%5B'ACMCertificateArn'%5D=new_cert_arn%0Adistribution_config%5B'ViewerCertificate'%5D%5B'Certificate'%5D=new_cert_arn%0A%0Acf.update_distribution(DistributionConfig=distribution_config,Id=distribution_id,IfMatch=distribution_etag)%0A
151293037b941aba874fb2641c1bf982e2143beb
Create solution.py
hackerrank/algorithms/implementation/medium/the_time_in_words/py/solution.py
hackerrank/algorithms/implementation/medium/the_time_in_words/py/solution.py
Python
0.000018
@@ -0,0 +1,1388 @@ +#!/bin/python3%0A%0Aimport sys%0A%0A%0Adef solution(hrs, min):%0A lookup = %7B%0A 0: 'zero',%0A 1: 'one',%0A 2: 'two',%0A 3: 'three',%0A 4: 'four',%0A 5: 'five',%0A 6: 'six',%0A 7: 'seven',%0A 8: 'eight',%0A 9: 'nine',%0A 10: 'ten',%0A 11: 'eleven',%0A 12: 'twelve',%0A 13: 'thirteen',%0A 14: 'fourteen',%0A 15: 'quarter',%0A 16: 'sixteen',%0A 17: 'seventeen',%0A 18: 'eighteen',%0A 19: 'nineteen',%0A 20: 'twenty',%0A 21: 'twenty one',%0A 22: 'twenty two',%0A 23: 'twenty three',%0A 24: 'twenty four',%0A 25: 'twenty five',%0A 26: 'twenty six',%0A 27: 'twenty seven',%0A 28: 'twenty eight',%0A 29: 'twenty nine',%0A 30: 'half',%0A %7D%0A%0A if min == 0:%0A return %22%7B%7D o' clock%22.format(lookup%5Bhrs%5D)%0A elif min %3C= 30:%0A if min == 15 or min == 30:%0A return %22%7B%7D past %7B%7D%22.format(lookup%5Bmin%5D, lookup%5Bhrs%5D)%0A else:%0A return %22%7B%7D minute%7B%7D past %7B%7D%22.format(lookup%5Bmin%5D, '' if min == 1 else 's', lookup%5Bhrs%5D) %0A %0A rem = 60 - min%0A %0A if rem == 15 or rem == 30:%0A return %22%7B%7D to %7B%7D%22.format(lookup%5Brem%5D, lookup%5Bhrs + 1%5D)%0A %0A return %22%7B%7D minute%7B%7D to %7B%7D%22.format(lookup%5Brem%5D, '' if min == 1 else 's', lookup%5Bhrs + 1%5D)%0A%0Ah = int(input().strip())%0Am = int(input().strip())%0As = solution(h, m)%0A%0Aprint(s)%0A
8ba0fcfa893e007f1c6cc794a36bd3604498c380
add rapiro.py
rapiroController.kivy/rapiro.py
rapiroController.kivy/rapiro.py
Python
0.999586
@@ -0,0 +1,1435 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Aimport sys%0Aimport serial%0Aimport os%0Aimport time%0Aimport datetime%0Aimport threading%0A%0A_str = ''%0Aif os.name == 'posix':%0A com = serial.Serial('/dev/ttyAMA0', 57600, timeout = 0.05)%0Aelse:%0A com = sys.stdout%0A%0Adef a2dist(v):%0A d = 26.59*pow(v/1024.0*5.0,-1.209)%0A return(d)%0A%0Adef rxData():%0A global _str%0A while (1):%0A n = com.inWaiting()%0A #print n, _str%0A if n %3E 0:%0A _str += com.read(n)%0A%0Adef command(data):%0A inst = data.split(',')%0A r = ''%0A try:%0A t = inst%5B0%5D%0A s = inst%5B1%5D%0A except:%0A t = 'x'%0A s = 'Not define'%0A if t == 'a':%0A %22%22%22%0A Arduino%0A %22%22%22%0A com.write(s)%0A r = com.readline()%0A elif t == 'p':%0A %22%22%22%0A Raspberry pi%0A %22%22%22%0A os.system(s)%0A else:%0A pass%0A return(t, s, r)%0A%0Adef main():%0A #print(command('a,#M0'))%0A #print(command('a,#Z'))%0A #print(command('a,#PS02A090S05A000T001'))%0A print(command('a,#M0'))%0A print(command('a,#Q'))%0A print(command('a,#A6'))%0A #print(command('a,#A1'))%0A #print(command('a,#A2'))%0A #print(command('a,#A3'))%0A #print(command('a,#A4'))%0A #print(command('a,#A5'))%0A print(command('a,#A6'))%0A #print(command('a,#A7'))%0A print(command('a,#C'))%0A print(command('a,#D'))%0A%0Aif __name__ == '__main__':%0A #t1 = threading.Thread(target=rxData)%0A #t1.setDaemon(True)%0A #t1.start()%0A main()%0A
a2848885e85ad6d9685bb8ae35747300ed4b6b8b
Add a BaseTokenizer
spicedham/tokenizer.py
spicedham/tokenizer.py
Python
0.000006
@@ -0,0 +1,128 @@ +class BaseTokenizer(object):%0A%0A def __init__(self, config):%0A pass%0A%0A def tokenize(self, text):%0A return %5Btext%5D%0A
75a882bf38c88d73e38d13fbb8b1499ff4ae4ea6
Add migration for changing users added by OSF for meetings with emails for fullnames to their guid
scripts/remove_after_use/set_meetings_users_fullnames_to_guids.py
scripts/remove_after_use/set_meetings_users_fullnames_to_guids.py
Python
0
@@ -0,0 +1,675 @@ +import sys%0Aimport logging%0A%0Aimport django%0Afrom django.db import transaction%0Adjango.setup()%0A%0Afrom osf.models import OSFUser%0A%0Alogger = logging.getLogger(__name__)%0Alogging.basicConfig(level=logging.INFO)%0A%0Adef main():%0A dry_run = '--dry' in sys.argv%0A with transaction.atomic():%0A users = OSFUser.objects.filter(fullname__regex=r'%5E%5BA-Za-z0-9%5C.%5C+_-%5D+@%5BA-Za-z0-9%5C._-%5D+%5C.%5Ba-zA-Z%5D*$', tags__name='osf4m')%0A logger.info('%7B%7D users found added by OSF 4 Meetings with emails for fullnames'.format(users.count()))%0A for user in users:%0A user.fullname = user._id%0A if not dry_run:%0A user.save()%0A%0Aif __name__ == '__main__':%0A main()%0A
a6b35a9a94b2e4b32c2236258812b44e81184515
Add management command for resyncing mobile worker location user data
corehq/apps/users/management/commands/fix_location_user_data.py
corehq/apps/users/management/commands/fix_location_user_data.py
Python
0
@@ -0,0 +1,1102 @@ +from corehq.apps.locations.models import Location%0Afrom corehq.apps.users.models import CommCareUser%0Afrom dimagi.utils.couch.database import iter_docs%0Afrom django.core.management.base import BaseCommand, CommandError%0A%0A%0Aclass Command(BaseCommand):%0A args = %22domain%22%0A help = %22Fix location user data for mobile workers.%22%0A%0A def process_user(self, user):%0A if user.location_id:%0A user.set_location(Location.get(user.location_id))%0A else:%0A user.unset_location()%0A%0A def handle(self, *args, **options):%0A if len(args) == 0:%0A raise CommandError(%22Usage: python manage.py fix_location_user_data %25s%22 %25 self.args)%0A%0A domain = args%5B0%5D%0A ids = (%0A CommCareUser.ids_by_domain(domain, is_active=True) +%0A CommCareUser.ids_by_domain(domain, is_active=False)%0A )%0A for doc in iter_docs(CommCareUser.get_db(), ids):%0A user = CommCareUser.wrap(doc)%0A try:%0A self.process_user(user)%0A except Exception as e:%0A print %22Error processing user %25s: %25s%22 %25 (user._id, e)%0A
a31ef338ef4029be92b0c578bdd12706a0f1c17d
Move zpool grains into salt.grains.zpool
salt/grains/zpool.py
salt/grains/zpool.py
Python
0.001271
@@ -0,0 +1,1923 @@ +# -*- coding: utf-8 -*-%0A'''%0AZFS grain provider%0A%0A:maintainer: Jorge Schrauwen %[email protected]%3E%0A:maturity: new%0A:depends: salt.utils, salt.module.cmdmod%0A:platform: illumos,freebsd,linux%0A%0A.. versionadded:: Oxygen%0A%0A'''%0Afrom __future__ import absolute_import%0A%0A# Import python libs%0Aimport logging%0A%0A# Import salt libs%0Aimport salt.utils.dictupdate%0Aimport salt.utils.path%0Aimport salt.utils.platform%0A%0A# Solve the Chicken and egg problem where grains need to run before any%0A# of the modules are loaded and are generally available for any usage.%0Aimport salt.modules.cmdmod%0A%0A__virtualname__ = 'zfs'%0A__salt__ = %7B%0A 'cmd.run': salt.modules.cmdmod.run,%0A 'cmd.run_all': salt.modules.cmdmod.run_all,%0A%7D%0A%0Alog = logging.getLogger(__name__)%0A%0A%0Adef __virtual__():%0A '''%0A Figure out if we need to be loaded%0A '''%0A # Don't load on windows, NetBSD, or proxy%0A # NOTE: ZFS on Windows is in development%0A # NOTE: ZFS on NetBSD is in development%0A if salt.utils.platform.is_windows() or salt.utils.platform.is_netbsd() or 'proxyminion' in __opts__:%0A return False%0A%0A # Don't load if we do not have the zpool command%0A if not salt.utils.path.which('zpool'):%0A return False%0A%0A return True%0A%0A%0Adef _zpool_data(zpool_cmd):%0A '''%0A Provide grains about zpools%0A '''%0A # collect zpool data%0A grains = %7B%7D%0A for zpool in __salt__%5B'cmd.run'%5D('%7Bzpool%7D list -H -o name,size'.format(zpool=zpool_cmd)).splitlines():%0A if 'zpool' not in grains:%0A grains%5B'zpool'%5D = %7B%7D%0A zpool = zpool.split()%0A grains%5B'zpool'%5D%5Bzpool%5B0%5D%5D = zpool%5B1%5D%0A%0A # return grain data%0A return grains%0A%0A%0Adef zpool():%0A '''%0A Provide grains for zfs/zpool%0A '''%0A grains = %7B%7D%0A zpool_cmd = salt.utils.path.which('zpool')%0A%0A grains = salt.utils.dictupdate.update(grains, _zpool_data(zpool_cmd), merge_lists=True)%0A%0A return grains%0A%0A# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4%0A
bb8a448e5e3f935f5ee4f8da9c78bcb651486c15
Create ZigZagConversion_002.py
leetcode/006-ZigZag-Conversion/ZigZagConversion_002.py
leetcode/006-ZigZag-Conversion/ZigZagConversion_002.py
Python
0.000001
@@ -0,0 +1,545 @@ +class Solution:%0A # @param %7Bstring%7D s%0A # @param %7Binteger%7D numRows%0A # @return %7Bstring%7D%0A def convert(self, s, numRows):%0A if numRows %3C 2:%0A return s%0A halfsize = numRows - 1%0A size = 2 * halfsize%0A res = ''%0A for i in range(numRows):%0A j, cnt = i, 1%0A while j %3C len(s):%0A res += s%5Bj%5D%0A if j %25 halfsize == 0:%0A j += size %0A else:%0A j = cnt * size - j%0A cnt += 1%0A return res%0A
95fcdd4f2f65a330adcb115b7ce6d6084efc6ae8
Add examples.
examples/sampling/metropolis.py
examples/sampling/metropolis.py
Python
0
@@ -0,0 +1,580 @@ +#!/usr/bin/env python%0A%0Aimport numpy as np%0Afrom util import MetropolisSampler, log_gaussian%0Afrom matplotlib import pyplot as plt%0A%0A%0Adef __main__():%0A np.random.seed(4)%0A # Generate parameters%0A num_dims = 3%0A mu = np.random.normal(0, 3, num_dims)%0A cov = np.diag(np.random.gamma(.5, size=num_dims))%0A # Create a sampler%0A sampler = MetropolisSampler(lambda x: -log_gaussian(x, mu, cov)%5B0%5D, cov / num_dims)%0A # Draw samples%0A sampler.sample(mu, 1000)%0A # Show the trace%0A sampler.trace_plot(values=mu)%0A plt.show()%0A%0A%0Aif __name__ == '__main__':%0A __main__()%0A
415717bddb00ca650bef61a5c6054a7b47575b56
Implement unit test for break.
jaspyx/tests/visitor/test_break.py
jaspyx/tests/visitor/test_break.py
Python
0.000001
@@ -0,0 +1,704 @@ +import ast%0Afrom jaspyx.ast_util import ast_store, ast_load%0Afrom jaspyx.tests.visitor.v8_helper import V8Helper%0A%0A%0Aclass TestBreak(V8Helper):%0A def test_break(self):%0A assert self.run(%0A %5B%0A ast.Assign(%0A %5Bast_store('i')%5D,%0A ast.Num(0),%0A ),%0A ast.While(%0A ast.Compare(%0A ast_load('i'),%0A %5Bast.Lt()%5D,%0A %5Bast.Num(10)%5D%0A ),%0A %5B%0A ast.Break(),%0A %5D,%0A %5B%5D%0A )%0A %5D,%0A 'i',%0A int%0A ) == 0%0A
48d493e1b0e85cab7b40ade34bfad29437880bd2
add webdeugger to the documentation
doc/autobuild.py
doc/autobuild.py
''' Script to generate Kivy API from source code. Code is messy, but working. Be careful if you change anything in ! ''' ignore_list = ( 'kivy._event', 'kivy.factory_registers', 'kivy.graphics.buffer', 'kivy.graphics.vbo', 'kivy.graphics.vertex', 'kivy.lib.osc' ) import os import sys from glob import glob import kivy # force loading of kivy modules import kivy.app import kivy.metrics import kivy.atlas import kivy.core.audio import kivy.core.camera import kivy.core.clipboard import kivy.core.gl import kivy.core.image import kivy.core.spelling import kivy.core.text import kivy.core.text.markup import kivy.core.video import kivy.core.window import kivy.ext import kivy.graphics import kivy.graphics.shader import kivy.animation import kivy.modules.keybinding import kivy.modules.monitor import kivy.modules.touchring import kivy.modules.inspector import kivy.modules.recorder import kivy.modules.screen import kivy.modules.webdebugger import kivy.network.urlrequest import kivy.support import kivy.input.recorder import kivy.interactive import kivy.garden from kivy.factory import Factory # force loading of all classes from factory for x in list(Factory.classes.keys())[:]: getattr(Factory, x) # Directory of doc base_dir = os.path.dirname(__file__) dest_dir = os.path.join(base_dir, 'sources') examples_framework_dir = os.path.join(base_dir, '..', 'examples', 'framework') def writefile(filename, data): global dest_dir # avoid to rewrite the file if the content didn't change f = os.path.join(dest_dir, filename) print('write', filename) if os.path.exists(f): with open(f) as fd: if fd.read() == data: return h = open(f, 'w') h.write(data) h.close() # Activate Kivy modules ''' for k in kivy.kivy_modules.list().keys(): kivy.kivy_modules.import_module(k) ''' # Search all kivy module l = [(x, sys.modules[x], os.path.basename(sys.modules[x].__file__).rsplit('.', 1)[0]) for x in sys.modules if x.startswith('kivy') and sys.modules[x]] # Extract packages from modules packages = [] modules = {} api_modules = [] for name, module, filename in l: if name in ignore_list: continue if not any([name.startswith(x) for x in ignore_list]): api_modules.append(name) if filename == '__init__': packages.append(name) else: if hasattr(module, '__all__'): modules[name] = module.__all__ else: modules[name] = [x for x in dir(module) if not x.startswith('__')] packages.sort() # Create index api_index = \ '''API Reference ------------- The API reference is a lexicographic list of all the different classes, methods and features that Kivy offers. .. toctree:: :maxdepth: 1 ''' api_modules.sort() for package in api_modules: api_index += " api-%s.rst\n" % package writefile('api-index.rst', api_index) # Create index for all packages template = \ '''========================================================================================================== $SUMMARY ========================================================================================================== $EXAMPLES_REF .. automodule:: $PACKAGE :members: :show-inheritance: .. toctree:: $EXAMPLES ''' template_examples = \ '''.. _example-reference%d: Examples -------- %s ''' template_examples_ref = \ '''# :ref:`Jump directly to Examples <example-reference%d>`''' def extract_summary_line(doc): if doc is None: return for line in doc.split('\n'): line = line.strip() # don't take empty line if len(line) < 1: continue # ref mark if line.startswith('.. _'): continue return line for package in packages: summary = extract_summary_line(sys.modules[package].__doc__) if summary is None: summary = 'NO DOCUMENTATION (package %s)' % package t = template.replace('$SUMMARY', summary) t = t.replace('$PACKAGE', package) t = t.replace('$EXAMPLES_REF', '') t = t.replace('$EXAMPLES', '') # search packages for subpackage in packages: packagemodule = subpackage.rsplit('.', 1)[0] if packagemodule != package or len(subpackage.split('.')) <= 2: continue t += " api-%s.rst\n" % subpackage # search modules m = list(modules.keys()) m.sort(key=lambda x: extract_summary_line(sys.modules[x].__doc__)) for module in m: packagemodule = module.rsplit('.', 1)[0] if packagemodule != package: continue t += " api-%s.rst\n" % module writefile('api-%s.rst' % package, t) # Create index for all module m = list(modules.keys()) m.sort() refid = 0 for module in m: summary = extract_summary_line(sys.modules[module].__doc__) if summary is None: summary = 'NO DOCUMENTATION (module %s)' % package # search examples example_output = [] example_prefix = module if module.startswith('kivy.'): example_prefix = module[5:] example_prefix = example_prefix.replace('.', '_') # try to found any example in framework directory list_examples = glob('%s*.py' % os.path.join(examples_framework_dir, example_prefix)) for x in list_examples: # extract filename without directory xb = os.path.basename(x) # add a section ! example_output.append('File :download:`%s <%s>` ::' % ( xb, os.path.join('..', x))) # put the file in with open(x, 'r') as fd: d = fd.read().strip() d = '\t' + '\n\t'.join(d.split('\n')) example_output.append(d) t = template.replace('$SUMMARY', summary) t = t.replace('$PACKAGE', module) if len(example_output): refid += 1 example_output = template_examples % (refid, '\n\n\n'.join(example_output)) t = t.replace('$EXAMPLES_REF', template_examples_ref % refid) t = t.replace('$EXAMPLES', example_output) else: t = t.replace('$EXAMPLES_REF', '') t = t.replace('$EXAMPLES', '') writefile('api-%s.rst' % module, t) # Generation finished print('Generation finished, do make html')
Python
0
@@ -947,58 +947,58 @@ ivy. -modules.webdebugger%0Aimport kivy.network.urlrequest +network.urlrequest%0Aimport kivy.modules.webdebugger %0Aimp
607c84d56524389a150e940d89f6ecb52420e8f3
plain, AC
leetcode/0162_find-peak-element.py
leetcode/0162_find-peak-element.py
Python
0.998553
@@ -0,0 +1,539 @@ +#%0A# @lc app=leetcode id=162 lang=python3%0A#%0A# %5B162%5D Find Peak Element%0A#%0A%0Afrom typing import List%0A%0A# @lc code=start%0AINT_MIN = - 2 ** 64 + 1%0A%0Aclass Solution:%0A def findPeakElement(self, nums: List%5Bint%5D) -%3E int:%0A if len(nums) == 1:%0A return 0%0A nums.insert(0, INT_MIN)%0A nums.append(INT_MIN)%0A for i in range(len(nums)):%0A if nums%5Bi+1%5D %3E nums%5Bi%5D and nums%5Bi+1%5D %3E nums%5Bi+2%5D: %0A return i%0A return -1%0A%0A# @lc code=end%0As = Solution()%0Aa = %5B1,2,1,3,5,6,4%5D%0Aprint(s.findPeakElement(a))%0A
6db056ed452dfe01e5b6b414d414dd02d8960cef
use new mesolve output format
examples/ex_landau_zener.py
examples/ex_landau_zener.py
# # Textbook example: Landau-Zener transitions in a quantum two-level system. # from qutip import * from pylab import * import time def hamiltonian_t(t, args): """ evaluate the hamiltonian at time t. """ H0 = args[0] H1 = args[1] return H0 + t * H1 def qubit_integrate(delta, eps0, A, gamma1, gamma2, psi0, tlist): # Hamiltonian sx = sigmax() sz = sigmaz() sm = destroy(2) H0 = - delta/2.0 * sx - eps0/2.0 * sz H1 = - A/2.0 * sz # collapse operators c_op_list = [] n_th = 0.0 # zero temperature # relaxation rate = gamma1 * (1 + n_th) if rate > 0.0: c_op_list.append(sqrt(rate) * sm) # excitation rate = gamma1 * n_th if rate > 0.0: c_op_list.append(sqrt(rate) * sm.dag()) # dephasing rate = gamma2 if rate > 0.0: c_op_list.append(sqrt(rate) * sz) # evolve and calculate expectation values # method 1: function callback which returns the time-depdent qobj #H_args = (H0, H1) #expt_list = mesolve(hamiltonian_t, psi0, tlist, c_op_list, [sm.dag() * sm], H_args) # method 2: a function callback that returns the coefficient for a qobj #H = [H0, [H1, lambda x,y: x]] #expt_list = mesolve(H, psi0, tlist, c_op_list, [sm.dag() * sm], {}) # method 3: a string that defines the coefficient. The solver generates # and compiles C code using cython. This method is usually the fastest # for large systems or long time evolutions, but there is fixed-time # overhead that makes it inefficient for small and short-time evolutions. H = [H0, [H1, 't']] expt_list = mesolve(H, psi0, tlist, c_op_list, [sm.dag() * sm], {}) return expt_list[0] # # set up the calculation # delta = 0.5 * 2 * pi # qubit sigma_x coefficient eps0 = 0.0 * 2 * pi # qubit sigma_z coefficient A = 2.0 * 2 * pi # sweep rate gamma1 = 0.0 # relaxation rate gamma2 = 0.0 # dephasing rate psi0 = basis(2,0) # initial state tlist = linspace(-20.0, 20.0, 5000) start_time = time.time() p_ex = qubit_integrate(delta, eps0, A, gamma1, gamma2, psi0, tlist) print 'time elapsed = ' + str(time.time() - start_time) plot(tlist, real(p_ex), 'b', tlist, real(1-p_ex), 'r') plot(tlist, 1 - exp( - pi * delta **2 / (2 * A)) * ones(shape(tlist)), 'k') xlabel('Time') ylabel('Occupation probability') title('Landau-Zener transition') legend(("Excited state", "Ground state", "Landau-Zener formula"), loc=0) show()
Python
0.000126
@@ -1622,24 +1622,21 @@ '%5D%5D%0A -expt_lis +outpu t = meso @@ -1701,24 +1701,28 @@ return -expt_lis +output.expec t%5B0%5D%0A
a1e451ab3525c5a0852782d1990f848b2329cb72
add sinawb token
server/crawler/sinawb/TokenConstant.py
server/crawler/sinawb/TokenConstant.py
Python
0.999782
@@ -0,0 +1,322 @@ +#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0AAuthor: AsherYang%0AEmail: [email protected]%0ADate: 2017/9/22.%0ADesc: sinaWeibo appkey%0A@see: http://open.weibo.com/apps/2489615368/info/basic?action=review%0A%22%22%22%0A%0Adomain=%22https://api.weibo.com/2/%22%0Atoken=%22%22%0Aappkey = %222489615368%22%0Asecret = %22dbb84df92e9a9c8f8e10d9985a8038a8%22
1a9b6c7c58c5960df18335552780c3ca668dea5e
add evaluation script for ihm
evaluation/evalutate_ihm.py
evaluation/evalutate_ihm.py
Python
0
@@ -0,0 +1,1637 @@ +import sklearn.utils as sk_utils%0Afrom mimic3models import metrics%0Aimport numpy as np%0Aimport pandas as pd%0Aimport argparse%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument('prediction', type=str)%0A parser.add_argument('--test_listfile', type=str, default='../data/in-hospital-mortality/test/listfile.csv')%0A parser.add_argument('--n_iters', type=int, default=10000)%0A args = parser.parse_args()%0A%0A pred_df = pd.read_csv(args.prediction, index_col=False)%0A test_df = pd.read_csv(args.test_listfile, index_col=False)%0A%0A df = test_df.merge(pred_df, left_on='stay', right_on='stay', how='left', suffixes=%5B'_l', '_r'%5D)%0A assert (df%5B'prediction'%5D.isnull().sum() == 0)%0A assert (df%5B'y_true_l'%5D.equals(df%5B'y_true_r'%5D))%0A%0A n_samples = df.shape%5B0%5D%0A data = np.zeros((n_samples, 2))%0A data%5B:, 0%5D = np.array(df%5B'prediction'%5D)%0A data%5B:, 1%5D = np.array(df%5B'y_true_l'%5D)%0A auroc_score = metrics.print_metrics_binary(data%5B:, 1%5D, data%5B:, 0%5D, verbose=0)%5B%22auroc%22%5D%0A%0A aucs = %5B%5D%0A for i in range(args.n_iters):%0A cur_data = sk_utils.resample(data, n_samples=len(data))%0A cur_auc = metrics.print_metrics_binary(cur_data%5B:, 1%5D, cur_data%5B:, 0%5D, verbose=0)%5B%22auroc%22%5D%0A aucs += %5Bcur_auc%5D%0A%0A print %22%7B%7D iterations%22.format(args.n_iters)%0A print %22ROC of AUC = %7B%7D%22.format(auroc_score)%0A print %22mean = %7B%7D%22.format(np.mean(aucs))%0A print %22median = %7B%7D%22.format(np.median(aucs))%0A print %22std = %7B%7D%22.format(np.std(aucs))%0A print %222.5%25 percentile = %7B%7D%22.format(np.percentile(aucs, 2.5))%0A print %2297.5%25 percentile = %7B%7D%22.format(np.percentile(aucs, 97.5))%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
e3c17a893ef4e0790af05cc238ac9038923b115a
Create docs directory for sphinx integration
docs/__init__.py
docs/__init__.py
Python
0
@@ -0,0 +1,59 @@ +#TODO:Create DOCS files for later integration using Sphinx%0A
c206cfd940dd8ba58edb86f16691bcf50b6e5e30
Add modgraph.py demo from Michael Hohn <[email protected]>
tclpkg/gv/demo/modgraph.py
tclpkg/gv/demo/modgraph.py
Python
0
@@ -0,0 +1,960 @@ +#!/usr/bin/python%0A%0A# display the kernel module dependencies%0A%0A# author: Michael Hohn %[email protected]%3E%0A# based on: modgraph.tcl by John Ellson %[email protected]%3E%0A%0Aimport sys%0A# sys.path.append('/usr/lib/graphviz/python')%0Asys.path.append('/usr/lib64/graphviz/python')%0Aimport gv%0A%0Amodules = open(%22/proc/modules%22, 'r').readlines()%0A%0AG = gv.digraph(%22G%22)%0Agv.setv(G, 'rankdir', 'LR')%0Agv.setv(G, 'nodesep', '0.05')%0Agv.setv(G, 'node', 'shape', 'box')%0Agv.setv(G, 'node', 'width', '0')%0Agv.setv(G, 'node', 'height', '0')%0Agv.setv(G, 'node', 'margin', '.03')%0Agv.setv(G, 'node', 'fontsize', '8')%0Agv.setv(G, 'node', 'fontname', 'helvetica')%0Agv.setv(G, 'edge', 'arrowsize', '.4')%0A%0Afor rec in modules:%0A fields = rec.split(' ')%0A n = gv.node(G, fields%5B0%5D)%0A for usedby in fields%5B3%5D.split(','):%0A if (usedby != '-') & (usedby != ''):%0A gv.edge(n, gv.node(G, usedby))%0A%0Agv.layout(G, 'dot')%0A# The 'xlib' renderer is provided by graphviz-cairo%0Agv.render(G, 'xlib')%0A
5c7a4547558e6f6959ae1878f56efef8716456c4
add script to convert distances into probabilities
scripts/distance2probability.py
scripts/distance2probability.py
Python
0.000006
@@ -0,0 +1,2884 @@ +#!/usr/bin/env python%0A# encoding: utf-8%0A%0A# The MIT License (MIT)%0A%0A# Copyright (c) 2014 Herv%C3%A9 BREDIN%0A%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A# SOFTWARE.%0A%0A%22%22%22Convert distance to probability%0A%0AUsage:%0A distance2probability.py train %3Cdistance_matrix%3E %3Cgroundtruth_matrix%3E %3Cd2p_model%3E%0A distance2probability.py apply %3Cdistance_matrix%3E %3Cd2p_model%3E %3Cprobability_matrix%3E%0A distance2probability.py (-h %7C --help)%0A distance2probability.py --version%0A%0AOptions:%0A -h --help Show this screen.%0A --version Show version.%0A%0A%22%22%22%0A%0Afrom docopt import docopt%0Afrom pyannote.algorithms.stats.llr import LLRIsotonicRegression%0Aimport numpy as np%0Aimport pickle%0A%0A%0Adef do_train(distance_matrix, groundtruth_matrix, d2p_model):%0A%0A # load distance matrix%0A x = np.load(distance_matrix)%0A%0A # load groundtruth matrix%0A y = np.load(groundtruth_matrix)%0A%0A # train isotonic regression%0A ir = LLRIsotonicRegression()%0A ir.fit(x, y)%0A%0A # save regression%0A pickle.dump(ir, d2p_model)%0A%0A%0Adef do_apply(distance_matrix, d2p_model, probability_matrix):%0A%0A # load distance matrix%0A x = np.load(distance_matrix)%0A%0A # load regression%0A ir = pickle.load(d2p_model)%0A%0A # apply isotonic regression%0A y = ir.apply(x)%0A%0A # save probability matrix%0A np.save(probability_matrix, y)%0A%0A%0Aif __name__ == '__main__':%0A%0A arguments = docopt(__doc__, version='0.1')%0A print arguments%0A%0A if arguments%5B'train'%5D:%0A distance_matrix = arguments%5B'%3Cdistance_matrix%3E'%5D%0A groundtruth_matrix = arguments%5B'%3Cgroundtruth_matrix%3E'%5D%0A d2p_model = arguments%5B'%3Cd2p_model%3E'%5D%0A do_train(distance_matrix, groundtruth_matrix, d2p_model)%0A%0A if arguments%5B'apply'%5D:%0A distance_matrix = arguments%5B'%3Cdistance_matrix%3E'%5D%0A d2p_model = arguments%5B'%3Cd2p_model%3E'%5D%0A probability_matrix = arguments%5B'%3Cprobability_matrix%3E'%5D%0A do_apply(distance_matrix, d2p_model, probability_matrix)%0A
2197e16cf20bba5d373f4b7a250b8f1190be8ede
Add focus attribute example.
examples/focused-windows.py
examples/focused-windows.py
Python
0
@@ -0,0 +1,734 @@ +#!/usr/bin/env python3%0A%0Afrom argparse import ArgumentParser%0Aimport i3ipc%0A%0Ai3 = i3ipc.Connection()%0A%0Adef focused_windows():%0A tree = i3.get_tree()%0A%0A workspaces = tree.workspaces()%0A for workspace in workspaces:%0A container = workspace%0A%0A while container:%0A if not hasattr(container, 'focus') %5C%0A or not container.focus:%0A break%0A%0A container_id = container.focus%5B0%5D%0A container = container.find_by_id(container_id)%0A%0A if container:%0A coname = container.name%0A wsname = workspace.name%0A%0A print('WS', wsname +':', coname)%0A%0A%0Aif __name__ == '__main__':%0A parser = ArgumentParser(description = 'Print the names of the focused window of each workspace.')%0A parser.parse_args()%0A%0A focused_windows()%0A
e7146bbee86ea744d080f18a4f27def9cb26e33e
add corpus_test1.py to see how to parse music21 songs
experiments/corpus_test1.py
experiments/corpus_test1.py
Python
0.000001
@@ -0,0 +1,1585 @@ +#!/usr/bin/env python3%0A# Copyright 2016 Curtis Sand%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22An experiment to help determine the best way to use music21 objects.%0A%0AThe music21 libaries have a lot of purposes beyond what I need so for now I%0Athink all I need is to know how to access the note pitches and their positions%0Aand durations within the work. From those three bits of info I can then%0Aconstruct a waveform representing that music given a tempo to define the length%0Aof a quarter note.%0A%22%22%22%0A%0Aimport numpy%0A%0Afrom music21 import corpus%0A%0Afrom potty_oh.common import get_cmd_line_parser%0Afrom potty_oh.common import call_main%0A%0A%0Adef main():%0A parser = get_cmd_line_parser(description=__doc__)%0A parser.parse_args()%0A%0A work_path = numpy.random.choice(corpus.getComposer('bach'))%0A work = corpus.parse(work_path)%0A for note in work.flat.notes:%0A print('%7B%7D %5B%7B%7D%5D: %7B%7D %7B%7D'.format(note.offset, note.duration.quarterLength,%0A note.pitch, note.frequency))%0A return 0%0A%0A%0Aif __name__ == %22__main__%22:%0A call_main(main)%0A
6ccdf23c67af632a46017d63b5f51d2c207be0ab
Add file
scheduled_bots/scripts/merge_duplicate_gene_proteins.py
scheduled_bots/scripts/merge_duplicate_gene_proteins.py
Python
0.000002
@@ -0,0 +1,1220 @@ +from tqdm import tqdm%0A%0Afrom wikidataintegrator.wdi_core import WDItemEngine, MergeError%0Afrom wikidataintegrator.wdi_login import WDLogin%0Afrom scheduled_bots.local import WDUSER, WDPASS%0A%0Alogin = WDLogin(WDUSER, WDPASS)%0As_protein = %22%22%22%0ASELECT DISTINCT ?item1 ?item2 ?value %7B%7B%0A%09?item1 wdt:P352 ?value .%0A%09?item2 wdt:P352 ?value .%0A ?item1 wdt:P31%7Cwdt:P279 wd:Q8054 .%0A ?item2 wdt:P31%7Cwdt:P279 wd:Q8054 .%0A FILTER NOT EXISTS %7B%7B?item1 wdt:P703 wd:Q15978631%7D%7D%0A%09FILTER( ?item1 != ?item2 && STR( ?item1 ) %3C STR( ?item2 ) ) .%0A%7D%7D%22%22%22%0A%0As_gene = %22%22%22%0ASELECT DISTINCT ?item1 ?item2 ?value %7B%7B%0A%09?item1 wdt:P351 ?value .%0A%09?item2 wdt:P351 ?value .%0A ?item1 wdt:P703 ?taxon1 .%0A ?item2 wdt:P703 ?taxon2 .%0A%09FILTER( ?item1 != ?item2 && STR( ?item1 ) %3C STR( ?item2 ) && ?taxon1 = ?taxon2) .%0A FILTER NOT EXISTS %7B%7B?item1 wdt:P703 wd:Q15978631%7D%7D%0A%7D%7D%22%22%22%0A%0As = s_gene%0A%0Aitems = %5B%7Bk: v%5B'value'%5D.split(%22/%22)%5B-1%5D for k, v in x.items()%7D for x in%0A WDItemEngine.execute_sparql_query(s)%5B'results'%5D%5B'bindings'%5D%5D%0Afor x in tqdm(items):%0A try:%0A WDItemEngine.merge_items(from_id=x%5B'item2'%5D, to_id=x%5B'item1'%5D, login_obj=login, ignore_conflicts='statement%7Cdescription%7Csitelink')%0A except MergeError as e:%0A print(e)%0A pass%0A
495da73f305a2a0e79a28d251b5b93caea06656d
Add UglifyJS as a filter.
mediagenerator/filters/uglifier.py
mediagenerator/filters/uglifier.py
Python
0
@@ -0,0 +1,1416 @@ +from django.conf import settings%0Afrom django.utils.encoding import smart_str%0Afrom mediagenerator.generators.bundles.base import Filter%0A%0Aclass Uglifier(Filter):%0A def __init__(self, **kwargs):%0A super(Uglifier, self).__init__(**kwargs)%0A assert self.filetype == 'js', (%0A 'Uglifier only supports compilation to js. '%0A 'The parent filter expects %22%25s%22.' %25 self.filetype)%0A%0A def get_output(self, variation):%0A # We import this here, so App Engine Helper users don't get import%0A # errors.%0A from subprocess import Popen, PIPE%0A for input in self.get_input(variation):%0A args = %5B'uglifyjs'%5D%0A try:%0A args = args + settings.UGLIFIER_OPTIONS%0A except AttributeError:%0A pass%0A try:%0A cmd = Popen(args,%0A stdin=PIPE, stdout=PIPE, stderr=PIPE,%0A universal_newlines=True)%0A output, error = cmd.communicate(smart_str(input))%0A assert cmd.wait() == 0, 'Command returned bad result:%5Cn%25s' %25 error%0A yield output.decode('utf-8')%0A except Exception, e:%0A raise ValueError(%22Failed to run UglifyJs. %22%0A %22Please make sure you have Node.js and UglifyJS installed %22%0A %22and that it's in your PATH.%5Cn%22%0A %22Error was: %25s%22 %25 e)%0A
660a3c5f3f8a4c63c21c27ce58c5639d37409ae1
add thing to filter tracts
filter_tracts.py
filter_tracts.py
Python
0
@@ -0,0 +1,317 @@ +import json%0A%0Acalif_tracts_data = open('tracts.json')%0A%0Acalif_tracts = json.load(calif_tracts_data)%0A%0Asf_tracts = %5B%5D%0Afor r in calif_tracts%5B%22features%22%5D:%0A if r%5B%22properties%22%5D%5B%22COUNTY%22%5D == %22075%22:%0A sf_tracts.append(r)%0A%0Acalif_tracts_data.close()%0Aprint json.dumps(%7B%22type%22: %22FeatureCollection%22, %22features%22: sf_tracts%7D)
e687dce8c8441728f1af6336497f7a131730db4f
Add untracked campaigns.py
framework/auth/campaigns.py
framework/auth/campaigns.py
Python
0.000001
@@ -0,0 +1,1000 @@ +import httplib as http%0A%0Afrom framework.exceptions import HTTPError%0A%0Afrom website import mails%0A%0AVALID_CAMPAIGNS = (%0A 'prereg',%0A)%0A%0AEMAIL_TEMPLATE_MAP = %7B%0A 'prereg': mails.CONFIRM_EMAIL_PREREG%0A%7D%0A%0Adef email_template_for_campaign(campaign, default=None):%0A if campaign in VALID_CAMPAIGNS:%0A try:%0A return EMAIL_TEMPLATE_MAP%5Bcampaign%5D%0A except KeyError as e:%0A if default:%0A return default%0A else:%0A raise e%0A%0Adef campaign_for_user(user):%0A campaigns = %5Btag for tag in user.system_tags if tag in VALID_CAMPAIGNS%5D%0A if campaigns:%0A return campaigns%5B0%5D%0A%0Adef campaign_url_for(campaign):%0A # Defined inside this function to ensure a request context%0A REDIRECT_MAP = %7B%0A 'prereg': '/prereg/'%0A %7D%0A if campaign not in VALID_CAMPAIGNS:%0A raise HTTPError(http.BAD_REQUEST)%0A else:%0A try:%0A return REDIRECT_MAP%5Bcampaign%5D%0A except KeyError:%0A raise HTTPError(http.NOT_FOUND)%0A