commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
e5d58cc795541b5e4e8f791a441a4369df17ee19
Add first exercise
cuadradoDentroDeRangoDado.py
cuadradoDentroDeRangoDado.py
Python
0.000142
@@ -0,0 +1,564 @@ +#!/usr/bin/env python%0A%0Adef main():%0A def cuadr(num):%0A return num * num%0A%0A def nom_cuad(num):%0A return (%22%25d -%3E %25d%22) %25 (num, cuadr(num))%0A %0A def promptCuadr():%0A myNum1 = input(%22Enter num1: %22)%0A myNum2 = input(%22Enter num2: %22)%0A minimum = min(myNum1, myNum2)%0A maximum = max(myNum1, myNum2)%0A arr = %5Bnom_cuad(x) for x in range(minimum, maximum) + %5Bmaximum%5D%5D%0A multiline = %22%5Cn%22.join(arr)%0A print multiline%0A %0A print %22==== Mostrar el cuadrado de los numeros dentro del rango introducido ====%22%0A promptCuadr()%0A print %22Operacion finalizada%22%0A%0Amain()%0A
8e1580d8d9a5239fd905b77aa6e6ac16adf7f8b1
Document GlobalStack
cura/Settings/GlobalStack.py
cura/Settings/GlobalStack.py
# Copyright (c) 2017 Ultimaker B.V. # Cura is released under the terms of the AGPLv3 or higher. from typing import Any from PyQt5.QtCore import pyqtProperty, pyqtSlot, pyqtSignal from UM.Decorators import override from UM.MimeTypeDatabase import MimeType, MimeTypeDatabase from UM.Settings.ContainerStack import ContainerStack, InvalidContainerStackError from UM.Settings.InstanceContainer import InstanceContainer from UM.Settings.SettingInstance import InstanceState from UM.Settings.DefinitionContainer import DefinitionContainer from UM.Settings.ContainerRegistry import ContainerRegistry from UM.Settings.Interfaces import ContainerInterface from . import Exceptions from .CuraContainerStack import CuraContainerStack class GlobalStack(CuraContainerStack): def __init__(self, container_id: str, *args, **kwargs): super().__init__(container_id, *args, **kwargs) self.addMetaDataEntry("type", "machine") # For backward compatibility self._extruders = [] # This property is used to track which settings we are calculating the "resolve" for # and if so, to bypass the resolve to prevent an infinite recursion that would occur # if the resolve function tried to access the same property it is a resolve for. self._resolving_settings = set() @pyqtProperty("QVariantList") def extruders(self) -> list: return self._extruders def addExtruder(self, extruder): extruder_count = self.getProperty("machine_extruder_count", "value") if extruder_count and len(self._extruders) + 1 > extruder_count: raise Exceptions.TooManyExtrudersError("Tried to add extruder to {id} but its extruder count is {count}".format(id = self.id, count = extruder_count)) self._extruders.append(extruder) ## Overridden from ContainerStack @override(ContainerStack) def getProperty(self, key: str, property_name: str) -> Any: if not self.definition.findDefinitions(key = key): return None if self._shouldResolve(key, property_name): self._resolving_settings.add(key) resolve = super().getProperty(key, "resolve") self._resolving_settings.remove(key) if resolve is not None: return resolve return super().getProperty(key, property_name) ## Overridden from ContainerStack @override(ContainerStack) def setNextStack(self, next_stack: ContainerStack) -> None: raise Exceptions.InvalidOperationError("Global stack cannot have a next stack!") def _shouldResolve(self, key: str, property_name: str) -> bool: if property_name is not "value": # Do not try to resolve anything but the "value" property return False if key in self._resolving_settings: # To prevent infinite recursion, if getProperty is called with the same key as # we are already trying to resolve, we should not try to resolve again. Since # this can happen multiple times when trying to resolve a value, we need to # track all settings that are being resolved. return False setting_state = super().getProperty(key, "state") if setting_state is not None and setting_state != InstanceState.Default: # When the user has explicitly set a value, we should ignore any resolve and # just return that value. return False return True ## private: global_stack_mime = MimeType( name = "application/x-cura-globalstack", comment = "Cura Global Stack", suffixes = ["global.cfg"] ) MimeTypeDatabase.addMimeType(global_stack_mime) ContainerRegistry.addContainerTypeByName(GlobalStack, "global_stack", global_stack_mime.name)
Python
0
@@ -722,16 +722,89 @@ rStack%0A%0A +## Represents the Global or Machine stack and its related containers.%0A#%0A class Gl @@ -1385,102 +1385,527 @@ -@pyqtProperty(%22QVariantList%22)%0A def extruders(self) -%3E list:%0A return self._extruders%0A +## Get the list of extruders of this stack.%0A #%0A # %5Creturn The extruders registered with this stack.%0A @pyqtProperty(%22QVariantList%22)%0A def extruders(self) -%3E list:%0A return self._extruders%0A%0A ## Add an extruder to the list of extruders of this stack.%0A #%0A # %5Cparam extruder The extruder to add.%0A #%0A # %5Cthrows Exceptions.TooManyExtrudersError Raised when trying to add an extruder while we%0A # already have the maximum number of extruders. %0A @@ -1935,17 +1935,41 @@ extruder -) +: ContainerStack) -%3E None :%0A @@ -2349,32 +2349,577 @@ ContainerStack%0A + #%0A # This will return the value of the specified property for the specified setting,%0A # unless the property is %22value%22 and that setting has a %22resolve%22 function set.%0A # When a resolve is set, it will instead try and execute the resolve first and%0A # then fall back to the normal %22value%22 property.%0A #%0A # %5Cparam key The setting key to get the property of.%0A # %5Cparam property_name The property to get the value of.%0A #%0A # %5Creturn The value of the property for the specified setting, or None if not found.%0A @override(Co @@ -3444,24 +3444,123 @@ tainerStack%0A + #%0A # This will simply raise an exception since the Global stack cannot have a next stack.%0A @overrid @@ -3731,16 +3731,150 @@ ack!%22)%0A%0A + # protected:%0A%0A # Determine whether or not we should try to get the %22resolve%22 property instead of the%0A # requested property.%0A def
aab833a4a267ed46e83a5968e87d357ae3a5a12b
Add new DemoStream example corresponding to the LSL4Unity Project
utils/LSL_Tests/RecieveDemoStream.py
utils/LSL_Tests/RecieveDemoStream.py
Python
0
@@ -0,0 +1,811 @@ +%22%22%22Example program to show how to read a marker time series from LSL.%22%22%22%0Aimport sys%0Asys.path.append('./pylsl') # help python find pylsl relative to this example program%0Afrom pylsl import StreamInlet, resolve_stream%0A%0A# first resolve an EEG stream on the lab network%0AtargetStreamType = 'Unity.Quaternion'%0Aprint 'looking for an stream of type ' + targetStreamType%0Astreams = resolve_stream('type', targetStreamType)%0A%0AstreamsFound = len(streams)%0A%0Aif (streamsFound %3E 0):%0A%09print 'found ' + str(streamsFound)%0Aelse:%0A%09print 'found none',%0A%0A# create a new inlet to read from the stream%0Ainlet = StreamInlet(streams%5B0%5D)%0A%0Awhile True:%0A%09%0A%09sample, timestamp = inlet.pull_sample()%0A%0A%09if(sample):%0A%09%09print %22%5C033%5BK%22, str(timestamp) + ' Quaternion: ' + ' '.join(str(sample%5Bx%5D) for x in range(0,len(sample))), %22%5Cr%22,%0A%09%09sys.stdout.flush()
897371dac52c38b96b6a1a92cd8ce36e9b2d1003
Add django admin page for HQOauthApplication
corehq/apps/hqwebapp/admin.py
corehq/apps/hqwebapp/admin.py
Python
0
@@ -0,0 +1,776 @@ +from django.contrib import admin%0A%0Afrom corehq.apps.hqwebapp.models import HQOauthApplication%0A%0A%[email protected](HQOauthApplication)%0Aclass HQOauthApplicationAdmin(admin.ModelAdmin):%0A list_display = (%0A %22id%22, %22application_id%22, %22application_name%22, %22application_user%22, %22application_client_type%22,%0A %22application_authorization_grant_type%22%0A )%0A%0A def application_id(self, obj):%0A return obj.application.id%0A%0A def application_name(self, obj):%0A return obj.application.name%0A%0A def application_user(self, obj):%0A return obj.application.user.id%0A%0A def application_client_type(self, obj):%0A return obj.application.client_type%0A%0A def application_authorization_grant_type(self, obj):%0A return obj.application.authorization_grant_type%0A
fb884d3453b42b68aa7ecc7b0523bf1460b6b9e0
Add missing EFS patch
scripts/patches/efs.py
scripts/patches/efs.py
Python
0.000001
@@ -0,0 +1,320 @@ +patches = %5B%0A %7B%0A %22op%22: %22replace%22,%0A %22path%22: %22/ResourceTypes/AWS::EFS::AccessPoint/Properties/AccessPointTags/ItemType%22,%0A %22value%22: %22Tag%22,%0A %7D,%0A %7B%0A %22op%22: %22replace%22,%0A %22path%22: %22/ResourceTypes/AWS::EFS::FileSystem/Properties/FileSystemTags/ItemType%22,%0A %22value%22: %22Tag%22,%0A %7D,%0A%5D%0A
1ae811c79b1cbc28b2f71e8f2bb01b44cc3aa2b9
Improve import malware hashes cron
cron/import_malware_hashes.py
cron/import_malware_hashes.py
Python
0.000004
@@ -0,0 +1,3200 @@ +#!/usr/bin/env python%0A%0A# This Source Code Form is subject to the terms of the Mozilla Public%0A# License, v. 2.0. If a copy of the MPL was not distributed with this%0A# file, You can obtain one at http://mozilla.org/MPL/2.0/.%0A# Copyright (c) 2017 Mozilla Corporation%0A#%0A# Contributors:%0A# Brandon Myers [email protected]%0A%0Aimport os%0Aimport sys%0Afrom configlib import getConfig, OptionParser%0Afrom datetime import datetime%0Afrom datetime import timedelta%0A%0Afrom pytx.access_token import access_token%0Afrom pytx import Malware%0A%0Asys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib'))%0Afrom utilities.logger import logger, initLogger%0Afrom utilities.toUTC import toUTC%0Afrom elasticsearch_client import ElasticsearchClient%0A%0Afrom state import State%0A%0A%0Adef pull_malware_hashes(since_date, until_date):%0A query_params = %7B%0A 'since': str(since_date),%0A 'until': str(until_date),%0A 'full_response': True,%0A %7D%0A logger.info('Querying threat exchange with params %7B%7D'.format(query_params))%0A%0A results = Malware.objects(**query_params)%0A%0A malware_data = %5B%5D%0A for result in results%5B'data'%5D:%0A created_date = toUTC(datetime.now()).isoformat()%0A es_doc = %7B%0A 'created_on': created_date,%0A 'details': result%0A %7D%0A malware_data.append(es_doc)%0A%0A return malware_data%0A%0A%0Adef main():%0A logger.info('Connecting to Elasticsearch')%0A client = ElasticsearchClient(options.esservers)%0A logger.info('Connecting to threat exchange')%0A access_token(options.appid, options.appsecret)%0A state = State(options.state_file_name)%0A current_timestamp = toUTC(datetime.now()).isoformat()%0A # We're setting a default for the past 2 days of data%0A # if there isnt a state file%0A since_date_obj = toUTC(datetime.now()) - timedelta(days=2)%0A since_date = since_date_obj.isoformat()%0A if 'lastrun' in state.data.keys():%0A since_date = state.data%5B'lastrun'%5D%0A%0A malware_hashes_docs = pull_malware_hashes(since_date=since_date, until_date=current_timestamp)%0A for malware_hash_doc in malware_hashes_docs:%0A client.save_object(index='threat-exchange', doc_type='malware_hashes', body=malware_hash_doc)%0A state.data%5B'lastrun'%5D = current_timestamp%0A state.save()%0A%0A%0Adef initConfig():%0A options.output = getConfig('output', 'stdout', options.configfile)%0A options.sysloghostname = getConfig('sysloghostname', 'localhost', options.configfile)%0A options.syslogport = getConfig('syslogport', 514, options.configfile)%0A options.state_file_name = getConfig('state_file_name', '%7B0%7D.state'.format(sys.argv%5B0%5D), options.configfile)%0A # threat exchange options%0A options.appid = getConfig('appid', '', options.configfile)%0A options.appsecret = getConfig('appsecret', '', options.configfile)%0A # elastic search server settings%0A options.esservers = list(getConfig('esservers', 'http://localhost:9200', options.configfile).split(','))%0A%0A%0Aif __name__ == '__main__':%0A parser = OptionParser()%0A parser.add_option(%22-c%22, dest='configfile', default=sys.argv%5B0%5D.replace('.py', '.conf'), help=%22configuration file to use%22)%0A (options, args) = parser.parse_args()%0A initConfig()%0A initLogger(options)%0A main()%0A
58852970847bab30fee18e6ab824b24bc75d389f
Add the package-cleaning script
clean-packages.py
clean-packages.py
Python
0
@@ -0,0 +1,1219 @@ +# WARNING: HERE BE DRAGONS%0A%0Aimport yaml%0Aimport os.path%0Aimport urllib.parse%0Afrom copy import deepcopy%0A%0Aurllib.parse.uses_relative.append('github')%0Aurllib.parse.uses_netloc.append('github')%0A%0Awith open('packages.yaml') as f:%0A package_db = yaml.load(f)%0A%0Adef strip_prefix(prefix, url):%0A for n in range(len(url) - 1, 0, -1):%0A component = url%5Bn:%5D%0A joined = urllib.parse.urljoin(prefix, component)%0A if joined == url:%0A return component%0A return url%0A%0Adef clean_package(value):%0A backup = deepcopy(value)%0A if 'base' in value:%0A old_base = value%5B'base'%5D%0A del value%5B'base'%5D%0A value%5B'files'%5D = %7Bfn: urllib.parse.urljoin(old_base, val) for fn, val in value%5B'files'%5D.items()%7D%0A prefix = os.path.commonprefix(value%5B'files'%5D.values())%0A if '/' not in prefix:%0A return backup%0A prefix = prefix%5B0:prefix.rindex('/')+1%5D%0A if len(prefix) %3E 12:%0A value%5B'base'%5D = prefix%0A value%5B'files'%5D = %7Bfn: strip_prefix(prefix, url) for fn, url in value%5B'files'%5D.items()%7D%0A return value%0A%0Apackage_db = %7Bkey: clean_package(value) for key, value in package_db.items()%7D%0A%0Awith open('packages.yaml', 'w') as f:%0A yaml.dump(package_db, f, default_flow_style = False)%0A%0A
2e0fbcb3ec1c2f0311d7ee4bbfeac33662f66089
Monitor process using subprocess module
monitor_process.py
monitor_process.py
Python
0.000001
@@ -0,0 +1,735 @@ +import subprocess%0A%0A%22%22%22 If the program is running %22ps -ef %7C grep program%22 will return 2 or more rows %0A(one with the program itself and the second one with %22grep program%22). %0AOtherwise, it will only return one row (%22grep program%22) %0AYou can trigger the alert on this if required.%0A%22%22%22%0A%0Adef monitor_process(name):%0A%09args=%5B'ps','-ef'%5D%0A%09args1=%5B'grep','-c','%25s' %25name%5D%0A%09process_ps = subprocess.Popen(args, stdout=subprocess.PIPE, shell=False)%0A%09process_monitor = subprocess.Popen(args1, stdin=process_ps.stdout, stdout=subprocess.PIPE, shell=False)%0A%09# Allow process_ps to receive a SIGPIPE if process_monitor exits.%0A%09process_ps.stdout.close()%0A%09return process_monitor.communicate()%5B0%5D%0A%0A%0Aif __name__== %22__main__%22:%0A%09print monitor_process('firefox')%0A
0d956a8137f5bd2cc30f5163c717858e4a1172ee
delete a module never used
nova/scheduler/filters/image_props_filter.py
nova/scheduler/filters/image_props_filter.py
# Copyright (c) 2011-2012 OpenStack, LLC # Copyright (c) 2012 Canonical Ltd # Copyright (c) 2012 SUSE LINUX Products GmbH # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.openstack.common import log as logging from nova.scheduler import filters from nova import utils LOG = logging.getLogger(__name__) class ImagePropertiesFilter(filters.BaseHostFilter): """Filter compute nodes that satisfy instance image properties. The ImagePropertiesFilter filters compute nodes that satisfy any architecture, hpervisor type, or virtual machine mode properties specified on the instance's image properties. Image properties are contained in the image dictionary in the request_spec. """ def _instance_supported(self, capabilities, image_props): img_arch = image_props.get('architecture', None) img_h_type = image_props.get('hypervisor_type', None) img_vm_mode = image_props.get('vm_mode', None) checked_img_props = (img_arch, img_h_type, img_vm_mode) # Supported if no compute-related instance properties are specified if not any(checked_img_props): return True supp_instances = capabilities.get('supported_instances', None) # Not supported if an instance property is requested but nothing # advertised by the host. if not supp_instances: LOG.debug(_("Instance contains properties %(image_props)s, " "but no corresponding capabilities are advertised " "by the compute node"), locals()) return False def _compare_props(props, other_props): for i in props: if i and i not in other_props: return False return True for supp_inst in supp_instances: if _compare_props(checked_img_props, supp_inst): LOG.debug(_("Instance properties %(image_props)s " "are satisfied by compute host capabilities " "%(capabilities)s"), locals()) return True LOG.debug(_("Instance contains properties %(image_props)s " "that are not provided by the compute node " "capabilities %(capabilities)s"), locals()) return False def host_passes(self, host_state, filter_properties): """Check if host passes specified image properties. Returns True for compute nodes that satisfy image properties contained in the request_spec. """ spec = filter_properties.get('request_spec', {}) image_props = spec.get('image', {}).get('properties', {}) capabilities = host_state.capabilities if not self._instance_supported(capabilities, image_props): LOG.debug(_("%(host_state)s does not support requested " "instance_properties"), locals()) return False return True
Python
0.000007
@@ -801,31 +801,8 @@ ers%0A -from nova import utils%0A %0A%0ALO
0ae60d170c3a8fd33fac3b1283e646a7018027df
Add expertise removal migration
qipr_approver/approver/migrations/0007_auto_20170227_1533.py
qipr_approver/approver/migrations/0007_auto_20170227_1533.py
Python
0
@@ -0,0 +1,771 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-02-27 15:33%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('approver', '0006_auto_20170222_1424'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveField(%0A model_name='expertise',%0A name='created_by',%0A ),%0A migrations.RemoveField(%0A model_name='expertise',%0A name='last_modified_by',%0A ),%0A migrations.AlterField(%0A model_name='person',%0A name='expertise',%0A field=models.ManyToManyField(to='approver.Descriptor'),%0A ),%0A migrations.DeleteModel(%0A name='Expertise',%0A ),%0A %5D%0A
b629a8e6346359683e637fd8e2f34f1d704ad1bc
Add missing test
test/test_full.py
test/test_full.py
Python
0.000383
@@ -0,0 +1,258 @@ +import numpy as np%0Afrom util.full import matrix%0A%0Adef assert_(this, ref):%0A print this%0A print ref%0A assert np.allclose(this, ref)%0A%0Adef test_diag():%0A ref = %5B%5B1, 0, 0%5D, %5B0, 1, 0%5D, %5B0, 0, 1%5D%5D%0A this = matrix.diag(%5B1,1,1%5D)%0A assert_(this, ref)%0A %0A
5e9b804ef20d71aa84cb4d3cdd8b3bad9863cf11
add validator
sections/validators.py
sections/validators.py
Python
0.000005
@@ -0,0 +1,246 @@ +import re%0Afrom django.core.validators import RegexValidator%0A%0A%0Asection_name_validator = RegexValidator(%0A r'%5E%5Ba-zA-Z%5D%5Ba-zA-Z0-9%5D%7B1,19%7D$',%0A 'This field can contain only characters a-zA-Z0-9 and be max 20 characters long',%0A code='invalid'%0A)%0A
e7db8f3dc4d945185a99b5b62ae0b528959651ac
add python version
versioncheck/python_version.py
versioncheck/python_version.py
Python
0.000002
@@ -0,0 +1,916 @@ +from invoke import task%0Afrom subprocess import call%0Aimport invoke %0A%0Adef check_invoke_version(ctx):%0A minimal_verion = %220.15.0%22%0A if minimal_verion %3E invoke.__version__:%0A print(%22Your python-invoke version is too old (currently %22+invoke.__version__+%22). Please update to version %22+minimal_verion+%22 or higher.%22)%0A print(%22call: pip install invoke --upgrade%22)%0A correct = False%0A response = False%0A print(%22%5CnDo you want to resume with a old version? %5BYES/NO%5D?%22)%0A while response != True:%0A choice = raw_input().lower()%0A if choice in yes:%0A correct = True%0A response = True%0A elif choice in no:%0A correct = False%0A response = True%0A else:%0A sys.stdout.write(%22Please respond with 'yes' or 'no'%22)%0A%0A if correct == False:%0A return False%0A%0A return True%0A
ca9ed2756a12a2587f5b4d021597d2229196da50
Add migration to add china region
api/common/migrations/0007_add_china_region.py
api/common/migrations/0007_add_china_region.py
Python
0
@@ -0,0 +1,596 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.7 on 2017-06-24 21:52%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Adef forwards(apps, schema_editor):%0A Region = apps.get_model('common.Region')%0A region_to_add = 'China'%0A try:%0A Region.objects.get(name=region_to_add)%0A except Region.DoesNotExist:%0A Region.objects.create(name=region_to_add)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('common', '0006_emailrecord'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(forwards, migrations.RunPython.noop)%0A %5D%0A
680b2cb1488f83aef5b45476e23bd93a90069872
Create Content Loader app to Herd/DM standards - Configure Pyinstaller
herd-code/herd-tools/herd-content-loader/herdcl/hook-otags.py
herd-code/herd-tools/herd-content-loader/herdcl/hook-otags.py
Python
0
@@ -0,0 +1,173 @@ +hiddenimports = %5B%0A 'numpy',%0A 'pandas._libs.tslibs.timedeltas',%0A 'pandas._libs.tslibs.nattype',%0A 'pandas._libs.tslibs.np_datetime',%0A 'pandas._libs.skiplist'%0A%5D%0A
56d14e7b0386588afd39f2413fafe0b9ba41806d
Access checking unit tests for SlotsTransferAdminPage.
tests/app/soc/modules/gsoc/views/test_slot_transfer_admin.py
tests/app/soc/modules/gsoc/views/test_slot_transfer_admin.py
Python
0
@@ -0,0 +1,1808 @@ +# Copyright 2013 the Melange authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Unit tests for slot transfer admin view.%22%22%22%0A%0Afrom tests import profile_utils%0Afrom tests import test_utils%0A%0A%0Aclass SlotsTransferAdminPageTest(test_utils.GSoCDjangoTestCase):%0A %22%22%22Unit tests for SlotsTransferAdminPage class.%22%22%22%0A%0A def setUp(self):%0A self.init()%0A self.url = '/gsoc/admin/slots/transfer/%25s' %25 self.gsoc.key().name()%0A%0A def testLoneUserAccessForbidden(self):%0A response = self.get(self.url)%0A self.assertResponseForbidden(response)%0A self.assertErrorTemplatesUsed(response)%0A%0A def testStudentAccessForbidden(self):%0A self.data.createStudent()%0A response = self.get(self.url)%0A self.assertResponseForbidden(response)%0A self.assertErrorTemplatesUsed(response)%0A%0A def testMentorAccessForbidden(self):%0A self.data.createMentor(self.org)%0A response = self.get(self.url)%0A self.assertResponseForbidden(response)%0A self.assertErrorTemplatesUsed(response)%0A%0A def testOrgAdminAccessForbidden(self):%0A self.data.createOrgAdmin(self.org)%0A response = self.get(self.url)%0A self.assertResponseForbidden(response)%0A self.assertErrorTemplatesUsed(response)%0A%0A def testHostAccessGranted(self):%0A self.data.createHost()%0A response = self.get(self.url)%0A self.assertResponseOK(response)%0A
db446bf6dc4255f556f20235d2bdc28fa056ad46
Add list_owners.py to list shared folder owners
list_owners.py
list_owners.py
Python
0
@@ -0,0 +1,2115 @@ +%22%22%22List all shared folders and their owners.%22%22%22%0A%0Aimport logging%0Aimport os%0Afrom typing import Iterator%0A%0Aimport dropbox%0A%0Afrom backup import File, setup_logging, get_members, get_files%0A%0A%0Adef get_folder_members(team: dropbox.DropboxTeam,%0A folder: File) %5C%0A -%3E Iterator%5Bdropbox.sharing.UserMembershipInfo%5D:%0A %22%22%22Yield UserMembershipInfo objects which contain access level information%0A (whether user is an owner, editor or viewer of a shared folder).%0A %22%22%22%0A user = team.as_user(folder.member.profile.team_member_id)%0A members = user.sharing_list_folder_members(folder.file.shared_folder_id)%0A%0A for member in members.users:%0A yield member%0A%0A while members.cursor:%0A members = user.sharing_list_folder_members_continue(members.cursor)%0A for member in members.users:%0A yield member%0A%0A%0Adef main():%0A setup_logging()%0A logger = logging.getLogger('main')%0A%0A logger.info('Please wait up to tens of minutes...')%0A%0A shared_folders = set()%0A team = dropbox.DropboxTeam(os.environ%5B'DROPBOX_TEAM_TOKEN'%5D)%0A%0A for member in get_members(team):%0A logger.debug(f'Checking %7Bmember.profile.name.display_name%7D')%0A%0A for f in get_files(member, team):%0A%0A path = f.file.path_display%0A logger.debug(f'Checking %7Bpath%7D')%0A%0A # Find out if it is a shared folder%0A try:%0A if not f.file.sharing_info.parent_shared_folder_id:%0A shared_folders.add(f)%0A%0A except AttributeError:%0A logger.debug(f'%7Bpath%7D is not a shared folder')%0A%0A for sf in shared_folders:%0A path = sf.file.path_display%0A%0A for member in get_folder_members(team, sf):%0A name = member.user.display_name%0A logger.debug(f'%7Bpath%7D : %7Bname%7D : %7Bmember.access_type%7D')%0A%0A if member.access_type.is_owner():%0A logger.info(f'%7Bpath%7D is owned by %7Bname%7D')%0A break%0A%0A else:%0A # No owner found for the shared folder%0A logger.warning(f'No owner found for %7Bpath%7D')%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
1c094fe58df0fa57884752be7f64ee9755e433f1
Create __init__.py
tests/__init__.py
tests/__init__.py
Python
0.000429
@@ -0,0 +1,2 @@ + %0A
6edc4700f755380b8b9099ae78619cbd225a2790
add API tests
tests/api_test.py
tests/api_test.py
Python
0.000001
@@ -0,0 +1,1973 @@ +import overview, unittest, mock, json%0Afrom overview.services import Services%0A%0A%0Aclass ApiV1Test(unittest.TestCase):%0A def send_patch_json(self, url, json_data):%0A return self.app.patch(url,%0A data = json.dumps(json_data),%0A headers = %5B('Content-Type', 'application/json')%5D)%0A%0A def setUp(self):%0A self.app = overview.app.test_client()%0A%0A @mock.patch.object(Services, 'docker_state')%0A def test_get_docker_state(self, mock_docker_state):%0A mock_docker_state.return_value = %7B'message':'docker_state_by_services'%7D%0A rv = self.app.get('/api/v1/docker')%0A self.assertEqual(rv.data, '%7B%5Cn %22message%22: %22docker_state_by_services%22%5Cn%7D')%0A%0A @mock.patch.object(Services, 'states')%0A def test_get_services_state(self, mock_services_state):%0A mock_services_state.return_value = %7B'message':'services_state'%7D%0A rv = self.app.get('/api/v1/services')%0A self.assertEqual(rv.data, '%7B%5Cn %22message%22: %22services_state%22%5Cn%7D')%0A%0A @mock.patch.object(Services, 'change')%0A def test_patch_service_state(self, mock_services_change):%0A%0A # When the change is valid (from services.change perspective)%0A mock_services_change.return_value = None%0A rv = self.send_patch_json('/api/v1/services/serviceId',%0A %7B 'state': Services.STATE_RUNNING %7D)%0A%0A self.assertEqual(rv.data,%0A '%7B%5Cn %22message%22: %22Correctly applied. Change in progress.%22%5Cn%7D')%0A%0A # Verify that the change has been given%0A mock_services_change.assert_called_with('serviceId', Services.STATE_RUNNING)%0A%0A # When the change is invalid (from services.change perspective)%0A mock_services_change.return_value = 'error description'%0A rv = self.send_patch_json('/api/v1/services/serviceId',%0A %7B 'state': Services.STATE_RUNNING %7D)%0A%0A self.assertEqual(rv.data,%0A '%7B%5Cn %22error%22: %22error description%22, %5Cn'%0A ' %22message%22: %22This change cannot be made%22%5Cn%7D')%0A
0347d82e55382b9618158c4c5809c360e729c245
Create neworld_client.py
_src/om2py4w/4wex0/neworld_client.py
_src/om2py4w/4wex0/neworld_client.py
Python
0.000001
@@ -0,0 +1,857 @@ +#/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport sys%0A# sys.setdefaultencoding() does not exist, here!%0Areload(sys) # Reload does the trick! %0Asys.setdefaultencoding('UTF8')%0A%0Afrom lxml import html%0Aimport requests %0A%0Adef read_note():%0A page = requests.get('http://localhost:8080/neworld')%0A tree = html.fromstring(page.content)%0A note_content = tree.xpath('//div%5B@class=%22note_content%22%5D/text()')%0A%0A return note_content%0A%0Adef write_note(mynote):%0A wpage = requests.post('http://localhost:8080/neworld', data = %7B'notes': mynote%7D)%0A %0A%0A%0Adef main():%0A while True:%0A mynote = raw_input('%3E%3E%3E ')%0A %0A if mynote == %22q%22:%0A print (%22Thanks for writing.%22)%0A break%0A elif mynote ==%22r%22:%0A print read_note()%0A else:%0A write_note(mynote)%0A%0A %0A%0Aif __name__ == %22__main__%22:%0A main()%0A
faacc6dcef31cb22a87cd3184824b9785b21fdef
Jiffy application to test indexing of reflections from other experiments in an n^2 manner, to see if a matrix from one experiment has good predictive power for another as:
command_line/griddex.py
command_line/griddex.py
Python
0.999997
@@ -0,0 +1,2550 @@ +from __future__ import absolute_import, division, print_function%0A%0Aimport libtbx.phil%0Aimport libtbx.load_env%0A%0Ahelp_message = '''%0A%0ACross reference indexing solutions.%0A%0AExamples::%0A%0A %25s expts0.json refl0.json%0A%0A''' %25 libtbx.env.dispatcher_name%0A%0Aphil_scope = libtbx.phil.parse(%22%22%22%0A d_min = None%0A .type = float(value_min=0.0)%0A%22%22%22)%0A%0Adef test_index(experiment, reflections):%0A from dials.algorithms.indexing import indexer%0A%0A # map reflections to reciprocal space from image space%0A%0A refl = indexer.indexer_base.map_spots_pixel_to_mm_rad(%0A reflections, experiment.detector, experiment.scan)%0A%0A indexer.indexer_base.map_centroids_to_reciprocal_space(%0A refl, experiment.detector, experiment.beam, experiment.goniometer)%0A%0A # now compute fractional indices - in Python rather than trying to push%0A # everything to C++ for the moment%0A%0A from scitbx import matrix%0A ub = matrix.sqr(experiment.crystal.get_A())%0A rub = ub.inverse()%0A%0A from dials.array_family import flex%0A hkl_real = flex.vec3_double(len(reflections))%0A%0A for j, rlp in enumerate(reflections%5B'rlp'%5D):%0A hkl_real%5Bj%5D = rub * rlp%0A%0A hkl = hkl_real.iround()%0A%0A ms = 0.0%0A for (_h, _k, _l), (_hr, _kr, _lr) in zip(hkl, hkl_real):%0A ms += (_hr - _h) ** 2 + (_kr - _k) ** 2 + (_lr - _l) ** 2%0A%0A import math%0A return math.sqrt(ms / len(reflections))%0A%0Adef run(args):%0A%0A from dials.util.options import OptionParser%0A from dials.util.options import flatten_experiments%0A from dials.util.options import flatten_reflections%0A import libtbx.load_env%0A%0A usage = %22%25s %5Boptions%5D datablock.json reflections.pickle%22 %25 (%0A libtbx.env.dispatcher_name)%0A%0A parser = OptionParser(%0A usage=usage,%0A phil=phil_scope,%0A read_experiments=True,%0A read_reflections=True,%0A check_format=False,%0A epilog=help_message)%0A%0A params, options = parser.parse_args(show_diff_phil=True)%0A experiments = flatten_experiments(params.input.experiments)%0A reflections = flatten_reflections(params.input.reflections)%0A%0A assert len(experiments) == len(reflections)%0A%0A nn = len(experiments)%0A%0A # FIXME check that all the crystals are in the primitive setting...%0A%0A # now compute grid of reciprocal RMSD's%0A result = %7B %7D%0A%0A for j, expt in enumerate(experiments):%0A for k, refl in enumerate(reflections):%0A result%5Bj, k%5D = test_index(expt, refl)%0A%0A # print matrix of results%0A print(' ' + ''.join(%5B'%257d' %25 j for j in range(nn)%5D))%0A for k in range(nn):%0A record = ''.join(%5B' %256.3f' %25 result%5Bj, k%5D for j in range(nn)%5D)%0A print('%258d' %25 k + record)%0A%0Aif __name__ == '__main__':%0A import sys%0A run(sys.argv%5B1:%5D)%0A
3a4c922d353df5f5b3f3cabe24b04090b0a3fd08
test the serve command
tests/test_cli.py
tests/test_cli.py
Python
0.0001
@@ -0,0 +1,1362 @@ +# Copyright 2013 Donald Stufft%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0Afrom __future__ import absolute_import, division, print_function%0Afrom __future__ import unicode_literals%0A%0Aimport pretend%0Aimport werkzeug.serving%0A%0Afrom warehouse.cli import ServeCommand%0A%0A%0Adef test_serve(monkeypatch):%0A run_simple = pretend.call_recorder(%0A lambda host, port, app, use_reloader, use_debugger: None,%0A )%0A monkeypatch.setattr(werkzeug.serving, %22run_simple%22, run_simple)%0A%0A host, port, app, use_reloader, use_debugger = (%0A pretend.stub() for x in range(5)%0A )%0A ServeCommand()(%0A app, host, port,%0A reloader=use_reloader,%0A debugger=use_debugger,%0A )%0A%0A assert run_simple.calls == %5B%0A pretend.call(%0A host, port, app,%0A use_reloader=use_reloader,%0A use_debugger=use_debugger,%0A ),%0A %5D%0A
2299343d8b10658cc6682b23dbf9be9d5fd290f6
Add unit test for data integrity.
tests/testdata.py
tests/testdata.py
Python
0
@@ -0,0 +1,1238 @@ +import ConfigParser%0Aimport csv%0Aimport unittest%0A%0Aclass DataTest(unittest.TestCase):%0A %0A def setUp(self):%0A config = ConfigParser.RawConfigParser()%0A config.read('../app.config')%0A # Load the data from the csv into an array%0A self.data = %5B%5D%0A with open('../data/%25s' %25 config.get('data', 'filename'), 'rb') as csvfile:%0A reader = csv.reader(csvfile)%0A # Skip header and parse data%0A reader.next()%0A for row in reader:%0A self.data.append(%5Bs.strip() for s in row%5D)%0A %0A def test_complete(self):%0A '''Ensure there are no day/country pairs missing data'''%0A date_country = dict()%0A dates = set()%0A countries = set()%0A for date, country, video_id in self.data:%0A dates.add(date)%0A countries.add(country)%0A date_country%5Bdate%5D = date_country.get(date, %7B%7D)%0A date_country%5Bdate%5D%5Bcountry%5D = date_country%5Bdate%5D.get(country, 0) + 1%0A for date in dates:%0A for country in countries:%0A count = date_country.get(date,%7B%7D).get(country,0)%0A self.assertNotEqual((date, country, count), (date, country, 0))%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
78aebc48763b15dedc3aee65a18a2a39f46e5c30
add run module
flagon/run.py
flagon/run.py
Python
0.000001
@@ -0,0 +1,230 @@ +%0A%0Adef run_simple(hostname, port, application, use_reloader=False,%0A use_debugger=False):%0A from wsgiref.simple_server import make_server%0A from wsgiref.simple_server import WSGIRequestHandler, WSGIServer%0A pass%0A
b9399dbdfff80fec21cfed926779b67589835047
Create LettCombPhoneNum_002.py
leetcode/017-Letter-Combinations-of-a-Phone-Number/LettCombPhoneNum_002.py
leetcode/017-Letter-Combinations-of-a-Phone-Number/LettCombPhoneNum_002.py
Python
0.000001
@@ -0,0 +1,647 @@ +class Solution(object):%0A def letterCombinations(self, digits):%0A %22%22%22%0A :type digits: str%0A :rtype: List%5Bstr%5D%0A %22%22%22%0A def comb(digits, d2l):%0A if not digits:%0A return %5B%22%22%5D%0A %0A res = %5B%5D%0A for c in d2l%5Bint(digits%5B0%5D)%5D:%0A for suffix in comb(digits%5B1:%5D, d2l):%0A res.append(c + suffix)%0A return res%0A %0A if not digits:%0A return %5B%5D%0A %0A d2l = %7B 2: 'abc', 3: 'def', 4: 'ghi', 5: 'jkl', %0A 6: 'mno', 7: 'pqrs', 8: 'tuv', 9: 'wxyz' %7D%0A return comb(digits, d2l)%0A
d92eff7e89e09167b126f99243986eae5792f705
Add py-debtcollector (#25212)
var/spack/repos/builtin/packages/py-debtcollector/package.py
var/spack/repos/builtin/packages/py-debtcollector/package.py
Python
0
@@ -0,0 +1,938 @@ +# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyDebtcollector(PythonPackage):%0A %22%22%22%0A A collection of Python deprecation patterns and strategies that help you%0A collect your technical debt in a non-destructive manner.%0A %22%22%22%0A%0A homepage = %22https://docs.openstack.org/debtcollector/latest%22%0A pypi = %22debtcollector/debtcollector-2.2.0.tar.gz%22%0A%0A maintainers = %5B'haampie'%5D%0A%0A version('2.2.0', sha256='787981f4d235841bf6eb0467e23057fb1ac7ee24047c32028a8498b9128b6829')%0A%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('py-setuptools', type='build')%0A depends_on('[email protected]:2.0.999,2.1.1:', type='build')%0A%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('[email protected]:', type=('build', 'run'))%0A
6ed82eb673c2a95f5c349ab45dd8d17280db91c9
Conversion of string to number
BankOCR.py
BankOCR.py
Python
0.999999
@@ -0,0 +1,1666 @@ +numbers = '''%0A _ _ _ _ _ _ _ _%0A%7C %7C %7C _%7C _%7C%7C_%7C%7C_ %7C_ %7C%7C_%7C%7C_%7C%0A%7C_%7C %7C%7C_ _%7C %7C _%7C%7C_%7C %7C%7C_%7C _%7C%0A'''%0A%0Aclass Converter:%0A def __init__(self):%0A digits = self.splitDigits(numbers)%0A self.digitLineDicts = %5B%7B%7D for i in range(3)%5D%0A self.digitIdDict = %7B%7D%0A digitIndices = %5B0%5D*3%0A for d in digits:%0A for (lineIdx, line) in enumerate(d.split('%5Cn')):%0A lDict = self.digitLineDicts%5BlineIdx%5D%0A if not line in lDict:%0A lDict%5Bline%5D = digitIndices%5BlineIdx%5D%0A digitIndices%5BlineIdx%5D += 1%0A for i,d in enumerate(digits):%0A self.digitIdDict%5Bself.generateID(d)%5D = i%0A%0A def generateID(self, digit):%0A id = 0%0A for (lineIdx, line) in enumerate(digit.split('%5Cn')):%0A id *= 10%0A id += self.digitLineDicts%5BlineIdx%5D%5Bline%5D%0A return id%0A%0A def convertDigit(self, digit):%0A return self.digitIdDict%5Bself.generateID(digit)%5D%0A%0A def splitDigits(self, code):%0A lines = %5Bl for l in code.split('%5Cn') if l%5D%0A numChars = max(%5Blen(l) for l in lines%5D)%0A def adjustLine(l):%0A return l + ' ' * max(numChars-len(l), 0);%0A lines = %5BadjustLine(l) for l in lines%5D%0A numDigits = numChars//3%0A digits = %5B''%5D*numDigits%0A for i in range(numDigits):%0A digits%5Bi%5D += lines%5B0%5D%5Bi*3:i*3+3%5D + '%5Cn'%0A digits%5Bi%5D += lines%5B1%5D%5Bi*3:i*3+3%5D + '%5Cn'%0A digits%5Bi%5D += lines%5B2%5D%5Bi*3:i*3+3%5D%0A return digits%0A%0A def convert(self, digits):%0A for d in self.splitDigits(digits):%0A yield self.convertDigit(d)%0A%0Ac = Converter()%0Aprint(list(c.convert(numbers)))%0A
6bd4b7e4c2dac2817250f184114eea8c05fbefb7
Add compat.py to get get_user_model working
cuser/compat.py
cuser/compat.py
Python
0
@@ -0,0 +1,580 @@ +from __future__ import unicode_literals%0Afrom django.conf import settings%0Afrom django.core.exceptions import ImproperlyConfigured%0Aimport django%0Afrom django.utils.functional import lazy%0A%0A__all__ = %5B'User', 'AUTH_USER_MODEL'%5D%0A%0A%0A# Django 1.5+ compatibility%0Aif django.VERSION %3E= (1, 5):%0A AUTH_USER_MODEL = settings.AUTH_USER_MODEL%0A try:%0A from django.contrib.auth import get_user_model%0A User = lazy(get_user_model, AUTH_USER_MODEL)%0A except ImproperlyConfigured:%0A pass%0Aelse:%0A from django.contrib.auth.models import User%0A AUTH_USER_MODEL = 'auth.User'%0A
422390ff7eb4d97eaf0c5c1a1b250010ee766ec7
Add tool for clean pyc files
tools/cleanPYC.py
tools/cleanPYC.py
Python
0
@@ -0,0 +1,312 @@ +%0Aimport re%0Aimport os%0Aimport sys%0A%0Aprint(%22%25s path%5Cn%22 %25 sys.argv%5B0%5D)%0A%0Apath = sys.argv%5B1%5D%0A%0Afor root, dirs, files in os.walk(path):%0A for file_ in files:%0A if re.match(%22.*.pyc$%22, file_):%0A abs_file = os.path.join(root, file_)%0A print(%22Clean %25s%22 %25 abs_file)%0A os.remove(abs_file)%0A
0d0115ef5e088ed54a176e24cc94713b706f3d55
include migration
awx/main/migrations/0015_v300_label_changes.py
awx/main/migrations/0015_v300_label_changes.py
Python
0.000111
@@ -0,0 +1,487 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('main', '0014_v300_invsource_cred'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='label',%0A name='organization',%0A field=models.ForeignKey(related_name='labels', to='main.Organization', help_text='Organization this label belongs to.'),%0A ),%0A %5D%0A
5db256e6ac4ee84e008afa8f94d767330e392709
Increase coverage
test/test_vmcp.py
test/test_vmcp.py
Python
0
@@ -0,0 +1,1262 @@ +# -*- coding: utf8 -*-%0A# This file is part of PyBossa.%0A#%0A# Copyright (C) 2013 SF Isle of Man Limited%0A#%0A# PyBossa is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# PyBossa is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with PyBossa. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Aimport json%0Afrom mock import patch%0Afrom base import web, model, Fixtures, db, redis_flushall%0Aimport pybossa.vmcp as vmcp%0Afrom nose.tools import assert_equal, assert_raises%0A%0A%0Aclass TestAPI:%0A%0A def test_myquote(self):%0A %22%22%22Test myquote works.%22%22%22%0A # Valid char should be the same%0A err_msg = %22Valid chars should not be quoted%22%0A assert vmcp.myquote('a') == 'a', err_msg%0A # Non-valid%0A err_msg = %22Non-Valid chars should be quoted%22%0A assert vmcp.myquote('%25') == '%2525', err_msg%0A
0a3e00b27606eda26917c3c69b0344dc301502f0
Revert "will this fix tests?"
tests/__init__.py
tests/__init__.py
Python
0
@@ -0,0 +1,148 @@ +# log_support setups the default Logger class%0A# and so we need to ensure that it is also%0A# setup for the tests%0Afrom lbrynet.core import log_support%0A
3dbef22cee9ea83c7e80756037209334da237d4c
Remove unused compat types from compat.py
twython/compat.py
twython/compat.py
import sys _ver = sys.version_info #: Python 2.x? is_py2 = (_ver[0] == 2) #: Python 3.x? is_py3 = (_ver[0] == 3) try: import simplejson as json except ImportError: import json if is_py2: from urllib import urlencode, quote_plus try: from urlparse import parse_qsl except ImportError: from cgi import parse_qsl builtin_str = str bytes = str str = unicode basestring = basestring numeric_types = (int, long, float) elif is_py3: from urllib.parse import urlencode, quote_plus, parse_qsl builtin_str = str str = str bytes = bytes basestring = (str, bytes) numeric_types = (int, float)
Python
0
@@ -354,317 +354,135 @@ b -uiltin_str = str%0A bytes = str%0A str = unicode%0A basestring = basestring%0A numeric_types = (int, long, float)%0A%0A%0Aelif is_py3:%0A from urllib.parse import urlencode, quote_plus, parse_qsl%0A%0A builtin_str = str%0A str = str%0A bytes = bytes%0A basestring = (str, bytes)%0A numeric_types = (int, float +asestring = basestring%0A%0A%0Aelif is_py3:%0A from urllib.parse import urlencode, quote_plus, parse_qsl%0A%0A basestring = (str, bytes )%0A
f10049ae831570b54581c2a089218359febe5c50
add command for exporting to csv
fecfilings/management/commands/fecfilings_to_csv.py
fecfilings/management/commands/fecfilings_to_csv.py
Python
0.000001
@@ -0,0 +1,235 @@ +from django.core.management.base import NoArgsCommand%0A%0Afrom fecfilings.models import Contributor%0A%0A%0Aclass Command(NoArgsCommand):%0A def handle(self, **options):%0A for c in Contributor.objects.all():%0A print c.to_csv()%0A
23165cbd1ac8ba1528649c04b56d598664e1da8b
Enhance mysensors binary sensor device classes (#13367)
homeassistant/components/binary_sensor/mysensors.py
homeassistant/components/binary_sensor/mysensors.py
""" Support for MySensors binary sensors. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/binary_sensor.mysensors/ """ from homeassistant.components import mysensors from homeassistant.components.binary_sensor import ( DEVICE_CLASSES, DOMAIN, BinarySensorDevice) from homeassistant.const import STATE_ON def setup_platform(hass, config, add_devices, discovery_info=None): """Set up the MySensors platform for binary sensors.""" mysensors.setup_mysensors_platform( hass, DOMAIN, discovery_info, MySensorsBinarySensor, add_devices=add_devices) class MySensorsBinarySensor(mysensors.MySensorsEntity, BinarySensorDevice): """Representation of a MySensors Binary Sensor child node.""" @property def is_on(self): """Return True if the binary sensor is on.""" return self._values.get(self.value_type) == STATE_ON @property def device_class(self): """Return the class of this sensor, from DEVICE_CLASSES.""" pres = self.gateway.const.Presentation class_map = { pres.S_DOOR: 'opening', pres.S_MOTION: 'motion', pres.S_SMOKE: 'smoke', } if float(self.gateway.protocol_version) >= 1.5: class_map.update({ pres.S_SPRINKLER: 'sprinkler', pres.S_WATER_LEAK: 'leak', pres.S_SOUND: 'sound', pres.S_VIBRATION: 'vibration', pres.S_MOISTURE: 'moisture', }) if class_map.get(self.child_type) in DEVICE_CLASSES: return class_map.get(self.child_type)
Python
0
@@ -367,16 +367,248 @@ ATE_ON%0A%0A +SENSORS = %7B%0A 'S_DOOR': 'door',%0A 'S_MOTION': 'motion',%0A 'S_SMOKE': 'smoke',%0A 'S_SPRINKLER': 'safety',%0A 'S_WATER_LEAK': 'safety',%0A 'S_SOUND': 'sound',%0A 'S_VIBRATION': 'vibration',%0A 'S_MOISTURE': 'moisture',%0A%7D%0A%0A %0Adef set @@ -1327,504 +1327,86 @@ -class_map = %7B%0A pres.S_DOOR: 'opening',%0A pres.S_MOTION: 'motion',%0A pres.S_SMOKE: 'smoke',%0A %7D%0A if float(self.gateway.protocol_version) %3E= 1.5:%0A class_map.update(%7B%0A pres.S_SPRINKLER: 'sprinkler',%0A pres.S_WATER_LEAK: 'leak',%0A pres.S_SOUND: 'sound',%0A pres.S_VIBRATION: 'vibration',%0A pres.S_MOISTURE: 'moisture',%0A %7D)%0A if class_map.get(self.child_type) +device_class = SENSORS.get(pres(self.child_type).name)%0A if device_class in @@ -1444,35 +1444,37 @@ urn -class_map.get(self.child_type) +device_class%0A return None %0A
c9f70c7a4a24be0cdd9dcf044a06051b0978efff
add exceptions
jsonrpc/exceptions.py
jsonrpc/exceptions.py
Python
0.000013
@@ -0,0 +1,2592 @@ +class JSONRPCError(object):%0A%0A %22%22%22 Error for JSON-RPC communication.%0A%0A When a rpc call encounters an error, the Response Object MUST contain the%0A error member with a value that is a Object with the following members:%0A%0A code: A Number that indicates the error type that occurred.%0A This MUST be an integer.%0A%0A message: A String providing a short description of the error.%0A The message SHOULD be limited to a concise single sentence.%0A%0A data: A Primitive or Structured value that contains additional information%0A about the error.%0A This may be omitted.%0A The value of this member is defined by the Server (e.g. detailed error%0A information, nested errors etc.).%0A%0A The error codes from and including -32768 to -32000 are reserved for%0A pre-defined errors. Any code within this range, but not defined explicitly%0A below is reserved for future use. The error codes are nearly the same as%0A those suggested for XML-RPC at the following%0A url: http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php%0A%0A %22%22%22%0A%0A def __init__(self, code=None, message=None, data=None):%0A self.code = code or self.code%0A self.message = message or self.message%0A self.data = data%0A%0A @property%0A def _dict(self):%0A %22%22%22 Return object dict representation.%0A%0A :return dict:%0A%0A %22%22%22%0A data = dict(code=self.code, message=self.message)%0A%0A if self.data:%0A data%5B%22data%22%5D = self.data%0A%0A return data%0A%0A%0Aclass JSONRPCParseError(JSONRPCError):%0A%0A %22%22%22 Parse Error.%0A%0A Invalid JSON was received by the server.%0A An error occurred on the server while parsing the JSON text.%0A%0A %22%22%22%0A%0A code = -32700%0A message = %22Parse error%22%0A%0A%0Aclass JSONRPCInvalidRequest(JSONRPCError):%0A%0A %22%22%22 Invalid Request.%0A%0A The JSON sent is not a valid Request object.%0A%0A %22%22%22%0A%0A code = -32600%0A message = %22Invalid Request%22%0A%0A%0Aclass JSONRPCMethodNotFound(JSONRPCError):%0A%0A %22%22%22 Method not found.%0A%0A The method does not exist / is not available.%0A%0A %22%22%22%0A%0A code = -32601%0A message = %22Method not found%22%0A%0A%0Aclass JSONRPCInvalidParams(JSONRPCError):%0A%0A %22%22%22 Invalid params.%0A%0A Invalid method parameter(s).%0A%0A %22%22%22%0A%0A code = -32602%0A message = %22Invalid params%22%0A%0A%0Aclass JSONRPCInternalError(JSONRPCError):%0A%0A %22%22%22 Internal error.%0A%0A Internal JSON-RPC error.%0A%0A %22%22%22%0A%0A code = -32603%0A message = %22Internal error%22%0A%0A%0Aclass JSONRPCServerError(JSONRPCError):%0A%0A %22%22%22 Server error.%0A%0A Reserved for implementation-defined server-errors.%0A%0A %22%22%22%0A%0A code = -32000%0A message = %22Server error%22%0A
2947a2c9b6348d248e3ae740722d6a7aa04327c0
add reg d included definitions
regconfig/reg_d.py
regconfig/reg_d.py
Python
0
@@ -0,0 +1,331 @@ +from regparser.default_settings import *%0A%0A#### Regulation D%0A%0AINCLUDE_DEFINITIONS_IN_PART_1004 = %5B%0A ('Alternative mortgage transaction', 'Alternative mortgage transaction'),%0A ('Creditor', 'Creditor'),%0A ('State', 'State'),%0A ('State law', 'State law'),%0A%5D%0A%0AINCLUDE_DEFINITIONS_IN%5B'1004'%5D = INCLUDE_DEFINITIONS_IN_PART_1004%0A
1e32a27b35e25e780e8af6cc76d1eb424328171b
add leetcode Populating Next Right Pointers in Each Node
leetcode/PopulatingNextRightPointersinEachNode/solution.py
leetcode/PopulatingNextRightPointersinEachNode/solution.py
Python
0
@@ -0,0 +1,625 @@ +# -*- coding:utf-8 -*-%0A# Definition for a binary tree node%0A# class TreeNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A# self.next = None%0A%0A%0Aclass Solution:%0A # @param root, a tree node%0A # @return nothing%0A def connect(self, root):%0A if root is None:%0A return root%0A if root.left is not None:%0A root.left.next = root.right%0A if root.right is not None and root.next is not None:%0A root.right.next = root.next.left or root.next.right%0A self.connect(root.left)%0A self.connect(root.right)%0A
b220410ad51413d52076bec84a3bf1a660f9883b
Add a program that says Hello Shikha
helloShikha.py
helloShikha.py
Python
0.999999
@@ -0,0 +1,54 @@ +#This is my hello world program%0Aprint 'Hello Shikha!'%0A
557652d4b4297dd80d844915c3d57fc3e46ac83a
add graham's solution:
solutions/4_21_grsr.py
solutions/4_21_grsr.py
Python
0.000033
@@ -0,0 +1,184 @@ +import sys%0A%0Afor line in sys.stdin:%0A line = line.rstrip()%0A pop, sample_id, sample_name, sex = line.split(%22,%22)%0A if (sample_id == sys.argv%5B1%5D):%0A print %22Found%22, sample_id%0A%0A
8db04e5d648c9e923f7977f456242d9ea9b80050
Create pig_latin.py
solutions/pig_latin.py
solutions/pig_latin.py
Python
0.001803
@@ -0,0 +1,321 @@ +def pig_latin(input_string):%0A%09new_string = %5B%5D%0A%09for i in input_string.split():%0A%09%09if i%5B0%5D not in %22aeiou%22:%0A%09%09%09i = i%5B1:%5D+i%5B0%5D%0A%09%09i +=%22ay%22%0A%09%09new_string.append(i)%0A%09return ' '.join(new_string)%0A%0Adef main():%0A%09user_input = str(raw_input(%22Please give me a phrase: %22))%0A%09print pig_latin(user_input)%0A%0Aif __name__ == '__main__':%0A%09main()%0A
f5460adbaeb87421a7f193a700d25e5a3c6e4351
Create crypt.py
crypt.py
crypt.py
Python
0.00091
@@ -0,0 +1,292 @@ +from itertools import cycle%0A%0Adef crypt(source,key):%0A result=%22%22%0A a=cycle(key)%0A for ch in source:%0A result+=chr(ord(ch)%5Eord(next(a)))%0A return result%0A%0Aif __name__==%22__main__%22:%0A source=input(%22%E8%BE%93%E5%85%A5%E6%83%B3%E8%A6%81%E5%8A%A0%E5%AF%86/%E8%A7%A3%E5%AF%86%E7%9A%84%E5%AD%97%E4%B8%B2:%22)%0A key=input(%22%E8%BE%93%E5%85%A5%E5%AF%86%E9%92%A5:%22)%0A print(%22%E5%8A%A0%E5%AF%86/%E8%A7%A3%E5%AF%86%E6%88%90%E5%8A%9F!%E5%AF%86%E7%A0%81%E4%B8%BA:%22+crypt(source,key))%0A
5ed7db70874f3ebfe9c946d38ccf12228dacac3a
Test if we tried to commit with an empty message, it should raise a ValueError
tests/test_git.py
tests/test_git.py
Python
0.000001
@@ -0,0 +1,557 @@ +from unittest import TestCase%0A%0Afrom mock import MagicMock, patch%0Afrom nose.tools import raises%0A%0Afrom pyolite.git import Git%0A%0A%0Aclass TestGit(TestCase):%0A%0A @raises(ValueError)%0A def test_commit_with_no_message(self):%0A mock_repo = MagicMock()%0A mock_index = MagicMock()%0A mock_remotes = MagicMock()%0A%0A mock_repo.index = mock_index%0A mock_repo.remotes.origin = mock_remotes%0A%0A with patch.multiple('pyolite.git', Repo=mock_repo):%0A git = Git('~/path/to/repo')%0A objects = %5B'simple_object', 'more_complex_one'%5D%0A%0A git.commit(objects, '')%0A
1a3d9b3da91a5c87316e44498a876f70a49df8ad
add 70
python/p070.py
python/p070.py
Python
0.999266
@@ -0,0 +1,422 @@ +import utils%0A%0A%0Adef is_perm(a, b):%0A return sorted(str(a)) == sorted(str(b))%0A%0Abest = (10000, 1)%0Aprimes = %5B i for i in utils.primes(4000) if i %3E 2000 %5D%0A%0Afor i in primes:%0A for j in primes:%0A n = i * j%0A if n %3E 10**7:%0A break%0A phi = (i - 1) * (j - 1)%0A ratio = (n * 1.0) / phi%0A curr = (ratio, n)%0A if is_perm(n, phi) and curr %3C best:%0A best = curr%0A%0Aprint best%5B1%5D%0A
c4764ef1aa1a1aaa0ae8dd909c3578705c7a2060
add 77
python/p077.py
python/p077.py
Python
0.998768
@@ -0,0 +1,325 @@ +import utils%0A%0Aprimes = utils.primes(100)%0A%0Adef count(target):%0A ways = %5B0%5D * (target + 1)%0A ways%5B0%5D = 1%0A%0A for p in primes:%0A for j in xrange(p, target + 1):%0A ways%5Bj%5D += ways%5Bj - p%5D%0A%0A return ways%5Btarget%5D%0A%0A%0Afor target in xrange(2, 100):%0A if count(target) %3E 5000:%0A print target%0A break%0A
f773b404c10af24477de733a42ed9c06bc93296d
Improve support for v6 thrift hosts.
sparts/tasks/thrift.py
sparts/tasks/thrift.py
# Copyright (c) 2014, Facebook, Inc. All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. An additional grant # of patent rights can be found in the PATENTS file in the same directory. # """thrift-related helper tasks""" from __future__ import absolute_import from ..vtask import VTask from sparts.sparts import option from thrift.server.TNonblockingServer import TNonblockingServer from thrift.transport.TSocket import TServerSocket import time class ThriftHandlerTask(VTask): """A loopless task that handles thrift requests. You will need to subclass this task, set MODULE, and implement the necessary methods in order for requests to be mapped here.""" LOOPLESS = True MODULE = None _processor = None def initTask(self): super(ThriftHandlerTask, self).initTask() assert self.MODULE is not None self._verifyInterface() def _verifyInterface(self): iface = self.MODULE.Iface missing_methods = [] for k in dir(iface): v = getattr(iface, k, None) if not callable(v) or k.startswith('_'): continue v2 = getattr(self, k, None) if v2 is None or not callable(v): missing_methods.append(k) if missing_methods: raise TypeError("%s is missing the following methods: %s" % (self.__class__.__name__, missing_methods)) def _makeProcessor(self): return self.MODULE.Processor(self) @property def processor(self): if self._processor is None: self._processor = self._makeProcessor() return self._processor class ThriftServerTask(VTask): MODULE = None def initTask(self): super(ThriftServerTask, self).initTask() processors = self._findProcessors() assert len(processors) > 0, "No processors found for %s" % (self.MODULE) assert len(processors) == 1, "Too many processors found for %s" % \ (self.MODULE) self.processorTask = processors[0] @property def processor(self): return self.processorTask.processor def _checkTaskModule(self, task): """Returns True if `task` implements the appropriate MODULE Iface""" # Skip non-ThriftHandlerTasks if not isinstance(task, ThriftHandlerTask): return False # If self.MODULE is None, then connect *any* ThriftHandlerTask if self.MODULE is None: return True iface = self.MODULE.Iface # Verify task has all the Iface methods. for method_name in dir(iface): method = getattr(iface, method_name) # Skip field attributes if not callable(method): continue # Check for this method on the handler task handler_method = getattr(task, method_name, None) if handler_method is None: self.logger.debug("Skipping Task %s (missing method %s)", task.name, method_name) return False # And make sure that attribute is actually callable if not callable(handler_method): self.logger.debug("Skipping Task %s (%s not callable)", task.name, method_name) return False # If all the methods are there, the shoe fits. return True def _findProcessors(self): """Returns all processors that match this tasks' MODULE""" processors = [] for task in self.service.tasks: if self._checkTaskModule(task): processors.append(task) return processors class NBServerTask(ThriftServerTask): """Spin up a thrift TNonblockingServer in a sparts worker thread""" DEFAULT_HOST = '0.0.0.0' DEFAULT_PORT = 0 OPT_PREFIX = 'thrift' bound_host = bound_port = None host = option(default=lambda cls: cls.DEFAULT_HOST, metavar='HOST', help='Address to bind server to [%(default)s]') port = option(default=lambda cls: cls.DEFAULT_PORT, type=int, metavar='PORT', help='Port to run server on [%(default)s]') num_threads = option(name='threads', default=10, type=int, metavar='N', help='Server Worker Threads [%(default)s]') def initTask(self): """Overridden to bind sockets, etc""" super(NBServerTask, self).initTask() self._stopped = False # Construct TServerSocket this way for compatibility with fbthrift self.socket = TServerSocket(port=self.port) self.socket.host = self.host self.server = TNonblockingServer(self.processor, self.socket, threads=self.num_threads) self.server.prepare() self.bound_host, self.bound_port = \ self.server.socket.handle.getsockname() self.logger.info("%s Server Started on %s:%s", self.name, self.bound_host, self.bound_port) def stop(self): """Overridden to tell the thrift server to shutdown asynchronously""" self.server.stop() self.server.close() self._stopped = True def _runloop(self): """Overridden to execute TNonblockingServer's main loop""" while not self.server._stop: self.server.serve() while not self._stopped: time.sleep(0.1)
Python
0
@@ -4936,32 +4936,33 @@ r.prepare()%0A +%0A self.bound_h @@ -4953,56 +4953,18 @@ -self.bound_host, self.bound_port = %5C%0A +addrinfo = sel @@ -4996,24 +4996,82 @@ tsockname()%0A + self.bound_host, self.bound_port = addrinfo%5B0:2%5D%0A%0A self @@ -5111,12 +5111,20 @@ n %25s -:%25s%22 +%22, self.name ,%0A @@ -5151,22 +5151,30 @@ self. -name, +_fmt_hostport( self.bou @@ -5198,16 +5198,183 @@ nd_port) +)%0A%0A def _fmt_hostport(self, host, port):%0A if ':' in host:%0A return '%5B%25s%5D:%25d' %25 (host, port)%0A else:%0A return '%25s:%25d' %25 (host, port) %0A%0A de
9a237141c9635d2a1dad6349ad73d24e969d8460
Add runner
hud-runner.py
hud-runner.py
Python
0.000022
@@ -0,0 +1,191 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A %0A %0A%22%22%22Convenience wrapper for running hud directly from source tree.%22%22%22%0A %0A %0Afrom hud.hud import main%0A %0A %0Aif __name__ == '__main__':%0A main()%0A
bfb7d8d9356fe66f433556977a333e4256c6fb61
Create series.py
trendpy/series.py
trendpy/series.py
Python
0
@@ -0,0 +1,2304 @@ +# series.py%0A%0A# MIT License%0A%0A# Copyright (c) 2017 Rene Jean Corneille%0A%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A%0A# The above copyright notice and this permission notice shall be included in all%0A# copies or substantial portions of the Software.%0A%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A# SOFTWARE.%0A%0Aimport matplotlib.pyplot as plt%0A%0Afrom trendpy.mcmc import MCMC%0Afrom trendpy.factory import StrategyFactory%0A%0Afrom pandas import DataFrame, read_csv%0A%0Aclass Series(object):%0A%0A%09def __init__(self):%0A%09%09self.data=None%0A%09%09self.is_log_price = False%0A%0A%09def __len__(self):%0A%09%09return self.data.size%0A%0A%09def __str__(self):%0A%09%09return self.data.__str__()%0A%0A%09@staticmethod%0A%09def from_csv(filename, nomalise=True):%0A%09%09ts=Series()%0A%09%09ts.nomalise = nomalise%0A%09%09ts.data=read_csv(filename,index_col=0)%0A%09%09return ts%0A%0A%09def returns(self,period=1):%0A%09%09pass%0A%0A%09def save(self,filename='export.csv',type='csv',separator=','):%0A%09%09if type=='csv':%0A%09%09%09pass%0A%09%09if type=='json':%0A%09%09%09pass%0A%0A%09def plot(self):%0A%09%09self.data.plot()%0A%09%09plt.show()%0A%0A%09def filter(self, method=%22L1Filter%22,number_simulations=100, burns=50,total_variation=2):%0A%09%09mcmc = MCMC(self, StrategyFactory.create(method,self.data.as_matrix()%5B:,0%5D,total_variation_order=total_variation))%0A%0A%09%09mcmc.run(number_simulations)%0A%0A%09%09trend = mcmc.output(burns,%22trend%22)%0A%0A%09%09self.data = self.data.join(DataFrame(trend,index=self.data.index,columns=%5Bmethod%5D))%0A%0A%09def regression(self,method=%22lasso%22, number_simulations=100, burns=50):%0A%09%09pass%0A%0A%09def export(self, filename, as_txt=False):%0A%09%09pass%0A
250d1c20c16b6c0846a9fb94ef4ebc6e780221df
Create solution.py
hackerrank/algorithms/implementation/easy/equalize_the_array/py/solution.py
hackerrank/algorithms/implementation/easy/equalize_the_array/py/solution.py
Python
0.000018
@@ -0,0 +1,284 @@ +def solution(nums):%0A import collections%0A %0A if len(nums) == 0:%0A return 0%0A %0A item, count = collections.Counter(nums).most_common()%5B0%5D%0A %0A return len(nums) - count%0A%0An = int(input())%0Anums = tuple(map(int, input().split()))%0A%0Acnt = solution(nums)%0A%0Aprint(cnt)%0A
fca6421c53e286549d861c65c114991602f310ea
Add some adaptors.
pykmer/adaptors.py
pykmer/adaptors.py
Python
0
@@ -0,0 +1,1415 @@ +%22%22%22%0AThis module provides some adaptors for converting between%0Adifferent data formats:%0A%0A%60k2kf%60%0A Convert a sequence of k-mers to k-mer frequency pairs%0A%0A%60kf2k%60%0A Convert a sequence of k-mer frequency pairs to k-mers%0A%0A%0A%60keyedKs%60%0A Provide keyed access to a sequence of k-mers%0A%0A%60keyedKFs%60%0A Provide keyed access to a sequence of k-mer frequency pairs%0A%22%22%22%0A%0Adef k2kf(xs, f=1):%0A for x in xs:%0A yield (x, f)%0A%0Adef kf2k(xs):%0A for (x, _) in xs:%0A yield x%0A%0Aclass keyedKs:%0A def __init__(self, itr):%0A self.itr = itr%0A self.more = True%0A self.next()%0A%0A def valid(self):%0A return self.more%0A%0A def kmer(self):%0A assert self.valid()%0A return self.curr%0A%0A def item(self):%0A assert self.valid()%0A return self.curr%0A%0A def next(self):%0A assert self.valid()%0A try:%0A self.curr = self.itr.next()%0A except StopIteration:%0A self.more = False%0A%0Aclass keyedKfs:%0A def __init__(self, itr):%0A self.itr = itr%0A self.more = True%0A self.next()%0A%0A def valid(self):%0A return self.more%0A%0A def kmer(self):%0A assert self.valid()%0A return self.curr%5B0%5D%0A%0A def item(self):%0A assert self.valid()%0A return self.curr%0A%0A def next(self):%0A assert self.valid()%0A try:%0A self.curr = self.itr.next()%0A except StopIteration:%0A self.more = False%0A%0A
b7f3e32827bb9a0f122928d218f4d535febb0829
add command
Command.py
Command.py
Python
0.000292
@@ -0,0 +1,431 @@ +# -*- coding: utf-8 -*-%0A%0A%22%22%22%0ACommand pattern%0A%0A%22%22%22%0A%0Afrom os import listdir, curdir%0A%0A%0Aclass ListCommand(object):%0A%0A def __init__(self, path=None):%0A self.path = path or curdir%0A%0A def execute(self):%0A self._list(self.path)%0A%0A @staticmethod%0A def _list(path=None):%0A print 'list path %7B%7D :'.format(path)%0A print listdir(path)%0A%0Aif __name__ == %22__main__%22:%0A command = ListCommand()%0A command.execute()%0A
bd865a9fdc941b99be40a5ba3dcc02b819b2e9da
add cpm.utils.refstring
cpm/utils/refstring.py
cpm/utils/refstring.py
Python
0.000001
@@ -0,0 +1,2714 @@ +# Copyright (c) 2017 Niklas Rosenstein%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A# THE SOFTWARE.%0A%0Aimport collections%0Aimport re%0A%0Afrom . import semver%0A%0ARef = collections.namedtuple('Ref', 'package version module function')%0Aspec = '%5B%3Cpackage%3E%5B@%3Cversion%3E%5D%5D%5B/%3Cmodule%3E%5D%5B:%3Cfunction%3E%5D'%0Aregex = re.compile('''%5E%0A (?:%0A (?P%3Cpackage%3E %5BA-z0-9%5C.%5C-_%5D+)%0A (?: @(?P%3Cversion%3E %5B0-9%5C.%5D+%5BA-z0-9%5C.%5C-%5C+%5D*))?%0A )?%0A (?: /(?P%3Cmodule%3E %5BA-z0-9%5C.%5C-_%5D+))?%0A (?: :(?P%3Cfunction%3E %5BA-z0-9%5C.%5C-_%5D+))?%0A $''', re.X)%0A%0A%0Adef parse(s):%0A %22%22%22%0A Parse a reference string and returns a #Ref which is a namedtuple consisting%0A of the members *package*, *version*, *module* and *function*. The parameter%0A *s* must be a string of the format%0A%0A %5B%3Cpackage%3E%5B@%3Cversion%3E%5D%5D%5B/%3Cmodule%3E%5D%5B:%3Cfunction%3E%5D%0A%0A # Raises%0A ValueError: If the string is invalid.%0A %22%22%22%0A%0A m = regex.match(s)%0A if not m:%0A raise ValueError('invalid refstring: %22%7B%7D%22'.format(s))%0A package, version, module, function = m.groups()%0A if version:%0A try:%0A version = semver.Version(version)%0A except ValueError as exc:%0A raise ValueError('invalid refstring: %22%7B%7D%22 (%7B%7D)'.format(s, exc))%0A return Ref(package, version, module, function)%0A%0A%0Adef join(package=None, version=None, module=None, function=None):%0A %22%22%22%0A Concatenes the components of a reference back into a string. To use this%0A function with a #Ref object, simply use argument-unpacking like this:%0A %60join(*ref)%60.%0A %22%22%22%0A%0A if package:%0A result = package%0A if version:%0A result += '@' + str(version)%0A else:%0A if version:%0A raise ValueError('version can not be specified without a package')%0A result = ''%0A%0A if module:%0A result += '/' + module%0A if function:%0A result += ':' + function%0A%0A return result%0A
edb904ca105abfb767f94f366e19ed05374a8014
Create URL Shortner
URLShortner.py
URLShortner.py
Python
0
@@ -0,0 +1,1216 @@ +import uuid%0Aimport json%0Aimport os%0Afrom glob import iglob%0Afrom pprint import pprint%0A%0Amapping=%7B%7D%0Amapping%5B'URL'%5D=%5B%5D%0A#Getting JSON file of initial Tika parsing containing list of file paths categorized by MIME types%0Afile=%22C:/Users/rahul/Documents/GitHub/Scientific-Content-Enrichment-in-the-Text-Retrieval-Conference-TREC-Polar-Dynamic-Domain-Dataset/fulldump-path-all-json/%22%0A%0AoutFile='output-from-url-shortner-all-types'+'.json'%0Aoutput_file=open(outFile,'w')%0A%0Afor filepath in iglob(os.path.join(file, '*.json')):%0A with open(filepath) as data_file: %0A data = json.load(data_file)%0A for i in data%5B'files'%5D:%0A #Getting a unique md5 hash for the file path relative to the current directory%0A d=%7B%7D%0A d%5B'filePath'%5D=i%0A %0A s=%22polar.usc.edu/%22+str(uuid.uuid4())%5B:8%5D%0A d%5B'shortURL'%5D=s%0A mapping%5B'URL'%5D.append(d)%0A %0A print %22%5C'%22+ i+ %22%5C'%22 + %22 : %22 +%22%5C'%22+ s+ %22%5C'%22%0A #print dispString %0A #output_file.write(dispString)%0A %0A data_file.close()%0A%0A#Dumping JSON object with mapped shortened URLs and file path%0Akeys=json.dumps(mapping, sort_keys=True)%0Aoutput_file.write(keys)%0Aoutput_file.close()%0A
62a13341610d476ba8ff9e3fd5a3476cbdb18225
Create convert.py
convert.py
convert.py
Python
0.000002
@@ -0,0 +1,1351 @@ +import gensim%0A%0A#word2vec embeddings start with a line with the number of lines (tokens?) and the number of dimensions of the file. This allows%0A#gensim to allocate memory accordingly for querying the model. Larger dimensions mean larger memory is held captive. Accordingly, this line%0A#has to be inserted into the GloVe embeddings file.%0A%0A#GloVe Model File%0A#More models can be downloaded from http://nlp.stanford.edu/projects/glove/%0Afname=%22glove.6B.50d.txt%22%0A%0A#convert Glove vectors to word2vec format%0Aword2vec_convert_file=%22C:/Users/Manas/Desktop/ML/Topics_Multiclass/Zomato_Reviews/Data/IMDB/word2vec_line.txt%22%0A%0A#to be a first line insert%0Anum_lines = sum(1 for line in open(fname))%0Adims=50%0A%0Aprint '%25d lines with %25d dimensions' %25(num_lines,dims)%0A%0Awith open(word2vec_convert_file,'w') as f:%0A f.write(str(num_lines)+ %22 %22 +str(dims) + '%5Cn')%0Af.close()%0A%0Amodel_file='glove_model.txt'%0A%0Afilenames = %5Bword2vec_convert_file,fname%5D%0A%0Awith open(model_file, 'w') as outfile:%0A for fname in filenames:%0A with open(fname) as infile:%0A for line in infile:%0A outfile.write(line)%0Aoutfile.close() %0A%0A#load converted model file %0Amodel=gensim.models.Word2Vec.load_word2vec_format(model_file,binary=False) #GloVe Model%0A%0Aprint model.most_similar(positive=%5B'australia'%5D, topn=10)%0Aprint model.similarity('woman', 'man')%0A
5d5ccc84eaaec6b6d749a9054f744a5a44f9dac9
add script for reading from PCF8574
i2c/PCF8574.py
i2c/PCF8574.py
Python
0
@@ -0,0 +1,565 @@ +#!/usr/bin/python%0Aimport sys%0Aimport smbus%0Aimport time%0A%0A# Reads data from PCF8574 and prints the state of each port%0A%0Adef readPCF8574(busnumber,address):%0A address = int(address,16)%0A busnumber = int(1)%0A bus = smbus.SMBus(busnumber)%0A state = bus.read_byte(address);%0A%0A for i in range(0,8):%0A port = %22port %22 + str(i) %0A value = 1&(state%3E%3E7-i)%0A print str(port) + ': ' + str(value) %0A%0A%0A%0Aif len(sys.argv) != 3:%0A print %22Usage: python PCF8574.py bus address%22%0A exit(1)%0A%0Abus = sys.argv%5B1%5D%0Aaddress = sys.argv%5B2%5D%0A%0AreadPCF8574(bus,address)%0A
ddbe9de5cfc5b412812096291db6a37d120e03ce
add plotting the distribution of fields and apgoee
py/plot_dustwapogee.py
py/plot_dustwapogee.py
Python
0
@@ -0,0 +1,1882 @@ +###############################################################################%0A# plot_dustwapogee: plot the dust-map at 5 kpc with the APOGEE fields in the %0A# sample overlayed%0A###############################################################################%0Aimport sys%0Aimport numpy%0Aimport healpy%0Afrom galpy.util import bovy_plot%0Aimport apogee.select.apogeeSelect%0Aimport dust%0Aimport define_rcsample%0A# nside to work at, 2048 is the max%0A_NSIDE=2048%0Adef plot_dustwapogee(plotname):%0A # Load the dust map%0A green15map= dust.load_green15(5.,nest=True,nside_out=_NSIDE)%0A green15map%5Bgreen15map == healpy.UNSEEN%5D= -1.%0A # plot it%0A healpy.visufunc.mollview(green15map,%0A nest=True,%0A xsize=4000,min=0.,max=.8,%0A format=r'$%25g$',%0A title='',%0A cmap='gist_yarg',%0A unit='$A_H%5C,(%5Cmathrm%7Bmag%7D)$')%0A # Load the RC data to get the fields%0A data= define_rcsample.get_rcsample()%0A loc_ids= numpy.array(list(set(data%5B'LOCATION_ID'%5D)))%0A # Load the selection function, just to get the field centers%0A apo= apogee.select.apogeeSelect(_justprocessobslog=True)%0A theta= numpy.empty(len(loc_ids))%0A phi= numpy.empty(len(loc_ids))%0A for ii,loc_id in enumerate(loc_ids):%0A tl, tb= apo.glonGlat(loc_id)%0A theta%5Bii%5D= (90.-tb)/180.*numpy.pi%0A phi%5Bii%5D= tl/180.*numpy.pi%0A hib= numpy.fabs((numpy.pi/2.-theta)) %3E (8./180.*numpy.pi)%0A healpy.visufunc.projplot(theta%5Bhib%5D,phi%5Bhib%5D,'o',ms=5.,mfc='none',mew=0.8,%0A mec='k')%0A lowb= True-hib%0A healpy.visufunc.projplot(theta%5Blowb%5D,phi%5Blowb%5D,'o',ms=5.,mfc='none',%0A mec='w',mew=0.8)%0A bovy_plot.bovy_end_print(plotname)%0A%0Aif __name__ == '__main__':%0A plot_dustwapogee(sys.argv%5B1%5D)%0A
139a634515061674d3832320791d35ff512d8a5a
Add a snippet.
python/print_stderr.py
python/print_stderr.py
Python
0.000002
@@ -0,0 +1,100 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport sys%0A%0Aprint(%22Error message%22, file=sys.stderr)%0A
2909b4a7e46fe4a466e0c99abf90222c43f34d93
add tests for Every Election wrapper
polling_stations/apps/data_finder/tests/test_ee_wrapper.py
polling_stations/apps/data_finder/tests/test_ee_wrapper.py
Python
0
@@ -0,0 +1,1449 @@ +import mock%0Afrom django.test import TestCase%0Afrom data_finder.helpers import EveryElectionWrapper%0A%0A%0A# mock get_data() functions%0Adef get_data_exception(self, postcode):%0A raise Exception()%0A%0Adef get_data_no_elections(self, postcode):%0A return %5B%5D%0A%0Adef get_data_with_elections(self, postcode):%0A return %5B%0A %7B%7D, # no explanation key%0A %7B'explanation': None%7D, # null explanation key%0A %7B'explanation': 'some text'%7D, # explanation key contains text%0A %5D%0A%0Aclass EveryElectionWrapperTest(TestCase):%0A%0A @mock.patch(%22data_finder.helpers.EveryElectionWrapper.get_data%22, get_data_exception)%0A def test_exception(self):%0A ee = EveryElectionWrapper('AA11AA')%0A self.assertFalse(ee.request_success)%0A self.assertTrue(ee.has_election())%0A self.assertEqual(%5B%5D, ee.get_explanations())%0A%0A @mock.patch(%22data_finder.helpers.EveryElectionWrapper.get_data%22, get_data_no_elections)%0A def test_no_elections(self):%0A ee = EveryElectionWrapper('AA11AA')%0A self.assertTrue(ee.request_success)%0A self.assertFalse(ee.has_election())%0A self.assertEqual(%5B%5D, ee.get_explanations())%0A%0A @mock.patch(%22data_finder.helpers.EveryElectionWrapper.get_data%22, get_data_with_elections)%0A def test_elections(self):%0A ee = EveryElectionWrapper('AA11AA')%0A self.assertTrue(ee.request_success)%0A self.assertTrue(ee.has_election())%0A self.assertEqual(%5B'some text'%5D, ee.get_explanations())%0A
67d760f0a3ed081d43237e1b2106b86a4e6a56c6
add log handler
Util/LogHandler.py
Util/LogHandler.py
Python
0.000002
@@ -0,0 +1,2126 @@ +# -*- coding: utf-8 -*-%0A%22%22%22%0A-------------------------------------------------%0A File Name%EF%BC%9A LogHandler.py%0A Description :%0A Author : JHao%0A date%EF%BC%9A 2017/3/6%0A-------------------------------------------------%0A Change Activity:%0A 2017/3/6: log handler%0A-------------------------------------------------%0A%22%22%22%0A__author__ = 'JHao'%0A%0Aimport logging%0A%0Afrom logging.handlers import TimedRotatingFileHandler%0A%0A# %E6%97%A5%E5%BF%97%E7%BA%A7%E5%88%AB%0ACRITICAL = 50%0AFATAL = CRITICAL%0AERROR = 40%0AWARNING = 30%0AWARN = WARNING%0AINFO = 20%0ADEBUG = 10%0ANOTSET = 0%0A%0A%0Aclass LogHandler(logging.Logger):%0A %22%22%22%0A LogHandler%0A %22%22%22%0A%0A def __init__(self, name, level=DEBUG):%0A self.name = name%0A self.level = level%0A logging.Logger.__init__(self, self.name, level=level)%0A self.__setFileHandler__()%0A self.__setStreamHandler__()%0A%0A def __setFileHandler__(self, level=None):%0A %22%22%22%0A set file handler%0A :param level:%0A :return:%0A %22%22%22%0A file_name = '../log/%25s' %25 self.name%0A # %E8%AE%BE%E7%BD%AE%E6%97%A5%E5%BF%97%E5%9B%9E%E6%BB%9A, %E4%BF%9D%E5%AD%98%E5%9C%A8log%E7%9B%AE%E5%BD%95, %E4%B8%80%E5%A4%A9%E4%BF%9D%E5%AD%98%E4%B8%80%E4%B8%AA%E6%96%87%E4%BB%B6, %E4%BF%9D%E7%95%9915%E5%A4%A9%0A file_handler = TimedRotatingFileHandler(filename=file_name, when='D', interval=1, backupCount=15)%0A file_handler.suffix = '%25Y%25m%25d.log'%0A if not level:%0A file_handler.setLevel(self.level)%0A else:%0A file_handler.setLevel(level)%0A formatter = logging.Formatter('%25(asctime)s %25(filename)s%5Bline:%25(lineno)d%5D %25(levelname)s %25(message)s')%0A%0A file_handler.setFormatter(formatter)%0A self.addHandler(file_handler)%0A%0A def __setStreamHandler__(self, level=None):%0A %22%22%22%0A set stream handler%0A :param level:%0A :return:%0A %22%22%22%0A stream_handler = logging.StreamHandler()%0A formatter = logging.Formatter('%25(asctime)s %25(filename)s%5Bline:%25(lineno)d%5D %25(levelname)s %25(message)s')%0A stream_handler.setFormatter(formatter)%0A if not level:%0A stream_handler.setLevel(self.level)%0A else:%0A stream_handler.setLevel(level)%0A self.addHandler(stream_handler)%0A%0A%0Aif __name__ == '__main__':%0A # log = get_logger(%22aa%22)%0A # log.error(%22aa%22)%0A pass%0A
7331e1d1061a7a1ac9abc583d45746facfde9180
Create search-in-a-binary-search-tree.py
Python/search-in-a-binary-search-tree.py
Python/search-in-a-binary-search-tree.py
Python
0.000023
@@ -0,0 +1,1107 @@ +# Time: O(h)%0A# Space: O(1)%0A%0A# Given the root node of a binary search tree (BST) and a value.%0A# You need to find the node in the BST that the node's value equals the given value.%0A# Return the subtree rooted with that node.%0A# If such node doesn't exist, you should return NULL.%0A#%0A# For example, %0A#%0A# Given the tree:%0A# 4%0A# / %5C%0A# 2 7%0A# / %5C%0A# 1 3%0A#%0A# And the value to search: 2%0A# You should return this subtree:%0A#%0A# 2 %0A# / %5C %0A# 1 3%0A# In the example above,%0A# if we want to search the value 5,%0A# since there is no node with value 5, we should return NULL.%0A%0A# Definition for a binary tree node.%0Aclass TreeNode(object):%0A def __init__(self, x):%0A self.val = x%0A self.left = None%0A self.right = None%0A%0A%0Aclass Solution(object):%0A def searchBST(self, root, val):%0A %22%22%22%0A :type root: TreeNode%0A :type val: int%0A :rtype: TreeNode%0A %22%22%22%0A while root and val != root.val:%0A if val %3C root.val:%0A root = root.left%0A else:%0A root = root.right%0A return root%0A
37409639dc1afaa440ca47fe27469bf4c353bcf5
support / on Windoze too
atest/resources/TestCheckerLibrary.py
atest/resources/TestCheckerLibrary.py
import os.path import re from robot import utils from robot.output import readers from robot.common import Statistics from robot.libraries.BuiltIn import BuiltIn class TestCheckerLibrary: def process_output(self, path): try: print "Processing output '%s'" % path suite, errors = readers.process_output(path) except: raise RuntimeError('Processing output failed: %s' % utils.get_error_message()) setter = BuiltIn().set_suite_variable setter('$SUITE', process_suite(suite)) setter('$STATISTICS', Statistics(suite)) setter('$ERRORS', process_errors(errors)) def get_test_from_suite(self, suite, name): tests = self.get_tests_from_suite(suite, name) if len(tests) == 1: return tests[0] elif len(tests) == 0: err = "No test '%s' found from suite '%s'" else: err = "More than one test '%s' found from suite '%s'" raise RuntimeError(err % (name, suite.name)) def get_tests_from_suite(self, suite, name=None): tests = [ test for test in suite.tests if name is None or utils.eq(test.name, name) ] for subsuite in suite.suites: tests.extend(self.get_tests_from_suite(subsuite, name)) return tests def get_suite_from_suite(self, suite, name): suites = self.get_suites_from_suite(suite, name) if len(suites) == 1: return suites[0] elif len(suites) == 0: err = "No suite '%s' found from suite '%s'" else: err = "More than one suite '%s' found from suite '%s'" raise RuntimeError(err % (name, suite.name)) def get_suites_from_suite(self, suite, name): suites = utils.eq(suite.name, name) and [ suite ] or [] for subsuite in suite.suites: suites.extend(self.get_suites_from_suite(subsuite, name)) return suites def check_test_status(self, test, status=None, message=None): """Verifies that test's status and message are as expected. Expected status and message can be given as parameters. If expected status is not given, expected status and message are read from test's documentation. If documentation doesn't contain any of PASS, FAIL or ERROR, test's status is expected to be PASS. If status is given that is used. Expected message is documentation after given status. Expected message can also be regular expression. In that case expected match starts with REGEXP: , which is ignored in the regexp match. """ if status is not None: test.exp_status = status if message is not None: test.exp_message = message if test.exp_status != test.status: if test.exp_status == 'PASS': msg = "Test was expected to PASS but it FAILED. " msg += "Error message:\n" + test.message else: msg = "Test was expected to FAIL but it PASSED. " msg += "Expected message:\n" + test.exp_message raise AssertionError(msg) if test.exp_message == test.message: return if test.exp_message.startswith('REGEXP:'): pattern = test.exp_message.replace('REGEXP:', '', 1).strip() if re.match('^%s$' % pattern, test.message, re.DOTALL): return if test.exp_message.startswith('STARTS:'): start = test.exp_message.replace('STARTS:', '', 1).strip() if start == '': raise RuntimeError("Empty 'STARTS:' is not allowed") if test.message.startswith(start): return raise AssertionError("Wrong error message\n\n" "Expected:\n%s\n\nActual:\n%s\n" % (test.exp_message, test.message)) def check_suite_contains_tests(self, suite, *expected_names): actual_tests = [ test for test in self.get_tests_from_suite(suite) ] tests_msg = """ Expected tests : %s Actual tests : %s""" % (str(list(expected_names)), str(actual_tests)) expected_names = [ utils.normalize(name) for name in expected_names ] if len(actual_tests) != len(expected_names): raise AssertionError("Wrong number of tests." + tests_msg) for test in actual_tests: if utils.eq_any(test.name, expected_names): print "Verifying test '%s'" % test.name self.check_test_status(test) expected_names.remove(utils.normalize(test.name)) else: raise AssertionError("Test '%s' was not expected to be run.%s" % (test.name, tests_msg)) if len(expected_names) != 0: raise Exception("Bug in test library") def get_node(self, path, node=None): dom = utils.DomWrapper(path) if node is None: return dom return dom.get_node(node) def process_suite(suite): for subsuite in suite.suites: process_suite(subsuite) for test in suite.tests: process_test(test) suite.test_count = suite.get_test_count() process_keyword(suite.setup) process_keyword(suite.teardown) return suite def process_test(test): if 'FAIL' in test.doc: test.exp_status = 'FAIL' test.exp_message = test.doc.split('FAIL', 1)[1].lstrip() else: test.exp_status = 'PASS' test.exp_message = '' test.kws = test.keywords test.keyword_count = test.kw_count = len(test.keywords) for kw in test.keywords: process_keyword(kw) process_keyword(test.setup) process_keyword(test.teardown) def process_keyword(kw): if kw is None: return kw.kws = kw.keywords kw.msgs = kw.messages kw.message_count = kw.msg_count = len(kw.messages) kw.keyword_count = kw.kw_count = len(kw.keywords) for subkw in kw.keywords: process_keyword(subkw) def process_errors(errors): errors.msgs = errors.messages errors.message_count = errors.msg_count = len(errors.messages) return errors
Python
0
@@ -6,13 +6,8 @@ t os -.path %0Aimp @@ -228,16 +228,57 @@ path):%0A + path = path.replace('/', os.sep)%0A
58311387849f8785fa964eb01e728c92bc0d8b61
Create levenshtein.py
levenshtein.py
levenshtein.py
Python
0.000001
@@ -0,0 +1,1341 @@ +%0A# source: http://en.wikibooks.org/wiki/Algorithm_Implementation/Strings/Levenshtein_distance%0A%0Adef levenshtein(source, target):%0A if len(source) %3C len(target):%0A return levenshtein(target, source)%0A %0A # So now we have len(source) %3E= len(target).%0A if len(target) == 0:%0A return len(source)%0A %0A # We call tuple() to force strings to be used as sequences%0A # ('c', 'a', 't', 's') - numpy uses them as values by default.%0A source = np.array(tuple(source))%0A target = np.array(tuple(target))%0A %0A # We use a dynamic programming algorithm, but with the%0A # added optimization that we only need the last two rows%0A # of the matrix.%0A previous_row = np.arange(target.size + 1)%0A for s in source:%0A # Insertion (target grows longer than source):%0A current_row = previous_row + 1%0A %0A # Substitution or matching:%0A # Target and source items are aligned, and either%0A # are different (cost of 1), or are the same (cost of 0).%0A current_row%5B1:%5D = np.minimum(%0A current_row%5B1:%5D,%0A np.add(previous_row%5B:-1%5D, target != s))%0A %0A # Deletion (target grows shorter than source):%0A current_row%5B1:%5D = np.minimum(%0A current_row%5B1:%5D,%0A current_row%5B0:-1%5D + 1)%0A %0A previous_row = current_row%0A %0A return previous_row%5B-1%5D%0A
d205c9a5a2d92190676a30156e039f8cdd400629
Correct base API
pysis/sis.py
pysis/sis.py
#!/usr/bin/env python # -*- encoding: utf-8 -*- from datetime import datetime import calendar class SIS(object): __BASE_URL__ = 'http://api.sustainableis.com/v1/' __API_DOMAIN__ = 'api.sustainableis.com' #__BASE_URL__ = 'http://localhost:3000/v1/' #__API_DOMAIN__ = 'localhost:3000' """Main SIS object You can configure all services globally using the config dict. See the attributes in `pysis.core.client`. Examples: s = SIS(token='xyz...') s = SIS(token='xyz...', base_url='http://api.sustainableis.com/v2/') s = SIS(token='xyz...', enableParamChecks=False) """ def __init__(self, **config): from pysis.core.client import Client from pysis.resources.base import Resource from pysis.services.organizations import Organizations from pysis.services.facilities import Facilities from pysis.services.utilities import Utilities from pysis.services.outputs import Outputs from pysis.services.buildings import Buildings from pysis.services.feeds import Feeds from pysis.services.users import Users from pysis.services.blastcells import Blastcells from pysis.services.weather import Weather from pysis.services.configurations import Configurations from pysis.services.oauth import Oauth from pysis.services.workers import Workers from pysis.services.alerts import Alerts from pysis.services.emails import Emails from pysis.services.reports import Reports from pysis.services.metrics import Metrics enableParamChecks = True if 'enableParamChecks' in config: enableParamChecks = config['enableParamChecks'] Resource.setParamCheck(enableParamChecks) if 'api_domain' not in config: config['api_domain'] = self.__API_DOMAIN__ if 'base_url' not in config: config['base_url'] = self.__BASE_URL__ self._client = Client(**config) self._organizations = Organizations(self._client) self._facilities = Facilities(self._client) self._utilities = Utilities(self._client) self._outputs = Outputs(self._client) self._buildings = Buildings(self._client) self._feeds = Feeds(self._client) self._users = Users(self._client) self._blastcells = Blastcells(self._client) self._weather = Weather(self._client) self._configurations = Configurations(self._client) self._oauth = Oauth(self._client) self._workers = Workers(self._client) self._alerts = Alerts(self._client) self._emails = Emails(self._client) self._reports = Reports(self._client) self._metrics = Metrics(self._client) @property def organizations(self): return self._organizations @property def facilities(self): return self._facilities @property def outputs(self): return self._outputs @property def buildings(self): return self._buildings @property def users(self): return self._users @property def feeds(self): return self._feeds @property def blastcells(self): return self._blastcells @property def weather(self): return self._weather @property def configurations(self): return self._configurations @property def oauth(self): return self._oauth @property def workers(self): return self._workers @property def alerts(self): return self._alerts @property def emails(self): return self._emails @property def reports(self): return self._reports @property def utilities(self): return self._utilities @property def metrics(self): return self._metrics
Python
0.000003
@@ -138,33 +138,28 @@ p://api. -sustainableis.com +ndustrial.io /v1/'%0A @@ -182,33 +182,28 @@ = 'api. -sustainableis.com +ndustrial.io '%0A #_
a5b012db4cb4cc8a988c0ed37411194639dd1bbd
add tester.py module to pytools
lib/tester.py
lib/tester.py
Python
0.000006
@@ -0,0 +1,1346 @@ +#!/usr/bin/env python%0A%0A%22%22%22%0APackage: pytools%0AAuthor: Christopher Hanley%0A%0APurpose:%0A========%0AProvide driver function for package tests.%0A%0ADependencies:%0A=============%0A%0A- nose 0.10.4 or greater.%0A%0AUsage Example:%0A==============%0AAll packages will need to import jwtools.tester and add the following%0Afunction to the __init__.py of their package:%0A%0Adef test(*args,**kwds):%0A thisdir = os.path.dirname(os.path.abspath(__file__))%0A pytools.tester.test(curdir=thisdir)%0A%0A%0AThis assumes that all software packages are installed with the structure:%0A%0Apackage/%0A __init__.py%0A modules.py%0A /tests%0A%0AWhere the /tests subdirectory containts the nose tests.%0A%0A%0A%0A%22%22%22%0A%0A%0Afrom __future__ import division%0A%0Aimport os,sys%0A%0Adef test(*args,**kwds):%0A %22%22%22%0A Purpose:%0A ========%0A test: Run refcore nosetest suite of tests. The tests are located in the%0A /test directory of the installed modules.%0A%0A %22%22%22%0A%0A try:%0A thisdir = kwds%5B'curdir'%5D%0A except KeyError:%0A thisdir = os.path.dirname(os.path.abspath(__file__))%0A DIRS=%5B'/tests'%5D%0A%0A args=%5B%5D%0A for dirname in DIRS:%0A args.append('-w')%0A args.append(thisdir+dirname)%0A%0A result = False%0A%0A try:%0A import nose, nose.core%0A result = nose.run(argv=args)%0A except ImportError:%0A print %22Nose 0.10.4 or greater is required for running tests.%22%0A return result%0A%0A
6efc045d34f432723b52aa094c1caec3bf102e96
add sparse repeated updates benchmark
benchmarks/sparse_repeated_updates.py
benchmarks/sparse_repeated_updates.py
Python
0
@@ -0,0 +1,253 @@ +import numpy as np%0Aimport theano%0Aimport theano.tensor as T%0AfX = theano.config.floatX%0As = theano.shared(np.ones((10, 1), dtype=fX))%0Aidxs = %5B0, 1, 1%5D%0Afn = theano.function(%5B%5D, updates=%5B(s, T.inc_subtensor(s%5Bidxs%5D, s%5Bidxs%5D ** 2))%5D)%0Afn()%0Aprint s.get_value()%0A
4b43906004f9bfb6164bb2c0b95efaf1dbb881c8
add py
correction_image.py
correction_image.py
Python
0.00008
@@ -0,0 +1,99 @@ +#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%0A# Project Apricot%0A# Copyright (c) 2015 Daiki, Takanori.%0A
fc44d4463045e458796d13b3c97b34cf6ba47f61
Add script to create the player pitch weights.
bluechip/player/createpitchweights.py
bluechip/player/createpitchweights.py
Python
0
@@ -0,0 +1,710 @@ +import random%0Afrom player.models import Player, Pitch, PlayerPitchWeight%0A%0A#TODO: Need to centralize this function call.%0Arandom.seed(123456789)%0A%0Apitch_records = Pitch.objects.all().order_by('id')%0Apitches_count = pitch_records.count()%0Afor p in Player.objects.all():%0A%09weights = %5B%5D%0A%09sum_weights = 0%0A%09for _ in xrange(pitches_count):%0A%09%09mu = 1.0 / pitches_count%0A%09%09sigma = (2.0 / 3.0) * mu%0A%09%09w = random.normalvariate(mu, sigma)%0A%09%09w = max(w, 0.0)%0A%09%09weights.append(w)%0A%09%09sum_weights += w%0A%0A%09# Normalize weights before creating records%0A%09for i in xrange(len(weights)):%0A%09%09weights%5Bi%5D /= sum_weights%0A%0A%09j = 0%0A%09for pitch in pitch_records:%0A%09%09ppw = PlayerPitchWeight(player=p, pitch=pitch, weight=weights%5Bj%5D)%0A%09%09ppw.save()%0A%09%09j += 1%0A
d3f68c385da4d2fa864ba748f41785be01c26c34
Add py solution for 551. Student Attendance Record I
py/student-attendance-record-i.py
py/student-attendance-record-i.py
Python
0.998417
@@ -0,0 +1,491 @@ +class Solution(object):%0A def checkRecord(self, s):%0A %22%22%22%0A :type s: str%0A :rtype: bool%0A %22%22%22%0A A = False%0A L = 0%0A for c in s:%0A if c == 'L':%0A L += 1%0A if L %3E 2:%0A return False%0A else:%0A L = 0%0A if c == 'A':%0A if A:%0A return False%0A else:%0A A = True%0A%0A return True%0A
a1ee4d90e0cf159f27274423b989c98844fbeba1
Create mytask1b.py
ml/mytask1b.py
ml/mytask1b.py
Python
0.999813
@@ -0,0 +1,1747 @@ +%22%22%22 Features%0A%0A%0AThe objective of this task is to explore the corpus, deals.txt. %0A%0A%0AThe deals.txt file is a collection of deal descriptions, separated by a new line, from which %0Awe want to glean the following insights:%0A%0A%0A1. What is the most popular term across all the deals?%0A2. What is the least popular term across all the deals?%0A3. How many types of guitars are mentioned across all the deals?%0A%0A%0A%22%22%22%0A%0A####################################################%0A# Solution 2 of Q1:%0A# Use topia.termextract 1.1.0 for term extraction%0A# %0A####################################################%0A%0A# load term extraction library%0A%0Afrom topia.termextract import extract%0Aextractor = extract.TermExtractor()%0A%0A# define the trivial permissive filter%0Aextractor.filter = extract.permissiveFilter%0A%0A# load data%0Aopenfile = open('..%5Cdata%5Cdeals.txt', 'r')%0A%0Ad = %7B%7D%0Anumberguitars = 0%0A%0Afor line in openfile:%0A%0A terms = extractor(line)%0A%0A # empty%0A if not terms:%0A continue%0A%0A # take each term from terms%0A for term in terms:%0A %0A # aggregate dictionary for each term%0A if not (term%5B0%5D in d):%0A d%5Bterm%5B0%5D%5D = 0%0A d%5Bterm%5B0%5D%5D += term%5B1%5D%0A%0A # count guitar%0A if 'guitar' in term or 'guitars' in term:%0A numberguitars += 1%0A else:%0A if 'Guitar' in term or 'Guitars' in term:%0A numberguitars += 1%0A%0Av = list(d.values())%0A%0Amaxvalue = max(v)%0Aminvalue = min(v)%0A%0Amaxkeys = %5B%5D%0Aminkeys = %5B%5D%0A%0Afor k, v in d.items():%0A if v == maxvalue:%0A maxkeys.append(k)%0A %0A if v == minvalue:%0A minkeys.append(k)%0A%0A# output results%0A%0Aprint %221. the most popular terms%5Cn%22, maxkeys%0A#print %222. the least popular terms%5Cn%22, minkeys%0Aprint %223. the number of types of guitars%22, numberguitars%0A%0A
714e2e2ae5e8412ef522dc64666e6548307eec07
Add the init method to the topic model.
model/topic.py
model/topic.py
Python
0
@@ -0,0 +1,159 @@ +class TopicModel(Query):%0A def __init__(self, db):%0A self.db = db%0A self.table_name = %22topic%22%0A super(TopicModel, self).__init__()%0A
c795f8e21d2b400134cb52ef7eae2cc7e26cfd99
Create ada.py
ada.py
ada.py
Python
0.00017
@@ -0,0 +1 @@ +%0A
028831c53d27452168b7a430eb713e01c966acb0
add privacy policy as first legal check
accelerator/migrations/0006_add_privacy_policy_legal_check.py
accelerator/migrations/0006_add_privacy_policy_legal_check.py
Python
0
@@ -0,0 +1,929 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.8 on 2018-05-14 09:09%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Adef add_privacy_policy_legal_check(apps, schema_editor):%0A LegalCheck = apps.get_model('accelerator', 'LegalCheck')%0A LegalCheck.objects.create(%0A name='accepted_privacy_policy',%0A title='The MassChallenge Privacy Policy',%0A url='https://masschallenge.org/privacy-policy'%0A )%0A%0A%0Adef remove_privacy_policy_legal_check(apps, schema_editor):%0A LegalCheck = apps.get_model('accelerator', 'LegalCheck')%0A LegalCheck.objects.filter(name='accepted_privacy_policy').delete()%0A%0A%0Aclass Migration(migrations.Migration):%0A dependencies = %5B%0A ('accelerator', '0005_legalcheck_legalcheckacceptance'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(add_privacy_policy_legal_check,%0A remove_privacy_policy_legal_check),%0A%0A %5D%0A
bc8158bb0d8cb14ae5a990bcaaf1d4bb1780245f
remove unused import
psutil/_compat.py
psutil/_compat.py
#!/usr/bin/env python # Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Module which provides compatibility with older Python versions.""" __all__ = ["PY3", "long", "xrange", "unicode", "callable", "lru_cache"] import collections import functools import sys try: import __builtin__ except ImportError: import builtins as __builtin__ # py3 PY3 = sys.version_info[0] == 3 if PY3: long = int xrange = range unicode = str else: long = long xrange = xrange unicode = unicode # removed in 3.0, reintroduced in 3.2 try: callable = callable except NameError: def callable(obj): return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) # --- stdlib additions # py 3.2 functools.lru_cache # Taken from: http://code.activestate.com/recipes/578078 # Credit: Raymond Hettinger try: from functools import lru_cache except ImportError: try: from threading import RLock except ImportError: from dummy_threading import RLock _CacheInfo = collections.namedtuple( "CacheInfo", ["hits", "misses", "maxsize", "currsize"]) class _HashedSeq(list): __slots__ = 'hashvalue' def __init__(self, tup, hash=hash): self[:] = tup self.hashvalue = hash(tup) def __hash__(self): return self.hashvalue def _make_key(args, kwds, typed, kwd_mark=(object(), ), fasttypes=set((int, str, frozenset, type(None))), sorted=sorted, tuple=tuple, type=type, len=len): key = args if kwds: sorted_items = sorted(kwds.items()) key += kwd_mark for item in sorted_items: key += item if typed: key += tuple(type(v) for v in args) if kwds: key += tuple(type(v) for k, v in sorted_items) elif len(key) == 1 and type(key[0]) in fasttypes: return key[0] return _HashedSeq(key) def lru_cache(maxsize=100, typed=False): """Least-recently-used cache decorator, see: http://docs.python.org/3/library/functools.html#functools.lru_cache """ def decorating_function(user_function): cache = dict() stats = [0, 0] HITS, MISSES = 0, 1 make_key = _make_key cache_get = cache.get _len = len lock = RLock() root = [] root[:] = [root, root, None, None] nonlocal_root = [root] PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 if maxsize == 0: def wrapper(*args, **kwds): result = user_function(*args, **kwds) stats[MISSES] += 1 return result elif maxsize is None: def wrapper(*args, **kwds): key = make_key(args, kwds, typed) result = cache_get(key, root) if result is not root: stats[HITS] += 1 return result result = user_function(*args, **kwds) cache[key] = result stats[MISSES] += 1 return result else: def wrapper(*args, **kwds): if kwds or typed: key = make_key(args, kwds, typed) else: key = args lock.acquire() try: link = cache_get(key) if link is not None: root, = nonlocal_root link_prev, link_next, key, result = link link_prev[NEXT] = link_next link_next[PREV] = link_prev last = root[PREV] last[NEXT] = root[PREV] = link link[PREV] = last link[NEXT] = root stats[HITS] += 1 return result finally: lock.release() result = user_function(*args, **kwds) lock.acquire() try: root, = nonlocal_root if key in cache: pass elif _len(cache) >= maxsize: oldroot = root oldroot[KEY] = key oldroot[RESULT] = result root = nonlocal_root[0] = oldroot[NEXT] oldkey = root[KEY] root[KEY] = root[RESULT] = None del cache[oldkey] cache[key] = oldroot else: last = root[PREV] link = [last, root, key, result] last[NEXT] = root[PREV] = cache[key] = link stats[MISSES] += 1 finally: lock.release() return result def cache_info(): """Report cache statistics""" lock.acquire() try: return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache)) finally: lock.release() def cache_clear(): """Clear the cache and cache statistics""" lock.acquire() try: cache.clear() root = nonlocal_root[0] root[:] = [root, root, None, None] stats[:] = [0, 0] finally: lock.release() wrapper.__wrapped__ = user_function wrapper.cache_info = cache_info wrapper.cache_clear = cache_clear return functools.update_wrapper(wrapper, user_function) return decorating_function
Python
0.000001
@@ -375,98 +375,8 @@ sys -%0Atry:%0A import __builtin__%0Aexcept ImportError:%0A import builtins as __builtin__ # py3 %0A%0APY
a2975adeedcc4aa33ee8b63bd404675bb3453089
Add broker app.
apps/broker.py
apps/broker.py
Python
0
@@ -0,0 +1,1754 @@ +%22%22%22%0AAlter item database.%0A%22%22%22%0Aimport logging%0Aimport sys%0Aimport os%0A%0A# import hack to avoid PYTHONPATH%0Atry:%0A import pydarkstar%0Aexcept ImportError:%0A root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))%0A root, dirs, files = next(os.walk(root))%0A if 'pydarkstar' in dirs:%0A sys.path.insert(1, root)%0A import pydarkstar%0A else:%0A raise%0A%0Aimport pydarkstar.logutils%0Aimport pydarkstar.options%0Aimport pydarkstar.common%0A%0Aclass Options(pydarkstar.options.Options):%0A %22%22%22%0A Reads options from config file, then from command line.%0A %22%22%22%0A def __init__(self):%0A super(Options, self).__init__(config='broker.yaml', description=__doc__)%0A%0A # logging%0A self.verbose = False # error, info, and debug%0A self.silent = False # error only%0A%0A # input and output%0A self.save = False # save config%0A%0A # logging%0A self.add_argument('--verbose', action='store_true',%0A help='report debug, info, and error')%0A self.add_argument('--silent', action='store_true',%0A help='report error only')%0A%0A # output%0A self.add_argument('--save', action='store_true',%0A help='save config file (and exit)')%0A%0Adef main():%0A %22%22%22%0A Main function.%0A %22%22%22%0A # get options%0A opts = Options()%0A opts.parse_args()%0A pydarkstar.logutils.basicConfig(%0A verbose=opts.verbose, silent=opts.silent, fname='broker.log')%0A logging.debug('start')%0A%0A # log options%0A opts.log_values(level=logging.INFO)%0A%0A # save options%0A if opts.save:%0A opts.save = False%0A opts.dump()%0A return%0A%0Adef cleanup():%0A logging.info('exit%5Cn')%0A%0Aif __name__ == '__main__':%0A with pydarkstar.logutils.capture():%0A main()%0A cleanup()
a8c08baeb2ee6268ac61613a23cc86cf885a9d09
Handle NULL deleted_at in migration 112.
nova/db/sqlalchemy/migrate_repo/versions/112_update_deleted_instance_data.py
nova/db/sqlalchemy/migrate_repo/versions/112_update_deleted_instance_data.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack LLC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime from sqlalchemy import MetaData, Table from sqlalchemy import and_, between TABLES = ('instance_metadata', 'instance_system_metadata', 'block_device_mapping') def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instances = Table('instances', meta, autoload=True) instance_list = list(instances.select().\ where(instances.c.deleted == True).execute()) for table_name in TABLES: table = Table(table_name, meta, autoload=True) for instance in instance_list: table.update( (and_(table.c.deleted == True, table.c.instance_uuid == instance['uuid'], between(table.c.deleted_at, instance['deleted_at'] - datetime.timedelta(seconds=2), instance['deleted_at'] + datetime.timedelta(seconds=2))) ), {table.c.deleted: False, table.c.deleted_at: None} ).execute() def downgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine instances = Table('instances', meta, autoload=True) instance_list = list(instances.select().\ where(instances.c.deleted == True).execute()) for table_name in TABLES: table = Table(table_name, meta, autoload=True) for instance in instance_list: table.update( (and_(table.c.deleted == False, table.c.instance_uuid == instance['uuid']) ), {table.c.deleted: True, table.c.deleted_at: instance['deleted_at']} ).execute()
Python
0.000002
@@ -1239,32 +1239,100 @@ instance_list:%0A + if not instance%5B'deleted_at'%5D:%0A continue%0A tabl
7dbab1a6615a49513fe16c74550ddf2f52b0f698
Create 4-keys-keyboard.py
Python/4-keys-keyboard.py
Python/4-keys-keyboard.py
Python
0.999774
@@ -0,0 +1,339 @@ +# Time: O(n)%0A# Space: O(1)%0A%0Aclass Solution(object):%0A def maxA(self, N):%0A %22%22%22%0A :type N: int%0A :rtype: int%0A %22%22%22%0A if N %3C= 6:%0A return N%0A dp = %5Bi for i in range(N+1)%5D%0A for i in xrange(7, N+1):%0A dp%5Bi %25 6%5D = max(dp%5B(i-4) %25 6%5D*3,dp%5B(i-5) %25 6%5D*4)%0A return dp%5BN %25 6%5D%0A %0A
be9c88b630ea243afdef3d87ac0b316bd3300281
Add 283-move-zeroes.py
283-move-zeroes.py
283-move-zeroes.py
Python
0.011626
@@ -0,0 +1,1588 @@ +%22%22%22%0AQuestion:%0A Move Zeroes%0A%0A Given an array nums, write a function to move all 0's to the end of it while maintaining the relative order of the non-zero elements.%0A%0A For example, given nums = %5B0, 1, 0, 3, 12%5D, after calling your function, nums should be %5B1, 3, 12, 0, 0%5D.%0A%0A Note:%0A You must do this in-place without making a copy of the array.%0A Minimize the total number of operations.%0A%0A Credits:%0A Special thanks to @jianchao.li.fighter for adding this problem and creating all test cases.%0A%0APerformance:%0A 1. Total Accepted: 15730 Total Submissions: 38045 Difficulty: Easy%0A 2. Sorry. We do not have enough accepted submissions.%0A%22%22%22%0A%0Aclass Solution(object):%0A def moveZeroes(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: void Do not return anything, modify nums in-place instead.%0A %22%22%22%0A reached_zero_count = 0%0A%0A for idx, num in enumerate(nums):%0A if num == 0:%0A reached_zero_count += 1%0A if num != 0:%0A if reached_zero_count %3E 0: # make sure has reached at least a zero.%0A nums%5Bidx - reached_zero_count%5D = num%0A nums%5Bidx%5D = 0%0A%0A%0Adef test_func(nums, result):%0A Solution().moveZeroes(nums)%0A assert nums == result, %5Bnums, result%5D%0A%0Atest_func(%5B%5D, %5B%5D)%0Atest_func(%5B0%5D, %5B0%5D)%0Atest_func(%5B1%5D, %5B1%5D)%0Atest_func(%5B0, 0%5D, %5B0, 0%5D)%0Atest_func(%5B0, 1%5D, %5B1, 0%5D)%0Atest_func(%5B1, 1%5D, %5B1, 1%5D)%0Atest_func(%5B0, 1, 0, 3, 12%5D, %5B1, 3, 12, 0, 0%5D)%0Atest_func(%5B0, 1, 0, 3, 12, 0%5D, %5B1, 3, 12, 0, 0, 0%5D)%0Atest_func(%5B0, 1, 0, 0, 0, 3, 12, 0%5D, %5B1, 3, 12, 0, 0, 0, 0, 0%5D)%0A
5d7f2fdfb1b850aacaf29ba76c7e5ed441e6db63
Create 32losmasgrandes.py
32losmasgrandes.py
32losmasgrandes.py
Python
0.000132
@@ -0,0 +1,2230 @@ +#Integrantes del equipo%0A#Chavez Pavon Jose Manuel%0A#Ramirez Ramirez Servando%0A#Saules Rojas David%0A#Lopez Adriana%0A%0Aimport random %0A%0A#Funcino para crear una lista%0A#La cual usaremos para simular las alturas de las 32 personas%0A#La llenaremos de forma aleatoria%0Adef lista (): %0A%09l = %5B%5D #Creamos la lista de las %22alturas%22%0A%09for x in range (0,32):%0A%09%09l.append(random.randint(1,300))%0A%09return l #Regresamos la lista%0A%0A%0A#Funcion para obtener cual es la persona mas alta%0A#Recorremos la lista de dos en dos pregutando cual es mayor%0A#Los elementos mayores los agregamos a una nueva lista %0A#A los elementos que fueron comparados los metemos en un diccionario%0A#Para despues con ellos obtener el segundo mayor%0A#var %22lista%22 = La lista de las alturas %0A#var %22dic%22 = el diciconario de los elementos comparados %0Adef primero (lista, dic):%0A%09a=0 #Iterador 'a' a utilizar para recorrer la lista inicializado en cero%0A%09l2 = %5B%5D #Lista para ir agregando a los elementos mayores%0A%09#Clausula de escape%0A%09if len(lista) == 1:%0A%09%09print lista%5B0%5D%0A%09%09#Llamada a la funcion segundo, la cual nos dara el segundo mas alto%0A%09%09segundo(dic%5Blista%5B0%5D%5D)%0A%09%09return%0A%09#Recorremos la lista buscando a los elementos mayores%0A%09while a%3Clen(lista):%0A%09%09#Verificamos que elmento es mayor%0A%09%09if lista%5Ba%5D %3E lista%5Ba+1%5D:%0A%09%09%09l2.append(lista%5Ba%5D)#El mayor lo agregamos a l2%0A%09%09%09dic%5Blista%5Ba%5D%5D = str(lista%5Ba+1%5D) + %22 %22#Al menor lo agregamos al diccionario pasandole como llave al elemento mayor%0A%09%09#El caso contrario del if%0A%09%09else: %0A%09%09%09l2.append(lista%5Ba+1%5D)#El mayor lo agregamos a l2%0A%09%09%09dic%5Blista%5Ba+1%5D%5D = str(lista%5Ba%5D) + %22 %22#Al menor lo agregamos al diccionario pasandole como llave al elemento mayor%0A%09%09a+=2 #Aumentos nuestro iterador dos posiciones%0A%09primero(l2, dic) #Llamada recursiva de la funcion %0A%0A#Funcion para obtener el segundo elementos mas grande%0A#var %22cadena%22 = la cadena que nos da el diccionario que tiene como llave%0A#al elemento mas grande%0Adef segundo (cadena):%0A%09repe = cadena.split()#Separamos la cadena por espacios con split()%0A%09print max(repe)#Obtenemos el elemento mayor de la cadena con max() y lo imprimimos%0A%09return %0A%0A%0A%0A%0Al = lista()#Creamos la lista a ejecutar%0Adicc=%7B%7D#Diccionario para los elementos que fueron comparados pero no fueron mayores%0Aprimero(l,dicc)#Llamada de la funcion primero%0A%0A%0A
a7ccd7bc02476cfad85280ff1e742671453360de
Add Digital Outcomes and Specialists to frameworks
migrations/versions/420_dos_is_coming.py
migrations/versions/420_dos_is_coming.py
Python
0
@@ -0,0 +1,890 @@ +%22%22%22DOS is coming%0A%0ARevision ID: 420%0ARevises: 410_remove_empty_drafts%0ACreate Date: 2015-11-16 14:10:35.814066%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '420'%0Adown_revision = '410_remove_empty_drafts'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom app.models import Framework%0A%0A%0Adef upgrade():%0A op.execute(%22COMMIT%22)%0A op.execute(%22ALTER TYPE framework_enum ADD VALUE IF NOT EXISTS 'dos' after 'gcloud'%22)%0A%0A framework = Framework.query.filter(Framework.slug == 'digital-outcomes-and-specialists').first()%0A%0A if not framework:%0A op.execute(%22%22%22%0A INSERT INTO frameworks (name, framework, status, slug)%0A values('Digital Outcomes and Specialists', 'dos', 'coming', 'digital-outcomes-and-specialists')%0A %22%22%22)%0A%0A%0Adef downgrade():%0A op.execute(%22%22%22%0A DELETE FROM frameworks where slug='digital-outcomes-and-specialists'%0A %22%22%22)%0A
273f0bd289d62c6980f095b0a8bb41a973b0678f
add import script for Bradford
polling_stations/apps/data_collection/management/commands/import_bradford.py
polling_stations/apps/data_collection/management/commands/import_bradford.py
Python
0
@@ -0,0 +1,408 @@ +from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter%0A%0Aclass Command(BaseXpressDemocracyClubCsvImporter):%0A council_id = 'E08000032'%0A addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.tsvJune2017.tsv'%0A stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.tsvJune2017.tsv'%0A elections = %5B'parl.2017-06-08'%5D%0A csv_delimiter = '%5Ct'%0A
12ba7e0c6db91f5ee46a1be9acaece110f98b911
add bigwig file reader
PyMaSC/bwreader.py
PyMaSC/bwreader.py
Python
0
@@ -0,0 +1,2180 @@ +import os%0A%0Aimport wWigIO%0A%0A%0Aclass BigWigFile(object):%0A @staticmethod%0A def wigToBigWig(wigfile, sizefile, bwfile):%0A wWigIO.wigToBigWig(wigfile, sizefile, bwfile)%0A%0A @staticmethod%0A def bigWigToWig(bwfile, wigfile):%0A wWigIO.bigWigToWig(bwfile, wigfile)%0A%0A def __init__(self, path, chrom_size=None):%0A if not os.path.exists(path) and path != '-':%0A raise IOError(%22input file '%7B0%7D' dose not exist.%22.format(path))%0A elif path == '-':%0A path = %22stdin%22%0A%0A prefix, ext = os.path.splitext(path)%5B0%5D%0A if ext == %22wig%22:%0A bwfile = prefix + %22.bw%22%0A if os.path.exists(bwfile):%0A self.path = bwfile%0A else:%0A if chrom_size is None:%0A raise IOError(%22Failed to convet wig to bigwig. 'chrom_size' file required.%22)%0A BigWigFile.wigToBigWig(path, chrom_size, bwfile)%0A self.path = bwfile%0A else:%0A self.path = path%0A%0A wWigIO.open(self.path)%0A self.set_chromsizes()%0A%0A self.closed = False%0A%0A def set_chromsizes(self):%0A self.chromsizes = wWigIO.getChromSize(self.path)%0A%0A def _getIntervals(self, chrom, begin, end):%0A wigs = wWigIO.getIntervals(self.path, chrom, begin, end)%0A%0A if wigs == 1:%0A raise ValueError(%22wWigIO.getIntervals doesn't have correct parameters.%22)%0A if wigs == 2:%0A raise ValueError(%22Fail to open BigWig file.%22)%0A%0A return wigs%0A%0A def fetch(self, chrom=None, begin=None, end=None):%0A if chrom is None:%0A chroms = self.chromsizes.keys()%0A else:%0A chroms = %5Bchrom%5D%0A if begin is None or begin %3C 0:%0A begin = 0%0A if end is None:%0A end = 0%0A%0A for chrom in chroms:%0A for wig in self._getIntervals(chrom, begin, end):%0A yield chrom, wig%5B0%5D, wig%5B1%5D, wig%5B2%5D%0A%0A def close(self):%0A if not self.closed:%0A wWigIO.close(self.infile)%0A self.closed = True%0A%0A def __enter__(self):%0A return self%0A%0A def __exit__(self, type, value, traceback):%0A self.close()%0A%0A def __del__(self):%0A self.close()%0A
c0f690fe1d43edc4fc5cc4b3aeb40594c1abd674
Create pollard_rho_algorithm.py
daedalus/attacks/pollard_rho_algorithm.py
daedalus/attacks/pollard_rho_algorithm.py
Python
0.000772
@@ -0,0 +1,567 @@ +#pollard rho algorithm of integer factorization%0A%0A%0Adef gcd(a,b):%0A if a is 0:%0A return b%0A return gcd(b%25a,a)%0A%0Adef pollard_rho(number,x,y):%0A d = 1%0A while d is 1:%0A x = (x**2+1)%25number%0A for i in range(0,2,1):%0A y = (y**2+1)%25number%0A if x%3Ey:%0A z = x-y%0A else:%0A z=y-x%0A d = gcd(z,number)%0A return d%0A%0Ax=2%0Ay=2%0Anumber = 84923983%0Afactor = pollard_rho(number,x,y)%0Awhile factor is number or 1:%0A x = x+1%0A y = y+1%0A pollard_rho(number,x,y)%0Afactor2 = int(number/factor)%0A%0Aprint(factor,factor2)%0A
c5dfcffdf743e2c26b8dba6e3be8aee7d7aaa608
Test `write_*` and `join_*` on bytes
test/test_join_bytes.py
test/test_join_bytes.py
Python
0.000004
@@ -0,0 +1,1677 @@ +import re%0Aimport linesep%0A%0Atry:%0A from StringIO import StringIO as BytesIO%0Aexcept ImportError:%0A from io import BytesIO%0A%0A# Based on %3Chttps://pytest.org/latest/example/parametrize.html#a-quick-port-of-testscenarios%3E%0Adef pytest_generate_tests(metafunc):%0A idlist = %5B%5D%0A argvalues = %5B%5D%0A for scenario in metafunc.module.scenarios:%0A idlist.append(scenario%5B0%5D)%0A argvalues.append(%5Bscenario%5B1%5D%5Bargname%5D for argname in metafunc.fixturenames%5D)%0A metafunc.parametrize(metafunc.fixturenames, argvalues, ids=idlist, scope=%22module%22)%0A%0Ascenarios = %5B%0A%0A ('empty', %7B%0A %22entries%22: %5B%5D,%0A %22sep%22: b'%5Cn',%0A %22preceded%22: b'',%0A %22terminated%22: b'',%0A %22separated%22: b'',%0A %7D),%0A%0A ('empty_str', %7B%0A %22entries%22: %5Bb''%5D,%0A %22sep%22: b'%5Cn',%0A %22preceded%22: b'%5Cn',%0A %22terminated%22: b'%5Cn',%0A %22separated%22: b'',%0A %7D),%0A%0A%5D%0A%0Adef test_join_preceded(entries, sep, preceded):%0A assert linesep.join_preceded(entries, sep) == preceded%0A%0Adef test_join_terminated(entries, sep, terminated):%0A assert linesep.join_terminated(entries, sep) == terminated%0A%0Adef test_join_separated(entries, sep, separated):%0A assert linesep.join_separated(entries, sep) == separated%0A%0Adef test_write_preceded(entries, sep, preceded):%0A fp = BytesIO()%0A linesep.write_preceded(fp, entries, sep)%0A assert fp.getvalue() == preceded%0A%0Adef test_write_terminated(entries, sep, terminated):%0A fp = BytesIO()%0A linesep.write_terminated(fp, entries, sep)%0A assert fp.getvalue() == terminated%0A%0Adef test_write_separated(entries, sep, separated):%0A fp = BytesIO()%0A linesep.write_separated(fp, entries, sep)%0A assert fp.getvalue() == separated%0A
a30cd68e77242df4efadc75c4390dd8a3ce68612
Add data migration for Audit's empty status
src/ggrc/migrations/versions/20170103101308_42b22b9ca859__fix_audit_empty_status.py
src/ggrc/migrations/versions/20170103101308_42b22b9ca859__fix_audit_empty_status.py
Python
0
@@ -0,0 +1,1071 @@ +# Copyright (C) 2016 Google Inc.%0A# Licensed under http://www.apache.org/licenses/LICENSE-2.0 %3Csee LICENSE file%3E%0A%0A%22%22%22%0AFix audit empty status%0A%0ACreate Date: 2016-12-22 13:53:24.497701%0A%22%22%22%0A# disable Invalid constant name pylint warning for mandatory Alembic variables.%0A# pylint: disable=invalid-name%0A%0Aimport sqlalchemy as sa%0A%0Afrom alembic import op%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '42b22b9ca859'%0Adown_revision = '4fcaef05479f'%0A%0A%0AVALID_STATES = (%0A u'Planned', u'In Progress', u'Manager Review',%0A u'Ready for External Review', u'Completed'%0A)%0A%0A%0Adef upgrade():%0A %22%22%22Upgrade database schema and/or data, creating a new revision.%22%22%22%0A op.execute(%22UPDATE audits SET status='Planned' WHERE status=0%22)%0A op.alter_column('audits', 'status', nullable=True, type_=sa.String(250),%0A existing_type=sa.Enum(*VALID_STATES))%0A%0A%0Adef downgrade():%0A %22%22%22Downgrade database schema and/or data back to the previous revision.%22%22%22%0A op.alter_column('audits', 'status', nullable=False,%0A type_=sa.Enum(*VALID_STATES), existing_type=sa.String)%0A
56b44a5a510390913e2b9c9909218428842dcde8
Match old_user_id to user_id and old_team_id to team_id
migrations/versions/542fd8471e84_match_old_to_new_user_and_team_columns.py
migrations/versions/542fd8471e84_match_old_to_new_user_and_team_columns.py
Python
0.999503
@@ -0,0 +1,2015 @@ +# -*- coding: utf-8 -*-%0A%22%22%22Match old to new user and team columns%0A%0ARevision ID: 542fd8471e84%0ARevises: 382cde270594%0ACreate Date: 2020-04-07 03:52:04.415019%0A%0A%22%22%22%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A# revision identifiers, used by Alembic.%0Arevision = '542fd8471e84'%0Adown_revision = '382cde270594'%0Abranch_labels = None%0Adepends_on = None%0A%0A# (table, old, new)%0Amigrate_user_columns = %5B%0A ('comment', 'old_user_id', 'user_id'),%0A ('contact_exchange', 'old_user_id', 'user_id'),%0A ('participant', 'old_user_id', 'user_id'),%0A ('project', 'old_user_id', 'user_id'),%0A ('proposal', 'old_speaker_id', 'speaker_id'),%0A ('proposal', 'old_user_id', 'user_id'),%0A ('rsvp', 'old_user_id', 'user_id'),%0A ('saved_project', 'old_user_id', 'user_id'),%0A ('saved_session', 'old_user_id', 'user_id'),%0A ('vote', 'old_user_id', 'user_id'),%0A%5D%0A%0A# (table, old, new)%0Amigrate_team_columns = %5B%0A ('profile', 'old_admin_team_id', 'admin_team_id'),%0A ('project', 'old_admin_team_id', 'admin_team_id'),%0A ('project', 'old_checkin_team_id', 'checkin_team_id'),%0A ('project', 'old_review_team_id', 'review_team_id'),%0A%5D%0A%0A%0Adef upgrade():%0A for table, old, new in migrate_user_columns:%0A print(f%22Upgrading %7Btable%7D.%7Bnew%7D%22) # NOQA: T001%0A op.execute(%0A sa.DDL(%0A f'''%0A UPDATE %22%7Btable%7D%22 SET %22%7Bnew%7D%22 = %22user%22.%22id%22%0A FROM %22user%22, %22old_user%22%0A WHERE %22%7Btable%7D%22.%22%7Bold%7D%22 = %22old_user%22.%22id%22%0A AND %22old_user%22.%22uuid%22 = %22user%22.%22uuid%22;%0A '''%0A )%0A )%0A%0A for table, old, new in migrate_team_columns:%0A print(f%22Upgrading %7Btable%7D.%7Bnew%7D%22) # NOQA: T001%0A op.execute(%0A sa.DDL(%0A f'''%0A UPDATE %22%7Btable%7D%22 SET %22%7Bnew%7D%22 = %22team%22.%22id%22%0A FROM %22team%22, %22old_team%22%0A WHERE %22%7Btable%7D%22.%22%7Bold%7D%22 = %22old_team%22.%22id%22%0A AND %22old_team%22.%22uuid%22 = %22team%22.%22uuid%22;%0A '''%0A )%0A )%0A%0A%0Adef downgrade():%0A pass%0A
52eb461f1679f134aed25c221cfcc63abd8d3768
add test
test/test_importers/test_youtube_importer.py
test/test_importers/test_youtube_importer.py
Python
0.000002
@@ -0,0 +1,1581 @@ +# -*- coding: utf8 -*-%0A# This file is part of PyBossa.%0A#%0A# Copyright (C) 2016 SciFabric LTD.%0A#%0A# PyBossa is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# PyBossa is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with PyBossa. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Afrom mock import patch, Mock%0Afrom pybossa.importers.youtubeapi import BulkTaskYoutubeImport%0A%0Adef create_importer_with_form_data(**form_data):%0A with patch('pybossa.importers.youtubeapi.build'):%0A form_data%5B'youtube_api_server_key'%5D = 'apikey'%0A importer = BulkTaskYoutubeImport(**form_data)%0A importer.client.api = Mock()%0A return importer%0A%0A%0Aclass TestBulkYoutubeImport(object):%0A%0A form_data = %7B%0A 'playlist_url': 'https://www.youtube.com/playlist?list=playlistid'%0A 'youtube_api_server_key': 'apikey'%0A %7D%0A%0A def test_count_tasks_returns_0_if_no_files_to_import(self):%0A form_data = %7B%0A 'playlist_url': '',%0A 'youtube_api_server_key': 'apikey'%0A %7D%0A number_of_tasks = BulkTaskYoutubeImport(**form_data).count_tasks()%0A%0A assert number_of_tasks == 0, number_of_tasks%0A
1e9a64fe6324d8b4ac96daafa7427e9f55e6dd38
add Geom.decompose tests
tests/gobj/test_geom.py
tests/gobj/test_geom.py
Python
0.000001
@@ -0,0 +1,1142 @@ +from panda3d import core%0A%0Aempty_format = core.GeomVertexFormat.get_empty()%0A%0A%0Adef test_geom_decompose_in_place():%0A vertex_data = core.GeomVertexData(%22%22, empty_format, core.GeomEnums.UH_static)%0A prim = core.GeomTristrips(core.GeomEnums.UH_static)%0A prim.add_vertex(0)%0A prim.add_vertex(1)%0A prim.add_vertex(2)%0A prim.add_vertex(3)%0A prim.close_primitive()%0A%0A geom = core.Geom(vertex_data)%0A geom.add_primitive(prim)%0A%0A geom.decompose_in_place()%0A%0A prim = geom.get_primitive(0)%0A assert tuple(prim.get_vertex_list()) == (0, 1, 2, 2, 1, 3)%0A%0A%0Adef test_geom_decompose():%0A vertex_data = core.GeomVertexData(%22%22, empty_format, core.GeomEnums.UH_static)%0A prim = core.GeomTristrips(core.GeomEnums.UH_static)%0A prim.add_vertex(0)%0A prim.add_vertex(1)%0A prim.add_vertex(2)%0A prim.add_vertex(3)%0A prim.close_primitive()%0A%0A geom = core.Geom(vertex_data)%0A geom.add_primitive(prim)%0A%0A new_geom = geom.decompose()%0A%0A new_prim = new_geom.get_primitive(0)%0A assert tuple(new_prim.get_vertex_list()) == (0, 1, 2, 2, 1, 3)%0A%0A # Old primitive should still be unchanged%0A assert prim == geom.get_primitive(0)%0A
66b5a1089ed0ce2e615f889f35b5e39db91950ae
Fix serving uploaded files during development.
mezzanine/core/management/commands/runserver.py
mezzanine/core/management/commands/runserver.py
import os from django.conf import settings from django.contrib.staticfiles.management.commands import runserver from django.contrib.staticfiles.handlers import StaticFilesHandler from django.http import Http404 from django.views.static import serve class MezzStaticFilesHandler(StaticFilesHandler): def get_response(self, request): try: return super(MezzStaticFilesHandler, self).get_response(request) except Http404: handled = (settings.STATIC_URL, settings.MEDIA_URL) if request.path.startswith(handled): path = self.file_path(request.path).replace(os.sep, "/") return serve(request, path, document_root=settings.STATIC_ROOT) raise class Command(runserver.Command): """ Overrides runserver so that we can serve uploaded files during development, and not require every single developer on every single one of their projects to have to set up multiple web server aliases for serving static content. See https://code.djangoproject.com/ticket/15199 For ease, we also serve any static files that have been stored under the project's ``STATIC_ROOT``. """ def get_handler(self, *args, **options): handler = super(Command, self).get_handler(*args, **options) if settings.DEBUG or options["insecure_serving"]: handler = MezzStaticFilesHandler(handler) return handler
Python
0
@@ -178,40 +178,8 @@ ler%0A -from django.http import Http404%0A from @@ -212,16 +212,16 @@ t serve%0A + %0A%0Aclass @@ -306,25 +306,8 @@ t):%0A - try:%0A @@ -312,20 +312,24 @@ re -turn +sponse = super(M @@ -383,36 +383,8 @@ st)%0A - except Http404:%0A @@ -447,22 +447,50 @@ - if +if response.status_code == 404 and request @@ -516,20 +516,16 @@ ndled):%0A - @@ -593,28 +593,24 @@ - - return serve @@ -665,26 +665,8 @@ OT)%0A - raise%0A %0A%0Acl
93a7f4cb914de537e477a6c6bd45e0aa28ce2e4f
update model fields
modelview/migrations/0053_auto_20200408_1442.py
modelview/migrations/0053_auto_20200408_1442.py
Python
0.000001
@@ -0,0 +1,1020 @@ +# Generated by Django 3.0 on 2020-04-08 12:42%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('modelview', '0052_auto_20200408_1308'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='energyframework',%0A name='data_postprocessing',%0A field=models.BooleanField(default=False, help_text='Which output format(s) can the framework apply? Please list!', verbose_name='data postprocessing'),%0A ),%0A migrations.AlterField(%0A model_name='energyframework',%0A name='agricultural_demand',%0A field=models.BooleanField(default=False, help_text='Which agricultural demands are already modelled with the framework?', verbose_name='Agricultural demand'),%0A ),%0A migrations.AlterField(%0A model_name='energyframework',%0A name='gm_singleNode',%0A field=models.BooleanField(default=False, verbose_name='Single-node model'),%0A ),%0A %5D%0A
bc52778a5ed9ee44f40400cc2693f86318434527
Add missing file
metashare/repository/editor/lang.py
metashare/repository/editor/lang.py
Python
0.000006
@@ -0,0 +1,1532 @@ +%0Afrom xml.etree.ElementTree import XML%0Aimport os%0Aimport logging%0Afrom metashare.settings import LOG_LEVEL, LOG_HANDLER%0Aimport pycountry%0A%0A# Setup logging support.%0Alogging.basicConfig(level=LOG_LEVEL)%0ALOGGER = logging.getLogger('metashare.xml_utils')%0ALOGGER.addHandler(LOG_HANDLER)%0A%0Adef read_langs(filename):%0A if not os.path.isfile(filename):%0A LOGGER.error('read_langs: %7B0%7D not found'.format(filename))%0A return None%0A %0A file_hnd = os.open(filename, os.O_RDONLY)%0A data = os.read(file_hnd, 10000)%0A print data%0A xml_langs = XML(data)%0A return xml_langs%0A%0Adef read_languages():%0A langs = pycountry.languages%0A lang_list = %5B%5D%0A for index in range(len(langs.objects)):%0A lang = langs.objects%5Bindex%5D%0A if hasattr(lang, 'alpha2'):%0A lang_item = (index, lang.alpha2, lang.name)%0A lang_list.append(lang_item)%0A else:%0A #lang_item = (index, '', lang.name)%0A pass%0A return lang_list%0A %0Adef read_lang_alpha2():%0A langs = pycountry.languages%0A lang_list = %5B%5D%0A for index in range(len(langs.objects)):%0A lang = langs.objects%5Bindex%5D%0A if hasattr(lang, 'alpha2'):%0A lang_item = (lang.alpha2)%0A lang_list.append(lang_item)%0A return lang_list%0A %0Adef get_lang_list(xml_tree):%0A lang_el_list = xml_tree.findall('lang')%0A lang_list = %5B%5D%0A for el in lang_el_list:%0A lang_id = el.find('id').text%0A lang_name = el.find('name').text%0A lang_list.append((lang_id, lang_name))%0A return lang_list%0A%0A
e580995de78c3658951b119577a0f7c335352e13
Create feature_class_info_to_csv.py
feature_class_info_to_csv.py
feature_class_info_to_csv.py
Python
0.000003
@@ -0,0 +1,835 @@ +import arcpy%0Aimport os%0Aimport time%0Aimport csv%0A%0A%0Abegin_time = time.clock()%0A%0Aarcpy.env.workspace = ws = r%22%5C%5C192-86%5CDFSRoot%5CData%5Callenj%5CDesktop%5Cgdb%5Ctest.gdb%22%0Amrcsv = r%22%5C%5C192-86%5CDFSRoot%5CData%5Callenj%5CDesktop%5Cgdb%5Cwrite.csv%22%0A%0A%0Als = %5B1,2,3%5D%0Awriter = csv.writer(open(mrcsv, 'a'))%0Awriter.writerow(%5B%22Feature%22,%22Feature_Count%22,%22Extents%22%5D)%0A%0Ac = 0%0Afor fds in arcpy.ListDatasets('','feature') + %5B''%5D:%0A for fc in arcpy.ListFeatureClasses('','',fds):%0A %09print fc%0A x = fc%0A y = arcpy.GetCount_management(fc)%0A z = %22meow%22%0A row = %5B(x),(y),(z)%5D%0A writer.writerow(row)%0A%09%09c = c + 1%0A%0Aprint %22Feature Class Count:%22%0Aprint c%0Aprint %22--------------%22%0A%0Aend_time = time.clock()%0Aprint %22Elapsed Time:%22%0Aprint (end_time - begin_time)%0Aprint %22Seconds%22%0Aprint %22--------------%22%0Aprint %22Goodbye%22%0A
05a006e2cf22434eb70208fc365c9a23b9dd8892
Add lint-test-expectations to Blink PRESUBMIT.py .
PRESUBMIT.py
PRESUBMIT.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Top-level presubmit script for Blink. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details about the presubmit API built into gcl. """ _EXCLUDED_PATHS = () def _CheckForVersionControlConflictsInFile(input_api, f): pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$') errors = [] for line_num, line in f.ChangedContents(): if pattern.match(line): errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line)) return errors def _CheckForVersionControlConflicts(input_api, output_api): """Usually this is not intentional and will cause a compile failure.""" errors = [] for f in input_api.AffectedFiles(): errors.extend(_CheckForVersionControlConflictsInFile(input_api, f)) results = [] if errors: results.append(output_api.PresubmitError( 'Version control conflict markers found, please resolve.', errors)) return results def _CommonChecks(input_api, output_api): """Checks common to both upload and commit.""" # We should figure out what license checks we actually want to use. license_header = r'.*' results = [] results.extend(input_api.canned_checks.PanProjectChecks( input_api, output_api, excluded_paths=_EXCLUDED_PATHS, maxlen=250, license_header=license_header)) results.extend(_CheckForVersionControlConflicts(input_api, output_api)) results.extend(_CheckPatchFiles(input_api, output_api)) return results def _CheckSubversionConfig(input_api, output_api): """Verifies the subversion config file is correctly setup. Checks that autoprops are enabled, returns an error otherwise. """ join = input_api.os_path.join if input_api.platform == 'win32': appdata = input_api.environ.get('APPDATA', '') if not appdata: return [output_api.PresubmitError('%APPDATA% is not configured.')] path = join(appdata, 'Subversion', 'config') else: home = input_api.environ.get('HOME', '') if not home: return [output_api.PresubmitError('$HOME is not configured.')] path = join(home, '.subversion', 'config') error_msg = ( 'Please look at http://dev.chromium.org/developers/coding-style to\n' 'configure your subversion configuration file. This enables automatic\n' 'properties to simplify the project maintenance.\n' 'Pro-tip: just download and install\n' 'http://src.chromium.org/viewvc/chrome/trunk/tools/build/slave/config\n') try: lines = open(path, 'r').read().splitlines() # Make sure auto-props is enabled and check for 2 Chromium standard # auto-prop. if (not '*.cc = svn:eol-style=LF' in lines or not '*.pdf = svn:mime-type=application/pdf' in lines or not 'enable-auto-props = yes' in lines): return [ output_api.PresubmitNotifyResult( 'It looks like you have not configured your subversion config ' 'file or it is not up-to-date.\n' + error_msg) ] except (OSError, IOError): return [ output_api.PresubmitNotifyResult( 'Can\'t find your subversion config file.\n' + error_msg) ] return [] def _CheckPatchFiles(input_api, output_api): problems = [f.LocalPath() for f in input_api.AffectedFiles() if f.LocalPath().endswith(('.orig', '.rej'))] if problems: return [output_api.PresubmitError( "Don't commit .rej and .orig files.", problems)] else: return [] def CheckChangeOnUpload(input_api, output_api): results = [] results.extend(_CommonChecks(input_api, output_api)) return results def CheckChangeOnCommit(input_api, output_api): results = [] results.extend(_CommonChecks(input_api, output_api)) results.extend(input_api.canned_checks.CheckTreeIsOpen( input_api, output_api, json_url='http://blink-status.appspot.com/current?format=json')) results.extend(input_api.canned_checks.CheckChangeHasDescription( input_api, output_api)) results.extend(_CheckSubversionConfig(input_api, output_api)) return results def GetPreferredTrySlaves(project, change): return ['linux_layout_rel', 'mac_layout_rel', 'win_layout_rel']
Python
0.000006
@@ -1588,32 +1588,96 @@ i, output_api))%0A + results.extend(_CheckTestExpectations(input_api, output_api))%0A return results @@ -3644,24 +3644,792 @@ return %5B%5D%0A%0A%0A +def _CheckTestExpectations(input_api, output_api):%0A local_paths = %5Bf.LocalPath() for f in input_api.AffectedFiles()%5D%0A if any(path.endswith('TestExpectations') for path in local_paths):%0A lint_path = input_api.os_path.join(input_api.PresubmitLocalPath(),%0A 'Tools', 'Scripts', 'lint-test-expectations')%0A _, errs = input_api.subprocess.Popen(%0A %5Binput_api.python_executable, lint_path%5D,%0A stdout=input_api.subprocess.PIPE,%0A stderr=input_api.subprocess.PIPE).communicate()%0A if not errs:%0A return %5Boutput_api.PresubmitError(%22lint-test-expectations failed %22%0A %22to produce output; check by hand. %22)%5D%0A if errs != 'Lint succeeded.%5Cn':%0A return %5Boutput_api.PresubmitError(errs)%5D%0A return %5B%5D%0A%0A%0A def CheckCha
ae477223f296de9ee6b81a15d56d7140a5bf26ac
Create __init__.py
requests/packages/urllib3/contrib/packages/ssl_match_hostname/__init__.py
requests/packages/urllib3/contrib/packages/ssl_match_hostname/__init__.py
Python
0.000429
@@ -0,0 +1 @@ +%0A
2ef9fce02be94f8c4e9b5c52ca04a05cce1b5ede
Allow to start server as a module
LiSE/LiSE/server/__main__.py
LiSE/LiSE/server/__main__.py
Python
0.000001
@@ -0,0 +1,657 @@ +import cherrypy%0Afrom argparse import ArgumentParser%0Afrom . import LiSEHandleWebService%0A%0Aparser = ArgumentParser()%0Aparser.add_argument('world', action='store', required=True)%0Aparser.add_argument('-c', '--code', action='store')%0Aargs = parser.parse_args()%0Aconf = %7B%0A '/': %7B%0A 'request.dispatch': cherrypy.dispatch.MethodDispatcher(),%0A 'tools.sessions.on': True,%0A 'tools.response_headers.on': True,%0A 'tools.response_headers.headers': %5B('Content-Type', 'application/json')%5D,%0A 'tools.encode.on': True,%0A 'tools.encode.encoding': 'utf-8'%0A %7D%0A%7D%0Acherrypy.quickstart(LiSEHandleWebService(args.world, args.code), '/', conf)%0A
9c97f75894ff8350051682b57eb790e1387f5e7f
Add minimal coverage for on/take-over.html.spt
tests/test_associate.py
tests/test_associate.py
from __future__ import division, print_function, unicode_literals import mock from gittip.testing import Harness, test_website from gittip.testing.client import TestClient from gittip.elsewhere.twitter import TwitterAccount class Tests(Harness): def setUp(self): Harness.setUp(self) self.website = test_website self.client = TestClient() def tearDown(self): Harness.tearDown(self) self.website.oauth_cache = {} @mock.patch('requests.post') @mock.patch('requests.get') @mock.patch('gittip.utils.mixpanel.track') def test_associate_opts_in(self, track, get, post): self.website.oauth_cache = {"deadbeef": ("deadbeef", "opt-in", "")} post.return_value.status_code = 200 post.return_value.text = "oauth_token=foo&oauth_token_secret=foo&user_id=foo" get.return_value.status_code = 200 get.return_value.text = '{"id": 1234, "screen_name": "alice"}' response = self.client.get("/on/twitter/associate?oauth_token=deadbeef&" "oauth_verifier=donald_trump") assert response.code == 302, response.body assert response.headers['Location'] == "/alice/", response.headers @mock.patch('requests.post') @mock.patch('requests.get') @mock.patch('gittip.utils.mixpanel.track') def test_associate_connects(self, track, get, post): self.make_participant('alice') self.website.oauth_cache = {"deadbeef": ("deadbeef", "connect", "")} post.return_value.status_code = 200 post.return_value.text = "oauth_token=foo&oauth_token_secret=foo&user_id=foo" get.return_value.status_code = 200 get.return_value.text = '{"id": 1234, "screen_name": "alice"}' response = self.client.get("/on/twitter/associate?oauth_token=deadbeef&" "oauth_verifier=donald_trump", user="alice") assert response.code == 302, response.body assert response.headers['Location'] == "/alice/", response.headers rec = self.db.one("SELECT * FROM elsewhere") assert rec.participant == 'alice', rec assert rec.platform == 'twitter', rec @mock.patch('requests.post') @mock.patch('requests.get') @mock.patch('gittip.utils.mixpanel.track') def test_associate_confirms_on_connect(self, track, get, post): TwitterAccount('1234', {'screen_name': 'alice'}).opt_in('alice') self.make_participant('bob') self.website.oauth_cache = {"deadbeef": ("deadbeef", "connect", "")} post.return_value.status_code = 200 post.return_value.text = "oauth_token=foo&oauth_token_secret=foo&user_id=foo" get.return_value.status_code = 200 get.return_value.text = '{"id": 1234, "screen_name": "alice"}' self.client.get('/') # populates cookies['csrf_token'] response = self.client.get("/on/twitter/associate?oauth_token=deadbeef&" "oauth_verifier=donald_trump", user="bob") assert "Please Confirm" in response.body, response.body
Python
0
@@ -3078,24 +3078,1095 @@ nse.body, response.body%0A +%0A%0A @mock.patch('requests.post')%0A @mock.patch('requests.get')%0A @mock.patch('gittip.utils.mixpanel.track')%0A def test_can_post_to_take_over(self, track, get, post):%0A TwitterAccount('1234', %7B'screen_name': 'alice'%7D).opt_in('alice')%0A%0A self.make_participant('bob')%0A self.website.connect_tokens = %7B(%22bob%22, %22twitter%22, %221234%22): %22deadbeef%22%7D%0A%0A csrf_token = self.client.get('/').request.context%5B'csrf_token'%5D%0A response = self.client.post( %22/on/take-over.html%22%0A , data=%7B %22platform%22: %22twitter%22%0A , %22user_id%22: %221234%22%0A , %22csrf_token%22: csrf_token%0A , %22connect_token%22: %22deadbeef%22%0A %7D%0A , user=%22bob%22%0A )%0A%0A assert response.code == 302, response.body%0A expected = '/about/me.html'%0A actual = response.headers%5B'Location'%5D%0A assert actual == expected, actual%0A
9e2d025384dd58c87bf8d292008711c317cb45df
extract human face
otherFaces.py
otherFaces.py
Python
0.999999
@@ -0,0 +1,1355 @@ +import cv2%0D%0Aprint(cv2.__file__)%0D%0Aimport os%0D%0Aimport sys%0D%0A%0D%0AIMAGE_DIR = 'D:%5CDATA%5Cgirl2%5Cgirl2'%0D%0A%0D%0AOUTPUT_DIR = './other_people'%0D%0Aif not os.path.exists(OUTPUT_DIR):%0D%0A os.makedirs(OUTPUT_DIR)%0D%0A%0D%0A# http://blog.topspeedsnail.com/archives/10511%0D%0A# wget https://raw.githubusercontent.com/opencv/opencv/master/data/haarcascades/haarcascade_frontalface_default.xml%0D%0Aface_haar = cv2.CascadeClassifier(%22haarcascade_frontalface_default.xml%22)%0D%0Aface_haar.load('D:/Program Files (x86)/Miniconda3/Library/etc/haarcascades/haarcascade_frontalface_default.xml')%0D%0A%0D%0Afor (dirpath, dirnames, filenames) in os.walk(IMAGE_DIR):%0D%0A for filename in filenames:%0D%0A if filename.endswith('.jpg'):%0D%0A image_path = os.path.join(dirpath, filename)%0D%0A print('process: ', image_path)%0D%0A img = cv2.imread(image_path)%0D%0A%0D%0A gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)%0D%0A faces = face_haar.detectMultiScale(gray_image, 1.3, 5)%0D%0A for face_x, face_y, face_w, face_h in faces:%0D%0A face = img%5Bface_y:face_y + face_h, face_x:face_x + face_w%5D%0D%0A%0D%0A face = cv2.resize(face, (64, 64))%0D%0A%0D%0A cv2.imshow(%22img%22, face)%0D%0A cv2.imwrite(os.path.join(OUTPUT_DIR, filename), face)%0D%0A%0D%0A key = cv2.waitKey(30) & 0xff%0D%0A if key == 27:%0D%0A sys.exit(0)
a0124a990b4afe0cd5fd3971bae1e43f417bc1b2
Add management command to find domains impacted by 502 bug
corehq/apps/domain/management/commands/find_secure_submission_image_domains.py
corehq/apps/domain/management/commands/find_secure_submission_image_domains.py
Python
0
@@ -0,0 +1,1278 @@ +from django.core.management.base import BaseCommand%0Afrom corehq.apps.domain.models import Domain%0Aimport csv%0A%0A%0Aclass Command(BaseCommand):%0A help = 'Find domains with secure submissions and image questions'%0A%0A def handle(self, *args, **options):%0A with open('domain_results.csv', 'wb+') as csvfile:%0A csv_writer = csv.writer(%0A csvfile,%0A delimiter=',',%0A quotechar='%7C',%0A quoting=csv.QUOTE_MINIMAL%0A )%0A%0A csv_writer.writerow(%5B'domain', 'app', 'domain_creator'%5D)%0A%0A for domain in Domain.get_all(include_docs=True):%0A if domain.secure_submissions:%0A for app in domain.full_applications(include_builds=False):%0A for module in app.modules:%0A for form in module.forms:%0A for question in form.get_questions(app.langs):%0A if question%5B'type'%5D == 'Image':%0A csv_writer.writerow(%5B%0A domain.name,%0A app.name,%0A domain.creating_user%0A %5D)%0A
71f1bc5d981952f275500a2b62a67488b33e205b
Longest increasing subsequence algo
LongestIncreasingSubsequence.py
LongestIncreasingSubsequence.py
Python
0.999398
@@ -0,0 +1,647 @@ +#Finds a largest increasing subsequence in O(n%5E2) time%0A#algorithm at http://www.algorithmist.com/index.php/Longest_Increasing_Subsequence%0Adef LongestSubsequence(array):%0A n=len(array)%0A q=%5B0%5D*n%0A p=%5B-1%5D*n # Contains all the previos elements to the increasing sequence%0A %0A for i in range(n):%0A maxLen=0%0A for j in range(i):%0A if array%5Bi%5D%3Earray%5Bj%5D :%0A if q%5Bj%5D%3EmaxLen :%0A maxLen=q%5Bj%5D%0A p%5Bi%5D=j%0A%0A q%5Bi%5D=maxLen+1%0A %0A idx=q.index(max(q))%0A seq=%5B%5D%0A while(idx!=-1):%0A seq=%5Barray%5Bidx%5D%5D+seq%0A idx=p%5Bidx%5D%0A %0A return seq%0A %0Adef main():%0A print(LongestSubsequence(%5B4,2,6,1,9,0,11,7,12%5D))%0A %0A %0Aif __name__=='__main__':%0A main()
361a075efed0ca4a9877f7268b2e91725ef8be65
Add encoder.py
encoder.py
encoder.py
Python
0.000011
@@ -0,0 +1,2311 @@ +%22%22%22%0ASource: https://trac.ffmpeg.org/wiki/Encode/H.264%0A%22%22%22%0Aimport os%0Aimport sys%0Aimport subprocess%0A%0AFFMPEG_PATH = '/usr/local/bin/ffmpeg'%0A%0AVIDEO_CODEC = 'h264'%0AVIDEO_ENCODER = 'h264_omx'%0A%0AAUDIO_CODEC = 'aac'%0AAUDIO_ENCODER = 'aac'%0A%0ABITRATE = '2500k'%0A%0ASRC_DIR = os.path.expanduser('~/Desktop')%0ADEST_DIR = os.path.expanduser('~/Desktop/Media')%0A%0AINPUT_EXTS = %5B'.mkv'%5D%0AOUTPUT_EXT = '.mp4'%0A%0Adef stream_codec(stream, filename):%0A %22%22%22return the codec name for a stream%22%22%22%0A return subprocess.check_output(%5B%0A 'ffprobe',%0A '-v',%0A 'error',%0A '-select_streams',%0A stream,%0A '-show_entries',%0A 'stream=codec_name',%0A '-of',%0A 'default=nokey=1:noprint_wrappers=1',%0A filename%0A %5D)%0A%0Adef walk_src_media(callback):%0A %22%22%22get a sorted list of files that have a valid input extension%22%22%22%0A for root, _dirs, files in os.walk(os.path.expanduser(SRC_DIR)):%0A for filename in files:%0A if os.path.splitext(filename)%5B1%5D in INPUT_EXTS:%0A callback(root, filename)%0A%0Adef encode(root, filename, opts):%0A %22%22%22encode file using ffmpeg%22%22%22%0A input_filename = os.path.join(root, filename)%0A path_to_create = os.path.dirname(os.path.relpath(input_filename, SRC_DIR))%0A path_to_create = os.path.join(DEST_DIR, path_to_create)%0A output_filename = os.path.join(path_to_create, os.path.splitext(filename)%5B0%5D + OUTPUT_EXT)%0A%0A if os.path.isfile(output_filename):%0A return%0A%0A command = %5BFFMPEG_PATH, '-i', os.path.expanduser(input_filename)%5D%0A%0A v_encoder = 'copy' if stream_codec('v:0', input_filename) == VIDEO_CODEC else VIDEO_ENCODER%0A command += %5B'-c:v', v_encoder%5D%0A%0A a_encoder = 'copy' if stream_codec('a:0', input_filename) == AUDIO_CODEC else AUDIO_ENCODER%0A command += %5B'-c:a', a_encoder%5D%0A%0A command += %5B'-b:v', BITRATE%5D%0A%0A if '--debug' in opts:%0A command += %5B'-to', '15'%5D%0A%0A command += %5Bos.path.expanduser(output_filename)%5D%0A%0A if '--dry' in opts:%0A print(' '.join(command), '%5Cn')%0A else:%0A os.makedirs(path_to_create, exist_ok=True)%0A subprocess.run(command)%0A%0Adef process(args):%0A %22%22%22encode media from the source directory into the destination directory%22%22%22%0A walk_src_media(lambda root, filename: encode(root, filename, args))%0A%0Aif __name__ == %22__main__%22:%0A process(sys.argv%5B1:%5D)%0A
3344c49bf36a4bd74fb9db079297b98a2e0ee46f
Implement cht.sh release script
bin/release.py
bin/release.py
Python
0
@@ -0,0 +1,1532 @@ +#!/usr/bin/env python%0A%0Afrom __future__ import print_function%0A%0Afrom datetime import datetime%0Aimport os%0Afrom os import path%0Aimport re%0Aimport shutil%0Aimport subprocess%0Afrom subprocess import Popen%0Aimport sys%0A%0ASHARE_DIR = path.join(path.dirname(__file__), %22../share/%22)%0A%0A%0Adef run(args):%0A return Popen(args, stdout=sys.stdout, stderr=sys.stderr).wait()%0A%0A%0Astatus = subprocess.check_output(%5B%22git%22, %22status%22, %22--porcelain%22%5D)%0Aif len(status) %3E 0:%0A print(%22Unclean working tree. Commit or stash changes first.%22, file=sys.stderr)%0A sys.exit(1)%0A%0Atimestamp = datetime.utcnow().strftime(%22%25Y-%25m-%25d %25H:%25M:%25S +0000%22)%0A%0Acht_curr = path.join(SHARE_DIR, %22cht.sh.txt%22)%0Acht_new = path.join(SHARE_DIR, %22cht.sh.txt.new%22)%0A%0Are_version = re.compile(r%22%5E__CHTSH_VERSION=(.*)$%22)%0Are_timestamp = re.compile(r%22%5E__CHTSH_DATETIME=.*$%22)%0A%0Awith open(cht_curr, %22rt%22) as fin:%0A with open(cht_new, %22wt%22) as fout:%0A for line in fin:%0A match = re_version.match(line)%0A if match:%0A version = int(match.group(1)) + 1%0A fout.write(%22__CHTSH_VERSION=%25s%5Cn%22 %25 version)%0A continue%0A%0A match = re_timestamp.match(line)%0A if match:%0A fout.write('__CHTSH_DATETIME=%22%25s%22%5Cn' %25 timestamp)%0A continue%0A%0A fout.write(line)%0A%0Ashutil.copymode(cht_curr, cht_new)%0Aos.remove(cht_curr)%0Aos.rename(cht_new, cht_curr)%0A%0Amessage = %22cht: v%25s%22 %25 version%0Arun(%5B%22git%22, %22add%22, cht_curr%5D)%0Arun(%5B%22git%22, %22commit%22, %22-m%22, message%5D)%0Arun(%5B%22git%22, %22tag%22, %22cht@%25s%22 %25 version, %22-m%22, message%5D)%0A