commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
67d86229279e979d8ef5ac54e5ed8ca85c32ff2e
|
add another sample script (multiple.py).
|
demos/multiple.py
|
demos/multiple.py
|
Python
| 0 |
@@ -0,0 +1,632 @@
+#!/usr/bin/env python%0Afrom Exscript import Host%0Afrom Exscript.util.interact import read_login%0Afrom Exscript.util.template import eval_file%0Afrom Exscript.util.start import start%0A%0Adef one(conn):%0A conn.open()%0A conn.authenticate()%0A conn.autoinit()%0A conn.execute('show ip int brie')%0A%0Adef two(conn):%0A eval_file(conn, 'mytemplate.exscript', interface = 'POS1/0')%0A%0Aaccount = read_login()%0A%0A# Start on one host.%0Ahost1 = Host('localhost')%0Ahost1.set('myvariable', 'foobar')%0Astart(account, host1, one)%0A%0A# Start on another.%0Ahost2 = Host('otherhost1')%0Ahost3 = Host('otherhost2')%0Astart(account, %5Bhost1, host2%5D, two)%0A
|
|
3704654e704c0595e933f4ab2832e945816afde8
|
Add setup.py file
|
TimeSeries/PublicApis/Python/setup.py
|
TimeSeries/PublicApis/Python/setup.py
|
Python
| 0.000001 |
@@ -0,0 +1,421 @@
+from setuptools import setup%0A%0Asetup(%0A name=%22aquarius-timeseries-client%22,%0A py_modules=%5B%22timeseries_client%22%5D,%0A version=%220.1%22,%0A description=%22Python client for Aquarius TimeSeries API%22,%0A long_description=open(%22README.md%22).read(),%0A long_description_content_type=%22text/markdown%22,%0A url=%22https://github.com/AquaticInformatics/Examples%22,%0A install_requires=(%0A %22requests%22,%0A %22pyrfc3339%22%0A )%0A)%0A
|
|
da22d8dffadbb4713e715aca7918942f445090c9
|
embed video form and model fields
|
embed_video/fields.py
|
embed_video/fields.py
|
Python
| 0 |
@@ -0,0 +1,715 @@
+from django.db import models%0Afrom django import forms%0Afrom django.utils.translation import ugettext_lazy as _%0A%0Afrom .base import detect_backend%0A%0A__all__ = ('EmbedVideoField', 'EmbedVideoFormField')%0A%0A%0Aclass EmbedVideoField(models.URLField):%0A def formfield(self, **kwargs):%0A defaults = %7B'form_class': EmbedVideoFormField%7D%0A defaults.update(kwargs)%0A return super(EmbedVideoField, self).formfield(**defaults)%0A%0A%0Aclass EmbedVideoFormField(forms.URLField):%0A def validate(self, url):%0A super(EmbedVideoFormField, self).validate(url)%0A%0A try:%0A detect_backend(url)%0A except:%0A raise forms.ValidationError(_(u'URL could not be recognized.'))%0A%0A return url%0A
|
|
b81028067cf65b2ee3a155d081e7983a1de70d5f
|
Add mistakenly omitted migrations
|
opentreemap/treemap/migrations/0005_auto_20150729_1046.py
|
opentreemap/treemap/migrations/0005_auto_20150729_1046.py
|
Python
| 0 |
@@ -0,0 +1,784 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('treemap', '0004_auto_20150720_1523'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='fieldpermission',%0A name='permission_level',%0A field=models.IntegerField(default=0, choices=%5B(0, 'Invisible'), (1, 'Read Only'), (2, 'Pending Write Access'), (3, 'Full Write Access')%5D),%0A ),%0A migrations.AlterField(%0A model_name='role',%0A name='default_permission',%0A field=models.IntegerField(default=0, choices=%5B(0, 'Invisible'), (1, 'Read Only'), (2, 'Pending Write Access'), (3, 'Full Write Access')%5D),%0A ),%0A %5D%0A
|
|
1fa74f6a6a5faeb9579c889df32e4bfe8d6908df
|
Add migration
|
fat/migrations/0059_event_extra_sponsored.py
|
fat/migrations/0059_event_extra_sponsored.py
|
Python
| 0.000002 |
@@ -0,0 +1,449 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.5 on 2016-08-08 10:16%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('fat', '0058_auto_20160808_1007'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='event',%0A name='extra_sponsored',%0A field=models.TextField(blank=True),%0A ),%0A %5D%0A
|
|
62c70b301ffc1e178c3bd54bd81291876b3883ea
|
Add simple linear interpolation filling.
|
analysis/03-fill-dropouts-linear.py
|
analysis/03-fill-dropouts-linear.py
|
Python
| 0 |
@@ -0,0 +1,1058 @@
+#!/usr/bin/env python%0A%0Afrom __future__ import division%0A%0Aimport climate%0Aimport lmj.cubes%0Aimport lmj.cubes.fill%0Aimport numpy as np%0Aimport pandas as pd%0A%0Alogging = climate.get_logger('fill')%0A%0Adef fill(dfs, window):%0A '''Complete missing marker data using linear interpolation.%0A%0A This method alters the given %60dfs%60 in-place.%0A%0A Parameters%0A ----------%0A dfs : list of pd.DataFrame%0A Frames of source data. The frames will be stacked into a single large%0A frame to use during SVT. This stacked frame will then be split and%0A returned.%0A window : int%0A Model windows of this many consecutive frames.%0A '''%0A df = lmj.cubes.fill.stack(dfs, window)%0A centers = lmj.cubes.fill.center(df)%0A pos, _, _ = lmj.cubes.fill.window(df, window, interpolate=True)%0A lmj.cubes.fill.update(df, pos, window)%0A lmj.cubes.fill.restore(df, centers)%0A lmj.cubes.fill.unstack(df, dfs)%0A%0A%0Adef main(args):%0A lmj.cubes.fill.main(args, lambda ts: fill(%5Bt.df for t in ts%5D, args.window))%0A%0A%0Aif __name__ == '__main__':%0A climate.call(main)%0A
|
|
7942254131bcf005d5a5f1bb33ca7d1ffff1b311
|
Create keyAllCtrls.py
|
af_scripts/blendshapes/keyAllCtrls.py
|
af_scripts/blendshapes/keyAllCtrls.py
|
Python
| 0.000002 |
@@ -0,0 +1,186 @@
+import maya.cmds as cmds%0Aimport maya.mel as mel%0Acmds.select(cmds.ls('*:*.faceCtrl', o=1))%0Amel.eval('doSetKeyframeArgList 6 %7B %224%22,%220%22,%220%22,%220%22,%221%22,%220%22,%220%22,%22animationList%22,%220%22,%221%22,%220%22 %7D;')%0A
|
|
f51c4abc95fda5504e7c7a5ad87355698798ddd1
|
create temporary streaming solution
|
temp_vidstream.py
|
temp_vidstream.py
|
Python
| 0 |
@@ -0,0 +1,190 @@
+import picamera%0A%0Awith picamera.PiCamera() as camera:%0A camera.resolution = (640, 480)%0A camera.start_recording('vidstream.mp4')%0A camera.wait_recording(60)%0A camera.stop_recording()%0A
|
|
89d27dd0a28f84c99930c0f1dad496e525f62272
|
migrate to namespace table
|
migrations/versions/28c0d6c2f887_add_namespaces.py
|
migrations/versions/28c0d6c2f887_add_namespaces.py
|
Python
| 0.000002 |
@@ -0,0 +1,2665 @@
+%22%22%22Add namespaces%0A%0ARevision ID: 28c0d6c2f887%0ARevises: 4323056c0b78%0ACreate Date: 2013-10-14 22:18:29.705865%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '28c0d6c2f887'%0Adown_revision = '4323056c0b78'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import mysql%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A# op.create_table('namespaces',%0A# sa.Column('id', sa.Integer(), nullable=False),%0A# sa.Column('user_id', sa.Integer(), nullable=False),%0A# sa.PrimaryKeyConstraint('id')%0A# )%0A op.alter_column(u'foldermeta', u'user_id', new_column_name='namespace_id',%0A%09existing_type=mysql.INTEGER(display_width=11))%0A op.alter_column(u'foldermeta', 'folder_name',%0A existing_type=mysql.VARCHAR(length=255),%0A nullable=False)%0A op.alter_column(u'foldermeta', 'msg_uid',%0A existing_type=mysql.INTEGER(display_width=11),%0A nullable=False)%0A op.alter_column(u'messagemeta', u'user_id', new_column_name='namespace_id',%0A%09existing_type=mysql.INTEGER(display_width=11))%0A op.alter_column(u'rawmessage', u'user_id', new_column_name='namespace_id',%0A%09existing_type=mysql.INTEGER(display_width=11))%0A op.alter_column(u'uidvalidity', u'user_id', new_column_name='namespace_id',%0A%09existing_type=mysql.INTEGER(display_width=11))%0A op.add_column(u'users', sa.Column('root_namespace', sa.Integer(), nullable=False))%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_column(u'users', 'root_namespace')%0A op.add_column(u'uidvalidity', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False))%0A op.drop_column(u'uidvalidity', 'namespace_id')%0A op.add_column(u'rawmessage', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False))%0A op.drop_column(u'rawmessage', 'namespace_id')%0A op.add_column(u'messagemeta', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False))%0A op.drop_column(u'messagemeta', 'namespace_id')%0A op.alter_column(u'foldermeta', 'msg_uid',%0A existing_type=mysql.INTEGER(display_width=11),%0A nullable=True)%0A op.alter_column(u'foldermeta', 'folder_name',%0A existing_type=mysql.VARCHAR(length=255),%0A nullable=True)%0A op.add_column(u'foldermeta', sa.Column(u'user_id', mysql.INTEGER(display_width=11), nullable=False))%0A op.drop_column(u'foldermeta', 'namespace_id')%0A op.drop_table('namespaces')%0A ### end Alembic commands ###%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A
|
|
55f2325354724cfe8b90324038daf2c1acaa916a
|
Add unit tests for OpenStack config defaults
|
teuthology/openstack/test/test_config.py
|
teuthology/openstack/test/test_config.py
|
Python
| 0 |
@@ -0,0 +1,1092 @@
+from teuthology.config import config%0A%0A%0Aclass TestOpenStack(object):%0A%0A def setup(self):%0A self.openstack_config = config%5B'openstack'%5D%0A%0A def test_config_clone(self):%0A assert 'clone' in self.openstack_config%0A%0A def test_config_user_data(self):%0A os_type = 'rhel'%0A os_version = '7.0'%0A template_path = self.openstack_config%5B'user-data'%5D.format(%0A os_type=os_type,%0A os_version=os_version)%0A assert os_type in template_path%0A assert os_version in template_path%0A%0A def test_config_ip(self):%0A assert 'ip' in self.openstack_config%0A%0A def test_config_machine(self):%0A assert 'machine' in self.openstack_config%0A machine_config = self.openstack_config%5B'machine'%5D%0A assert 'disk' in machine_config%0A assert 'ram' in machine_config%0A assert 'cpus' in machine_config%0A%0A def test_config_volumes(self):%0A assert 'volumes' in self.openstack_config%0A volumes_config = self.openstack_config%5B'volumes'%5D%0A assert 'count' in volumes_config%0A assert 'size' in volumes_config%0A
|
|
526d58fb917a4e098018f733b4c0b254417140b4
|
Add @log_route decorator
|
keeper/logutils.py
|
keeper/logutils.py
|
Python
| 0.00001 |
@@ -0,0 +1,1336 @@
+%22%22%22Logging helpers and utilities.%0A%22%22%22%0A%0A__all__ = %5B'log_route'%5D%0A%0Afrom functools import wraps%0Afrom timeit import default_timer as timer%0Aimport uuid%0A%0Afrom flask import request, make_response%0Aimport structlog%0A%0A%0Adef log_route():%0A %22%22%22Route decorator to initialize a thread-local logger for a route.%0A %22%22%22%0A def decorator(f):%0A @wraps(f)%0A def decorated_function(*args, **kwargs):%0A # Initialize a timer to capture the response time%0A # This is for convenience, in addition to route monitoring.%0A start_time = timer()%0A%0A # Initialize a new thread-local logger and add a unique request%0A # ID to its context.%0A # http://www.structlog.org/en/stable/examples.html%0A logger = structlog.get_logger()%0A log = logger.new(%0A request_id=str(uuid.uuid4()),%0A path=request.path,%0A method=request.method,%0A )%0A%0A # Pass through route%0A response = f(*args, **kwargs)%0A response = make_response(response)%0A%0A # Close out the logger%0A end_time = timer()%0A log.info(%0A status=response.status_code,%0A response_time=end_time - start_time)%0A%0A return response%0A%0A return decorated_function%0A return decorator%0A
|
|
3f3115a0a9c7407820b3b10c06dcfa4f92ac6e57
|
Add owned book scaffold
|
goodreads_api_client/resources/owned_book.py
|
goodreads_api_client/resources/owned_book.py
|
Python
| 0 |
@@ -0,0 +1,658 @@
+# -*- coding: utf-8 -*-%0A%22%22%22Module containing owned book resource class.%22%22%22%0A%0Afrom goodreads_api_client.exceptions import OauthEndpointNotImplemented%0Afrom goodreads_api_client.resources.base import Resource%0A%0A%0Aclass OwnedBook(Resource):%0A def create(self):%0A raise OauthEndpointNotImplemented('owned_book.compare')%0A%0A def destroy(self):%0A raise OauthEndpointNotImplemented('owned_book.destroy')%0A%0A def list(self):%0A raise OauthEndpointNotImplemented('owned_book.list')%0A%0A def show(self):%0A raise OauthEndpointNotImplemented('owned_book.show')%0A%0A def update(self):%0A raise OauthEndpointNotImplemented('owned_book.update')%0A
|
|
5d99b7c2dfbfbb776716f2258d560bab2602531f
|
Create main.py
|
main.py
|
main.py
|
Python
| 0.000001 |
@@ -0,0 +1,2657 @@
+# -*- coding: utf-8 -*-%0A#Backlog Manager%0A#programmed by Ian Hitterdal (otend)%0A#licensed under MIT license%0Aimport work%0Aimport random%0A%0A%0Adef addWork(medium):%0A#input: valid medium string%0A#user input: work title string%0A#output: none%0A#user output: none, really%0A global workDict%0A global mediumList%0A if medium not in mediumList:%0A print(%22Invalid medium, otend did something wrong%22)%0A else:%0A inName = input(%22What is the name of the work? %22)%0A workDict%5Bmedium%5D.append(work.Work(inName))%0A%0A%0Adef pickMedium():%0A#input: none%0A#user input: integer to choose a medium from the list%0A#output: valid medium string%0A global mediumList%0A print(%22Which medium would you like to use?%22)%0A n = 1%0A for med in mediumList:%0A print(n,%22. %22,med)%0A n = n+1%0A choice = int(input(%22Enter a number. %22))%0A return mediumList%5Bchoice-1%5D%0A%0Adef chooseWork(medium):%0A#input: valid medium string%0A#user input: affirmation of viewing%0A#output: none%0A#user output: work chosen%0A global workDict%0A valList = %5B%5D%0A for item in workDict%5Bmedium%5D:%0A if item.wasViewed == False:%0A valList.append(item)%0A if len(valList) == 0:%0A print(%22No works.%22)%0A else:%0A a = random.choice(workDict%5Bmedium%5D)%0A print(%22You should watch/play/whatever...%22)%0A print(a.name,%22%5Cn%22)%0A b = input(%22Did you watch it? y/n%22)%0A if(b == %22y%22):%0A a.wasViewed = True%0A%0Adef listWork(medium):%0A#Input: string that is in the medium list%0A#output: none%0A#user output: all entries present in the list for that medium.%0A global workDict%0A print(%22Here are the works registered for %7B%7D.%22,medium)%0A for i in workDict%5Bmedium%5D:%0A print(i)%0A%0A%0A%0Adef watDo():%0A#input: none%0A#user input: choice of task%0A#output: none%0A#user output: tasks available, other outputs dependent on validity of choice%0A#valid: goodbye or none%0A#invalid: error message%0A print(%22What do you want to do?%22)%0A print(%221. Add a work.%22)%0A print(%222. Have a work chosen.%22)%0A print(%223. List works.%22)%0A print(%224. Quit.%22)%0A choice = input(%22Enter a number.%22)%0A if choice not in %5B%221%22,%222%22,%223%22,%224%22%5D:%0A print(%22You have entered an invalid choice. Please try again.%22)%0A watDo()%0A elif choice == %224%22:%0A print(%22Goodbye.%22)%0A else:%0A a = pickMedium()%0A if(choice == %221%22):%0A addWork(a)%0A watDo()%0A elif(choice == %222%22):%0A chooseWork(a)%0A watDo()%0A else:%0A listWork(a)%0A watDo()%0A%0AmediumList = %5B%22film%22, %22game%22, %22show%22, %22comic%22, %22book%22, %22album%22%5D%0AworkDict = dict()%0Afor n in mediumList:%0A workDict%5Bn%5D = list()%0A%0Aprint(%22Welcome to Backlog Manager 0.1 Pre-Alpha!%22)%0AwatDo()%0A
|
|
f75d321b200217514cde901cc15cc2b798e3dcfe
|
Add new hipchat module
|
bumblebee/modules/hipchat.py
|
bumblebee/modules/hipchat.py
|
Python
| 0 |
@@ -0,0 +1,1801 @@
+%22%22%22Displays the unread messages count for an HipChat user%0A%0ARequires the following library:%0A * requests%0A%0AParameters:%0A * hipchat.token: HipChat user access token, the token needs to have the 'View Messages' scope.%0A * hipchat.interval: Refresh interval in minutes (defaults to 5)%0A%22%22%22%0A%0Aimport time%0Aimport functools%0Aimport bumblebee.input%0Aimport bumblebee.output%0Aimport bumblebee.engine%0A%0Atry:%0A import requests%0Aexcept ImportError:%0A pass%0A%0AHIPCHAT_API_URL = %22https://www.hipchat.com/v2/readstate?expand=items.unreadCount%22%0A%0Aclass Module(bumblebee.engine.Module):%0A def __init__(self, engine, config):%0A super(Module, self).__init__(engine, config,%0A bumblebee.output.Widget(full_text=self.output)%0A )%0A self._count = 0%0A self._interval = int(self.parameter(%22interval%22, %225%22))%0A self._nextcheck = 0%0A%0A self._requests = requests.Session()%0A self._requests.headers.update(%7B%22Authorization%22:%22Bearer %7B%7D%22.format(self.parameter(%22token%22, %22%22))%7D)%0A%0A immediate_update = functools.partial(self.update, immediate=True)%0A engine.input.register_callback(self, button=bumblebee.input.RIGHT_MOUSE, cmd=immediate_update)%0A%0A def output(self, _):%0A return str(self._count)%0A%0A def update(self, _, immediate=False):%0A if immediate or self._nextcheck %3C int(time.time()):%0A self._nextcheck = int(time.time()) + self._interval * 60%0A%0A try:%0A self._count = 0%0A items = self._requests.get(HIPCHAT_API_URL).json().get('items')%0A self._count = sum(%5Bitem.get('unreadCount').get('count') for item in items%5D)%0A%0A except Exception:%0A self._count = %22n/a%22%0A%0A%0A# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4%0A
|
|
786ed1d37ae5285bce1178d401d487233d4bd5b1
|
Add greater/less than tests
|
test/osa_tests.py
|
test/osa_tests.py
|
Python
| 0.000001 |
@@ -0,0 +1,1171 @@
+#!/usr/bin/env python%0A# Copyright 2016, Rackspace US, Inc.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%22%22%22Extra tests for jinja2 templates in Ansible.%22%22%22%0A%0A%0Adef greater_than(value, reference_value):%0A %22%22%22Return true if value %3E reference_value.%22%22%22%0A return value %3E reference_value%0A%0A%0Adef less_than(value, reference_value):%0A %22%22%22Return true if value %3C reference_value.%22%22%22%0A return value %3C reference_value%0A%0A%0Aclass TestModule:%0A %22%22%22Main test class from Ansible.%22%22%22%0A%0A def tests(self):%0A %22%22%22Add these tests to the list of tests available to Ansible.%22%22%22%0A return %7B%0A 'greater_than': greater_than,%0A 'less_than': less_than,%0A %7D%0A
|
|
0a3488915938de418ab0675f4cc051769b470927
|
Fix tab switching test on reference builds.
|
tools/perf/measurements/tab_switching.py
|
tools/perf/measurements/tab_switching.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The tab switching measurement.
This measurement opens pages in different tabs. After all the tabs have opened,
it cycles through each tab in sequence, and records a histogram of the time
between when a tab was first requested to be shown, and when it was painted.
"""
from metrics import histogram_util
from telemetry.core import util
from telemetry.page import page_measurement
from telemetry.page import page_runner
# TODO: Revisit this test once multitab support is finalized.
class TabSwitching(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArg('--enable-stats-collection-bindings')
options.AppendExtraBrowserArg('--dom-automation')
def CanRunForPage(self, page):
return not page.page_set.pages.index(page)
def DidNavigateToPage(self, page, tab):
for i in xrange(1, len(page.page_set.pages)):
t = tab.browser.tabs.New()
page_state = page_runner.PageState()
page_state.PreparePage(page.page_set.pages[i], t)
page_state.ImplicitPageNavigation(page.page_set.pages[i], t)
def MeasurePage(self, _, tab, results):
"""Although this is called MeasurePage, we're actually using this function
to cycle through each tab that was opened via DidNavigateToPage and
thenrecord a single histogram for the tab switching metric.
"""
histogram_name = 'MPArch.RWH_TabSwitchPaintDuration'
histogram_type = histogram_util.BROWSER_HISTOGRAM
first_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
prev_histogram = first_histogram
for i in xrange(len(tab.browser.tabs)):
t = tab.browser.tabs[i]
t.Activate()
def _IsDone():
cur_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
diff_histogram = histogram_util.SubtractHistogram(
cur_histogram, prev_histogram)
return diff_histogram
util.WaitFor(_IsDone, 30)
prev_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
last_histogram = histogram_util.GetHistogramFromDomAutomation(
histogram_type, histogram_name, tab)
diff_histogram = histogram_util.SubtractHistogram(last_histogram,
first_histogram)
results.AddSummary(histogram_name, '', diff_histogram,
data_type='unimportant-histogram')
|
Python
| 0.998504 |
@@ -868,16 +868,96 @@
mation')
+%0A options.AppendExtraBrowserArg('--reduce-security-for-dom-automation-tests')
%0A%0A def
|
01d9134067852a1f9dfecf75f730f9fba14434e0
|
Add test_gradient_checker.py
|
python/paddle/v2/framework/tests/test_gradient_checker.py
|
python/paddle/v2/framework/tests/test_gradient_checker.py
|
Python
| 0.000005 |
@@ -0,0 +1,1469 @@
+import unittest%0Aimport numpy%0Afrom paddle.v2.framework.op import Operator%0Afrom gradient_checker import GradientChecker%0Afrom gradient_checker import get_numeric_gradient%0A%0A%0Aclass GetNumericGradientTest(unittest.TestCase):%0A def test_add_op(self):%0A add_op = Operator('add_two', X=%22X%22, Y=%22Y%22, Out=%22Z%22)%0A x = numpy.random.random((10, 1)).astype(%22float32%22)%0A y = numpy.random.random((10, 1)).astype(%22float32%22)%0A%0A arr = get_numeric_gradient(add_op, %7B'X': x, %22Y%22: y%7D, 'Z', 'X')%0A self.assertAlmostEqual(arr.mean(), 1.0, delta=1e-4)%0A%0A def test_softmax_op(self):%0A def stable_softmax(x):%0A %22%22%22Compute the softmax of vector x in a numerically stable way.%22%22%22%0A shiftx = x - numpy.max(x)%0A exps = numpy.exp(shiftx)%0A return exps / numpy.sum(exps)%0A%0A def label_softmax_grad(Y, dY):%0A dX = Y * 0.0%0A for i in range(Y.shape%5B0%5D):%0A d = numpy.dot(Y%5Bi, :%5D, dY%5Bi, :%5D)%0A dX%5Bi, :%5D = Y%5Bi, :%5D * (dY%5Bi, :%5D - d)%0A return dX%0A%0A softmax_op = Operator(%22softmax%22, X=%22X%22, Y=%22Y%22)%0A%0A X = numpy.random.random((2, 2)).astype(%22float32%22)%0A Y = numpy.apply_along_axis(stable_softmax, 1, X)%0A dY = numpy.ones(Y.shape)%0A dX = label_softmax_grad(Y, dY)%0A%0A arr = get_numeric_gradient(softmax_op, %7B%22X%22: X%7D, 'Y', 'X')%0A numpy.testing.assert_almost_equal(arr, dX, decimal=1e-2)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
9779fc585d8d8d87580a47139742eb25bc52facd
|
Add new decorators module, move deprecated from utils over here
|
kiwi/decorators.py
|
kiwi/decorators.py
|
Python
| 0 |
@@ -0,0 +1,1889 @@
+#%0A# Kiwi: a Framework and Enhanced Widgets for Python%0A#%0A# Copyright (C) 2005 Async Open Source%0A#%0A# This library is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU Lesser General Public%0A# License as published by the Free Software Foundation; either%0A# version 2.1 of the License, or (at your option) any later version.%0A# %0A# This library is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU%0A# Lesser General Public License for more details.%0A# %0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this library; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307%0A# USA%0A# %0A# Author(s): Johan Dahlin %[email protected]%3E%0A#%0A%0Aimport gobject%0A%0Afrom kiwi import _warn%0A%0Aclass deprecated(object):%0A def __init__(self, new):%0A self._new = new%0A%0A def __call__(self, func):%0A def wrapper(*args, **kwargs):%0A _warn(%22%25s is deprecated, use %25s instead%22 %25 (func.__name__,%0A self._new))%0A return func(*args, **kwargs)%0A return wrapper%0A%0Aclass delayed(object):%0A def __init__(self, delay):%0A self._delay = delay%0A self._timeout_id = -1%0A %0A def __call__(self, func):%0A def real_call(args, kwargs):%0A func(*args, **kwargs)%0A self._timeout_id = -1%0A return False%0A %0A def wrapper(*args, **kwargs):%0A # Only one call at a time%0A if self._timeout_id != -1:%0A return%0A %0A self._timeout_id = gobject.timeout_add(self._delay,%0A real_call, args, kwargs)%0A %0A return wrapper%0A%0A %0A
|
|
9258451157de31f3ece7e18fcb8ae43c433239f4
|
add example to post files to Portals File System
|
portals_api/upload_files_to_portals_file_system.py
|
portals_api/upload_files_to_portals_file_system.py
|
Python
| 0 |
@@ -0,0 +1,1300 @@
+# Example that uploads a file to the Portals File System using Portals API%0A# Access Level- Portals Domain Administrator%0A# Note: Uses Python 'Requests' module for calling API%0A# APIs:%0A# - http://docs.exosite.com/portals/#update-file-content%0A%0A%0Aimport requests%0Aimport getpass%0A%0Adirectory = %22images%22 #default directory name%0Adomain = %22%22 #example: example.exosite.com%0Auser_email = %22%22 #example: [email protected] - assume administrator access to Portals Domain Solution%0Aif domain == %22%22:%0A%09domain = raw_input('Enter Portals Domain (e.g. %22example.exosite.com%22: ')%0Aif user_email == %22%22:%0A%09user_email = raw_input('Enter Your Email Address: ')%0Auser_password = getpass.getpass() #ask for password each time at prompt%0A%0A%0A# Files to upload%0Afiles = %7B%22MyLogo.png%22:open(%22./MyLogo.png%22, %22rb%22),%0A %22MyOtherLogo.jpg%22:open(%22./MyOtherLogo.jpg%22, %22rb%22)%0A %7D%0A%0A%0Aurl = %22https://%22+domain+%22/api/portals/v1/fs/%22+directory%0A%0Aprint 'Uploading files to ' + domain%0A%0Ar = requests.post(url, files=files, auth=(user_email, user_password))%0A%0Aprint(%22Status: %22, r.status_code)%0A%0Ar = requests.get(url)%0A%0Aif r.status_code == 200:%0A folder = r.json()%0A for directory, filepath in folder.iteritems():%0A for filename, filetype in filepath.iteritems():%0A print(%22/%22.join(%5Burl,directory,filename%5D))
|
|
425d8ef0f439e9580c85e0dc04e5fe0c93cffddf
|
add 16
|
p016.py
|
p016.py
|
Python
| 0.999998 |
@@ -0,0 +1,185 @@
+# 2**15 = 32768 and the sum of its digits is 3+2+7+6+8=26%0A# what is the sum of the digits of the number 2**1000?%0A%0Adef f(n):%0A return sum(%5B int(c) for c in str(2**n)%5D)%0A%0Aprint f(1000)%0A%0A
|
|
2b73467ccfbf6e29047223f1c1e3250916b6ffdb
|
add 23
|
p023.py
|
p023.py
|
Python
| 0.999986 |
@@ -0,0 +1,397 @@
+from itertools import combinations_with_replacement%0A%0A%0Adef divisors(n):%0A r = set()%0A for i in range(1, n / 2):%0A if n %25 i == 0:%0A r.add(i)%0A r.add(n / i)%0A r.discard(n)%0A return r%0A%0Aabundant = filter(lambda n: sum(divisors(n)) %3E n, range(2, 29000))%0A%0Au = set(range(1, 29000))%0Afor i in combinations_with_replacement(abundant, 2):%0A u.discard(sum(i))%0A%0Aprint sum(u)%0A
|
|
351f2779549add63963d4103fbe1b058dde59d85
|
Add stupid test to make Jenkins happy.
|
zipline/test/test_sanity.py
|
zipline/test/test_sanity.py
|
Python
| 0.000006 |
@@ -0,0 +1,184 @@
+from unittest2 import TestCase%0A%0Aclass TestEnviroment(TestCase):%0A%0A def test_universe(self):%0A # first order logic is working today. Yay!%0A self.assertTrue(True != False)%0A
|
|
67f5e754a5f90903e09a6a876d858d002c513f8a
|
Add initial draft of posterior models
|
abcpy/posteriors.py
|
abcpy/posteriors.py
|
Python
| 0 |
@@ -0,0 +1,1860 @@
+import scipy as sp%0A%0Afrom .utils import stochastic_optimization%0A%0Aclass BolfiPosterior():%0A%0A def __init__(self, model, threshold, priors=None):%0A self.threshold = threshold%0A self.model = model%0A self.priors = %5BNone%5D * model.n_var%0A self.ML, ML_val = stochastic_optimization(self._neg_unnormalized_loglikelihood_density, self.model.bounds, 10000)%0A print(%22ML parameters: %25s%22 %25 (self.ML))%0A self.MAP, MAP_val = stochastic_optimization(self._neg_unnormalized_logposterior_density, self.model.bounds, 10000)%0A print(%22MAP parameters: %25s%22 %25 (self.MAP))%0A%0A def _unnormalized_loglikelihood_density(self, x):%0A mean, var, std = self.model.evaluate(x)%0A return sp.stats.norm.logcdf(self.threshold, mean, std)%0A%0A def _unnormalized_likelihood_density(self, x):%0A return np.exp(self._unnormalized_loglikelihood_density(x))%0A%0A def _neg_unnormalized_loglikelihood_density(self, x):%0A return -1 * self._unnormalized_loglikelihood_density(x)%0A%0A def _unnormalized_logposterior_density(self, x):%0A return self._unnormalized_loglikelihood_density(x) + self._logprior_density(x)%0A%0A def _unnormalized_posterior_density(self, x):%0A return np.exp(self._unnormalized_logposterior_density(x))%0A%0A def _neg_unnormalized_logposterior_density(self, x):%0A return -1 * self._unnormalized_logposterior_density(x)%0A%0A def _logprior_density(self, x):%0A logprior_density = 0.0%0A for xv, prior in zip(x, self.priors):%0A if prior is not None:%0A logprior_density += prior.getLogProbDensity(xv)%0A return logprior_density%0A%0A def _prior_density(self, x):%0A return np.exp(self._logprior_density(x))%0A%0A def _neg_logprior_density(self, x):%0A return -1 * self._logprior_density(x)%0A%0A def sample(self):%0A return tuple(%5B%5Bv%5D for v in self.MAP%5D)%0A
|
|
8131bb276a467d7df00f7452616869d20d312eb7
|
add api_view test
|
apps/api/tests/tests_view.py
|
apps/api/tests/tests_view.py
|
Python
| 0 |
@@ -0,0 +1,1904 @@
+import datetime%0Afrom django.test import TestCase%0Afrom django.test.client import Client%0Afrom apps.pages.models import Page, Page_translation%0A%0A%0Aclass MySmileApiTestCase(TestCase):%0A def setUp(self):%0A some_page = Page.objects.create(id=1,%0A slug='index',%0A color='#FDA132',%0A photo='images/photo.png',%0A sortorder=1,%0A status=Page.STATUS_PUBLISHED,%0A ptype=Page.PTYPE_API,%0A updated_at=datetime.datetime.now(),%0A created_at=datetime.datetime.now())%0A %0A Page_translation.objects.create(id=1,%0A page=some_page,%0A lang='en',%0A menu='Main',%0A col_central='lorem ipsum',%0A col_bottom_1='lorem ipsum',%0A col_bottom_2='lorem ipsum',%0A col_bottom_3='lorem ipsum',%0A meta_title='Welcome!',%0A meta_description='This is mane page!',%0A meta_keywords='Python3, Django',%0A photo_alt='',%0A photo_description = '',%0A updated_at=datetime.datetime.now(),%0A created_at=datetime.datetime.now())%0A self._client = Client()%0A%0A def test_content_short(self):%0A response = self._client.get('/api/content')%0A self.assertEqual(response.status_code, 200)%0A%0A def test_content_slug(self):%0A response = self._client.get('/api/content?slug=index')%0A self.assertEqual(response.status_code, 200)%0A%0A def test_content_slug_lang(self):%0A response = self._client.get('/api/content?slug=index&lang=en')%0A self.assertEqual(response.status_code, 200)%0A%0A def test_language(self):%0A response = self._client.get('/api/language')%0A self.assertEqual(response.status_code, 200)%0A%0A def test_contact(self):%0A response = self._client.get('/api/contact')%0A self.assertEqual(response.status_code, 200)%0A
|
|
6104fdc57931151f6cf3c8cd517f5efee17fe826
|
Update repost_stock_for_deleted_bins_for_merging_items.py
|
erpnext/patches/v7_1/repost_stock_for_deleted_bins_for_merging_items.py
|
erpnext/patches/v7_1/repost_stock_for_deleted_bins_for_merging_items.py
|
from __future__ import unicode_literals
import frappe
from erpnext.stock.stock_balance import repost_stock
def execute():
frappe.reload_doc('manufacturing', 'doctype', 'production_order_item')
modified_items = frappe.db.sql_list("""
select name from `tabItem`
where is_stock_item=1 and modified >= '2016-10-31'
""")
if not modified_items:
return
item_warehouses_with_transactions = []
transactions = ("Sales Order Item", "Material Request Item", "Purchase Order Item",
"Stock Ledger Entry", "Packed Item")
for doctype in transactions:
item_warehouses_with_transactions += list(frappe.db.sql("""
select distinct item_code, warehouse
from `tab{0}` where docstatus=1 and item_code in ({1})"""
.format(doctype, ', '.join(['%s']*len(modified_items))), tuple(modified_items)))
item_warehouses_with_transactions += list(frappe.db.sql("""
select distinct production_item, fg_warehouse
from `tabProduction Order` where docstatus=1 and production_item in ({0})"""
.format(', '.join(['%s']*len(modified_items))), tuple(modified_items)))
item_warehouses_with_transactions += list(frappe.db.sql("""
select distinct pr_item.item_code, pr.source_warehouse
from `tabProduction Order` pr, `tabProduction Order Item` pr_item
where pr_item.parent and pr.name and pr.docstatus=1 and pr_item.item_code in ({0})"""
.format(', '.join(['%s']*len(modified_items))), tuple(modified_items)))
item_warehouses_with_bin = list(frappe.db.sql("select distinct item_code, warehouse from `tabBin`"))
item_warehouses_with_missing_bin = list(
set(item_warehouses_with_transactions) - set(item_warehouses_with_bin))
for item_code, warehouse in item_warehouses_with_missing_bin:
repost_stock(item_code, warehouse)
|
Python
| 0 |
@@ -188,16 +188,83 @@
_item')%0A
+%09frappe.reload_doc('manufacturing', 'doctype', 'production_order')%0A
%09%0A%09modif
|
142ec5bdca99d11236f2d479cf4dafbc7e8962a3
|
test of the nis module
|
Lib/test/test_nis.py
|
Lib/test/test_nis.py
|
Python
| 0 |
@@ -0,0 +1,362 @@
+import nis%0A%0Averbose = 0%0Aif __name__ == '__main__':%0A verbose = 1%0A%0Amaps = nis.maps()%0Afor nismap in maps:%0A if verbose:%0A%09print nismap%0A mapping = nis.cat(nismap)%0A for k, v in mapping.items():%0A%09if verbose:%0A%09 print ' ', k, v%0A%09if not k:%0A%09 continue%0A%09if nis.match(k, nismap) %3C%3E v:%0A%09 print %22NIS match failed for key %60%25s' in map %60%25s'%22 %25 (k, nismap)%0A%0A
|
|
a35a6b715670e985c0bd711a4cb55df2a267e018
|
Create downloader.py
|
3.下载缓存/downloader.py
|
3.下载缓存/downloader.py
|
Python
| 0.000001 |
@@ -0,0 +1,3230 @@
+import urlparse%0Aimport urllib2%0Aimport random%0Aimport time%0Afrom datetime import datetime, timedelta%0Aimport socket%0A%0A%0ADEFAULT_AGENT = 'wswp'%0ADEFAULT_DELAY = 5%0ADEFAULT_RETRIES = 1%0ADEFAULT_TIMEOUT = 60%0A%0A%0Aclass Downloader:%0A def __init__(self, delay=DEFAULT_DELAY, user_agent=DEFAULT_AGENT, proxies=None, num_retries=DEFAULT_RETRIES, timeout=DEFAULT_TIMEOUT, opener=None, cache=None):%0A socket.setdefaulttimeout(timeout)%0A self.throttle = Throttle(delay)%0A self.user_agent = user_agent%0A self.proxies = proxies%0A self.num_retries = num_retries%0A self.opener = opener%0A self.cache = cache%0A%0A%0A def __call__(self, url):%0A result = None%0A if self.cache:%0A try:%0A result = self.cache%5Burl%5D%0A except KeyError:%0A # url is not available in cache %0A pass%0A else:%0A if self.num_retries %3E 0 and 500 %3C= result%5B'code'%5D %3C 600:%0A # server error so ignore result from cache and re-download%0A result = None%0A if result is None:%0A # result was not loaded from cache so still need to download%0A self.throttle.wait(url)%0A proxy = random.choice(self.proxies) if self.proxies else None%0A headers = %7B'User-agent': self.user_agent%7D%0A result = self.download(url, headers, proxy=proxy, num_retries=self.num_retries)%0A if self.cache:%0A # save result to cache%0A self.cache%5Burl%5D = result%0A return result%5B'html'%5D%0A%0A%0A def download(self, url, headers, proxy, num_retries, data=None):%0A print 'Downloading:', url%0A request = urllib2.Request(url, data, headers or %7B%7D)%0A opener = self.opener or urllib2.build_opener()%0A if proxy:%0A proxy_params = %7Burlparse.urlparse(url).scheme: proxy%7D%0A opener.add_handler(urllib2.ProxyHandler(proxy_params))%0A try:%0A response = opener.open(request)%0A html = response.read()%0A code = response.code%0A except Exception as e:%0A print 'Download error:', str(e)%0A html = ''%0A if hasattr(e, 'code'):%0A code = e.code%0A if num_retries %3E 0 and 500 %3C= code %3C 600:%0A # retry 5XX HTTP errors%0A return self._get(url, headers, proxy, num_retries-1, data)%0A else:%0A code = None%0A return %7B'html': html, 'code': code%7D%0A%0A%0Aclass Throttle:%0A %22%22%22Throttle downloading by sleeping between requests to same domain%0A %22%22%22%0A def __init__(self, delay):%0A # amount of delay between downloads for each domain%0A self.delay = delay%0A # timestamp of when a domain was last accessed%0A self.domains = %7B%7D%0A %0A def wait(self, url):%0A %22%22%22Delay if have accessed this domain recently%0A %22%22%22%0A domain = urlparse.urlsplit(url).netloc%0A last_accessed = self.domains.get(domain)%0A if self.delay %3E 0 and last_accessed is not None:%0A sleep_secs = self.delay - (datetime.now() - last_accessed).seconds%0A if sleep_secs %3E 0:%0A time.sleep(sleep_secs)%0A self.domains%5Bdomain%5D = datetime.now()%0A
|
|
6bf4f7491bdfe8a5afd5eb8cdb4a8fcb2af78b36
|
Add commands/findCognateClassesCrossingMeanings.py
|
ielex/lexicon/management/commands/findCognateClassesCrossingMeanings.py
|
ielex/lexicon/management/commands/findCognateClassesCrossingMeanings.py
|
Python
| 0 |
@@ -0,0 +1,950 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals, print_function%0A%0Afrom collections import defaultdict%0A%0Afrom django.core.management import BaseCommand%0A%0Afrom ielex.lexicon.models import CognateJudgement, Lexeme%0A%0A%0Aclass Command(BaseCommand):%0A%0A help = %22Compiles a list of cognate classes,%22%5C%0A %22%5Cnwhere each cognate class belongs to more than one meaning.%22%0A%0A def handle(self, *args, **options):%0A lexemeMeaningMap = dict(Lexeme.objects.values_list('id', 'meaning_id'))%0A cogLexTuples = CognateJudgement.objects.values_list(%0A 'cognate_class_id', 'lexeme_id')%0A%0A cogMeaningMap = defaultdict(set)%0A for cogId, lexId in cogLexTuples:%0A cogMeaningMap%5BcogId%5D.add(lexemeMeaningMap%5BlexId%5D)%0A%0A for cogId, mIdSet in cogMeaningMap.iteritems():%0A if len(mIdSet) %3E 1:%0A print(%22Cognate class %25s has multiple meanings: %25s.%22 %25%0A (cogId, mIdSet))%0A
|
|
d8eea29d2fd78f2f1e388de36f13cf6069fde974
|
switch ninupdates channel back to announcement since test works
|
addons/loop.py
|
addons/loop.py
|
import asyncio
import copy
import discord
import feedparser
import sys
import time
import datetime
import traceback
import os
import json
from discord.ext import commands
from urllib.parse import urlparse, parse_qs
class Loop:
"""
Loop events.
"""
def __init__(self, bot):
self.bot = bot
bot.loop.create_task(self.start_update_loop())
print('Addon "{}" loaded'.format(self.__class__.__name__))
def __unload(self):
self.is_active = False
is_active = True
last_hour = datetime.datetime.now().hour
warning_time_period = datetime.timedelta(minutes=30)
async def start_update_loop(self):
# thanks Luc#5653
await self.bot.wait_until_all_ready()
while self.is_active:
try:
timestamp = datetime.datetime.now()
timebans = copy.copy(self.bot.timebans)
for ban in timebans.items():
if timestamp > ban[1][1]:
self.bot.actions.append("tbr:" + ban[0])
await self.bot.unban(self.bot.server, ban[1][0])
msg = "⚠️ **Ban expired**: {} | {}#{}".format(ban[1][0].mention, self.bot.escape_name(ban[1][0].name), ban[1][0].discriminator)
await self.bot.send_message(self.bot.modlogs_channel, msg)
self.bot.timebans.pop(ban[0])
elif not ban[1][2]:
warning_time = ban[1][1] - self.warning_time_period
if timestamp > warning_time:
ban[1][2] = True
await self.bot.send_message(self.bot.mods_channel, "**Note**: {} will be unbanned in {} minutes.".format(self.bot.escape_name(ban[1][0]), ((ban[1][1] - timestamp).seconds // 60) + 1))
if timestamp.minute == 0 and timestamp.hour != self.last_hour:
await self.bot.send_message(self.bot.helpers_channel, "{} has {:,} members at this hour!".format(self.bot.server.name, self.bot.server.member_count))
self.last_hour = timestamp.hour
if (timestamp.minute - 1) % 5 == 0 and timestamp.second == 0:
# ugly but it works
ninupdates_feed = feedparser.parse('https://yls8.mtheall.com/ninupdates/feed.php')
# ninupdates_feed = feedparser.parse('./feed.rss')
reported_systems = []
for entry in ninupdates_feed['entries']:
system, ver = entry['title'].split()
if system in reported_systems:
continue
reported_systems.append(system)
reporturl = entry['link']
reporturl_date = parse_qs(urlparse(reporturl).query)['date'][0]
reportpath = 'data/ninupdates/{}.json'.format(system)
to_write = {'reportdate': reporturl_date}
if not os.path.isfile(reportpath):
to_write['ver'] = ver
with open(reportpath, 'w') as f:
json.dump(to_write, f)
else:
with open(reportpath, 'r') as f:
oldver = json.load(f)
if oldver['reportdate'] != reporturl_date:
# "Reminder to not update until confirmed safe or known broken features are fixed."
if reporturl_date == ver:
await self.bot.send_message(self.bot.meta_channel, '⏬ System updated detected for {}\n<{}>'.format(system, reporturl))
to_write['ver'] = reporturl_date
else:
await self.bot.send_message(self.bot.meta_channel, '⏬ System updated detected for {}: {}\n<{}>'.format(system, ver, reporturl))
to_write['ver'] = ver
with open(reportpath, 'w') as f:
json.dump(to_write, f)
elif oldver['reportdate'] == oldver['ver'] and len(ver) != 17:
# lazy method of seeing if an update + vernumber was found before the bot caught the update in the first place
await self.bot.send_message(self.bot.meta_channel, 'ℹ️ New update version for {}: {} ({})'.format(system, ver, reporturl_date))
to_write['ver'] = ver
with open(reportpath, 'w') as f:
json.dump(to_write, f)
except Exception as e:
print('Ignoring exception in start_update_loop', file=sys.stderr)
traceback.print_tb(e.__traceback__)
print('{0.__class__.__name__}: {0}'.format(e), file=sys.stderr)
finally:
await asyncio.sleep(1)
def setup(bot):
bot.add_cog(Loop(bot))
|
Python
| 0 |
@@ -3690,36 +3690,45 @@
essage(self.bot.
-meta
+announcements
_channel, '%E2%8F%AC Sys
@@ -3961,36 +3961,45 @@
essage(self.bot.
-meta
+announcements
_channel, '%E2%8F%AC Sys
@@ -4558,12 +4558,21 @@
bot.
-meta
+announcements
_cha
|
b7dd7f75f655f4fbcb34d8f9ec260a6f18e8f617
|
Add utility to create administrative users.
|
backend/scripts/adminuser.py
|
backend/scripts/adminuser.py
|
Python
| 0 |
@@ -0,0 +1,1632 @@
+#!/usr/bin/env python%0Aimport rethinkdb as r%0Afrom optparse import OptionParser%0Aimport sys%0A%0A%0Adef create_group(conn):%0A group = %7B%7D%0A group%5B'name'%5D = %22Admin Group%22%0A group%5B'description'%5D = %22Administration Group for Materials Commons%22%0A group%5B'id'%5D = 'admin'%0A group%5B'owner'%5D = '[email protected]'%0A group%5B'users'%5D = %5B%5D%0A group%5B'birthtime'%5D = r.now()%0A group%5B'mtime'%5D = r.now()%0A r.table('usergroups').insert(group).run(conn)%0A admin_group = r.table('usergroups').get('admin')%5C%0A .run(conn, time_format='raw')%0A return admin_group%0A%0A%0Adef add_user(user, group, conn):%0A for u in group%5B'users'%5D:%0A if u == user:%0A return%0A group%5B'users'%5D.append(user)%0A r.table('usergroups').get('admin').update(group).run(conn)%0A%0Aif __name__ == %22__main__%22:%0A parser = OptionParser()%0A parser.add_option(%22-P%22, %22--port%22, type=%22int%22, dest=%22port%22,%0A help=%22rethinkdb port%22)%0A parser.add_option(%22-u%22, %22--user%22, type=%22string%22, dest=%22user%22,%0A help=%22user to add to admin group%22)%0A (options, args) = parser.parse_args()%0A%0A if options.port is None:%0A print %22You must specify the rethinkdb port%22%0A sys.exit(1)%0A%0A if options.user is None:%0A print %22You must specify a user to add%22%0A sys.exit(1)%0A%0A conn = r.connect('localhost', options.port, db='materialscommons')%0A admin_group = r.table('usergroups').get('admin')%5C%0A .run(conn, time_format='raw')%0A if admin_group is None:%0A admin_group = create_group(conn)%0A add_user(options.user, admin_group, conn)%0A
|
|
8d32947304d72a13ed8e27d41d35028a904072e9
|
Add libpq package
|
libpq/conanfile.py
|
libpq/conanfile.py
|
Python
| 0.000001 |
@@ -0,0 +1,1504 @@
+from conans import ConanFile, AutoToolsBuildEnvironment, tools%0Aimport os%0A%0Aclass LibpqConn(ConanFile):%0A name = %22libpq%22%0A version = %229.6.3%22%0A license = %22PostgreSQL license https://www.postgresql.org/about/licence/%22%0A url = %22https://github.com/trigger-happy/conan-packages%22%0A description = %22C library for interfacing with postgresql%22%0A settings = %22os%22, %22compiler%22, %22build_type%22, %22arch%22%0A options = %7B%22shared%22: %5BTrue, False%5D%7D%0A default_options = %22shared=False%22%0A generators = %22cmake%22%0A%0A def source(self):%0A pkgLink = 'https://ftp.postgresql.org/pub/source/v%7Bpkgver%7D/postgresql-%7Bpkgver%7D.tar.bz2'.format(pkgver=self.version)%0A self.run(%22curl -JOL %22 + pkgLink)%0A self.run(%22tar xf postgresql-%7Bpkgver%7D.tar.bz2%22.format(pkgver=self.version))%0A self.run(%22mkdir deploy%22)%0A%0A def build(self):%0A env_build = AutoToolsBuildEnvironment(self)%0A install_prefix=os.getcwd()%0A with tools.chdir(%22postgresql-%7Bpkgver%7D%22.format(pkgver=self.version)):%0A with tools.environment_append(env_build.vars):%0A self.run(%22./configure --with-openssl --without-readline --prefix=%7B0%7D%22.format(install_prefix))%0A with tools.chdir(%22src/interfaces/libpq%22):%0A self.run(%22make install%22)%0A%0A def package(self):%0A with tools.chdir(%22deploy%22):%0A self.copy(%22lib/*%22, dst=%22lib%22, keep_path=False)%0A self.copy(%22include/*%22, dst=%22.%22, keep_path=True)%0A%0A def package_info(self):%0A self.cpp_info.libs = %5B%22pq%22%5D%0A
|
|
e59c03f0bad78c9cb1db86f2fb0ac29009c8474e
|
add rll
|
reverse-linked-list.py
|
reverse-linked-list.py
|
Python
| 0.000001 |
@@ -0,0 +1,483 @@
+# https://leetcode.com/problems/reverse-linked-list/%0A%0A# Definition for singly-linked list.%0Aclass ListNode:%0A def __init__(self, x):%0A self.val = x%0A self.next = None%0A%0Aclass Solution:%0A # @param %7BListNode%7D head%0A # @return %7BListNode%7D%0A def reverseList(self, head):%0A last, current = None, head%0A while current:%0A next = current.next%0A current.next = last%0A last = current%0A current = next%0A return last%0A
|
|
0c17398f68597eae175ad6a37945cf37e95e1809
|
Reset invalid default quotas for CloudServiceProjectLink [WAL-814]
|
nodeconductor/structure/migrations/0050_reset_cloud_spl_quota_limits.py
|
nodeconductor/structure/migrations/0050_reset_cloud_spl_quota_limits.py
|
Python
| 0 |
@@ -0,0 +1,924 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.contrib.contenttypes import models as ct_models%0Afrom django.db import migrations, models%0A%0Afrom nodeconductor.quotas.models import Quota%0Afrom nodeconductor.structure.models import CloudServiceProjectLink%0A%0A%0Adef reset_cloud_spl_quota_limits(apps, schema_editor):%0A old_limits = %7B%0A 'vcpu': 100,%0A 'ram': 256000,%0A 'storage': 5120000,%0A %7D%0A%0A for model in CloudServiceProjectLink.get_all_models():%0A content_type = ct_models.ContentType.objects.get_for_model(model)%0A for quota, limit in old_limits.items():%0A Quota.objects.filter(content_type=content_type, name=quota, limit=limit).update(limit=-1)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('structure', '0049_extend_abbreviation'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(reset_cloud_spl_quota_limits),%0A %5D%0A
|
|
63ae0b619ea50b1e234abc139becaeb84c703302
|
add player class
|
MellPlayer/player.py
|
MellPlayer/player.py
|
Python
| 0 |
@@ -0,0 +1,508 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A'''%0ANetease Music Player%0A%0ACreated on 2017-02-20%0A@author: Mellcap%0A'''%0A%0A%0Aclass Player(object):%0A%0A def __init__(self):%0A pass%0A%0A def start(self):%0A pass%0A%0A def pause(self):%0A pass%0A%0A def start_or_pause(self):%0A pass%0A%0A def switch_song(self, action='next'):%0A '''%0A action: next/prev%0A '''%0A pass%0A%0A def switch_playlist(self, action='next'):%0A '''%0A action: next/prev%0A '''%0A pass%0A
|
|
602db58ff01ef7ea2718d713a5b2026377023b8d
|
Create context_processors.py
|
commons/context_processors.py
|
commons/context_processors.py
|
Python
| 0.000577 |
@@ -0,0 +1,303 @@
+from os import environ%0Afrom %7B%7B project_name %7D%7D import __version__%0Aimport uuid%0A%0A%0Adef metainfo(request):%0A metainfo = %7B %0A 'uuid': unicode(uuid.uuid4()),%0A 'version': __version__,%0A 'static_version': %22?v=%7B%7D%22.format(uuid),%0A 'branch': environ%5B'BRANCH'%5D%0A %7D %0A return metainfo%0A
|
|
6ac6f936a12fcc1578db3fed629ec3a8bc471dcb
|
remove print
|
src/you_get/extractor/acfun.py
|
src/you_get/extractor/acfun.py
|
#!/usr/bin/env python
__all__ = ['acfun_download']
from ..common import *
from .qq import qq_download_by_id
from .sina import sina_download_by_vid
from .tudou import tudou_download_by_iid
from .youku import youku_download_by_vid
import json, re
def get_srt_json(id):
url = 'http://comment.acfun.com/%s.json' % id
return get_html(url)
def get_srt_lock_json(id):
url = 'http://comment.acfun.com/%s_lock.json' % id
return get_html(url)
def acfun_download_by_vid(vid, title=None, output_dir='.', merge=True, info_only=False):
info = json.loads(get_html('http://www.acfun.com/video/getVideo.aspx?id=' + vid))
sourceType = info['sourceType']
sourceId = info['sourceId']
danmakuId = info['danmakuId']
if sourceType == 'sina':
sina_download_by_vid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'youku':
print(sourceId, danmakuId)#
youku_download_by_vid(sourceId, title=title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'tudou':
tudou_download_by_iid(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
elif sourceType == 'qq':
qq_download_by_id(sourceId, title, output_dir=output_dir, merge=merge, info_only=info_only)
else:
raise NotImplementedError(sourceType)
if not info_only:
title = get_filename(title)
try:
print('Downloading %s ...\n' % (title + '.cmt.json'))
cmt = get_srt_json(danmakuId)
with open(os.path.join(output_dir, title + '.cmt.json'), 'w') as x:
x.write(cmt)
print('Downloading %s ...\n' % (title + '.cmt_lock.json'))
cmt = get_srt_lock_json(danmakuId)
with open(os.path.join(output_dir, title + '.cmt_lock.json'), 'w') as x:
x.write(cmt)
except:
pass
def acfun_download(url, output_dir = '.', merge = True, info_only = False):
assert re.match(r'http://[^\.]+.acfun.[^\.]+/v/ac(\d+)', url)
html = get_html(url)
title = r1(r'<h1 id="txt-title-view">([^<>]+)<', html)
title = unescape_html(title)
title = escape_file_path(title)
assert title
videos = re.findall("data-vid=\"(\d+)\" href=\"[^\"]+\" title=\"([^\"]+)\"", html)
if videos is not None:
for video in videos:
p_vid = video[0]
p_title = title + " - " + video[1]
acfun_download_by_vid(p_vid, p_title, output_dir=output_dir, merge=merge, info_only=info_only)
else:
# Useless - to be removed?
id = r1(r"src=\"/newflvplayer/player.*id=(\d+)", html)
sina_download_by_vid(id, title, output_dir=output_dir, merge=merge, info_only=info_only)
site_info = "AcFun.com"
download = acfun_download
download_playlist = playlist_not_supported('acfun')
|
Python
| 0.000001 |
@@ -894,44 +894,8 @@
u':%0A
- print(sourceId, danmakuId)#%0A
|
4152b6a10610aa364e901f062a8611b94f65b3de
|
Create e.py
|
at/abc126/e.py
|
at/abc126/e.py
|
Python
| 0.000001 |
@@ -0,0 +1,524 @@
+# %E5%B9%B6%E6%9F%A5%E9%9B%86%0Aread = input%0An, m = map(int, read().split())%0Af = %5B-1 for i in range(n + 1)%5D # 1 ~ n%0Adef find(x):%0A if f%5Bx%5D%3C0:%0A return x%0A else :%0A f%5Bx%5D = find(f%5Bx%5D)%0A return f%5Bx%5D%0Afor i in range(m):%0A x,y,z = map(int, read().split())%0A if abs(x) %3C abs(y): #%E5%90%88%E5%B9%B6%E5%88%B0x%E4%B8%8A%EF%BC%8C%E4%BF%9D%E8%AF%81x%E6%98%AF%E5%A4%A7%E9%9B%86%E5%90%88%0A x,y = y,x%0A fx = find(x)%0A fy = find(y)%0A if fx == fy:continue # %E5%B7%B2%E7%BB%8F%E5%9C%A8%E4%B8%80%E4%B8%AA%E9%9B%86%E5%90%88%EF%BC%8C%E4%B8%8D%E6%93%8D%E4%BD%9C%0A f%5Bfx%5D = f%5Bfx%5D - 1%0A f%5Bfy%5D = fx%0A# print(x,y,fx,fy,f)%0A%0Aans = 0%0Afor i in range(1, n+1): # 1~n%0A if f%5Bi%5D %3C 0:%0A ans += 1%0Aprint(ans)%0A
|
|
2057ebd9bae44b232b133ca0c0f76e11d4ca3b5f
|
Add missing file
|
conary/server/wsgi_adapter.py
|
conary/server/wsgi_adapter.py
|
Python
| 0.000006 |
@@ -0,0 +1,1793 @@
+#%0A# Copyright (c) rPath, Inc.%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A#%0A%0Aimport webob%0Aimport sys%0A%0A%0Adef modpython_to_webob(mpreq, handler):%0A # This could be written as a mod_python -%3E WSGI gateway, but this is much%0A # more compact.%0A from mod_python import apache%0A mpreq.add_common_vars()%0A environ = dict(mpreq.subprocess_env.items())%0A environ%5B'wsgi.version'%5D = (1, 0)%0A if environ.get('HTTPS', '').lower() == 'on':%0A environ%5B'wsgi.url_scheme'%5D = 'https'%0A else:%0A environ%5B'wsgi.url_scheme'%5D = 'http'%0A environ%5B'wsgi.input'%5D = mpreq%0A environ%5B'wsgi.errors'%5D = sys.stderr%0A environ%5B'wsgi.multithread'%5D = False%0A environ%5B'wsgi.multiprocess'%5D = True%0A environ%5B'wsgi.run_once'%5D = False%0A%0A request = webob.Request(environ)%0A response = handler(request)%0A%0A mpreq.status = response.status_int%0A for key, value in response.headerlist:%0A if key.lower() == 'content-length':%0A mpreq.set_content_length(int(value))%0A elif key.lower() == 'content-type':%0A mpreq.content_type = value%0A else:%0A mpreq.headers_out.add(key, value)%0A for chunk in response.app_iter:%0A mpreq.write(chunk)%0A return apache.OK%0A
|
|
aef33a2c8f34d164bba18741a3cf6e5b71a60a99
|
Add stub file for extract_csv.py
|
extract_csv.py
|
extract_csv.py
|
Python
| 0.000001 |
@@ -0,0 +1,153 @@
+def extract_csv(filename):%0A%09# TODO: connect to sqlite database and extract a csv of the rows.%0A%09pass%0A%0A%0Aif __name__ == '__main__':%0A%09extract_csv('data.csv')
|
|
f99eb9a2397f571f045f6a5f663a42878e94b3ea
|
Create Euler_003.py
|
Euler_003.py
|
Euler_003.py
|
Python
| 0.000169 |
@@ -0,0 +1,110 @@
+#%0Ax, num = 2, 600851475143%0Awhile num != x:%0A if num %25 x == 0: num = num / x; x = 2%0A else: x += 1%0Aprint x%0A
|
|
1072b8e28e75cf41a35302c9febd1ec22473e966
|
Add code/analyse_chain_growth.py
|
code/analyse_chain_growth.py
|
code/analyse_chain_growth.py
|
Python
| 0.000111 |
@@ -0,0 +1,1704 @@
+#!/usr/bin/env python%0A%0Aimport sys%0Aimport os%0Aimport os.path%0A%0Aimport argparse%0A%0Aparser = argparse.ArgumentParser()%0Aparser.add_argument('dirs', type=str, nargs='+',%0A help='directories containing simulation files')%0Aparser.add_argument('--rate', type=float, default=0.1)%0Aparser.add_argument('--sites', type=int, default=1)%0Aparser.add_argument('-N', type=int, default=10000)%0Aargs = parser.parse_args()%0A%0Aimport numpy as np%0Afrom scipy.optimize import leastsq%0Afrom io import StringIO%0Aimport matplotlib.pyplot as plt%0A%0ANNEIGH=3.5%0A%0A# Open lammps log file to extract thermodynamic observables%0Adef from_log(logfile,i0,i1):%0A return np.loadtxt(StringIO(u''.join(logfile%5Bi0+1:i1%5D)), unpack=True)%0A%0Afitfunc = lambda p, t: 1*(1.-np.exp(-t*p%5B0%5D-p%5B1%5D))%0Aerrfunc = lambda p, t, y: fitfunc(p, t) - y%0A%0Ap_data = %5B%5D%0Afor d in args.dirs:%0A logfile = open(os.path.join(os.getcwd(), d, 'log.lammps')).readlines()%0A start_indices = %5B(i,l) for (i,l) in enumerate(logfile) if l.startswith('Time ')%5D%0A stop_indices = %5B(i,l) for (i,l) in enumerate(logfile) if l.startswith('Loop time')%5D%0A time, e_tot, temp, e_kin, e_vdw, e_bond, e_pot, press, rho, n_bonds, n_bonds_max, bonds = from_log(logfile, start_indices%5B-1%5D%5B0%5D, stop_indices%5B-1%5D%5B0%5D)%0A time -= time%5B0%5D%0A plt.plot(time, n_bonds)%0A nmax = min(int(1./(args.rate*args.fraction)), len(time))%0A nmax = len(time)%0A p, success = leastsq(errfunc, %5Bargs.rate*NNEIGH*args.fraction, 0./args.rate%5D, args=(time%5B:nmax%5D, n_bonds%5B:nmax%5D))%0A p_data.append(p)%0A print p%0A%0Aplt.plot(time, 1*(1.-np.exp(-time*args.rate*NNEIGH*args.fraction)))%0Ap_data = np.array(p_data)%0Aprint p_data.mean(axis=0)%0Aplt.plot(time, fitfunc(p_data.mean(axis=0), time), 'k--')%0A%0Aplt.show()%0A
|
|
bd15388aa877f32ebc613511ad909b311ed3bcf0
|
Add tests
|
sympy/concrete/tests/test_dispersion.py
|
sympy/concrete/tests/test_dispersion.py
|
Python
| 0.000001 |
@@ -0,0 +1,1122 @@
+from sympy.core import Symbol, S, oo%0Afrom sympy.concrete.dispersion import *%0A%0A%0Adef test_dispersion():%0A x = Symbol(%22x%22)%0A%0A fp = S(0).as_poly(x)%0A assert sorted(dispersionset(fp)) == %5B0%5D%0A%0A fp = S(2).as_poly(x)%0A assert sorted(dispersionset(fp)) == %5B0%5D%0A%0A fp = (x + 1).as_poly(x)%0A assert sorted(dispersionset(fp)) == %5B0%5D%0A assert dispersion(fp) == 0%0A fp = (x*(x + 3)).as_poly(x)%0A assert sorted(dispersionset(fp)) == %5B0, 3%5D%0A assert dispersion(fp) == 3%0A fp = ((x - 3)*(x + 3)).as_poly(x)%0A assert sorted(dispersionset(fp)) == %5B0, 6%5D%0A assert dispersion(fp) == 6%0A fp = ((x + 1)*(x + 2)).as_poly(x)%0A assert sorted(dispersionset(fp)) == %5B0, 1%5D%0A assert dispersion(fp) == 1%0A%0A fp = (x**4 - 3*x**2 + 1).as_poly(x)%0A gp = fp.shift(-3)%0A assert sorted(dispersionset(fp, gp)) == %5B2, 3, 4%5D%0A assert dispersion(fp, gp) == 4%0A assert sorted(dispersionset(gp, fp)) == %5B%5D%0A assert dispersion(gp, fp) == -oo%0A%0A a = Symbol(%22a%22)%0A fp = (x*(3*x**2+a)*(x-2536)*(x**3+a)).as_poly(x)%0A gp = fp.as_expr().subs(x, x-345).as_poly(x)%0A assert sorted(dispersionset(fp, gp)) == %5B345, 2881%5D%0A
|
|
6ed3b62efe24aa8aeaedd314bb4e472628713bac
|
Create deft_opportunist.py
|
tpdatasrc/tpgamefiles/scr/tpModifiers/deft_opportunist.py
|
tpdatasrc/tpgamefiles/scr/tpModifiers/deft_opportunist.py
|
Python
| 0.001698 |
@@ -0,0 +1,586 @@
+#Deft Opportunist: Complete Adventurer, p. 106%0A%0Afrom templeplus.pymod import PythonModifier%0Afrom toee import *%0Aimport tpdp%0A%0Aprint %22Registering Deft Opportunist%22%0A%0Adef DOAOO(attachee, args, evt_obj):%0A%0A%09if attachee.has_feat(%22Deft Opportunist%22) != 0:%0A%09%09#Check if it's an AOO, if so add 4 to the Attack Roll%0A%09%09if evt_obj.attack_packet.get_flags() & D20CAF_ATTACK_OF_OPPORTUNITY:%0A%09%09%09evt_obj.bonus_list.add(4, 0, %22Target Deft Opportunist bonus%22)%0A%09%09%09return 0%0A%0AeDO = PythonModifier(%22Deft Opportunist Feat%22, 2)%0AeDO.MapToFeat(%22Deft Opportunist%22)%0AeDO.AddHook(ET_OnToHitBonus2, EK_NONE, DOAOO, ())%0A
|
|
52f8daf63644fde1efd1c132d6b02ac6670ef0a4
|
Add migrations merge
|
temba/channels/migrations/0038_merge.py
|
temba/channels/migrations/0038_merge.py
|
Python
| 0.000001 |
@@ -0,0 +1,299 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('channels', '0037_auto_20160905_1537'),%0A ('channels', '0033_auto_20160623_1438'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
|
|
1b538aba890c8a81fc7bf66f2c35519608fbd6be
|
Create drivers.py
|
chips/analog/mock/drivers.py
|
chips/analog/mock/drivers.py
|
Python
| 0.000001 |
@@ -0,0 +1,107 @@
+# This code has to be added to the corresponding __init__.py%0A%0ADRIVERS%5B%22analogmock%22%5D = %5B%22ANALOG%22, %22PUUM%22%5D%0A
|
|
528de5a29d7beb743e5e80775a349f931e71262f
|
add test that triggers previous error
|
test/workflows/test_base.py
|
test/workflows/test_base.py
|
Python
| 0 |
@@ -0,0 +1,499 @@
+import json%0Aimport fmriprep.workflows.base as base%0Aimport re%0Aimport unittest%0Aimport mock%0A%0Aclass TestBase(unittest.TestCase):%0A%0A def test_fmri_preprocess_single(self):%0A ''' Tests that it runs without errors '''%0A # NOT a test for correctness%0A # SET UP INPUTS%0A test_settings = %7B%0A 'output_dir': '.',%0A 'work_dir': '.'%0A %7D%0A%0A # SET UP EXPECTATIONS%0A%0A # RUN%0A base.fmri_preprocess_single(settings=test_settings)%0A%0A # ASSERT%0A
|
|
0e02a9de3599e726b5a4dffd17f92a0cd0d2aaee
|
add import script for Wyre
|
polling_stations/apps/data_collection/management/commands/import_wyre.py
|
polling_stations/apps/data_collection/management/commands/import_wyre.py
|
Python
| 0 |
@@ -0,0 +1,385 @@
+from data_collection.management.commands import BaseXpressWebLookupCsvImporter%0A%0Aclass Command(BaseXpressWebLookupCsvImporter):%0A council_id = 'E07000128'%0A addresses_name = 'WyrePropertyPostCodePollingStationWebLookup-2017-03-08 2.CSV'%0A stations_name = 'WyrePropertyPostCodePollingStationWebLookup-2017-03-08 2.CSV'%0A elections = %5B'local.lancashire.2017-05-04'%5D%0A
|
|
4e9ecd13cedc069e53e6acc941f643ad0f8cf6b0
|
fix cleanup command
|
corehq/apps/callcenter/management/commands/remove_callcenter_form_data.py
|
corehq/apps/callcenter/management/commands/remove_callcenter_form_data.py
|
from __future__ import print_function
from optparse import make_option
from django.core.management.base import BaseCommand
from sqlalchemy.engine import create_engine
from sqlalchemy.orm.session import sessionmaker
from corehq.apps.callcenter.utils import get_call_center_domains, get_or_create_mapping
from ctable.models import SqlExtractMapping
from ctable.util import get_extractor
from django.conf import settings
mapping_name = 'cc_form_submissions'
class Command(BaseCommand):
help = 'Remove legacy call center data'
option_list = BaseCommand.option_list + (
make_option('--all-tables', action='store_true', default=False,
help="Delete all tables regardless of domain setting"),
make_option('--all-mappings', action='store_true', default=False,
help="Delete all mappings and mappings regardless of domain setting"),
make_option('--dry-run', action='store_true', default=False,
help="Don't actually do anything"),
)
def handle(self, *args, **options):
drop_all_tables = options.get('all-tables', False)
delete_all_mappings = options.get('all-mappings', False)
dry_run = options.get('dry_run', False)
if dry_run:
print("\n-------- DRY RUN --------\n")
all_tables = get_db_tables(settings.SQL_REPORTING_DATABASE_URL)
domains = get_call_center_domains()
for domain in domains:
print("Processing domain", domain)
mapping = get_or_create_mapping(domain, mapping_name)
if mapping.table_name in all_tables:
print("\tDropping SQL table", mapping.table_name)
if not dry_run:
extractor = get_extractor(mapping.backend)
extractor.clear_all_data(mapping)
if not mapping.new_document:
print("\tDeleting ctable mapping", mapping.name)
if not dry_run:
mapping.delete()
missed_tables = [t for t in all_tables if t.endswith(mapping_name)]
if missed_tables:
print('\nSome tables are still hanging around:')
with extractor.backend as backend:
for table in missed_tables:
if not drop_all_tables:
print('\t*', table)
else:
print("\tDeleting table", table)
backend.op.drop_table(table)
if not drop_all_tables:
print("\n To delete these tables run with '--all-tables'")
all_mappings = SqlExtractMapping.all()
missed_mappings = [m for m in all_mappings if m.name == mapping_name]
if missed_mappings:
print('\nSome mappings are still hanging around:')
for mapping in missed_mappings:
if not delete_all_mappings:
print('\t*', mapping.name, 'for domains', ', '.join(mapping.domains))
else:
print('\tDeleting mapping', mapping.name, 'for domains', ', '.join(mapping.domains))
mapping.delete()
if not delete_all_mappings:
print("\n To delete these mappings run with '--all-mappings'")
def get_session(url):
engine = create_engine(url)
session = sessionmaker(bind=engine)
return session()
def get_db_tables(database_url):
session = get_session(database_url)
results = session.execute("""
SELECT table_name
FROM information_schema.tables
WHERE table_schema = 'public';
""")
return [r[0] for r in results]
|
Python
| 0.000006 |
@@ -1100,17 +1100,17 @@
get('all
--
+_
tables',
@@ -1163,17 +1163,17 @@
get('all
--
+_
mappings
@@ -1376,16 +1376,57 @@
E_URL)%0A%0A
+ extractor = get_extractor('SQL')%0A
@@ -1717,24 +1717,24 @@
table_name)%0A
+
@@ -1757,71 +1757,8 @@
un:%0A
- extractor = get_extractor(mapping.backend)%0A
|
b14fb988321076f4cf17cebec7635fd209e08465
|
Create video.py
|
client/video.py
|
client/video.py
|
Python
| 0.000001 |
@@ -0,0 +1,334 @@
+# Capture video with OpenCV%0A%0Aimport numpy as np%0Aimport cv2%0Aimport time %0A%0Acap = cv2.VideoCapture('serenity.mp4')%0A%0Awhile(cap.isOpened()):%0A%0A%09ret, frame = cap.read()%0A%0A%09# time.sleep(.25)%0A%0A%09cv2.rectangle(frame,(384,0),(510,128),(0,255,0),3)%0A%09%0A%09cv2.imshow('frame',frame)%0A%09if cv2.waitKey(5) & 0xFF == ord('q'):%0A%09%09break%0A%09%09%0A%09%09%0A%09%09%0Acap.release()%0A
|
|
18a356c9fa49f32627481f312b03aa34ff711456
|
Revert "Define the tests as grpc_cc_test to automatically test against all po…"
|
test/core/bad_client/generate_tests.bzl
|
test/core/bad_client/generate_tests.bzl
|
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate build.json data for all the bad_client tests."""
load("//bazel:grpc_build_system.bzl", "grpc_cc_test", "grpc_cc_library")
def test_options():
return struct()
# maps test names to options
BAD_CLIENT_TESTS = {
'badreq': test_options(),
'connection_prefix': test_options(),
'headers': test_options(),
'initial_settings_frame': test_options(),
'head_of_line_blocking': test_options(),
'large_metadata': test_options(),
'server_registered_method': test_options(),
'simple_request': test_options(),
'window_overflow': test_options(),
'unknown_frame': test_options(),
}
def grpc_bad_client_tests():
grpc_cc_library(
name = 'bad_client_test',
srcs = ['bad_client.cc'],
hdrs = ['bad_client.h'],
deps = ['//test/core/util:grpc_test_util', '//:grpc', '//:gpr', '//test/core/end2end:cq_verifier']
)
for t, topt in BAD_CLIENT_TESTS.items():
grpc_cc_test(
name = '%s_bad_client_test' % t,
srcs = ['tests/%s.cc' % t],
deps = [':bad_client_test'],
)
|
Python
| 0 |
@@ -680,81 +680,8 @@
%22%22%0A%0A
-load(%22//bazel:grpc_build_system.bzl%22, %22grpc_cc_test%22, %22grpc_cc_library%22)%0A
%0Adef
@@ -1194,21 +1194,23 @@
ts():%0A
-grpc_
+native.
cc_libra
@@ -1464,21 +1464,23 @@
():%0A
-grpc_
+native.
cc_test(
|
59ac83e45116a97cfbdd7522f967337e73d51766
|
add cargo deny test
|
tests/integration_tests/build/test_dependencies.py
|
tests/integration_tests/build/test_dependencies.py
|
Python
| 0 |
@@ -0,0 +1,649 @@
+# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.%0A# SPDX-License-Identifier: Apache-2.0%0A%22%22%22Enforces controls over dependencies.%22%22%22%0A%0Aimport os%0Aimport framework.utils as utils%0A%0A%0Adef test_licenses():%0A %22%22%22Ensure license compatibility for Firecracker.%0A%0A For a list of currently allowed licenses checkout deny.toml in%0A the root directory.%0A%0A @type: build%0A %22%22%22%0A toml_file = os.path.normpath(%0A os.path.join(%0A os.path.dirname(os.path.realpath(__file__)),%0A '../../../Cargo.toml')%0A )%0A utils.run_cmd('cargo deny --manifest-path %7B%7D check licenses'.%0A format(toml_file))%0A
|
|
c3f01d8b365e6d367b1a565e5ce59cf04eb1bac3
|
fix build
|
get_version.py
|
get_version.py
|
Python
| 0.000001 |
@@ -0,0 +1,129 @@
+%22%22%22Return the short version string.%22%22%22%0Afrom mpfmonitor._version import __short_version__%0Aprint(%22%7B%7D.x%22.format(__short_version__))%0A
|
|
15d3692aee84432b6b7f8306505b3f59649fd6f9
|
Remove mimetype from the module_files table
|
cnxarchive/sql/migrations/20160128111115_mimetype_removal_from_module_files.py
|
cnxarchive/sql/migrations/20160128111115_mimetype_removal_from_module_files.py
|
Python
| 0.000001 |
@@ -0,0 +1,1578 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%5C%0A- Move the mimetype value from %60%60module_files%60%60 to %60%60files%60%60.%0A- Remove the %60%60mimetype%60%60 column from the %60%60module_files%60%60 table.%0A%0A%22%22%22%0Afrom __future__ import print_function%0Aimport sys%0A%0A%0Adef up(cursor):%0A # Move the mimetype value from %60%60module_files%60%60 to %60%60files%60%60.%0A cursor.execute(%22UPDATE files AS f SET media_type = mf.mimetype %22%0A %22FROM module_files AS mf %22%0A %22WHERE mf.fileid = f.fileid%22)%0A%0A # Warn about missing mimetype.%0A cursor.execute(%22SELECT fileid, sha1 %22%0A %22FROM files AS f %22%0A %22WHERE f.fileid NOT IN (SELECT fileid FROM module_files)%22)%0A rows = '%5Cn'.join(%5B'%7B%7D, %7B%7D'.format(fid, sha1)%0A for fid, sha1 in cursor.fetchall()%5D)%0A print(%22These files (fileid, sha1) do not have a corresponding %22%0A %22module_files entry:%5Cn%7B%7D%5Cn%22.format(rows),%0A file=sys.stderr)%0A%0A # Remove the %60%60mimetype%60%60 column from the %60%60module_files%60%60 table.%0A cursor.execute(%22ALTER TABLE module_files DROP COLUMN mimetype%22)%0A%0A%0Adef down(cursor):%0A # Add a %60%60mimetype%60%60 column to the %60%60module_files%60%60 table.%0A cursor.execute(%22ALTER TABLE module_files ADD COLUMN mimetype TEXT%22)%0A%0A # Move the mimetype value from %60%60files%60%60 to %60%60module_files%60%60.%0A print(%22Rollback cannot accurately replace mimetype values that %22%0A %22were in the %60%60modules_files%60%60 table.%22,%0A file=sys.stderr)%0A cursor.execute(%22UPDATE module_files AS mf SET mimetype = f.media_type %22%0A %22FROM files AS f %22%0A %22WHERE f.fileid = mf.fileid%22)%0A
|
|
67b5cd3f00ca57c4251dab65c5a6e15ab2be8a42
|
Create result.py
|
aiorucaptcha/result.py
|
aiorucaptcha/result.py
|
Python
| 0.000002 |
@@ -0,0 +1,165 @@
+class ResultObject:%0A%0A def __init__(self, code, task_id):%0A self.code = code%0A self.task_id = task_id%0A%0A def __str__(self):%0A return self.code%0A
|
|
4a7a15359763cbd6956bd30bde7cd68b05b2b4a2
|
test _compare_and_pop_smallest
|
tests/test_huffman_codes.py
|
tests/test_huffman_codes.py
|
Python
| 0.00006 |
@@ -0,0 +1,1393 @@
+import sys%0Aimport os%0Asys.path.append(os.path.abspath(os.path.dirname(__file__) + '../..'))%0Aimport unittest%0Afrom huffman_codes import huffman_codes, Node, Queue, _compare_and_pop_smallest, %5C%0A _traverse_children_and_assign_codes%0A%0A%0Aclass TestHuffmanCodes(unittest.TestCase):%0A def test_compare_and_pop_smallest__first_q_smaller(self):%0A q_1 = Queue()%0A q_1.enqueue((None, 1))%0A q_2 = Queue()%0A q_2.enqueue((None, 2))%0A output = _compare_and_pop_smallest(q_1, q_2)%0A self.assertEqual(output%5B1%5D, 1)%0A%0A def test_compare_and_pop_smallest__second_q_smaller(self):%0A q_1 = Queue()%0A q_1.enqueue((None, 1))%0A q_2 = Queue()%0A q_2.enqueue((None, 2))%0A output = _compare_and_pop_smallest(q_2, q_1)%0A self.assertEqual(output%5B1%5D, 1)%0A%0A def test_compare_and_pop_smallest__first_q_empty(self):%0A q_1 = Queue()%0A q_2 = Queue()%0A q_2.enqueue((None, 2))%0A output = _compare_and_pop_smallest(q_2, q_1)%0A self.assertEqual(output%5B1%5D, 2)%0A%0A def test_compare_and_pop_smallest__second_q_empty(self):%0A q_1 = Queue()%0A q_1.enqueue((None, 1))%0A q_2 = Queue()%0A output = _compare_and_pop_smallest(q_2, q_1)%0A self.assertEqual(output%5B1%5D, 1)%0A%0A def test_traverse_children_and_assign_codes(self):%0A pass%0A%0A def test_huffman_codes(self):%0A pass%0A
|
|
57112cdb9f4e47bba11978dc0569d6dfb45f15eb
|
Update cybergis-script-burn-alpha.py
|
bin/cybergis-script-burn-alpha.py
|
bin/cybergis-script-burn-alpha.py
|
#!/usr/bin/python2.7
import sys
import os
import threading
import time
import Queue
import struct
import numpy
import struct
import gdal
import osr
import gdalnumeric
from gdalconst import *
class RenderThread(threading.Thread):
def __init__(self, threadID, threadName, queue):
threading.Thread.__init__(self)
self.threadID = threadID
self.threadName = threadName
self.queue = queue
def run(self):
process(self.name,self.queue)
def process(threadName, q):
while not exitFlag:
queueLock.acquire()
if not workQueue.empty():
inBand, outband, y0, y, r, t = q.get()
queueLock.release()
#==#
if t==1:
print "Rendering rows "+str(y*r)+" to "+str((y*r)+r)+"."
outBand.WriteArray(inBand.ReadAsArray(0,y*r,inBand.XSize,r,inBand.XSize,r),0,y*r)
elif t==2:
print "Rendering row "+str(y0+y)+"."
outBand.WriteArray(inBand.ReadAsArray(0,y0+y,inBand.XSize,1,inBand.XSize,1),0,y0+y)
else:
queueLock.release()
#==#
time.sleep(1)
def main():
if(len(sys.argv)==8):
inputFile = sys.argv[1]
inputBands = int(sys.argv[2])
alphaFile = sys.argv[3]
alphaIndex = int(sys.argv[4])
outputFile = sys.argv[5]
rows = int(sys.argv[6])
numberOfThreads = int(sys.argv[7])
if numberOfThreads > 0:
if(os.path.exists(inputFile) and os.path.exists(alphaFile)):
if(not os.path.exists(outputFile)):
inputDataset = gdal.Open(inputFile,GA_ReadOnly)
alphaDataset = gdal.Open(alphaFile,GA_ReadOnly)
if ((not inputDataset is None) and (not alphaDataset is None)):
outputFormat = "GTiff"
numberOfBands = inputBands+1
w = inputDataset.RasterXSize
h = inputDataset.RasterYSize
r = rows
outputDataset = initDataset(outputFile,outputFormat,w,h,numberOfBands)
outputDataset.SetGeoTransform(list(inputDataset.GetGeoTransform()))
outputDataset.SetProjection(inputDataset.GetProjection())
if numberOfThreads == 1:
for b in range(inputBands):
inBand = inputDataset.GetRasterBand(b+1)
outBand = outputDataset.GetRasterBand(b+1)
for y in range(int(inBand.YSize/r)):
outBand.WriteArray(inBand.ReadAsArray(0,y*r,inBand.XSize,r,inBand.XSize,r),0,y*r)
y0 = inBand.YSize/r
for y in range(inBand.YSize%r):
outBand.WriteArray(inBand.ReadAsArray(0,y0+y,inBand.XSize,1,inBand.XSize,1),0,y0+y)
burn(alphaDataset.GetRasterBand(alphaIndex),outputDataset.GetRasterBand(numberOfBands),r)
elif numberOfThreads > 1:
exitFlag = 0
queueLock = threading.Lock()
workQueue = Queue.Queue(0)
threads = []
threadID = 1
for threadID in range(numberOfThreads):
thread = RenderThread(threadID, ("Thread "+threadID), workQueue)
thread.start()
threads.append(thread)
threadID += 1
queueLock.acquire()
#Add RGB Tasks
for b in range(inputBands):
print "Processing Band"+str(b)
inBand = inputDataset.GetRasterBand(b+1)
outBand = outputDataset.GetRasterBand(b+1)
y0 = inBand.YSize/r
for y in range(int(inBand.YSize/r)):
task = inband, outBand, y0, y, r, 1
workQueue.put(task)
for y in range(inBand.YSize%r):
task = inband, outBand, y0, y, r, 2
workQueue.put(task)
#Add Alpha Tasks
inBand = alphaDataset.GetRasterBand(alphaIndex)
outBand = outputDataset.GetRasterBand(numberOfBands)
y0 = inBand.YSize/r
for y in range(int(inBand.YSize/r)):
task = inband, outBand, y0, y, r, 1
workQueue.put(task)
for y in range(inBand.YSize%r):
task = inband, outBand, y0, y, r, 2
workQueue.put(task)
queueLock.release()
print "Queue is full with "+str(qsize)+" tasks."
print "Rendering threads will now execute."
while not workQueue.empty():
pass
exitFlag = 1 #tell's threads it's time to quit
for t in threads:
t.join()
inputDataset = None
outputDataset = None
else:
print "Error Opening File"
else:
print "Output file already exists"
else:
print "Input file does not exist."
else:
print "Threads needs to be 1 or higher."
else:
print "Usage: cybergis-script-burn-alpha.py <input_file> <input_bands> <alpha_file> <alpha_band_index> <output_file> <rows> <threads>"
def burn(inBand,outBand,rows):
r = rows
for y in range(int(inBand.YSize/r)):
outBand.WriteArray(inBand.ReadAsArray(0,y*r,inBand.XSize,r,inBand.XSize,r),0,y*r)
y0 = inBand.YSize/r
for y in range(inBand.YSize%r):
outBand.WriteArray(inBand.ReadAsArray(0,y0+y,inBand.XSize,1,inBand.XSize,1),0,y0+y)
def initDataset(outputFile,f,w,h,b):
driver = gdal.GetDriverByName(f)
metadata = driver.GetMetadata()
return driver.Create(outputFile,w,h,b,gdal.GDT_Byte,['ALPHA=YES'])
main()
|
Python
| 0 |
@@ -3008,16 +3008,20 @@
hread %22+
+str(
threadID
@@ -3021,16 +3021,17 @@
hreadID)
+)
, workQu
|
d20b03daaf1824ea4b032ac3ea5cb5f087016b49
|
Fix some styles
|
bluebottle/collect/serializers.py
|
bluebottle/collect/serializers.py
|
from rest_framework.serializers import ModelSerializer
from rest_framework_json_api.relations import (
ResourceRelatedField,
SerializerMethodResourceRelatedField
)
from bluebottle.activities.utils import (
BaseActivitySerializer, BaseActivityListSerializer, BaseContributorSerializer
)
from bluebottle.bluebottle_drf2.serializers import PrivateFileSerializer
from bluebottle.collect.models import CollectActivity, CollectContributor, CollectType
from bluebottle.collect.states import CollectContributorStateMachine
from bluebottle.fsm.serializers import TransitionSerializer
from bluebottle.time_based.permissions import CanExportParticipantsPermission
from bluebottle.utils.serializers import ResourcePermissionField
class CollectActivitySerializer(BaseActivitySerializer):
permissions = ResourcePermissionField('collect-activity-detail', view_args=('pk',))
collect_type = ResourceRelatedField(
queryset=CollectType.objects,
source='type'
)
my_contributor = SerializerMethodResourceRelatedField(
model=CollectContributor,
read_only=True,
source='get_my_contributor'
)
contributors = SerializerMethodResourceRelatedField(
model=CollectContributor,
many=True,
related_link_view_name='related-collect-contributors',
related_link_url_kwarg='activity_id'
)
contributors_export_url = PrivateFileSerializer(
'collect-contributors-export',
url_args=('pk', ),
filename='contributors.csv',
permission=CanExportParticipantsPermission,
read_only=True
)
def get_contributors(self, instance):
user = self.context['request'].user
return [
contributor for contributor in instance.contributors.all() if (
isinstance(contributor, CollectContributor) and (
contributor.status in [
CollectContributorStateMachine.new.value,
CollectContributorStateMachine.accepted.value,
CollectContributorStateMachine.succeeded.value
] or
user in (instance.owner, instance.initiative.owner, contributor.user)
)
)
]
def get_my_contributor(self, instance):
user = self.context['request'].user
if user.is_authenticated:
return instance.contributors.filter(user=user).instance_of(CollectContributor).first()
class Meta(BaseActivitySerializer.Meta):
model = CollectActivity
fields = BaseActivitySerializer.Meta.fields + (
'my_contributor',
'contributors',
'start',
'end',
'contributors_export_url',
'location',
'collect_type'
)
class JSONAPIMeta(BaseActivitySerializer.JSONAPIMeta):
resource_name = 'activities/collects'
included_resources = BaseActivitySerializer.JSONAPIMeta.included_resources + [
'my_contributor',
'location',
'collect_type'
]
included_serializers = dict(
BaseActivitySerializer.included_serializers,
**{
'my_contributor': 'bluebottle.collect.serializers.CollectContributorSerializer',
'location': 'bluebottle.geo.serializers.GeolocationSerializer',
'collect_type': 'bluebottle.collect.serializers.CollectTypeSerializer',
}
)
class CollectActivityListSerializer(BaseActivityListSerializer):
permissions = ResourcePermissionField('collect-activity-detail', view_args=('pk',))
class Meta(BaseActivityListSerializer.Meta):
model = CollectActivity
fields = BaseActivityListSerializer.Meta.fields + (
'start',
'end',
)
class JSONAPIMeta(BaseActivityListSerializer.JSONAPIMeta):
resource_name = 'activities/collects'
class CollectActivityTransitionSerializer(TransitionSerializer):
resource = ResourceRelatedField(queryset=CollectActivity.objects.all())
included_serializers = {
'resource': 'bluebottle.collect.serializers.CollectActivitySerializer',
}
class JSONAPIMeta(object):
included_resources = ['resource', ]
resource_name = 'activities/collect-activity-transitions'
class CollectContributorSerializer(BaseContributorSerializer):
activity = ResourceRelatedField(
queryset=CollectActivity.objects.all()
)
permissions = ResourcePermissionField('collect-contributor-detail', view_args=('pk',))
class Meta(BaseContributorSerializer.Meta):
model = CollectContributor
meta_fields = BaseContributorSerializer.Meta.meta_fields + ('permissions', )
fields = BaseContributorSerializer.Meta.fields + ('value',)
class JSONAPIMeta(BaseContributorSerializer.JSONAPIMeta):
resource_name = 'contributors/collect/contributor'
included_resources = [
'user',
'activity',
]
included_serializers = {
'user': 'bluebottle.initiatives.serializers.MemberSerializer',
'activity': 'bluebottle.collect.serializers.CollectActivitySerializer',
}
class CollectContributorListSerializer(CollectContributorSerializer):
pass
class CollectContributorTransitionSerializer(TransitionSerializer):
resource = ResourceRelatedField(queryset=CollectContributor.objects.all())
field = 'states'
included_serializers = {
'resource': 'bluebottle.collect.serializers.CollectContributorSerializer',
}
class JSONAPIMeta(object):
resource_name = 'contributors/collect/collect-contributor-transitions'
included_resources = [
'resource',
]
class CollectTypeSerializer(ModelSerializer):
class Meta(object):
model = CollectType
fields = ('id', 'name', 'description')
class JSONAPIMeta(object):
resource_name = 'activities/collect-types'
|
Python
| 0.000429 |
@@ -950,16 +950,40 @@
bjects,%0A
+ required=False,%0A
@@ -4943,24 +4943,25 @@
/contributor
+s
'%0A in
@@ -5670,24 +5670,16 @@
collect/
-collect-
contribu
|
3f84a3cb50e18ce9df96a9173d0be180633aad0d
|
Add polynomial learning example
|
Examples/polynomial_approximation.py
|
Examples/polynomial_approximation.py
|
Python
| 0.01065 |
@@ -0,0 +1,1226 @@
+%22%22%22%0AExample of neural network learning a polynomial equation. Test polynomial is f(x) = (6x%5E2 + 3x) %C3%B7 (3x)%0A%0ATraining is run on x values from 1.0 to 100.0%0A%22%22%22%0Afrom mazex import MazeX%0Aimport numpy as np%0Aimport random%0Aimport math%0Aimport matplotlib.pyplot as plt%0A%0A# Create list to store how close networks guesses are%0Agraph_data = %5B%5D%0A%0A# Create Neural Network%0Anet = MazeX(%5B1, 20, 4, 1%5D, %5B%22relu%22, %22relu%22, 'lin'%5D, learning_constant=0.00001)%0A%0A# test how close the network is to the correct answer given x = 12 and log the result for the graph%0Adef check(run):%0A guess = net.forward(np.array(%5B%5B12.0%5D%5D))%0A print(f%22run %7Brun%7D OFF BY: %7B25 - guess%5B0%5D%5B0%5D%7D%22)%0A graph_data.append(25 - guess%5B0%5D%5B0%5D)%0A%0A%0A# run a bunch of training steps on random values to help network learn the polynomial%0Afor i in range(100):%0A t = random.uniform(1.0, 100.0)%0A ans = ((6 * math.pow(t, 2)) + (3 * t)) / (3 * t)%0A Y = np.full((1, 1), ans)%0A X = np.full((1, 1), t)%0A%0A net.train(X, Y)%0A check(i)%0A%0A# plot the training data for visual feedback of learning progress. Saves graph to same directory as script%0Aplt.plot(graph_data)%0Aplt.ylabel('Error')%0Aplt.xlabel(%22training run%22)%0Aplt.title('Error over time')%0Aplt.savefig(f'Polynomial_approximation.png')%0A%0A%0A%0A
|
|
abe40e3c82ef1f351275a59b2e537f43530caa0c
|
Clean up db script (remove articles older than two days).
|
app/cleanup_stories.py
|
app/cleanup_stories.py
|
Python
| 0 |
@@ -0,0 +1,629 @@
+from pymongo import MongoClient%0Afrom fetch_stories import get_mongo_client, close_mongo_client%0Afrom bson import ObjectId%0Afrom datetime import datetime, timedelta%0A%0Adef remove_old_stories():%0A client = get_mongo_client()%0A db = client.get_default_database()%0A article_collection = db%5B'articles'%5D%0A%0A two_days_ago = datetime.utcnow() - timedelta(days=2)%0A two_days_ago = ObjectId.from_datetime(two_days_ago)%0A%0A query = %7B%0A '_id' : %7B '$lt' : two_days_ago%7D%0A %7D%0A %0A article_collection.remove(query)%0A close_mongo_client(client)%0A%0Adef main():%0A remove_old_stories()%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
ba590d28810409fa57783e6d29a651790f865e5c
|
create base api exceptions module
|
apps/api/exceptions.py
|
apps/api/exceptions.py
|
Python
| 0 |
@@ -0,0 +1,697 @@
+import json%0A%0Afrom tastypie.exceptions import TastypieError%0Afrom tastypie.http import HttpResponse%0A%0A%0Aclass CustomBadRequest(TastypieError):%0A %22%22%22%0A This exception is used to interrupt the flow of processing to immediately%0A return a custom HttpResponse.%0A %22%22%22%0A%0A def __init__(self, success=False, code=%22%22, message=%22%22):%0A self._response = %7B%0A %22error%22: %7B%0A %22success%22: success or False,%0A %22code%22: code or %22not_provided%22,%0A %22message%22: message or %22No error message was provided.%22%7D%7D%0A%0A @property%0A def response(self):%0A return HttpResponse(%0A json.dumps(self._response),%0A content_type='application/json')%0A
|
|
b1a5764956e0f569b4955dbf43e5656873c903f6
|
Create new package. (#7649)
|
var/spack/repos/builtin/packages/soapdenovo-trans/package.py
|
var/spack/repos/builtin/packages/soapdenovo-trans/package.py
|
Python
| 0 |
@@ -0,0 +1,2237 @@
+##############################################################################%0A# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, [email protected], All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass SoapdenovoTrans(MakefilePackage):%0A %22%22%22SOAPdenovo-Trans is a de novo transcriptome assembler basing on the%0A SOAPdenovo framework, adapt to alternative splicing and different%0A expression level among transcripts.%22%22%22%0A%0A homepage = %22http://soap.genomics.org.cn/SOAPdenovo-Trans.html%22%0A url = %22https://github.com/aquaskyline/SOAPdenovo-Trans/archive/1.0.4.tar.gz%22%0A%0A version('1.0.4', 'a3b00b0f743b96141c4d5f1b49f2918c')%0A%0A build_directory = 'src'%0A%0A def edit(self, spec, prefix):%0A with working_dir(self.build_directory):%0A makefile = FileFilter('Makefile')%0A makefile.filter('CFLAGS= -O3 -fomit-frame-pointer -static',%0A 'CFLAGS= -O3 -fomit-frame-pointer')%0A%0A def build(self, spec, prefix):%0A with working_dir(self.build_directory):%0A make()%0A make('127mer=1', parallel=False)%0A%0A def install(self, spec, prefix):%0A install_tree('.', prefix.bin)%0A
|
|
402004b1a0612e5b4eeb703f3787dd1b7f3def30
|
make auto migration
|
yandex_kassa/migrations/0004_auto_20151209_0940.py
|
yandex_kassa/migrations/0004_auto_20151209_0940.py
|
Python
| 0.000001 |
@@ -0,0 +1,1036 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('yandex_kassa', '0003_auto_20151116_1530'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='payment',%0A options=%7B'ordering': ('-created',), 'verbose_name': '%5Cu043f%5Cu043b%5Cu0430%5Cu0442%5Cu0435%5Cu0436', 'verbose_name_plural': '%5Cu041f%5Cu043b%5Cu0430%5Cu0442%5Cu0435%5Cu0436%5Cu0438'%7D,%0A ),%0A migrations.AlterField(%0A model_name='payment',%0A name='scid',%0A field=models.PositiveIntegerField(default=528277, verbose_name=b'%5Cxd0%5Cx9d%5Cxd0%5Cxbe%5Cxd0%5Cxbc%5Cxd0%5Cxb5%5Cxd1%5Cx80 %5Cxd0%5Cxb2%5Cxd0%5Cxb8%5Cxd1%5Cx82%5Cxd1%5Cx80%5Cxd0%5Cxb8%5Cxd0%5Cxbd%5Cxd1%5Cx8b'),%0A ),%0A migrations.AlterField(%0A model_name='payment',%0A name='shop_id',%0A field=models.PositiveIntegerField(default=104674, verbose_name=b'ID %5Cxd0%5Cxbc%5Cxd0%5Cxb0%5Cxd0%5Cxb3%5Cxd0%5Cxb0%5Cxd0%5Cxb7%5Cxd0%5Cxb8%5Cxd0%5Cxbd%5Cxd0%5Cxb0'),%0A ),%0A %5D%0A
|
|
7ec4133b11ba91541e9ec9895e39a2c402c63087
|
define the AVB loss separately
|
avb/models/avb_loss.py
|
avb/models/avb_loss.py
|
Python
| 0 |
@@ -0,0 +1,1717 @@
+import keras.backend as ker%0Afrom keras.layers import Layer%0Afrom keras.losses import categorical_crossentropy%0A%0A%0Aclass AVBLossLayer(Layer):%0A def __init__(self, **kwargs):%0A self.is_placeholder = True%0A super(AVBLossLayer, self).__init__(**kwargs)%0A%0A @staticmethod%0A def avb_loss(discrim_output_posterior, discrim_output_posterior_prior, data_log_probs):%0A # 1/m * sum_%7Bi=1%7D%5Em log p(x_i%7Cz), where z = encoder(x_i, epsilon_i)%0A reconstruction_log_likelihood = ker.mean(ker.sum(data_log_probs, axis=1))%0A # The decoder tries to maximise the reconstruction data log-likelihood%0A decoder_loss = -reconstruction_log_likelihood%0A # The encoder tries to minimize the discriminator output%0A encoder_loss = ker.mean(discrim_output_posterior)%0A # The dicriminator loss is the GAN loss with input from the prior and posterior distributions%0A discriminator_loss = ker.mean(categorical_crossentropy(y_true=ker.ones_like(discrim_output_posterior),%0A y_pred=discrim_output_posterior)%0A + categorical_crossentropy(y_true=ker.zeros_like(discrim_output_posterior_prior),%0A y_pred=discrim_output_posterior_prior))%0A return ker.mean(encoder_loss + decoder_loss + discriminator_loss)%0A%0A def call(self, inputs, **kwargs):%0A discrim_output_posterior, discrim_output_prior, decoder_output_log_probs = inputs%0A loss = self.avb_loss(discrim_output_posterior, discrim_output_prior, decoder_output_log_probs)%0A self.add_loss(loss, inputs=inputs)%0A # unused output%0A return inputs%5B0%5D%0A
|
|
0c3b3ff095af2ccf6c3891a99170c982b1639f4e
|
test pickle retention adapted.
|
test/test_module_pickle_retention.py
|
test/test_module_pickle_retention.py
|
#!/usr/bin/env python2.6
#Copyright (C) 2009-2010 :
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
#
#This file is part of Shinken.
#
#Shinken is free software: you can redistribute it and/or modify
#it under the terms of the GNU Affero General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#Shinken is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU Affero General Public License for more details.
#
#You should have received a copy of the GNU Affero General Public License
#along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
#It's ugly I know....
import os
from shinken_test import *
sys.path.append("../shinken/modules")
from pickle_retention_file_scheduler import *
class TestConfig(ShinkenTest):
#setUp is in shinken_test
#Change ME :)
def test_pickle_retention(self):
print self.conf.modules
#get our modules
mod = None
mod = Module({'type' : 'pickle_retention_file', 'module_name' : 'PickleRetention', 'path' : 'tmp/retention-test.dat'})
try :
os.unlink(mod.path)
except :
pass
sl = get_instance(mod)
print "Instance", sl
#Hack here :(
sl.properties = {}
sl.properties['to_queue'] = None
sl.init()
l = logger
#updte the hosts and service in the scheduler in the retentino-file
sl.update_retention_objects(self.sched, l)
#Now we change thing
svc = self.sched.hosts.find_by_name("test_host_0")
self.assert_(svc.state == 'PENDING')
print "State", svc.state
svc.state = 'UP' #was PENDING in the save time
r = sl.load_retention_objects(self.sched, l)
self.assert_(r == True)
#search if the host is not changed by the loading thing
svc2 = self.sched.hosts.find_by_name("test_host_0")
self.assert_(svc == svc2)
self.assert_(svc.state == 'PENDING')
#Ok, we can delete the retention file
os.unlink(mod.path)
# Now make real loops with notifications
self.scheduler_loop(10, [[svc, 2, 'CRITICAL | bibi=99%']])
#updte the hosts and service in the scheduler in the retentino-file
sl.update_retention_objects(self.sched, l)
r = sl.load_retention_objects(self.sched, l)
self.assert_(r == True)
if __name__ == '__main__':
unittest.main()
|
Python
| 0 |
@@ -900,16 +900,17 @@
port os%0A
+%0A
from shi
@@ -928,16 +928,18 @@
mport *%0A
+%0A%0A
sys.path
@@ -968,16 +968,55 @@
dules%22)%0A
+import pickle_retention_file_scheduler%0A
from pic
@@ -1053,16 +1053,247 @@
mport *%0A
+from module import Module%0A%0Amodconf = Module()%0Amodconf.module_name = %22PickleRetention%22%0Amodconf.module_type = pickle_retention_file_scheduler.properties%5B'type'%5D%0Amodconf.properties = pickle_retention_file_scheduler.properties.copy()%0A%0A
%0Aclass T
@@ -1482,24 +1482,25 @@
one%0A
+#
mod = Module
@@ -1606,16 +1606,123 @@
.dat'%7D)%0A
+ mod = pickle_retention_file_scheduler.Pickle_retention_scheduler(modconf, 'tmp/retention-test.dat')
%0A
|
fd54c28be8d9ffd7e5711035bf5b5e1b7fe332cc
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/ab2e190c2bfe60b3b738c125ca9db1a2785cdcaa.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "285e48bc47db23a479637fd1e2767b9a35dc2c9b"
TFRT_SHA256 = "6f0067d0cb7bb407caeef060603b6e33f1231cddf1ce4ce2ebce027dc418764f"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
Python
| 0.000003 |
@@ -210,133 +210,133 @@
= %22
-285e48bc47db23a479637fd1e2767b9a35dc2c9b%22%0A TFRT_SHA256 = %226f0067d0cb7bb407caeef060603b6e33f1231cddf1ce4ce2ebce027dc418764f
+ab2e190c2bfe60b3b738c125ca9db1a2785cdcaa%22%0A TFRT_SHA256 = %22b097063dd10c010e827e58cc8e5a0e4008d99bcba1dcb20259c8ef890620b9b5
%22%0A%0A
|
c9c00a6a5ab267ab56dd147e6542cae6566061d8
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/dc109b725d8f36f8c7db7847f0c95a819c43f9e9.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "4bcf968d66a6bb2899b9d99917b916f6ec04c327"
TFRT_SHA256 = "9bd2cc2e7003f73f767e138ae4776b43d15ca286f0f85ad374ec5f8aaeab1aa4"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0.000001 |
@@ -228,133 +228,133 @@
= %22
-4bcf968d66a6bb2899b9d99917b916f6ec04c327%22%0A TFRT_SHA256 = %229bd2cc2e7003f73f767e138ae4776b43d15ca286f0f85ad374ec5f8aaeab1aa4
+dc109b725d8f36f8c7db7847f0c95a819c43f9e9%22%0A TFRT_SHA256 = %22e6a6359ecd731f7208f32402fac9bf874b26855497c0252fcddc44e5133320df
%22%0A%0A
|
d81a2b0328c86165b09c2d41aa2a4684c75388cd
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/78537f15f4873bbed59258bed4442225303f462a.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "a2f5e07760d2a888370d0686546b757ee9628494"
TFRT_SHA256 = "70653b94faa603befef83457482c8a1151fa529b3215124e18a0f97592d5ad05"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0 |
@@ -228,133 +228,133 @@
= %22
-a2f5e07760d2a888370d0686546b757ee9628494%22%0A TFRT_SHA256 = %2270653b94faa603befef83457482c8a1151fa529b3215124e18a0f97592d5ad05
+78537f15f4873bbed59258bed4442225303f462a%22%0A TFRT_SHA256 = %2287526ed2a287d7809b2cadf82f9db94994b0019635d431f2fc9c3db2bd4a31cc
%22%0A%0A
|
8c1b20941c1216bb56fa55fe881962d2ea883366
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/c68238f982305e3618a2b5347e1e0a5663898c90.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "377c20166e8e1b5124493c1433b1df34ca62cf3f"
TFRT_SHA256 = "f0c3c03e7d9ca2e10c3256f28bf9c0aa0aa26d9aa4da539c00532ee5217ba7ba"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0.000002 |
@@ -210,133 +210,133 @@
= %22
-377c20166e8e1b5124493c1433b1df34ca62cf3f%22%0A TFRT_SHA256 = %22f0c3c03e7d9ca2e10c3256f28bf9c0aa0aa26d9aa4da539c00532ee5217ba7ba
+c68238f982305e3618a2b5347e1e0a5663898c90%22%0A TFRT_SHA256 = %22b28ed95058c101a9d3203ddbaa271044de984f6b49c5609124e1cb4ae0b3e165
%22%0A%0A
|
5a8fde172f0fc7aff841e8059927ff126712b321
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/feffe7beb261f6dfe9af083e8f46dfea293ded54.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "509cf2f10beb666002ece6a7b968fe2c7c0c1e4b"
TFRT_SHA256 = "14b22d39d3eebcf255e4dd8ee8630b4da3ecc786f5053adf9c94a2e42362ee0c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0.000005 |
@@ -210,133 +210,133 @@
= %22
-509cf2f10beb666002ece6a7b968fe2c7c0c1e4b%22%0A TFRT_SHA256 = %2214b22d39d3eebcf255e4dd8ee8630b4da3ecc786f5053adf9c94a2e42362ee0c
+feffe7beb261f6dfe9af083e8f46dfea293ded54%22%0A TFRT_SHA256 = %22830492c8a9884e5ca84b15a4da953491f74b2ffbd45656352d58b624e881b9b7
%22%0A%0A
|
e42862ce7bde45e90bec0980f3c35c5cef5c65b6
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/47a1de40f17e70f901238edfe99dc510a5db797a.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "033f079420053002701271e4173bdcaf21bd1b73"
TFRT_SHA256 = "15c1c5a3617b91322d4ef96ce884676d27164cf94211f83bc1fcec50ab96aad4"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
)
|
Python
| 0.000002 |
@@ -210,133 +210,133 @@
= %22
-033f079420053002701271e4173bdcaf21bd1b73%22%0A TFRT_SHA256 = %2215c1c5a3617b91322d4ef96ce884676d27164cf94211f83bc1fcec50ab96aad4
+47a1de40f17e70f901238edfe99dc510a5db797a%22%0A TFRT_SHA256 = %2287631491c3fdd34b4d00b6999274468b89a98f23113aeafa15b53c3a7517fc36
%22%0A%0A
|
a6cc742a7272d1138031e26c61fd10617e6b0ac1
|
Initialize transpositionTest
|
books/CrackingCodesWithPython/Chapter09/transpositionTest.py
|
books/CrackingCodesWithPython/Chapter09/transpositionTest.py
|
Python
| 0.000001 |
@@ -0,0 +1,1442 @@
+# Transposition Cipher Test%0A# https://www.nostarch.com/crackingcodes/ (BSD Licensed)%0A%0Aimport random, sys, transpositionEncrypt, transpositionDecrypt%0A%0Adef main():%0A random.seed(42) # Set the random %22seed%22 to a static value.%0A%0A for i in range(20): # Run 20 tests.%0A # Generate random messages to test.%0A%0A # The message will have a random length:%0A message = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' * random.randint(4, 40)%0A%0A # Convert the message string to a list to shuffle it:%0A message = list(message)%0A random.shuffle(message)%0A message = ''.join(message) # Convert the list back to a string.%0A%0A print('Test #%25s: %25s...%22' %25 (i + 1, message%5B:50%5D))%0A%0A # Check all possible keys for each message:%0A for key in range(1, int(len(message)/2)):%0A encrypted = transpositionEncrypt.encryptMessage(key, message)%0A decrypted = transpositionDecrypt.decryptMessage(key, encrypted)%0A%0A # If the decryption doesn't match the original message, display%0A # an error message and quit:%0A if message != decrypted:%0A print('Mismatch with key %25s and message %25s.' %25 (key, message))%0A print('Decrypted as: ' + decrypted)%0A sys.exit()%0A%0A print('Transposition cipher test passed.')%0A%0A%0A# If transpositionTest.py is run (instead of imported as a module) call%0A# the main() function:%0Aif __name__ == '__main__':%0A main()%0A
|
|
1c7daf0bd9801885d7740620b3e81faa03ce49d4
|
add sign/verify json tests
|
test/crypto/olm_device_test.py
|
test/crypto/olm_device_test.py
|
Python
| 0.000001 |
@@ -0,0 +1,2768 @@
+from copy import deepcopy%0A%0Afrom matrix_client.client import MatrixClient%0Afrom matrix_client.crypto.olm_device import OlmDevice%0A%0AHOSTNAME = 'http://example.com'%0A%0A%0Aclass TestOlmDevice:%0A cli = MatrixClient(HOSTNAME)%0A user_id = '@user:matrix.org'%0A device_id = 'QBUAZIFURK'%0A device = OlmDevice(cli.api, user_id, device_id)%0A signing_key = device.olm_account.identity_keys%5B'ed25519'%5D%0A%0A def test_sign_json(self):%0A example_payload = %7B%0A %22name%22: %22example.org%22,%0A %22unsigned%22: %7B%0A %22age_ts%22: 922834800000%0A %7D%0A %7D%0A saved_payload = deepcopy(example_payload)%0A%0A signed_payload = self.device.sign_json(example_payload)%0A signature = signed_payload.pop('signatures')%0A # We should not have modified the payload besides the signatures key%0A assert example_payload == saved_payload%0A key_id = 'ed25519:' + self.device_id%0A assert signature%5Bself.user_id%5D%5Bkey_id%5D%0A%0A def test_verify_json(self):%0A example_payload = %7B%0A %22test%22: %22test%22,%0A %22unsigned%22: %7B%0A %22age_ts%22: 922834800000%0A %7D,%0A %22signatures%22: %7B%0A %22@user:matrix.org%22: %7B%0A %22ed25519:QBUAZIFURK%22: (%22WI7TgwqTp4YVn1dFWmDu7xrJvEikEzAbmoqyM5JY5t0P%22%0A %226fVaiMFAirmwb13GzIyYDLR+nQfoksNBcrp7xSaMCA%22)%0A %7D%0A %7D%0A %7D%0A saved_payload = deepcopy(example_payload)%0A signing_key = %22WQF5z9b4DV1DANI5HUMJfhTIDvJs1jkoGTLY6AQdjF0%22%0A%0A assert self.device.verify_json(example_payload, signing_key, self.user_id,%0A self.device_id)%0A%0A # We should not have modified the payload%0A assert example_payload == saved_payload%0A%0A # Try to verify an object that has been tampered with%0A example_payload%5B'test'%5D = 'test1'%0A assert not self.device.verify_json(example_payload, signing_key, self.user_id,%0A self.device_id)%0A%0A # Try to verify invalid payloads%0A example_payload%5B'signatures'%5D.pop(self.user_id)%0A assert not self.device.verify_json(example_payload, signing_key, self.user_id,%0A self.device_id)%0A example_payload.pop('signatures')%0A assert not self.device.verify_json(example_payload, signing_key, self.user_id,%0A self.device_id)%0A%0A def test_sign_verify(self):%0A example_payload = %7B%0A %22name%22: %22example.org%22,%0A %7D%0A%0A signed_payload = self.device.sign_json(example_payload)%0A assert self.device.verify_json(signed_payload, self.signing_key, self.user_id,%0A self.device_id)%0A
|
|
c4ffd77a56e09f3b418e6d13e8339fe693fffbdb
|
add fasd_cleanup script
|
misc/fasd_clean.py
|
misc/fasd_clean.py
|
Python
| 0 |
@@ -0,0 +1,1524 @@
+#/usr/bin/env python%0A# Copyright (C) 2015 Ratheesh S%[email protected]%3E%0A%0A# This program is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU General Public License%0A# as published by the Free Software Foundation; either version 2%0A# of the License, or (at your option) any later version.%0A%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A%0A# You should have received a copy of the GNU General Public License%0A# along with this program; if not, write to the Free Software%0A# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA%0A# 02110-1301, USA.%0A%0Aimport os%0A%0Adb_file = %22/home/ratheesh/.fasd%22%0Apurged_items = 0%0A%0Atry:%0A f = open(db_file, %22r+%22)%0Aexcept IOError:%0A print 'ERROR: No File found: %25s' %25 db_file%0A exit(1)%0A%0Ad = f.readlines()%0Af.close()%0A%0Atry:%0A f = open(db_file, %22w+%22)%0Aexcept IOError:%0A print 'ERROR: No File found: %25s' %25 db_file%0A exit(1)%0A%0Aprint %22Cleaning fasd database ...%22%0Afor i in d:%0A path, sep, misc = i.partition('%7C')%0A if os.path.exists(path):%0A f.write(i)%0A else:%0A print 'Removing %25s' %25 path%0A purged_items += 1 # increment purged items%0Af.close()%0A%0Aif purged_items == 0:%0A print %22fasd database is clean!%22%0Aelse:%0A print %22---------------------------------------%22%0A print %22No. of Purged Items: %25d%22 %25 purged_items%0A%0A# End of File%0A
|
|
2a45679c02e74ce7a63e259b1475d4190086084e
|
Add errors to zombase
|
zombase/errors.py
|
zombase/errors.py
|
Python
| 0.000001 |
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-%0Aclass ZombaseRuntimeError(Exception):%0A pass%0A
|
|
629c9e330e6114680f22af125252d95fb6989201
|
update migrations for link manager
|
webquills/linkmgr/migrations/0002_alter_linkcategory_site.py
|
webquills/linkmgr/migrations/0002_alter_linkcategory_site.py
|
Python
| 0 |
@@ -0,0 +1,545 @@
+# Generated by Django 3.2 on 2021-06-07 11:11%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('wqsites', '0001_initial'),%0A ('linkmgr', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='linkcategory',%0A name='site',%0A field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='link_lists', to='wqsites.site', verbose_name='site'),%0A ),%0A %5D%0A
|
|
24e2ddfd49aa2c05879460baeb67ed6cc75ffa87
|
fix benchmark script
|
benchmark/benchmark.py
|
benchmark/benchmark.py
|
from pyqg import qg_model, model
import time
import cProfile
import pstats
import numpy as np
tmax = 104000000
dtfac = (64 * 8000.)
mynx = [32, 64, 128, 256]
res = np.zeros((len(mynx), 5))
for j, nx in enumerate(mynx):
dt = dtfac / nx
for i, (use_fftw, nth) in enumerate([(False, 1), (True, 1),
(True, 2), (True, 4), (True, 8)]):
m = qg_model.QGModel(nx=64, tmax=tmax, dt=dt,
use_fftw=use_fftw, ntd=nth)
tic = time.time()
m.run()
toc = time.time()
tottime = toc-tic
res[j,i] = tottime
print 'nx=%3d, fftw=%g, threads=%g: %g' % (nx, use_fftw, nth, tottime)
# # profiling
# prof = cProfile.Profile()
# prof.run('m.run()')
# p = pstats.Stats(prof)
# p.sort_stats('cum').print_stats(0.3)
|
Python
| 0.000007 |
@@ -1,36 +1,15 @@
-from pyqg import qg_model, model
+import pyqg
%0Aimp
@@ -78,14 +78,14 @@
x =
-104
+8
000
+*1
000%0A
@@ -93,17 +93,16 @@
dtfac =
-(
64 * 800
@@ -103,17 +103,16 @@
* 8000.
-)
%0A%0Amynx =
@@ -133,41 +133,120 @@
256
-%5D%0Ares = np.zeros((len(mynx), 5))%0A
+, 512, 1024, 2048%5D%0Amynth = %5B1,2,4,8,16,32%5D%0Ares = np.zeros((len(mynx), 5))%0A%0A%0Aprint 'nx, threads, timesteps, time'
%0Afor
@@ -298,16 +298,17 @@
nx%0A%0A
+#
for i, (
@@ -359,24 +359,25 @@
ue, 1),%0A
+#
(True, 2
@@ -402,16 +402,52 @@
e, 8)%5D):
+%0A for i, nth in enumerate(mynth):
%0A%0A
@@ -456,16 +456,12 @@
m =
+py
qg
-_model
.QGM
@@ -468,18 +468,18 @@
odel(nx=
-64
+nx
, tmax=t
@@ -489,16 +489,25 @@
, dt=dt,
+ ntd=nth,
%0A
@@ -528,39 +528,150 @@
- use_fftw=use_fftw, ntd=nth)
+# no output %0A twrite=np.inf,%0A # no time average%0A taveint=np.inf,)%0A%0A
%0A
@@ -774,16 +774,17 @@
+#
res%5Bj,i%5D
@@ -802,16 +802,17 @@
+#
print 'n
@@ -874,16 +874,81 @@
ottime)%0A
+ print '%253d, %253d, %258d, %2510.4f' %25 (nx, nth, m.tc, tottime)%0A
%0A%0A%0A#
|
2347ee253f04fa87b28206b0ec00fd2a3fffb49f
|
Create hello_market_maker.py
|
hello_market_maker.py
|
hello_market_maker.py
|
Python
| 0.999986 |
@@ -0,0 +1,1848 @@
+class hello_market_maker():%0A def __init__(self, anchor_price, tick_increment, max_pos):%0A self.anchor_price = anchor_price%0A self.tick_increment = tick_increment%0A self.position = 0%0A self.upper_bound = anchor_price + ((max_pos + 1) * tick_increment)%0A self.lower_bound = anchor_price - ((max_pos + 1) * tick_increment)%0A self.max_pos = max_pos%0A self.mkt = inside_market(anchor_price - tick_increment, anchor_price + tick_increment)%0A%0A def on_bid_fill(self):%0A # modify current bid and ask down 1 tick_increment%0A #self.mkt.shift(-self.tick_increment)%0A self.position += 1%0A price = self.mkt.bid.price%0A if self.position %3C self.max_pos:%0A self.mkt.shift(-self.tick_increment)%0A else:%0A self.mkt.exit(BID, self.tick_increment)%0A return %22BID_FILL @ %22, price%0A%0A def on_ask_fill(self):%0A # modify current bid and ask up 1 tick_increment%0A #self.mkt.shift(-self.tick_increment)%0A self.position -= 1%0A price = self.mkt.ask.price%0A if self.position %3E -self.max_pos:%0A self.mkt.shift(self.tick_increment)%0A else:%0A self.mkt.exit(ASK, self.tick_increment)%0A return %22ASK_FILL @ %22, price%0A%0A def evaluate(self, trade_price):%0A fill, price = self.mkt.evaluate(trade_price)%0A self.adjust_bounds(trade_price)%0A if fill == BID:%0A self.on_bid_fill()%0A elif fill == ASK:%0A self.on_ask_fill()%0A else:%0A filler = 0%0A return fill, price%0A%0A def adjust_bounds(self, trade_price):%0A if trade_price %3E self.upper_bound:%0A self.mkt.shift(self.tick_increment)%0A self.upper_bound += self.tick_increment%0A self.lower_bound += self.tick_increment%0A print %22ADJUSTING UP%22%0A elif trade_price %3C self.lower_bound:%0A self.mkt.shift(-self.tick_increment)%0A self.upper_bound -= self.tick_increment%0A self.lower_bound -= self.tick_increment%0A print %22ADJUSTING DOWN%22%0A%0A%0A
|
|
372f4a988411e48a0c50cdc74fb2a7f4e5abf052
|
Add a server identity test
|
tests/server-identity.py
|
tests/server-identity.py
|
Python
| 0.000001 |
@@ -0,0 +1,233 @@
+import nose%0Aimport requests%0A%0Aimport fixture%0A%0A%[email protected]_setup(fixture.start_tangelo, fixture.stop_tangelo)%0Adef test_server_identity():%0A response = requests.get(fixture.url(%22/%22))%0A assert response.headers%5B%22server%22%5D == %22Tangelo%22%0A
|
|
19db4647257617992e9b195828baf39907cc5db1
|
Add tests for exit codes
|
tests/test_exit_codes.py
|
tests/test_exit_codes.py
|
Python
| 0 |
@@ -0,0 +1,583 @@
+%22%22%22Check that the CLI returns the appropriate exit code.%22%22%22%0A%0Aimport subprocess%0A%0A%0Adef test_exit_code_demo():%0A %22%22%22Ensure that linting the demo returns an exit code of 1.%22%22%22%0A try:%0A subprocess.check_output(%22proselint --demo%22, shell=True)%0A%0A except subprocess.CalledProcessError as grepexc:%0A assert(grepexc.returncode == 1)%0A%0A%0Adef test_exit_code_version():%0A %22%22%22Ensure that getting the version returns an exit code of 0.%22%22%22%0A try:%0A subprocess.check_output(%22proselint --version%22, shell=True)%0A%0A except subprocess.CalledProcessError:%0A assert(False)%0A
|
|
787298889fd85dffb597dee6571dead42227c7d6
|
add test to validate generated stub constants.pyi
|
tests/test_type_stubs.py
|
tests/test_type_stubs.py
|
Python
| 0 |
@@ -0,0 +1,1810 @@
+%22%22%22Test type stubs for correctness where possible.%22%22%22%0A%0Aimport os%0Aimport sys%0A%0Aimport pytest%0A%0Aimport xmlsec%0A%0Ablack = pytest.importorskip('black')%0A%0A%0Aif sys.version_info %3E= (3, 4):%0A from pathlib import Path%0Aelse:%0A from _pytest.pathlib import Path%0A%0A%0Aconstants_stub_header = %22%22%22%0Aimport sys%0Afrom typing import NamedTuple%0A%0Aif sys.version_info %3E= (3, 8):%0A from typing import Final, Literal%0Aelse:%0A from typing_extensions import Final, Literal%0A%0A%0Aclass __KeyData(NamedTuple): # __KeyData type%0A href: str%0A name: str%0A%0A%0Aclass __Transform(NamedTuple): # __Transform type%0A href: str%0A name: str%0A usage: int%0A%0A%0A%22%22%22%0A%0A%0Adef gen_constants_stub():%0A %22%22%22%0A Generate contents of the file:%60xmlsec/constants.pyi%60.%0A%0A Simply load all constants at runtime,%0A generate appropriate type hint for each constant type.%0A %22%22%22%0A%0A def process_constant(name):%0A %22%22%22Generate line in stub file for constant name.%22%22%22%0A obj = getattr(xmlsec.constants, name)%0A return '%7Bname%7D: Final = %7Bobj!r%7D'.format(name=name, obj=obj)%0A%0A names = list(sorted(name for name in dir(xmlsec.constants) if not name.startswith('__')))%0A lines = %5Bprocess_constant(name) for name in names%5D%0A return constants_stub_header + os.linesep.join(lines)%0A%0A%0Adef test_xmlsec_constants_stub(request):%0A %22%22%22%0A Generate the stub file for :mod:%60xmlsec.constants%60 from existing code.%0A%0A Compare it against the existing stub :file:%60xmlsec/constants.pyi%60.%0A %22%22%22%0A rootdir = Path(str(request.config.rootdir))%0A stub = rootdir / 'src' / 'xmlsec' / 'constants.pyi'%0A mode = black.FileMode(target_versions=%5Bblack.TargetVersion.PY38%5D, line_length=130, is_pyi=True, string_normalization=False)%0A formatted = black.format_file_contents(gen_constants_stub(), fast=False, mode=mode)%0A assert formatted == stub.read_text()%0A
|
|
f3c9284bf7b5d9ae4acc413fd7feb824fdb7aca0
|
create field to exclude recomputation of old invoices
|
l10n_it_fatturapa_in/migrations/12.0.1.18.3/pre-migration.py
|
l10n_it_fatturapa_in/migrations/12.0.1.18.3/pre-migration.py
|
Python
| 0 |
@@ -0,0 +1,304 @@
+from openupgradelib import openupgrade%0A%0A%[email protected]()%0Adef migrate(env, version):%0A if not version:%0A return%0A openupgrade.logged_query(%0A env.cr,%0A %22%22%22%0A ALTER TABLE fatturapa_attachment_in%0A ADD COLUMN IF NOT EXISTS invoices_date character varying%0A %22%22%22,%0A )%0A
|
|
60068d4deeba541b9518579d6d8473c4300e189d
|
Test killing onitu during a transfer
|
tests/functional/test_crash.py
|
tests/functional/test_crash.py
|
Python
| 0 |
@@ -0,0 +1,1556 @@
+import os.path%0Afrom os import unlink%0A%0Afrom utils.launcher import Launcher%0Afrom utils.entries import Entries%0Afrom utils.loop import CounterLoop, BooleanLoop%0Afrom utils.files import generate, checksum%0Afrom utils.tempdirs import TempDirs%0A%0Alauncher = None%0Adirs = TempDirs()%0Arep1, rep2 = dirs.create(), dirs.create()%0Ajson_file = 'test_crash.json'%0A%0A%0Adef setup_module(module):%0A global launcher%0A entries = Entries()%0A entries.add('local_storage', 'rep1', %7B'root': rep1%7D)%0A entries.add('local_storage', 'rep2', %7B'root': rep2%7D)%0A entries.save(json_file)%0A launcher = Launcher(json_file)%0A%0A%0Adef teardown_module(module):%0A launcher.kill()%0A unlink(json_file)%0A dirs.delete()%0A%0A%0Adef launcher_startup():%0A loop = CounterLoop(3)%0A launcher.on_referee_started(loop.check)%0A launcher.on_driver_started(loop.check, driver='rep1')%0A launcher.on_driver_started(loop.check, driver='rep2')%0A launcher()%0A loop.run(timeout=5)%0A%0A%0Adef test_crach():%0A filename = 'crash'%0A%0A loop = BooleanLoop()%0A launcher.on_transfer_started(%0A loop.stop, d_from='rep1', d_to='rep2', filename=filename%0A )%0A launcher_startup()%0A generate(os.path.join(rep1, filename), 1000)%0A loop.run(timeout=5)%0A launcher.kill()%0A%0A launcher.unset_all_events()%0A loop = BooleanLoop()%0A launcher.on_transfer_ended(%0A loop.stop, d_from='rep1', d_to='rep2', filename=filename%0A )%0A launcher_startup()%0A loop.run(timeout=5)%0A%0A assert(checksum(os.path.join(rep1, filename)) ==%0A checksum(os.path.join(rep2, filename)))%0A launcher.kill()%0A
|
|
e541d2c6c9c71647201ad39eb8a774eabe243139
|
Add gaussian smoothing example (#485)
|
examples/01-filter/gaussian-smoothing.py
|
examples/01-filter/gaussian-smoothing.py
|
Python
| 0.000001 |
@@ -0,0 +1,1902 @@
+%22%22%22%0AGaussian smoothing%0A~~~~~~~~~~~~~~~~~~%0A%0APerform a gaussian convolution.%0A%0A%22%22%22%0Aimport pyvista as pv%0Afrom pyvista import examples%0A%0A# Load dataset%0Adata = examples.download_gourds()%0A%0A# Define a good point of view%0Acp = %5B%0A (319.5, 239.5, 1053.7372980874645),%0A (319.5, 239.5, 0.0),%0A (0.0, 1.0, 0.0)%0A%5D%0A%0A###############################################################################%0A# Let's apply the gaussian smoothing with different values of standard%0A# deviation.%0Ap = pv.Plotter(shape=(2, 2))%0A%0Ap.subplot(0, 0)%0Ap.add_text(%22Original Image%22, font_size=24)%0Ap.add_mesh(data, rgb=True)%0Ap.camera_position = cp%0A%0Ap.subplot(0, 1)%0Ap.add_text(%22Gaussian smoothing, std=2%22, font_size=24)%0Ap.add_mesh(data.gaussian_smooth(std_dev=2.), rgb=True)%0Ap.camera_position = cp%0A%0Ap.subplot(1, 0)%0Ap.add_text(%22Gaussian smoothing, std=4%22, font_size=24)%0Ap.add_mesh(data.gaussian_smooth(std_dev=4.), rgb=True)%0Ap.camera_position = cp%0A%0Ap.subplot(1, 1)%0Ap.add_text(%22Gaussian smoothing, std=8%22, font_size=24)%0Ap.add_mesh(data.gaussian_smooth(std_dev=8.), rgb=True)%0Ap.camera_position = cp%0A%0Ap.show()%0A%0A###############################################################################%0A# Now let's see an example on a 3D dataset with volume rendering:%0Adata = examples.download_brain()%0A%0Asmoothed_data = data.gaussian_smooth(std_dev=3.)%0A%0A%0Adargs = dict(clim=smoothed_data.get_data_range(),%0A opacity=%5B0, 0, 0, 0.1, 0.3, 0.6, 1%5D)%0A%0An = %5B100, 150, 200, 245, 255%5D%0A%0Ap = pv.Plotter(shape=(1, 2), notebook=0)%0Ap.subplot(0, 0)%0Ap.add_text(%22Original Image%22, font_size=24)%0A# p.add_mesh(data.contour(n), **dargs)%0Ap.add_volume(data, **dargs)%0Ap.subplot(0, 1)%0Ap.add_text(%22Gaussian smoothing%22, font_size=24)%0A# p.add_mesh(smoothed_data.contour(n), **dargs)%0Ap.add_volume(smoothed_data, **dargs)%0Ap.link_views()%0Ap.camera_position = %5B(-162.0, 704.8, 65.02),%0A (90.0, 108.0, 90.0),%0A (0.0068, 0.0447, 0.999)%5D%0Ap.show()%0A
|
|
59d435ab1d0e5347180f60633d316aa7f2a3abdb
|
add send_TWH_text module to package
|
timutils/send_TWH_txt.py
|
timutils/send_TWH_txt.py
|
Python
| 0 |
@@ -0,0 +1,2041 @@
+%22%22%22%0Ashort module to send a text message to Tim Hilton's phone using%0AVerizon's email-to-sms support and gmail's smtp mail server. I was%0Aunable to get UC Merced's outlook.com server to accept the outgoing%0Amessage.%0A%0ATimothy W. Hilton, UC Merced, 25 Feb 2014%0A%22%22%22%0A%0Aimport smtplib%0Aimport getpass%0A%0Adef get_outgoing_mail_password():%0A pwd = getpass.getpass(prompt='Gmail password: ')%0A if len(pwd) == 0:%0A pwd = None%0A return(pwd)%0A%0Adef send_vtext_gmail(gmail_passwd,%0A gmail_uname='[email protected]',%0A dest_phone_num='4153147478',%0A msg_txt='testing 123'):%0A%0A vtext_addr = %22%7B%[email protected]%22.format(dest_phone_num)%0A%0A msg = %22%22%22From: %25s%0A To: %25s%0A Subject: text-message%5Cn%0A %25s%22%22%22 %25 (gmail_uname, vtext_addr, msg_txt)%0A%0A server = smtplib.SMTP('smtp.gmail.com',587)%0A server.starttls()%0A server.login(gmail_uname,gmail_passwd)%0A server.sendmail(gmail_uname, vtext_addr, msg)%0A server.quit()%0A%0Adef send_vtext_outlook(ucmerced_uname,%0A smtp_password,%0A dest_phone_num,%0A msg_txt):%0A %22%22%22%0A 25 Feb 2014: couldn't get sending mail through UC Merced's%0A outlook.com SMTP server to work. Probably something related to%0A the formatting of the outlook.com username? -TWH%0A %22%22%22%0A vtext_addr = %22%7B%[email protected]%22.format(dest_phone_num)%0A smtp_uname = %22%7B%[email protected]%22.format(ucmerced_uname)%0A%0A msg = %22%22%22From: %25s%0A To: %25s%0A Subject: text-message%0A %25s%22%22%22 %25 (smtp_uname, vtext_addr, msg_txt)%0A%0A print smtp_uname%0A result = 0%0A%0A # server = smtplib.SMTP('pod51011.outlook.com',587)%0A # server.starttls()%0A # server.login(smtp_uname,smtp_password)%0A # result = server.sendmail(smtp_uname, vtext_addr, msg)%0A # server.quit()%0A%0A print result%0A%0Aif __name__ == %22__main__%22:%0A passwd = get_outgoing_mail_password()%0A if passwd is not None:%0A send_vtext_gmail(passwd,%0A msg_txt='here is the message')%0A else:%0A print('no password provided')%0A
|
|
2b4c065b986ca1e05d0755b2b64502861b17364d
|
add import script for Oldham
|
polling_stations/apps/data_collection/management/commands/import_oldham.py
|
polling_stations/apps/data_collection/management/commands/import_oldham.py
|
Python
| 0 |
@@ -0,0 +1,400 @@
+from data_collection.management.commands import BaseXpressCsvImporter%0A%0Aclass Command(BaseXpressCsvImporter):%0A council_id = 'E08000004'%0A addresses_name = 'OldhamPropertyPostCodePollingStationWebLookup-2017-02-16.TSV'%0A stations_name = 'OldhamPropertyPostCodePollingStationWebLookup-2017-02-16.TSV'%0A elections = %5B'mayor.greater-manchester.2017-05-04'%5D%0A csv_delimiter = '%5Ct'%0A
|
|
1064b7bc9e343f3ab9308172f6a3129745e7a548
|
add test.py
|
test.py
|
test.py
|
Python
| 0.000012 |
@@ -0,0 +1,2146 @@
+#!/usr/bin/python%0Aimport smc%0Afrom pprint import pprint%0Aimport time%0Aimport logging%0A%0Alogger = logging.getLogger(__name__)%0A%0Asmc.login('http://172.18.1.150:8082', 'EiGpKD4QxlLJ25dbBEp20001')%0A%0A%0A#Example of using a search filter %0A#Response is a json record with a reference link to the object%0A#smc.get_element_by_href(href) gets the record directly%0A%0A#Search for group named (Skype Servers)%0Amygroup = smc.filter_by_type('group', 'Skype Servers')%0Aif mygroup:%0A pprint(smc.get_element_by_href(mygroup%5B'href'%5D))%0A %0A#Search for single_fw instance named vmware-fw %0Amyfw = smc.filter_by_type('single_fw', 'vmware-fw')%0Aif myfw:%0A pprint(smc.get_element_by_href(myfw%5B'href'%5D))%0A%0A#Search for host named ami%0Amyhost = smc.filter_by_type('host', 'ami')%0Aif myhost:%0A pprint(smc.get_element_by_href(myhost%5B'href'%5D)) %0A%0A#Search by top level element if element type is not known%0Amyobject = smc.filter_by_element('myelement')%0A%0A%0A'''%0A#Creating/removing a host record. Validation is done based on IP address.%0Asmc.create_host('ami', '1.1.1.2')%0Asmc.remove_host('ami')%0A%0Asmc.create_host('a', 'a.b.c.d') #Should fail, not valid IP%0Asmc.remove_host('ami2') #should fail if host doesn't exist%0A'''%0A%0A'''%0A#Create group and add members%0Asmc.create_group('group_with_no_members')%0Asmc.create_host('ami', '1.1.1.1')%0Asmc.create_host('ami2', '2.2.2.2')%0Asmc.create_group('anewgroup', %5B'ami','ami2'%5D)%0A'''%0A %0A'''%0A#Example of creating a group record. If members is included, each member href %0A#needs to be validated or warning will be issued that members can't be added%0Asmc.create_group('mygroup')%0Asmc.create_group('mygroup', %5B'member1','member2','member3'%5D)%0A'''%0A%0A'''%0A#Example of creating a single_fw instance. method signature is:%0A#smc.create_single_fw(name, IP (mgmt), network (mgmt), dns=None, fw_license=None)%0A#If DNS and fw_license are provided, DNS is added to fw and an attempt is made to attach an available license if available%0Asmc.create_single_fw('lepage', '172.18.1.5', '172.18.1.0/24', dns='5.5.5.5', fw_license=True)%0Atime.sleep(5)%0Asmc.remove_single_fw('lepage')%0A'''%0A%0A%0A'''%0A#Get available dynamic licenses%0Aprint %22License: %25s%22 %25 smc.get_dynamic_license()%0A'''%0A%0Asmc.logout()%0A%0A
|
|
fa2fd9cdab29a5736ae6b69c5f754f92a33c7f74
|
add wsgi.py
|
wsgi.py
|
wsgi.py
|
Python
| 0.00001 |
@@ -0,0 +1,64 @@
+from server import app%0A%0Aif __name__ == %22__main__%22:%0A app.run()
|
|
f4e12493c000b6bb3051e9c201347d420c8dd687
|
add basis for netcomp class
|
camoco/NetComp.py
|
camoco/NetComp.py
|
Python
| 0 |
@@ -0,0 +1,521 @@
+from .COB import COB%0A%0Aclass NetComp(Camoco):%0A%0A def __init__(self,name,networks):%0A self.networks = set()%0A%0A # Add all the networks%0A for n in networks:%0A self.add_network(n)%0A%0A def add_network(self,net):%0A '''%0A Add a network (COB) to the %0A NetComp object.%0A '''%0A if isinstance(net,str):%0A net = COB(net)%0A if not isinstance(net,COB):%0A raise ValueError(f'a valid network must be provided')%0A self.networks.add(net)%0A
|
|
b5d2b975e0566b90e6f52b9b3a4bf1b2e1fef8da
|
constrain tabled_committee_report.committee_id NOT NULL
|
migrations/versions/8cbc3d8dd55_add_soundcloudtrack_model.py
|
migrations/versions/8cbc3d8dd55_add_soundcloudtrack_model.py
|
Python
| 0.999995 |
@@ -0,0 +1,1731 @@
+%22%22%22Add SoundcloudTrack model%0A%0ARevision ID: 8cbc3d8dd55%0ARevises: 17570e7e200b%0ACreate Date: 2016-08-31 10:19:49.128041%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '8cbc3d8dd55'%0Adown_revision = '17570e7e200b'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.alter_column('tabled_committee_report', 'committee_id',%0A existing_type=sa.INTEGER(),%0A nullable=False)%0A op.create_index(op.f('ix_tabled_committee_report_committee_id'), 'tabled_committee_report', %5B'committee_id'%5D, unique=False)%0A op.drop_constraint(u'tabled_committee_report_committee_id_fkey', 'tabled_committee_report', type_='foreignkey')%0A op.create_foreign_key(op.f('fk_tabled_committee_report_committee_id_committee'), 'tabled_committee_report', 'committee', %5B'committee_id'%5D, %5B'id'%5D)%0A op.drop_column('tabled_committee_report', 'summary')%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('tabled_committee_report', sa.Column('summary', sa.TEXT(), autoincrement=False, nullable=True))%0A op.drop_constraint(op.f('fk_tabled_committee_report_committee_id_committee'), 'tabled_committee_report', type_='foreignkey')%0A op.create_foreign_key(u'tabled_committee_report_committee_id_fkey', 'tabled_committee_report', 'committee', %5B'committee_id'%5D, %5B'id'%5D, ondelete=u'SET NULL')%0A op.drop_index(op.f('ix_tabled_committee_report_committee_id'), table_name='tabled_committee_report')%0A op.alter_column('tabled_committee_report', 'committee_id',%0A existing_type=sa.INTEGER(),%0A nullable=True)%0A ### end Alembic commands ###%0A
|
|
2a3b89f42cde7088b304a3f224eaf52894f544ec
|
Add an python example for stream testing
|
misc/utils/LSL_Tests/RecieveAppStatistics.py
|
misc/utils/LSL_Tests/RecieveAppStatistics.py
|
Python
| 0.000005 |
@@ -0,0 +1,612 @@
+%22%22%22Example program to show how to read a multi-channel time series from LSL.%22%22%22%0A%0Afrom pylsl import StreamInlet, resolve_stream%0Aimport sys%0A# first resolve an EEG stream on the lab network%0Aprint(%22looking for an Unity3D.AppStatistics stream...%22)%0Astreams = resolve_stream('type', 'Unity3D.FPS.FT')%0A%0A# create a new inlet to read from the stream%0Ainlet = StreamInlet(streams%5B0%5D)%0A%0Awhile True:%0A # get a new sample (you can also omit the timestamp part if you're not%0A # interested in it)%0A sample, timestamp = inlet.pull_sample()%0A print '%5Cr' + str(round(timestamp)) + '%5Ct' + str(sample),%0A sys.stdout.flush()%0A
|
|
cfba5e1d10d39757b7b3ad31d9c224ae4499736c
|
fix decimal
|
netforce_account/netforce_account/models/account_tax_rate.py
|
netforce_account/netforce_account/models/account_tax_rate.py
|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model, BrowseRecord
import uuid
from decimal import *
class TaxRate(Model):
_name = "account.tax.rate"
_string = "Tax Rate"
_key = ["name"]
_name_field = "name"
_fields = {
"name": fields.Char("Name", required=True, search=True),
"code": fields.Char("Code", search=True),
"rate": fields.Decimal("Tax Rate", function="get_rate", function_multi=True),
"wht_rate": fields.Decimal("WHT Rate", function="get_rate", function_multi=True),
"components": fields.One2Many("account.tax.component", "tax_rate_id", "Components"),
"uuid": fields.Char("UUID"),
"active": fields.Boolean("Active"),
"comments": fields.One2Many("message", "related_id", "Comments"),
}
_defaults = {
"uuid": lambda *a: str(uuid.uuid4()),
"active": True,
}
_order = "name"
def get_rate(self, ids, context={}):
vals = {}
for obj in self.browse(ids):
rate = 0
wht_rate = 0
for comp in obj.components:
if comp.type == "vat":
rate += comp.rate
elif comp.type == "wht":
wht_rate += comp.rate
res = {
"rate": rate,
"wht_rate": wht_rate
}
vals[obj.id] = res
return vals
def update_total(self, context={}):
data = context["data"]
data["rate"] = 0
for comp in data["components"]:
data["rate"] += comp["rate"]
return data
# XXX: remove this
def compute_tax(self, tax_id, amt, tax_type="tax_ex", wht=False):
if tax_type == "no_tax":
return 0
obj = self.browse(tax_id)
vat_rate = Decimal(0)
wht_rate = Decimal(0)
for comp in obj.components:
if comp.type == "wht":
wht_rate += comp.rate or 0
elif comp.type == "vat":
vat_rate += comp.rate or 0
if tax_type == "tax_ex":
base_amt = amt or 0
elif tax_type == "tax_in":
base_amt = (amt or 0) / (1 + vat_rate / 100)
if wht:
return base_amt * wht_rate / 100
else:
return base_amt * vat_rate / 100
# XXX: remove this
# (not used in payment)
def compute_components(self, tax_id, amt, tax_type="tax_ex", when="invoice"):
assert(when != "payment") # XXX
if tax_type == "no_tax":
return {}
obj = self.browse(tax_id)
if tax_type == "tax_in":
base_amt = amt / (1 + obj.rate / 100)
else:
base_amt = amt
has_defer = False
for comp in obj.components:
if comp.type == "vat_defer":
has_defer = True
comps = {}
for comp in obj.components:
if comp.type == "wht":
continue
if has_defer and comp.type == "vat":
continue
comps[comp.id] = base_amt * (comp.rate / 100)
return comps
def compute_base(self, tax_id, amt, tax_type="tax_ex"):
if isinstance(tax_id, BrowseRecord): # XXX: for speed (use browse cache)
obj = tax_id
else:
obj = self.browse(tax_id)
if tax_type == "tax_in":
base_amt = amt / (1 + obj.rate / 100)
elif tax_type == "tax_ex":
base_amt = amt
return base_amt
# TODO: use this in invoice/claim
def compute_taxes(self, tax_id, base, when="invoice"):
if isinstance(tax_id, BrowseRecord): # XXX: for speed (use browse cache)
obj = tax_id
else:
obj = self.browse(tax_id)
has_defer = False
for comp in obj.components:
if comp.type == "vat_defer":
has_defer = True
comps = {}
for comp in obj.components:
if when == "invoice":
if comp.type in ("vat", "vat_exempt") and has_defer:
continue
if comp.type == "wht":
continue
elif when == "invoice_payment":
if comp.type in ("vat", "vat_exempt") and not has_defer:
continue
elif when == "invoice_payment_inv":
if comp.type != "vat_defer":
continue
elif when == "invoice_payment_pmt":
if comp.type in ("vat", "vat_exempt") and not has_defer:
continue
if comp.type == "vat_defer":
continue
elif when == "direct_payment":
if comp.type == "vat_defer":
continue
else:
raise Exception("Can't compute taxes: invalid 'when'")
if when == "invoice" and comp.type not in ("vat", "vat_exempt", "vat_defer"):
continue
if when == "payment" and comp.type != "wht":
continue
tax = base * (comp.rate / 100)
if comp.type == "wht":
tax = -tax
elif comp.type == "vat_defer" and when in ("invoice_payment", "invoice_payment_inv"):
tax = -tax
comps[comp.id] = tax
return comps
def has_defer_vat(self, ids, context={}):
for obj in self.browse(ids):
for comp in obj.components:
if comp.type == "vat_defer":
return True
return False
TaxRate.register()
|
Python
| 1 |
@@ -2110,25 +2110,34 @@
rate =
-0
+Decimal(0)
%0A
@@ -2148,17 +2148,26 @@
_rate =
-0
+Decimal(0)
%0A
|
52c7d6ba8f6dcb6c6f1bd02790ab9bb7fae8ebcd
|
add script
|
scripts/grabBAMrecs.py
|
scripts/grabBAMrecs.py
|
Python
| 0.000003 |
@@ -0,0 +1,2128 @@
+#!/usr/bin/env python%0A%0Aimport sys%0Aimport pysam%0Aimport os%0Aimport re%0A%0Afrom collections import defaultdict as dd%0A%0Aimport logging%0Alogger = logging.getLogger(__name__)%0AFORMAT = '%25(asctime)s %25(message)s'%0Alogging.basicConfig(format=FORMAT)%0Alogger.setLevel(logging.INFO)%0A%0A%0Adef find_mate(read, bam):%0A ''' AlignmentFile.mate() can return a non-primary alignment, so use this function instead '''%0A chrom = read.next_reference_name%0A for rec in bam.fetch(chrom, read.next_reference_start, read.next_reference_start+1):%0A if rec.query_name == read.query_name and rec.reference_start == read.next_reference_start:%0A if not rec.is_secondary and bin(rec.flag & 2048) != bin(2048):%0A if rec.is_read1 != read.is_read1:%0A return rec%0A return None%0A%0A%0Aif len(sys.argv) == 3:%0A inbam = pysam.AlignmentFile(sys.argv%5B1%5D, 'rb')%0A%0A outfn = '.'.join(os.path.basename(sys.argv%5B1%5D).split('.')%5B:-1%5D) + '.' + re.sub(':', '_', sys.argv%5B2%5D) + '.bam'%0A%0A outbam = pysam.AlignmentFile(outfn, 'wb', template=inbam)%0A%0A seen = dd(list)%0A%0A for read in inbam.fetch(region=sys.argv%5B2%5D):%0A if not read.is_supplementary and not read.is_secondary and not read.mate_is_unmapped:%0A outbam.write(read)%0A seen%5Bread.qname%5D.append(read.is_read1)%0A%0A seen_pairs = 0%0A seen_alone = 0%0A%0A for qname, pair in seen.iteritems():%0A assert len(set(pair)) %3C= 2%0A %0A if len(set(pair)) == 2:%0A seen_pairs += 1%0A if len(set(pair)) == 1:%0A seen_alone += 1%0A%0A logger.info('%25d pairs inside and %25d mates outside region %25s' %25 (seen_pairs, seen_alone, sys.argv%5B2%5D))%0A%0A matebam = pysam.AlignmentFile(sys.argv%5B1%5D, 'rb') %0A%0A for read in inbam.fetch(region=sys.argv%5B2%5D):%0A if not read.is_supplementary and not read.is_secondary and not read.mate_is_unmapped:%0A assert read.qname in seen%0A if len(set(seen%5Bread.qname%5D)) == 1:%0A mate = find_mate(read, matebam)%0A if mate is not None:%0A outbam.write(mate)%0A%0A%0Aelse:%0A sys.exit('usage: %25s %3CBAM%3E %3Cregion chrom:start-end%3E' %25 sys.argv%5B0%5D)%0A
|
|
6ad72a0c624abdda0df8d5c49366bfc597a12340
|
Add tests for utils experiment module
|
cptm/tests/test_utils_experiment.py
|
cptm/tests/test_utils_experiment.py
|
Python
| 0 |
@@ -0,0 +1,2314 @@
+from nose.tools import assert_equal, assert_false%0A%0Afrom os import remove%0Afrom os.path import join%0Afrom json import dump%0A%0Afrom cptm.utils.experiment import load_config, add_parameter, thetaFileName, %5C%0A topicFileName, opinionFileName, tarFileName, experimentName%0A%0A%0Adef setup():%0A global jsonFile%0A global config%0A global nTopics%0A%0A jsonFile = 'config.json'%0A # create cofig.json%0A params = %7B%7D%0A with open(jsonFile, 'wb') as f:%0A dump(params, f, sort_keys=True, indent=4)%0A config = load_config(jsonFile)%0A%0A nTopics = 100%0A%0A%0Adef teardown():%0A remove(jsonFile)%0A%0A%0Adef test_load_config_default_values():%0A params = %7B%7D%0A params%5B'inputData'%5D = None%0A params%5B'outDir'%5D = '/%7B%7D'%0A params%5B'testSplit'%5D = 20%0A params%5B'minFreq'%5D = None%0A params%5B'removeTopTF'%5D = None%0A params%5B'removeTopDF'%5D = None%0A params%5B'nIter'%5D = 200%0A params%5B'beta'%5D = 0.02%0A params%5B'beta_o'%5D = 0.02%0A params%5B'expNumTopics'%5D = range(20, 201, 20)%0A params%5B'nTopics'%5D = None%0A params%5B'nProcesses'%5D = None%0A params%5B'topicLines'%5D = %5B0%5D%0A params%5B'opinionLines'%5D = %5B1%5D%0A params%5B'sampleEstimateStart'%5D = None%0A params%5B'sampleEstimateEnd'%5D = None%0A%0A for p, v in params.iteritems():%0A yield assert_equal, v, params%5Bp%5D%0A%0A%0Adef test_add_parameter():%0A pName = 'nTopics'%0A%0A yield assert_false, hasattr(config, pName)%0A%0A add_parameter(pName, nTopics, jsonFile)%0A config2 = load_config(jsonFile)%0A%0A yield assert_equal, config2%5BpName%5D, nTopics%0A%0A%0Adef test_thetaFileName():%0A config%5B'nTopics'%5D = nTopics%0A fName = thetaFileName(config)%0A assert_equal(fName, '/theta_%7B%7D.csv'.format(nTopics))%0A%0A%0Adef test_topicFileName():%0A config%5B'nTopics'%5D = nTopics%0A fName = topicFileName(config)%0A assert_equal(fName, '/topics_%7B%7D.csv'.format(nTopics))%0A%0A%0Adef test_opinionFileName():%0A config%5B'nTopics'%5D = nTopics%0A return join(params.get('outDir').format(''),%0A 'opinions_%7B%7D_%7B%7D.csv'.format(name, nTopics))%0A%0A%0A#def experimentName(params):%0A# fName = params.get('outDir')%0A# fName = fName.replace('/%7B%7D', '')%0A# _p, name = os.path.split(fName)%0A# return name%0A%0A%0A#def tarFileName(params):%0A# nTopics = params.get('nTopics')%0A# name = experimentName(params)%0A# return os.path.join(params.get('outDir').format(''),%0A# '%7B%7D_%7B%7D.tgz'.format(name, nTopics))%0A
|
|
f73800f8e4ccd76d858c08d8cc8a72a6f2274fb6
|
Validate settings a tad later
|
mopidy/__main__.py
|
mopidy/__main__.py
|
import logging
import multiprocessing
import optparse
import os
import sys
sys.path.insert(0,
os.path.abspath(os.path.join(os.path.dirname(__file__), '../')))
from mopidy import get_version, settings, SettingsError
from mopidy.core import CoreProcess
from mopidy.utils import get_class
from mopidy.utils.log import setup_logging
from mopidy.utils.path import get_or_create_folder
from mopidy.utils.settings import list_settings_optparse_callback
logger = logging.getLogger('mopidy.main')
def main():
options = _parse_options()
setup_logging(options.verbosity_level, options.dump)
settings.validate()
logger.info('-- Starting Mopidy --')
get_or_create_folder('~/.mopidy/')
core_queue = multiprocessing.Queue()
output_class = get_class(settings.OUTPUT)
backend_class = get_class(settings.BACKENDS[0])
frontend = get_class(settings.FRONTENDS[0])()
frontend.start_server(core_queue)
core = CoreProcess(core_queue, output_class, backend_class, frontend)
core.start()
logger.debug('Main done')
def _parse_options():
parser = optparse.OptionParser(version='Mopidy %s' % get_version())
parser.add_option('-q', '--quiet',
action='store_const', const=0, dest='verbosity_level',
help='less output (warning level)')
parser.add_option('-v', '--verbose',
action='store_const', const=2, dest='verbosity_level',
help='more output (debug level)')
parser.add_option('--dump',
action='store_true', dest='dump',
help='dump debug log to file')
parser.add_option('--list-settings',
action='callback', callback=list_settings_optparse_callback,
help='list current settings')
return parser.parse_args()[0]
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
logger.info(u'Interrupted by user')
sys.exit(0)
except SettingsError, e:
logger.error(e)
sys.exit(1)
except SystemExit, e:
logger.error(e)
sys.exit(1)
|
Python
| 0 |
@@ -515,17 +515,16 @@
tions =
-_
parse_op
@@ -531,16 +531,16 @@
tions()%0A
+
setu
@@ -592,32 +592,9 @@
mp)%0A
- settings.validate()
%0A
+
@@ -630,16 +630,17 @@
dy --')%0A
+%0A
get_
@@ -670,16 +670,41 @@
pidy/')%0A
+ settings.validate()%0A%0A
core
@@ -1013,16 +1013,17 @@
start()%0A
+%0A
logg
@@ -1049,17 +1049,16 @@
')%0A%0Adef
-_
parse_op
|
69b715ab99522967a6b1bb8f4abfc4f2b1e60912
|
check most of the analyzer code by importing the analyzer itself
|
tests/windows/test_analyzer.py
|
tests/windows/test_analyzer.py
|
Python
| 0 |
@@ -0,0 +1,314 @@
+# Copyright (C) 2017 Cuckoo Foundation.%0A# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org%0A# See the file 'docs/LICENSE' for copying permission.%0A%0Adef test_analyzer():%0A %22%22%22Simply imports the analyzer module to at least load most of the code.%22%22%22%0A import analyzer%0A%0A analyzer # Fake usage.%0A
|
|
4853257696373d248884efd1532af8a81c34ee93
|
Add LiveComposite creation helper script
|
tools/create_live_composite.py
|
tools/create_live_composite.py
|
Python
| 0 |
@@ -0,0 +1,2621 @@
+%0A%22%22%22%0A Helper script for cropping images and creating a RenPy LiveComposite for them.%0A Quite specific and mostly useful for processing images exported from a%0A rendering program like Blender or from Photoshop layers.%0A%0A Requires Pillow Python image processing library to be installed.%0A%0A Command line example (current working directory at the base of this project):%0A%0A python tools/create_live_composite.py ShaderDemo/game/images/doll%0A%0A This assumes all images in the source directory have the same size. The script%0A crops them and creates an efficient LiveComposite that can be used for rigging%0A or just normally. The resulting LiveComposite is written into a .rpy-file%0A in the target directory.%0A%22%22%22%0A%0Aimport sys%0Aimport os%0Afrom PIL import Image%0A%0AIMAGES = %5B%22png%22, %22jpg%22%5D%0APOSTFIX = %22crop%22%0APAD = 5%0A%0AsourceDir = sys.argv%5B1%5D%0AsourceImages = %5Bos.path.join(sourceDir, name) for name in os.listdir(sourceDir) if name.lower().split(%22.%22)%5B-1%5D in IMAGES%5D%0AsourceImages.sort()%0A%0Adef findValidImages(images):%0A valid = %5B%5D%0A size = None%0A for path in sourceImages:%0A image = Image.open(path)%0A if POSTFIX and POSTFIX in path.lower():%0A print(%22Skipping already cropped: %25s%22 %25 path)%0A elif size is None or image.size == size:%0A size = image.size%0A valid.append((path, image))%0A else:%0A print(%22Image %25s has size %25s, should be %25s? Skipped.%22 %25 (path, str(image.size), str(size)))%0A return valid%0A%0Adef getCropRect(image):%0A x = 0%0A y = 0%0A x2 = image.size%5B0%5D%0A y2 = image.size%5B1%5D%0A box = image.getbbox()%0A if box:%0A return max(box%5B0%5D - PAD, 0), max(box%5B1%5D - PAD, 0), min(box%5B2%5D + PAD, image.size%5B0%5D), min(box%5B3%5D + PAD, image.size%5B1%5D)%0A return x, y, x2, y2%0A%0Adef createName(path):%0A parts = path.rsplit(%22.%22, 1)%0A return parts%5B0%5D + POSTFIX + %22.%22 + parts%5B1%5D%0A%0Aresults = %5B%5D%0Afor path, image in findValidImages(sourceImages):%0A rect = getCropRect(image)%0A cropped = image.crop(rect)%0A name = createName(path)%0A cropped.save(name)%0A print(%22Saved: %25s. Cropped: %25s%22 %25 (name, str(rect)))%0A results.append((name, image, rect))%0A%0Aname = os.path.normcase(sourceDir).split(os.sep)%5B-1%5D%0Awith open(os.path.join(sourceDir, name + %22.rpy%22), %22w%22) as f:%0A base = results%5B0%5D%0A%0A f.write(%22#Automatically generated file%5Cn%5Cn%22)%0A f.write(%22image %25s = LiveComposite(%5Cn%22 %25 name)%0A f.write(%22 (%25i, %25i),%5Cn%22 %25 base%5B1%5D.size)%0A for result in results:%0A name, image, crop = result%0A name = name%5Bname.find(%22images%22):%5D.replace(%22%5C%5C%22, %22/%22)%0A f.write(%22 (%25i, %25i), %5C%22%25s%5C%22,%5Cn%22 %25 (crop%5B0%5D, crop%5B1%5D, name))%0A f.write(%22)%5Cn%22)%0A
|
|
425ca8da6cb8300481aa82f99d26dedd5baf1076
|
Fix parsing of the first race report for ThreadSanitizer We've introduced a mistake recently which results in hiding the first race report. The reason was the "main thread" line which doesn't contain "{{{" and doesn't have context printed. Please note that after submitting of this CL ThreadSanitizer bots can become red for a while. Review URL: http://codereview.chromium.org/272026
|
tools/valgrind/tsan_analyze.py
|
tools/valgrind/tsan_analyze.py
|
#!/usr/bin/python
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# tsan_analyze.py
''' Given a ThreadSanitizer output file, parses errors and uniques them.'''
import gdb_helper
import logging
import optparse
import os
import re
import subprocess
import sys
import time
# Global symbol table (ugh)
TheAddressTable = None
class _StackTraceLine(object):
def __init__(self, line, address, binary):
self.raw_line_ = line
self.address = address
self.binary = binary
def __str__(self):
global TheAddressTable
file, line = TheAddressTable.GetFileLine(self.binary, self.address)
if (file is None) or (line is None):
return self.raw_line_
else:
return self.raw_line_.replace(self.binary, '%s:%s' % (file, line))
class TsanAnalyze:
''' Given a set of ThreadSanitizer output files, parse all the errors out of
them, unique them and output the results.'''
LOAD_LIB_RE = re.compile('--[0-9]+-- ([^(:]*) \((0x[0-9a-f]+)\)')
TSAN_LINE_RE = re.compile('==[0-9]+==\s*[#0-9]+\s*'
'([0-9A-Fa-fx]+):'
'(?:[^ ]* )*'
'([^ :\n]+)'
'')
THREAD_CREATION_STR = ("INFO: T.* "
"(has been created by T.* at this point|is program's main thread)")
def __init__(self, source_dir, files, use_gdb=False):
'''Reads in a set of files.
Args:
source_dir: Path to top of source tree for this build
files: A list of filenames.
'''
self.use_gdb = use_gdb
if use_gdb:
global TheAddressTable
TheAddressTable = gdb_helper.AddressTable()
self.races = []
self.used_suppressions = {}
for file in files:
self.ParseReportFile(file)
if self.use_gdb:
TheAddressTable.ResolveAll()
def ReadLine(self):
self.line_ = self.cur_fd_.readline()
self.stack_trace_line_ = None
if not self.use_gdb:
return
global TheAddressTable
match = TsanAnalyze.LOAD_LIB_RE.match(self.line_)
if match:
binary, ip = match.groups()
TheAddressTable.AddBinaryAt(binary, ip)
return
match = TsanAnalyze.TSAN_LINE_RE.match(self.line_)
if match:
address, binary_name = match.groups()
stack_trace_line = _StackTraceLine(self.line_, address, binary_name)
TheAddressTable.Add(stack_trace_line.binary, stack_trace_line.address)
self.stack_trace_line_ = stack_trace_line
def ReadSection(self):
result = [self.line_]
while not re.search('}}}', self.line_):
self.ReadLine()
if self.stack_trace_line_ is None:
result.append(self.line_)
else:
result.append(self.stack_trace_line_)
return result
def ParseReportFile(self, filename):
self.cur_fd_ = open(filename, 'r')
while True:
# Read race reports.
self.ReadLine()
if (self.line_ == ''):
break
if re.search("ERROR SUMMARY", self.line_):
# TSAN has finished working. The remaining reports are duplicates.
break
tmp = []
while re.search(TsanAnalyze.THREAD_CREATION_STR, self.line_):
tmp.extend(self.ReadSection())
self.ReadLine()
if re.search("Possible data race", self.line_):
tmp.extend(self.ReadSection())
self.races.append(tmp)
while True:
# Read the list of used suppressions.
self.ReadLine()
if (self.line_ == ''):
break
match = re.search(" used_suppression:\s+([0-9]+)\s(.*)", self.line_)
if match:
count, supp_name = match.groups()
count = int(count)
if supp_name in self.used_suppressions:
self.used_suppressions[supp_name] += count
else:
self.used_suppressions[supp_name] = count
self.cur_fd_.close()
def Report(self):
print "-----------------------------------------------------"
print "Suppressions used:"
print " count name"
for item in sorted(self.used_suppressions.items(), key=lambda (k,v): (v,k)):
print "%7s %s" % (item[1], item[0])
print "-----------------------------------------------------"
sys.stdout.flush()
if len(self.races) > 0:
logging.error("Found %i race reports" % len(self.races))
for report_list in self.races:
report = ''
for line in report_list:
report += str(line)
logging.error('\n' + report)
return -1
logging.info("PASS: No race reports found")
return 0
if __name__ == '__main__':
'''For testing only. The TsanAnalyze class should be imported instead.'''
retcode = 0
parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
parser.add_option("", "--source_dir",
help="path to top of source tree for this build"
"(used to normalize source paths in baseline)")
(options, args) = parser.parse_args()
if not len(args) >= 1:
parser.error("no filename specified")
filenames = args
analyzer = TsanAnalyze(options.source_dir, filenames, use_gdb=True)
retcode = analyzer.Report()
sys.exit(retcode)
|
Python
| 0.998272 |
@@ -2591,16 +2591,55 @@
.line_%5D%0A
+ if re.search(%22%7B%7B%7B%22, self.line_):%0A
whil
@@ -2676,24 +2676,26 @@
ne_):%0A
+
self.ReadLin
@@ -2690,32 +2690,34 @@
self.ReadLine()%0A
+
if self.st
@@ -2741,32 +2741,34 @@
s None:%0A
+
result.append(se
@@ -2779,30 +2779,34 @@
ine_)%0A
+
else:%0A
+
resu
|
e94192a4c549e46ae0a155dbfa634ebde992903a
|
Create netntlm2hashcat.py
|
netntlm2hashcat.py
|
netntlm2hashcat.py
|
Python
| 0 |
@@ -0,0 +1,1663 @@
+#!/usr/bin/env python%0A%0Aimport sys%0Aimport re%0Aimport argparse%0A%0A# Arg Input (Like a pirate)%0Ap = argparse.ArgumentParser(description='Convert NetNTLM John Hashes to Hashcat Format')%0Ap.add_argument('-i','--hash',action='store_true',help='Enter one-time hash input mode',required=False)%0Ap.add_argument('-f','--file',dest='file',help='Path to file containing multiple hashes',required=False,default=%22%22)%0Ap.add_argument('-o','--output',dest='output',help='File path to save the converted hashes',required=False)%0Aa = p.parse_args()%0A%0A# RegEx to re-arrange the hash%0Areg = re.compile('(.*?):(%5C$.*?)%5C$(.*?)%5C$(.*)')%0A%0Aif a.hash:%0A try:%0A hash = raw_input(%22Enter your hash:%5Cn%22)%0A if hash:%0A print reg.sub(r'%5C1::::%5C4:%5C3',hash)%0A%0A except KeyboardInterrupt:%0A sys.exit(%22%5Cn%22)%0A%0A except:%0A sys.exit(%22Error: Something is broken%5Cn%22)%0A%0Aelif a.file:%0A try:%0A with open(a.file) as temp:%0A%0A for line in temp:%0A outhash = reg.sub(r'%5C1::::%5C4:%5C3',line)%0A outhash = outhash.rstrip('%5Cn%5Cn')%0A%0A if a.output is None:%0A print outhash%0A else:%0A with open(a.output,'w') as f:%0A f.write(outhash)%0A f.close()%0A%0A except KeyboardInterrupt:%0A sys.exit(%22%5Cn%22)%0A%0A except:%0A sys.exit(%22Error: Input file doesn't exist.%5Cn%22)%0A%0Aelse:%0A p.print_help()%0A%0A
|
|
e6e5fbb671c2539f4f82c6eaca51fbf400133482
|
Write a silly Python script to compute some hard coded info from the generated ARM match table, which is substantially more efficient than dealing with tblgen.
|
utils/Target/ARM/analyze-match-table.py
|
utils/Target/ARM/analyze-match-table.py
|
Python
| 0.000138 |
@@ -0,0 +1,2129 @@
+#!/usr/bin/env python%0A%0Adef analyze_match_table(path):%0A # Extract the instruction table.%0A data = open(path).read()%0A start = data.index(%22static const MatchEntry MatchTable%22)%0A end = data.index(%22%5Cn%7D;%5Cn%22, start)%0A lines = data%5Bstart:end%5D.split(%22%5Cn%22)%5B1:%5D%0A%0A # Parse the instructions.%0A insns = %5B%5D%0A for ln in lines:%0A ln = ln.split(%22%7B%22, 1)%5B1%5D%0A ln = ln.rsplit(%22%7D%22, 1)%5B0%5D%0A a,bc = ln.split(%22%7B%22, 1)%0A b,c = bc.split(%22%7D%22, 1)%0A code, string, converter, _ = %5Bs.strip()%0A for s in a.split(%22,%22)%5D%0A items = %5Bs.strip() for s in b.split(%22,%22)%5D%0A _,features = %5Bs.strip() for s in c.split(%22,%22)%5D%0A assert string%5B0%5D == string%5B-1%5D == '%22'%0A string = string%5B1:-1%5D%0A insns.append((code,string,converter,items,features))%0A%0A # For every mnemonic, compute whether or not it can have a carry setting%0A # operand and whether or not it can have a predication code.%0A mnemonic_flags = %7B%7D%0A for insn in insns:%0A mnemonic = insn%5B1%5D%0A items = insn%5B3%5D%0A flags = mnemonic_flags%5Bmnemonic%5D = mnemonic_flags.get(mnemonic, set())%0A flags.update(items)%0A%0A mnemonics = set(mnemonic_flags)%0A ccout_mnemonics = set(m for m in mnemonics%0A if 'MCK_CCOut' in mnemonic_flags%5Bm%5D)%0A condcode_mnemonics = set(m for m in mnemonics%0A if 'MCK_CondCode' in mnemonic_flags%5Bm%5D)%0A noncondcode_mnemonics = mnemonics - condcode_mnemonics%0A print ' %7C%7C '.join('Mnemonic == %22%25s%22' %25 m%0A for m in ccout_mnemonics)%0A print ' %7C%7C '.join('Mnemonic == %22%25s%22' %25 m%0A for m in noncondcode_mnemonics)%0A%0Adef main():%0A import sys%0A if len(sys.argv) == 1:%0A import os%0A from lit.Util import capture%0A llvm_obj_root = capture(%5B%22llvm-config%22, %22--obj-root%22%5D)%0A file = os.path.join(llvm_obj_root,%0A %22lib/Target/ARM/ARMGenAsmMatcher.inc%22)%0A elif len(sys.argv) == 2:%0A file = sys.argv%5B1%5D%0A else:%0A raise NotImplementedError%0A%0A analyze_match_table(file)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
5b2c1650059f9e4b69b6bab1d8ce88177f449e02
|
Add basic test for import
|
foyer/tests/test_external_forcefields.py
|
foyer/tests/test_external_forcefields.py
|
Python
| 0.000001 |
@@ -0,0 +1,144 @@
+import pytest%0A%0A%0Adef test_basic_import():%0A import foyer%0A assert 'external_forcefields' in dir(foyer)%0A import foyer.external_forcefields%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.