commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
dd1bface79e3fcc53e9e8b1cc10ea9f467b757f2
|
update use of AppCommand
|
django_extensions/management/commands/create_jobs.py
|
django_extensions/management/commands/create_jobs.py
|
# -*- coding: utf-8 -*-
import os
import sys
import shutil
from django.core.management.base import AppCommand
from django.core.management.color import color_style
from django_extensions.management.utils import _make_writeable, signalcommand
class Command(AppCommand):
help = "Creates a Django jobs command directory structure for the given app name in the current directory."
args = "[appname]"
label = 'application name'
requires_system_checks = False
# Can't import settings during this command, because they haven't
# necessarily been created.
can_import_settings = True
@signalcommand
def handle_app_config(self, app, **options):
copy_template('jobs_template', app.path, **options)
@signalcommand
def handle_app(self, app, **options):
# handle_app is RemovedInDjango19
app_dir = os.path.dirname(app.__file__)
copy_template('jobs_template', app_dir, **options)
def copy_template(template_name, copy_to, **options):
"""copies the specified template directory to the copy_to location"""
import django_extensions
style = color_style()
ERROR = getattr(style, 'ERROR', lambda x: x)
SUCCESS = getattr(style, 'SUCCESS', lambda x: x)
template_dir = os.path.join(django_extensions.__path__[0], 'conf', template_name)
# walks the template structure and copies it
for d, subdirs, files in os.walk(template_dir):
relative_dir = d[len(template_dir) + 1:]
if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)):
os.mkdir(os.path.join(copy_to, relative_dir))
for i, subdir in enumerate(subdirs):
if subdir.startswith('.'):
del subdirs[i]
for f in files:
if f.endswith('.pyc') or f.startswith('.DS_Store'):
continue
path_old = os.path.join(d, f)
path_new = os.path.join(copy_to, relative_dir, f).rstrip(".tmpl")
if os.path.exists(path_new):
if options.get('verbosity', 1) > 1:
print(ERROR("%s already exists" % path_new))
continue
if options.get('verbosity', 1) > 1:
print(SUCCESS("%s" % path_new))
with open(path_old, 'r') as fp_orig:
with open(path_new, 'w') as fp_new:
fp_new.write(fp_orig.read())
try:
shutil.copymode(path_old, path_new)
_make_writeable(path_new)
except OSError:
sys.stderr.write("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)
|
Python
| 0 |
@@ -380,62 +380,8 @@
ry.%22
-%0A args = %22%5Bappname%5D%22%0A label = 'application name'
%0A%0A
@@ -680,219 +680,8 @@
s)%0A%0A
- @signalcommand%0A def handle_app(self, app, **options):%0A # handle_app is RemovedInDjango19%0A app_dir = os.path.dirname(app.__file__)%0A copy_template('jobs_template', app_dir, **options)%0A%0A
%0Adef
|
06b536cdfd684d12ce64670bde50fdcbf7a71bd2
|
Add a workspace_binary rule to run a binary from the workspace root
|
defs/run_in_workspace.bzl
|
defs/run_in_workspace.bzl
|
Python
| 0.000001 |
@@ -0,0 +1,2342 @@
+# Copyright 2018 The Kubernetes Authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A# This technique was inspired by the gazelle rule implementation in bazelbuild/rules_go:%0A# https://github.com/bazelbuild/rules_go/blob/86ade29284ca11deeead86c061e9ba9bd0d157e0/go/private/tools/gazelle.bzl%0A%0A# Writes out a script which saves the runfiles directory,%0A# changes to the workspace root, and then runs a command.%0Adef _workspace_binary_script_impl(ctx):%0A content = %22%22%22#!/usr/bin/env bash%0Aset -o errexit%0Aset -o nounset%0Aset -o pipefail%0A%0ABASE=$(pwd)%0Acd $(dirname $(readlink WORKSPACE))%0A%22$BASE/%7Bcmd%7D%22 $@%0A%22%22%22.format(cmd=ctx.file.cmd.short_path)%0A%0A ctx.actions.write(output=ctx.outputs.executable, content=content, is_executable=True)%0A%0A runfiles = ctx.runfiles(%0A files = %5Bctx.file.cmd, ctx.file.workspace%5D,%0A )%0A return %5BDefaultInfo(runfiles=runfiles)%5D%0A%0A_workspace_binary_script = rule(%0A attrs = %7B%0A %22cmd%22: attr.label(%0A mandatory = True,%0A allow_files = True,%0A single_file = True,%0A ),%0A %22workspace%22: attr.label(%0A mandatory = True,%0A allow_files = True,%0A single_file = True,%0A ),%0A %7D,%0A executable = True,%0A implementation = _workspace_binary_script_impl,%0A)%0A%0A# Wraps a binary to be run in the workspace root via bazel run.%0A#%0A# For example, one might do something like%0A#%0A# workspace_binary(%0A# name = %22dep%22,%0A# cmd = %22//vendor/github.com/golang/dep/cmd/dep%22,%0A# )%0A#%0A# which would allow running dep with bazel run.%0Adef workspace_binary(name, cmd, visibility=None):%0A script_name = name + %22_script%22%0A _workspace_binary_script(%0A name=script_name,%0A cmd=cmd,%0A workspace = %22//:WORKSPACE%22,%0A )%0A native.sh_binary(%0A name = name,%0A srcs = %5B%22:%22 + script_name%5D,%0A visibility = visibility,%0A )%0A
|
|
eda3e6c005c1115a039f394d6f00baabebd39fee
|
Add command for full daily build process
|
calaccess_website/management/commands/updatebuildpublish.py
|
calaccess_website/management/commands/updatebuildpublish.py
|
Python
| 0 |
@@ -0,0 +1,1033 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0AUpdate to the latest available CAL-ACCESS snapshot and publish the files to the%0Awebsite.%0A%22%22%22%0Aimport logging%0Afrom django.core.management import call_command%0Afrom calaccess_raw.management.commands.updatecalaccessrawdata import Command as updatecommand%0Alogger = logging.getLogger(__name__)%0A%0A%0Aclass Command(updatecommand):%0A %22%22%22%0A Update to the latest available CAL-ACCESS snapshot and publish the files to%0A the website.%0A %22%22%22%0A help = 'Update to the latest available CAL-ACCESS snapshot and publish the%5C%0Afiles to the website.'%0A%0A def handle(self, *args, **options):%0A %22%22%22%0A Make it happen.%0A %22%22%22%0A super(Command, self).handle(*args, **options)%0A%0A self.header('Creating latest file links')%0A call_command('createlatestlinks')%0A self.header('Baking downloads-website content')%0A call_command('build')%0A self.header('Publishing backed content to S3 bucket.')%0A call_command('publish')%0A%0A self.success(%22Done!%22)%0A
|
|
52637b519ca7e743b913f55c37a2ae952a520d9f
|
List commands of given apps
|
django_dev_commands/management/commands/commands.py
|
django_dev_commands/management/commands/commands.py
|
Python
| 0.99999 |
@@ -0,0 +1,1196 @@
+# -*- coding: utf-8 -*-%0A%22%22%22List commands of the specified applications.%0A%0ABy passing command line arguments that represents regexs of app names you can list the commands of%0Athose apps only.%0A%22%22%22%0Aimport re%0A%0Afrom django.core.management import get_commands, execute_from_command_line%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom django.core.management.color import color_style%0Afrom django.utils import six%0A%0A%0Adef get_filtered_commands(*filters):%0A filters_re = re.compile(%22%7C%22.join(filters))%0A for command, app in six.iteritems(get_commands()):%0A if filters_re.search(app):%0A yield app, command%0A%0A%0Aclass Command(BaseCommand):%0A args = '%3Capp-name-regex ...%3E'%0A help = __doc__%0A%0A def handle(self, *filters, **options):%0A if filters:%0A style = color_style()%0A output = set()p%0A for app, command in get_filtered_commands(*filters):%0A if app not in output:%0A self.stdout.write(style.NOTICE(%22%5Cn%5B%7B%7D%5D%5Cn%22.format(app)))%0A output.add(app)%0A self.stdout.write(%22%7B%7D%5Cn%22.format(command))%0A else:%0A execute_from_command_line(%5B%22manage.py%22%5D)%0A %0A
|
|
fca3934b3a190f3f6877dfed5c5c4e6c81ecc61b
|
Return empty body on empty note; #327
|
judge/views/ticket.py
|
judge/views/ticket.py
|
from django import forms
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.exceptions import PermissionDenied, ImproperlyConfigured
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.urls import reverse_lazy
from django.utils.functional import cached_property
from django.utils.html import escape, format_html, linebreaks
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy, ugettext as _
from django.views import View
from django.views.generic import FormView, ListView, TemplateView
from django.views.generic.detail import SingleObjectMixin
from judge.models import Ticket, TicketMessage, Problem
from judge.utils.diggpaginator import DiggPaginator
from judge.utils.views import TitleMixin, paginate_query_context
from judge.widgets import HeavyPreviewPageDownWidget
ticket_widget = (forms.Textarea() if HeavyPreviewPageDownWidget is None else
HeavyPreviewPageDownWidget(preview=reverse_lazy('ticket_preview'),
preview_timeout=1000, hide_preview_button=True))
class TicketForm(forms.Form):
title = forms.CharField(max_length=100, label=ugettext_lazy('Ticket title'))
body = forms.CharField(widget=ticket_widget)
class SingleObjectFormView(SingleObjectMixin, FormView):
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super(SingleObjectFormView, self).post(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super(SingleObjectFormView, self).get(request, *args, **kwargs)
class NewTicketView(LoginRequiredMixin, SingleObjectFormView):
form_class = TicketForm
template_name = 'ticket/new.jade'
def get_assignees(self):
return []
def form_valid(self, form):
ticket = Ticket(user=self.request.user.profile, title=form.cleaned_data['title'])
ticket.linked_item = self.object
ticket.save()
TicketMessage(ticket=ticket, user=ticket.user, body=form.cleaned_data['body']).save()
ticket.assignees.set(self.get_assignees())
return HttpResponseRedirect(reverse('ticket', args=[ticket.id]))
class NewProblemTicketView(TitleMixin, NewTicketView):
model = Problem
slug_field = slug_url_kwarg = 'code'
def get_assignees(self):
return self.object.authors.all()
def get_title(self):
return _('New ticket for %s') % self.object.name
def get_content_title(self):
return mark_safe(escape(_('New ticket for %s')) %
format_html(u'<a href="{0}">{1}</a>', reverse('problem_detail', args=[self.object.code]),
self.object.translated_name(self.request.LANGUAGE_CODE)))
class TicketCommentForm(forms.Form):
body = forms.CharField(widget=ticket_widget)
class TicketMixin(object):
model = Ticket
def get_object(self, queryset=None):
ticket = super(TicketMixin, self).get_object(queryset)
profile_id = self.request.user.profile.id
if self.request.user.has_perm('judge.change_ticket'):
return ticket
if ticket.user_id == profile_id:
return ticket
if ticket.assignees.filter(id=profile_id).exists():
return ticket
raise PermissionDenied()
class TicketView(TitleMixin, LoginRequiredMixin, TicketMixin, SingleObjectFormView):
form_class = TicketCommentForm
template_name = 'ticket/ticket.jade'
context_object_name = 'ticket'
def form_valid(self, form):
message = TicketMessage(user=self.request.user.profile,
body=form.cleaned_data['body'],
ticket=self.object)
message.save()
return HttpResponseRedirect('%s#message-%d' % (reverse('ticket', args=[self.object.id]), message.id))
def get_title(self):
return _('%(title)s - Ticket %(id)d') % {'title': self.object.title, 'id': self.object.id}
def get_context_data(self, **kwargs):
context = super(TicketView, self).get_context_data(**kwargs)
context['messages'] = self.object.messages.select_related('user__user')
context['assignees'] = self.object.assignees.select_related('user')
return context
class TicketStatusChangeView(LoginRequiredMixin, TicketMixin, SingleObjectMixin, View):
open = None
def post(self, request, *args, **kwargs):
if self.open is None:
raise ImproperlyConfigured('Need to define open')
ticket = self.get_object()
if ticket.is_open != self.open:
ticket.is_open = self.open
ticket.save()
return HttpResponse(status=204)
class TicketNotesForm(forms.Form):
notes = forms.CharField(widget=forms.Textarea(), required=False)
class TicketNotesEditView(LoginRequiredMixin, TicketMixin, SingleObjectMixin, FormView):
template_name = 'ticket/edit_notes.jade'
form_class = TicketNotesForm
object = None
def get_initial(self):
return {'notes': self.get_object().notes}
def form_valid(self, form):
ticket = self.get_object()
ticket.notes = form.cleaned_data['notes']
ticket.save()
return HttpResponse(linebreaks(form.cleaned_data['notes'], autoescape=True))
def form_invalid(self, form):
return HttpResponseBadRequest()
class TicketList(LoginRequiredMixin, ListView):
model = Ticket
template_name = 'ticket/list.jade'
context_object_name = 'tickets'
paginate_by = 50
paginator_class = DiggPaginator
@cached_property
def profile(self):
return self.request.user.profile
def _get_queryset(self):
return Ticket.objects.select_related('user__user').prefetch_related('assignees__user').order_by('-id')
def get_queryset(self):
if self.request.user.has_perm('judge.change_ticket'):
return self._get_queryset()
return self._get_queryset().filter(assignees__id=self.profile.id)
def get_context_data(self, **kwargs):
context = super(TicketList, self).get_context_data(**kwargs)
page = context['page_obj']
context['title'] = _('Tickets - Page %(number)d of %(total)d') % {
'number': page.number,
'total': page.paginator.num_pages,
}
context.update(paginate_query_context(self.request))
return context
|
Python
| 0.999998 |
@@ -5326,24 +5326,32 @@
cket.notes =
+ notes =
form.cleane
@@ -5380,32 +5380,54 @@
ticket.save()%0A
+ if notes:%0A
return H
@@ -5453,34 +5453,13 @@
aks(
-form.cleaned_data%5B'
notes
-'%5D
, au
@@ -5473,16 +5473,74 @@
e=True))
+%0A else:%0A return HttpResponse(status=204)
%0A%0A de
|
056966052d0c23395a205511dce2e9577f376539
|
Add Sequence
|
chainerrl/links/sequence.py
|
chainerrl/links/sequence.py
|
Python
| 0.000048 |
@@ -0,0 +1,878 @@
+from __future__ import unicode_literals%0Afrom __future__ import print_function%0Afrom __future__ import division%0Afrom __future__ import absolute_import%0Afrom builtins import super%0Afrom future import standard_library%0Astandard_library.install_aliases()%0Aimport inspect%0A%0Aimport chainer%0A%0Afrom chainerrl.recurrent import RecurrentChainMixin%0A%0A%0Aclass Sequence(chainer.ChainList, RecurrentChainMixin):%0A%0A def __init__(self, *layers):%0A self.layers = layers%0A links = %5Blayer for layer in layers if isinstance(layer, chainer.Link)%5D%0A super().__init__(*links)%0A%0A def __call__(self, x, **kwargs):%0A h = x%0A for layer in self.layers:%0A layer_argnames = inspect.getargspec(layer)%5B0%5D%0A layer_kwargs = %7Bk: v for k, v in kwargs.items()%0A if k in layer_argnames%7D%0A h = layer(h, **layer_kwargs)%0A return h%0A
|
|
b8acaf64187f5626ef6755ef00d2b2a1471d4914
|
Add closure type inference test
|
numba/tests/closures/test_closure_type_inference.py
|
numba/tests/closures/test_closure_type_inference.py
|
Python
| 0.000006 |
@@ -0,0 +1,421 @@
+import numpy as np%0A%0Afrom numba import *%0Afrom numba.tests.test_support import *%0A%0A@autojit%0Adef test_cellvar_promotion(a):%0A %22%22%22%0A %3E%3E%3E inner = test_cellvar_promotion(10)%0A 200.0%0A %3E%3E%3E inner.__name__%0A 'inner'%0A %3E%3E%3E inner()%0A 1000.0%0A %22%22%22%0A b = int(a) * 2%0A%0A @jit(void())%0A def inner():%0A print a * b%0A%0A inner()%0A a = float(a)%0A b = a * a # + 1j # Promotion issue%0A return inner%0A%0Atestmod()
|
|
3a04bff5a7940463d6429918215429700befb507
|
add valid-number
|
valid-number.py
|
valid-number.py
|
Python
| 0.999993 |
@@ -0,0 +1,1249 @@
+# Link: https://oj.leetcode.com/problems/valid-number/%0Aclass Solution:%0A %22%22%22%0A Notes please see https://blog.xiaoba.me/2014/11/10/leetcode-valid-number.html%0A %22%22%22%0A # @param s, a string%0A # @return a boolean%0A def isNumber(self, s):%0A stateTable = %5B%0A %5B 1, 1, 1, 3, 3, 7, 7, 7,-1%5D,%0A %5B 4, 3, 4,-1,-1,-1,-1,-1,-1%5D,%0A %5B 0, 8,-1, 8,-1,-1,-1, 8, 8%5D,%0A %5B-1, 5,-1, 5,-1,-1,-1,-1,-1%5D,%0A %5B 2,-1,-1,-1,-1, 6,-1,-1,-1%5D%0A %5D%0A i = 0%0A state = 0%0A while True:%0A if i == len(s):%0A break%0A c = s%5Bi%5D%0A i += 1%0A inputType = self._getInputType(c)%0A if inputType is None:%0A return False%0A state = stateTable%5BinputType%5D%5Bstate%5D%0A if state == -1:%0A return False%0A%0A return state == 1 or state == 3 or state == 7 or state == 8%0A%0A def _isDigit(self, c):%0A return c %3E= '0' and c %3C= '9'%0A%0A def _getInputType(self, c):%0A if self._isDigit(c):%0A return 0%0A if c == '.':%0A return 1%0A if c == ' ':%0A return 2%0A if c.lower() == 'e':%0A return 3%0A if c == '+' or c == '-':%0A return 4%0A
|
|
dec6ea168c68e267f15b74407f8745d242629d30
|
Create tokens.py
|
tokens.py
|
tokens.py
|
Python
| 0.000001 |
@@ -0,0 +1,64 @@
+C_KEY = %22%22 %0AC_SECRET = %22%22 %0AA_TOKEN = %22%22 %0AA_TOKEN_SECRET = %22%22%0A
|
|
d56c3528ad8058231910fd3d06895f39174eeb6c
|
Prepare v2.16.2.dev
|
flexget/_version.py
|
flexget/_version.py
|
"""
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '2.16.1'
|
Python
| 0.000007 |
@@ -438,11 +438,15 @@
= '2.16.
-1
+2.dev
'%0A
|
a475d50d2b7b9febe5fb01bb185b63cbbe25f4d1
|
add migration to remove fields
|
hoover/search/migrations/0006_auto_20200303_1309.py
|
hoover/search/migrations/0006_auto_20200303_1309.py
|
Python
| 0.000001 |
@@ -0,0 +1,560 @@
+# Generated by Django 2.2.7 on 2020-03-03 13:09%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('search', '0005_rename_user_hypens_to_dots'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveField(%0A model_name='collection',%0A name='loader',%0A ),%0A migrations.RemoveField(%0A model_name='collection',%0A name='loader_state',%0A ),%0A migrations.RemoveField(%0A model_name='collection',%0A name='options',%0A ),%0A %5D%0A
|
|
32108ccab67a76a05150e8cfb5bbdf2ff3477346
|
Create minesweeper.py
|
game/minesweeper.py
|
game/minesweeper.py
|
Python
| 0.000001 |
@@ -0,0 +1,840 @@
+from tkinter import *%0A%0Aroot = Tk()%0Aroot.resizable(0, 0)%0Aroot.title(%22Minesweeper%22)%0Aframe = Frame(root)%0A%0AGrid.rowconfigure(root, 0, weight=1)%0AGrid.columnconfigure(root, 0, weight=1)%0Aframe.grid(row=0, column=0)%0A%0Aclass Tiles:%0A%09def __init__(self, frame, size):%0A%09%09self.size = size%0A%09%09self.frame = frame%0A%09%09self.tiles%5B%5D%0A%0A%09%09for x in range(self.size):%0A%09%09%09self.tiles.append(%5B%5D)%0A%09%09%09for y in range(self.size):%0A%09%09%09%09this.tiles%5Bx%5D.append(Button())%0A%09%09%09%09tiles%5Bx%5D%5By%5D = Button(self.frame, text=' ', width=2, bd = 3, command=lambda row=x, col=y: self.clicked(row, col)%0A%09%09%09%09tiles%5Bx%5D%5By%5D.grid(row=x, column=y)%0A%0A%09%09for x in range(this.size):%0A%09%09%09Grid.columnconfigure(frame, x, weight=1)%0A%0A%09%09for y in range(this.size):%0A %09%09%09Grid.rowconfigure(frame, y, weight=1)%0A %09%0A%09def clicked(self, x, y):%0A%09%09tiles%5Bx%5D%5By%5D%5B%22text%22%5D = '@'%0A%09%09tiles%5Bx%5D%5By%5D%5B%22relief%22%5D = SUNKEN%0A%0Aroot.mainloop()%0A
|
|
a75e87fd3b4fc3f370554227cefc4687593621ca
|
fix merge fup
|
gdbpool/psyco_ge.py
|
gdbpool/psyco_ge.py
|
Python
| 0.000001 |
@@ -0,0 +1,2269 @@
+%22%22%22A wait callback to allow psycopg2 cooperation with gevent.%0A%0AUse %60make_psycopg_green()%60 to enable gevent support in Psycopg.%0A%22%22%22%0A%0A# Copyright (C) 2010 Daniele Varrazzo %[email protected]%3E%0A# and licensed under the MIT license:%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A# THE SOFTWARE.%0A%0Aimport psycopg2%0Afrom psycopg2 import extensions%0A%0Afrom gevent.socket import wait_read, wait_write%0A%0Adef make_psycopg_green():%0A %22%22%22Configure Psycopg to be used with gevent in non-blocking way.%22%22%22%0A if not hasattr(extensions, 'set_wait_callback'):%0A raise ImportError(%0A %22support for coroutines not available in this Psycopg version (%25s)%22%0A %25 psycopg2.__version__)%0A%0A extensions.set_wait_callback(gevent_wait_callback)%0A%0Adef gevent_wait_callback(conn, timeout=None):%0A %22%22%22A wait callback useful to allow gevent to work with Psycopg.%22%22%22%0A while 1:%0A state = conn.poll()%0A if state == extensions.POLL_OK:%0A break%0A elif state == extensions.POLL_READ:%0A wait_read(conn.fileno(), timeout=timeout)%0A elif state == extensions.POLL_WRITE:%0A wait_write(conn.fileno(), timeout=timeout)%0A else:%0A raise psycopg2.OperationalError(%0A %22Bad result from poll: %25r%22 %25 state)%0A
|
|
5f503f0b9ab51ca2b1985fe88d5e84ff63b7d745
|
Add sample playlists for testing features.
|
addplaylists.py
|
addplaylists.py
|
Python
| 0 |
@@ -0,0 +1,601 @@
+#!/usr/bin/env python2%0Afrom datetime import datetime%0Afrom datetime import timedelta%0Aimport random%0Afrom wuvt.trackman.lib import perdelta%0Afrom wuvt import db%0Afrom wuvt.trackman.models import DJSet, DJ%0A%0Atoday = datetime.now()%0Aprint(%22adding dj%22)%0Adj = DJ(u%22Johnny 5%22, u%22John%22)%0Adb.session.add(dj)%0Adb.session.commit()%0A%0Aprint(%22djadded%22)%0Afor show in perdelta(today - timedelta(days=500), today, timedelta(hours=4)):%0A if random.randint(0,99) %3C 40:%0A djset = DJSet(dj.id)%0A djset.dtstart = show%0A djset.dtend = show + timedelta(4)%0A db.session.add(djset)%0A db.session.commit()%0A%0A
|
|
1905395783d5a0f5997e6e620ba09d41398840e0
|
add test_vincia.py
|
test_vincia.py
|
test_vincia.py
|
Python
| 0.000004 |
@@ -0,0 +1,123 @@
+%0A%0Afrom deepjets.generate import generate_events%0A%0Afor event in generate_events('w_vincia.config', 1, vincia=True):%0A pass%0A
|
|
4fab31eef9ad80230b36039b66c70d94456e5f9b
|
Add missing tests file from previous commit.
|
tests/monad.py
|
tests/monad.py
|
Python
| 0 |
@@ -0,0 +1,1737 @@
+'''Test case for monads and monoidic functions%0A'''%0Aimport unittest%0A%0Afrom lighty import monads%0A%0A%0Aclass MonadTestCase(unittest.TestCase):%0A '''Test case for partial template execution%0A '''%0A%0A def testNumberComparision(self):%0A monad = monads.ValueMonad(10)%0A assert monad == 10, 'Number __eq__ error: %25s' %25 monad%0A assert monad %3E 9, 'Number __gt__ error: %25s' %25 monad%0A assert monad %3E= 10, 'Number __ge__ error: %25s' %25 monad%0A assert monad %3C 11, 'Number __lt__ error: %25s' %25 monad%0A assert monad %3C= 10, 'Number __le__ error: %25s' %25 monad%0A%0A def testNumberActions(self):%0A monad = monads.ValueMonad(10)%0A assert monad + 10 == 20, 'Number + error: %25s' %25 (monad + 10)%0A assert monad - 5 == 5, 'Number - error: %25s' %25 (monad - 5)%0A assert monad / 2 == 5, 'Number / error: %25s' %25 (monad / 2)%0A assert monad * 2 == 20, 'Number * error: %25s' %25 (monad * 2)%0A assert monad ** 2 == 100, 'Number pow error: %25s' %25 (monad ** 2)%0A assert monad %3C%3C 1 == 10 %3C%3C 1, 'Number %3C%3C error: %25s' %25 (monad %3C%3C 1)%0A assert monad %3E%3E 1 == 10 %3E%3E 1, 'Number %3E%3E error: %25s' %25 (monad %3E%3E 1)%0A%0A def testNumberSeq(self):%0A monad = monads.ValueMonad(10)%0A assert len(monad) == 1, 'Number len error: %25s' %25 len(monad)%0A assert monad%5B0%5D == 10, 'Number %5B0%5D error: %25s' %25 monad%5B0%5D%0A assert isinstance(monad%5B1%5D, monads.NoneMonad), ('Number %5B1%5D error' %25%0A monad%5B1%5D)%0A assert not 10 in monad, 'Number in error: %25s' %25 (10 in monad)%0A%0A%0Adef test():%0A suite = unittest.TestSuite()%0A suite.addTest(MonadTestCase('testNumberComparision'))%0A suite.addTest(MonadTestCase('testNumberActions'))%0A suite.addTest(MonadTestCase('testNumberSeq'))%0A return suite%0A
|
|
c6f3c5dc482d8f052c37ecf99accee28d1be86a9
|
Update forward compatibility horizon to 2018-12-24
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
from tensorflow.python import tf2
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import variable_scope
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2018, 12, 23)
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibiltiy, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018).
month: A month (1 <= month <= 12) in year.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_HORIZON > datetime.date(year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args :
year: A year (e.g. 2018).
month: A month (1 <= month <= 12) in year.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month.
Yields:
Nothing.
"""
global _FORWARD_COMPATIBILITY_HORIZON
try:
old_compat_date = _FORWARD_COMPATIBILITY_HORIZON
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(year, month, day)
yield
finally:
_FORWARD_COMPATIBILITY_HORIZON = old_compat_date
@tf_export(v1=["enable_v2_behavior"])
def enable_v2_behavior():
"""Enables TensorFlow 2.x behaviors.
This function can be called at the beginning of the program (before `Tensors`,
`Graphs` or other structures have been created, and before devices have been
initialized. It switches all global behaviors that are different between
TensorFlow 1.x and 2.x to behave as intended for 2.x.
This function is called in the main TensorFlow `__init__.py` file, user should
not need to call it, except during complex migrations.
"""
tf2.enable() # Switches TensorArrayV2 and control flow V2
ops.enable_eager_execution()
tensor_shape.enable_v2_tensorshape() # Also switched by tf2
variable_scope.enable_resource_variables()
@tf_export(v1=["disable_v2_behavior"])
def disable_v2_behavior():
"""Disables TensorFlow 2.x behaviors.
This function can be called at the beginning of the program (before `Tensors`,
`Graphs` or other structures have been created, and before devices have been
initialized. It switches all global behaviors that are different between
TensorFlow 1.x and 2.x to behave as intended for 1.x.
User can call this function to disable 2.x behavior during complex migrations.
"""
tf2.disable() # Switches TensorArrayV2 and control flow V2
ops.disable_eager_execution()
tensor_shape.disable_v2_tensorshape() # Also switched by tf2
variable_scope.disable_resource_variables()
|
Python
| 0 |
@@ -1321,17 +1321,17 @@
8, 12, 2
-3
+4
)%0A%0A%0A@tf_
|
abdb631654651536512474680c36597c22318a8c
|
Update forward compatibility horizon to 2022-09-11
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2022, 9, 10)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://www.tensorflow.org/guide/versions#backward_and_partial_forward_compatibility).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
|
Python
| 0 |
@@ -1335,17 +1335,17 @@
22, 9, 1
-0
+1
)%0A_FORWA
|
50141a66831d080ecc0791f94d1bd3bfec0aeb65
|
Add migration for #465
|
judge/migrations/0046_blogpost_authors.py
|
judge/migrations/0046_blogpost_authors.py
|
Python
| 0 |
@@ -0,0 +1,516 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.8 on 2016-09-08 16:54%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('judge', '0045_organization_access_code'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='blogpost',%0A name='authors',%0A field=models.ManyToManyField(blank=True, help_text='', to='judge.Profile', verbose_name='authors'),%0A ),%0A %5D%0A
|
|
e5931a5837b1574681757e2c6fc7260122b48746
|
Add minify util
|
web-app/js/ofm/scripts/utils/minify.py
|
web-app/js/ofm/scripts/utils/minify.py
|
Python
| 0.002208 |
@@ -0,0 +1,2720 @@
+#!/usr/bin/python2.6%0A%0A# Minify Filemanager javascript files%0A# Usage : $ python ./utils/minify.py%0A%0Aclass bcolors:%0A HEADER = '%5C033%5B95m'%0A OKBLUE = '%5C033%5B94m'%0A OKGREEN = '%5C033%5B92m'%0A WARNING = '%5C033%5B93m'%0A FAIL = '%5C033%5B91m'%0A ENDC = '%5C033%5B0m'%0A%0A def disable(self):%0A self.HEADER = ''%0A self.OKBLUE = ''%0A self.OKGREEN = ''%0A self.WARNING = ''%0A self.FAIL = ''%0A self.ENDC = ''%0A%0Aimport httplib, urllib, sys, os%0A%0A%0AfmRootFolder = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) + %22/%22%0A%0Aos.chdir(fmRootFolder) # set working directory%0A%0AtoMinify = %5B%22filemanager.js%22%5D%0A%0Aprint bcolors.HEADER + %22-------------------------------------%22 + bcolors.ENDC%0A%0A# we loop on JS languages files%0Afor index, item in enumerate(toMinify):%0A%09# print index, item%0A%0A%09dir = os.path.dirname(item)%0A%09file = os.path.basename(item)%0A%0A%09with open (fmRootFolder + item, %22r%22) as myfile:%0A%09 js_input=myfile.read()%0A%0A%09 # Define the parameters for the POST request and encode them in%0A%09 # a URL-safe format.%0A%0A%09 params = urllib.urlencode(%5B%0A%09 ('js_code', js_input),%0A%09 # ('compilation_level', 'WHITESPACE_ONLY'),%0A%09 ('compilation_level', 'SIMPLE_OPTIMIZATIONS'),%0A%09 ('output_format', 'text'),%0A%09 ('output_info', 'compiled_code'),%0A%09 %5D)%0A%0A%09 params2 = urllib.urlencode(%5B%0A%09 ('js_code', js_input),%0A%09 # ('compilation_level', 'WHITESPACE_ONLY'),%0A%09 ('compilation_level', 'SIMPLE_OPTIMIZATIONS'),%0A%09 ('output_format', 'text'),%0A%09 ('output_info', 'errors'),%0A%09 %5D)%0A%0A%09 # Always use the following value for the Content-type header.%0A%09 headers = %7B %22Content-type%22: %22application/x-www-form-urlencoded%22 %7D%0A%09 conn = httplib.HTTPConnection('closure-compiler.appspot.com')%0A%09 conn.request('POST', '/compile', params, headers)%0A%09 response = conn.getresponse()%0A%09 data = response.read()%0A%0A%09 # we write the minified file - os.path.splitext(file)%5B0%5D return filename without extension%0A%09 with open(fmRootFolder + dir + '/' + os.path.splitext(file)%5B0%5D + %22.min.js%22, %22w%22) as text_file:%0A%09 text_file.write(data)%0A%0A%09 # We retrieve errors%0A%09 conn.request('POST', '/compile', params2, headers)%0A%09 response = conn.getresponse()%0A%09 errors = response.read()%0A%0A%0A%09 if errors == %22%22:%0A%09 %09 print bcolors.OKBLUE + file + %22 has been minified. No error found.%22%0A%09 else:%0A%09 %09 print bcolors.FAIL + file + %22 : the code contains errors : %22%0A%09 %09 print %22%22%0A%09 %09 print errors + bcolors.ENDC%0A%0A%09 conn.close()%0A%0Aprint bcolors.HEADER + %22-------------------------------------%22 + bcolors.ENDC%0A
|
|
348ffbf16fcb67768d72bd18167e6c70c99a27a1
|
Add Homodyne node
|
gpi/Homodyne_GPI.py
|
gpi/Homodyne_GPI.py
|
Python
| 0.000022 |
@@ -0,0 +1,1515 @@
+# Author: Ashley Anderson III %[email protected]%3E%0A# Date: 2015-10-10 21:13%0A# Copyright (c) 2015 Dignity Health%0A%0Afrom __future__ import absolute_import, division, print_function, unicode_literals%0A%0Aimport os%0A%0A# gpi, future%0Aimport gpi%0Afrom bart.gpi.borg import IFilePath, OFilePath, Command%0A%0A# bart%0Aimport bart%0Abase_path = bart.__path__%5B0%5D # library base for executables%0Aimport bart.python.cfl as cfl%0A%0Aclass ExternalNode(gpi.NodeAPI):%0A '''Usage: homodyne dim fraction %3Cinput%3E %3Coutput%3E%0A%0A Perform homodyne reconstruction along dimension dim.%0A '''%0A def initUI(self):%0A # Widgets%0A self.addWidget('SpinBox', 'dim', min=0)%0A self.addWidget('DoubleSpinBox', 'fraction', min=0.5, max=1.0,%0A decimals=3, singlestep=0.01)%0A%0A # IO Ports%0A self.addInPort('kspace', 'NPYarray')%0A%0A self.addOutPort('out', 'NPYarray')%0A%0A return 0%0A%0A def compute(self):%0A kspace = self.getData('kspace')%0A%0A # load up arguments list%0A args = %5Bbase_path+'/homodyne'%5D%0A%0A args += %5Bstr(self.getVal('dim'))%5D%0A args += %5Bstr(self.getVal('fraction'))%5D%0A%0A # setup file for passing data to external command%0A in1 = IFilePath(cfl.writecfl, kspace, asuffix=%5B'.cfl','.hdr'%5D)%0A args += %5Bin1%5D%0A%0A out1 = OFilePath(cfl.readcfl, asuffix=%5B'.cfl','.hdr'%5D)%0A args += %5Bout1%5D%0A%0A # run commandline%0A print(Command(*args))%0A%0A self.setData('out', out1.data())%0A%0A in1.close()%0A out1.close()%0A%0A return 0%0A
|
|
1a7fa8080d19909ccf8e8e89aa19c92c1413f1c1
|
Add script to submite jobs again
|
apps/pyjob_submite_jobs_again.py
|
apps/pyjob_submite_jobs_again.py
|
Python
| 0 |
@@ -0,0 +1,1324 @@
+#!/usr/bin/env python3%0A%0Aimport os%0Aimport sys%0Aimport subprocess%0A%0Aright_inputs = False%0Aif len(sys.argv) %3E 2 :%0A tp = sys.argv%5B1%5D%0A rms = %5Bint(x) for x in sys.argv%5B2:%5D%5D%0A if tp in %5B'ma', 'ex', 'xy'%5D: right_inputs = True%0A%0Acurdir = os.getcwd()%0Aif right_inputs:%0A if curdir.endswith('trackcpp'):%0A flatfile = 'flatfile.txt'%0A input_file = 'input_' + tp.lower() + '.py'%0A exec_file = 'runjob_' + tp.lower() + '.sh'%0A dirs = curdir.split(os.sep)%0A label = '-'.join(dirs%5B-5:%5D) + '-submitting_again.'%0A for m in rms:%0A mlabel = 'rms%2502i'%25m%0A os.chdir(os.path.join(curdir, mlabel))%0A files = os.listdir(os.getcwd())%0A kicktable_files = ','.join(%5Bf for f in files if f.endswith('_kicktable.txt')%5D)%0A if len(kicktable_files) != 0:%0A inputs = ','.join(%5Bkicktable_files, flatfile,input_file%5D)%0A else:%0A inputs = ','.join(%5Bflatfile,input_file%5D)%0A description = ': '.join(%5Bmlabel, tp.upper(), label%5D)%0A p = subprocess.Popen(%5B'pyjob_qsub.py', '--inputFiles', inputs, '--exec', exec_file, '--description', description%5D)%0A p.wait()%0A os.chdir(curdir)%0A else:%0A print('Change the current working directory to trackcpp directory.')%0Aelse:%0A print('Invalid inputs')%0A
|
|
1bf7439c67e2206acb0c6d285014261eeb18097f
|
Add coverage as single execution
|
coverage.py
|
coverage.py
|
Python
| 0.000009 |
@@ -0,0 +1,162 @@
+from app import initialization%0Afrom app.configuration import add%0Afrom app.check import *%0A%0Ainitialization.run()%0Aadd('phpunit-coverage', 'true')%0A%0Aphpunit.execute()%0A
|
|
884ae74bb75e5a0c60da74791a2e6fad9e4b83e5
|
Add py solution for 436. Find Right Interval
|
py/find-right-interval.py
|
py/find-right-interval.py
|
Python
| 0.998967 |
@@ -0,0 +1,873 @@
+from operator import itemgetter%0A# Definition for an interval.%0A# class Interval(object):%0A# def __init__(self, s=0, e=0):%0A# self.start = s%0A# self.end = e%0A%0Aclass Solution(object):%0A def findRightInterval(self, intervals):%0A %22%22%22%0A :type intervals: List%5BInterval%5D%0A :rtype: List%5Bint%5D%0A %22%22%22%0A sorted_itv = map(itemgetter(1, 2), sorted((x.start, i, x) for i, x in enumerate(intervals)))%0A size = len(intervals)%0A ans = %5B%5D%0A for itv in intervals:%0A L, U = -1, size%0A while L + 1 %3C U:%0A mid = (L + U) / 2%0A if sorted_itv%5Bmid%5D%5B1%5D.start %3E= itv.end:%0A U = mid%0A else:%0A L = mid%0A if U == size:%0A ans.append(-1)%0A else:%0A ans.append(sorted_itv%5BU%5D%5B0%5D)%0A return ans%0A
|
|
07f8fd56ab366a2d1365278c3310ade4b1d30c57
|
Add functional test for version negotiation
|
heat_integrationtests/functional/test_versionnegotiation.py
|
heat_integrationtests/functional/test_versionnegotiation.py
|
Python
| 0.000058 |
@@ -0,0 +1,1427 @@
+# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0Aimport requests%0A%0Afrom heat_integrationtests.functional import functional_base%0A%0Aexpected_version_dict = %7B%0A %22versions%22: %5B%0A %7B%22links%22: %5B%7B%22href%22: None, %22rel%22: %22self%22%7D%5D,%0A %22status%22: %22CURRENT%22, %22id%22: %22v1.0%22%7D%0A %5D%0A%7D%0A%0A%0Aclass VersionNegotiationTestCase(functional_base.FunctionalTestsBase):%0A%0A def test_authless_version_negotiation(self):%0A # NOTE(pas-ha): this will grab the public endpoint by default%0A heat_url = self.identity_client.get_endpoint_url(%0A 'orchestration', region=self.conf.region)%0A heat_api_root = heat_url.split('/v1')%5B0%5D%0A expected_version_dict%5B%0A 'versions'%5D%5B0%5D%5B'links'%5D%5B0%5D%5B'href'%5D = heat_api_root + '/v1/'%0A r = requests.get(heat_api_root)%0A self.assertEqual(300, r.status_code, 'got response %25s' %25 r.text)%0A self.assertEqual(expected_version_dict, r.json())%0A
|
|
4b8f7a4c97668b4dbd8634d6b01e30b71737c3bd
|
fix send_HTML_email to work when no email_from argument is supplied
|
dimagi/utils/django/email.py
|
dimagi/utils/django/email.py
|
from django.conf import settings
from django.core.mail import get_connection
from django.core.mail.message import EmailMultiAlternatives
NO_HTML_EMAIL_MESSAGE = """
Your email client is trying to display the plaintext version of an email that
is only supported in HTML. Please set your email client to display this message
in HTML, or use an email client that supports HTML emails.
"""
def send_HTML_email(subject, recipient, html_content, text_content=None, cc=None, email_from=None, file_attachments=None):
if not text_content:
text_content = getattr(settings, 'NO_HTML_EMAIL_MESSAGE',
NO_HTML_EMAIL_MESSAGE)
# If you get the return_path header wrong, this may impede mail delivery. It appears that the SMTP server
# has to recognize the return_path as being valid for the sending host. If we set it to, say, our SMTP
# server, this will always be the case (as the server is explicitly serving the host).
if email_from is None:
#todo: verify that this is even necessary here since it seems like email_return_path == email_from
email_return_path = getattr(settings, 'EMAIL_RETURN_PATH', None)
if email_return_path is None:
email_return_path = settings.EMAIL_LOGIN
email_from = getattr(settings, 'EMAIL_FROM', None)
if email_from is None:
email_from = email_return_path
else:
email_return_path = email_from
from_header = {'From': email_from} # From-header
connection = get_connection()
msg = EmailMultiAlternatives(subject, text_content, email_return_path, [recipient], headers=from_header, connection=connection, cc=cc)
for file in (file_attachments or []):
if file:
msg.attach(file["title"], file["file_obj"].getvalue(), file["mimetype"])
msg.attach_alternative(html_content, "text/html")
msg.send()
|
Python
| 0 |
@@ -381,16 +381,17 @@
s.%0A%22%22%22%0A%0A
+%0A
def send
@@ -454,16 +454,36 @@
nt=None,
+%0A
cc=None
@@ -495,21 +495,64 @@
il_from=
-None,
+settings.DEFAULT_FROM_EMAIL,%0A
file_at
@@ -718,798 +718,8 @@
E)%0A%0A
- # If you get the return_path header wrong, this may impede mail delivery. It appears that the SMTP server%0A # has to recognize the return_path as being valid for the sending host. If we set it to, say, our SMTP%0A # server, this will always be the case (as the server is explicitly serving the host).%0A if email_from is None:%0A #todo: verify that this is even necessary here since it seems like email_return_path == email_from%0A email_return_path = getattr(settings, 'EMAIL_RETURN_PATH', None)%0A if email_return_path is None:%0A email_return_path = settings.EMAIL_LOGIN%0A%0A email_from = getattr(settings, 'EMAIL_FROM', None)%0A if email_from is None:%0A email_from = email_return_path%0A else:%0A email_return_path = email_from%0A%0A
@@ -868,20 +868,46 @@
ail_
-return_path,
+from,%0A
%5Bre
@@ -936,16 +936,49 @@
_header,
+%0A
connect
@@ -1125,16 +1125,39 @@
value(),
+%0A
file%5B%22m
@@ -1235,8 +1235,9 @@
g.send()
+%0A
|
f7c4f8d43b30dfee36d4ff46e9133194a15b3e81
|
Add tests for __unicode__ functions in model. (#1026)
|
tests/unit/accounts/test_models.py
|
tests/unit/accounts/test_models.py
|
Python
| 0 |
@@ -0,0 +1,940 @@
+from django.contrib.auth.models import User%0Afrom django.test import TestCase%0A%0Afrom accounts.models import Profile, UserStatus%0A%0A%0Aclass BaseTestCase(TestCase):%0A%0A def setUp(self):%0A self.user = User.objects.create(%0A username='user',%0A email='[email protected]',%0A password='password')%0A%0A%0Aclass UserStatusTestCase(BaseTestCase):%0A%0A def setUp(self):%0A super(UserStatusTestCase, self).setUp()%0A self.user_status = UserStatus.objects.create(%0A name='user',%0A status=UserStatus.UNKNOWN,%0A )%0A%0A def test__str__(self):%0A self.assertEqual(self.user_status.name, self.user_status.__str__())%0A%0A%0Aclass ProfileTestCase(BaseTestCase):%0A%0A def setUp(self):%0A super(ProfileTestCase, self).setUp()%0A self.profile = Profile.objects.get(user=self.user)%0A%0A def test__str__(self):%0A self.assertEqual('%7B%7D'.format(self.profile.user), self.profile.__str__())%0A
|
|
3e5b98c1a79f625fbf9f54af782e459de7fa5b1f
|
update migration with new filename and parent migration name
|
accelerator/migrations/0052_cleanup_twitter_urls.py
|
accelerator/migrations/0052_cleanup_twitter_urls.py
|
Python
| 0 |
@@ -0,0 +1,1054 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0Afrom accelerator.twitter_handle_cleanup import (%0A clean_entrepreneur_profile_twitter_handles,%0A clean_expert_profile_twitter_handles,%0A clean_organization_twitter_handles%0A)%0A%0A%0Adef clean_up_twitter_handles(apps, schema_editor):%0A Organization = apps.get_model('accelerator', 'Organization')%0A ExpertProfile = apps.get_model('accelerator', 'ExpertProfile')%0A EntrepreneurProfile = apps.get_model(%0A 'accelerator',%0A 'EntrepreneurProfile')%0A%0A clean_entrepreneur_profile_twitter_handles(EntrepreneurProfile)%0A clean_expert_profile_twitter_handles(ExpertProfile)%0A clean_organization_twitter_handles(Organization)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%0A 'accelerator',%0A '0051_add_register_for_events_to_event_subnav_items'%0A ),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(%0A clean_up_twitter_handles,%0A migrations.RunPython.noop),%0A %5D%0A
|
|
c97f648a012c38802d9637d4c573a4ca9c8e1633
|
Create encoder.py
|
additional/customencoder/encoder.py
|
additional/customencoder/encoder.py
|
Python
| 0.000004 |
@@ -0,0 +1,754 @@
+#!/usr/bin/python%0A%0A#below is the shellcode for /bin/sh using execve sys call%0Ashellcode = (%22%5Cx31%5Cxc0%5Cx50%5Cx68%5Cx2f%5Cx2f%5Cx73%5Cx68%5Cx68%5Cx2f%5Cx62%5Cx69%5Cx6e%5Cx89%5Cxe3%5Cx50%5Cx89%5Cxe2%5Cx53%5Cx89%5Cxe1%5Cxb0%5Cx0b%5Cxcd%5Cx80%22)%0A%0At=%5B%5D%0Aw=%5B%5D%0Az=%5B%5D%0A%0Aror = lambda val, r_bits, max_bits: %5C%0A ((val & (2**max_bits-1)) %3E%3E r_bits%25max_bits) %7C %5C%0A (val %3C%3C (max_bits-(r_bits%25max_bits)) & (2**max_bits-1))%0A %0A%0A%0Afor i in range(0, len(shellcode)):%0A%09s = ord(shellcode%5Bi%5D)%0A%09y = ror(s,2,8)%0A%09b = y+1%0A%09w.append(s)%0A%09t.append(y)%0A%09z.append(b)%0A%0A%09%0Aprint %22length %25d%22 %25len(t)%0A%0Aprint %22%5B+%5D Original shellcode..:%22, (%22, %22.join(hex(c) for c in w%5B0::%5D))%0Aprint %22%5B+%5D ROR shellcode..:%22, (%22, %22.join(hex(c) for c in t%5B0::%5D))%0Aprint %22%5B+%5D ROR shellcode after adding 1 to each byte ..:%22, (%22, %22.join(hex(c) for c in z%5B0::%5D))%0A%0A %0A%0A
|
|
fb004f72c27b49ba9661e6a83b8f49be39757d22
|
add changemath shell
|
math_change.py
|
math_change.py
|
Python
| 0.000009 |
@@ -0,0 +1,833 @@
+import sys%0A%0Afilename = './Deterministic Policy Gradient Algorithms%E7%AC%94%E8%AE%B0.md'%0Aoutname = ''%0A%0Adef change(filename, outname):%0A f = open(filename, encoding='utf8')%0A data = f.readlines()%0A f.close()%0A %0A out = ''%0A doublenum = 0%0A for line in data:%0A if line=='$$%5Cn':%0A doublenum += 1%0A if doublenum %25 2 == 0:%0A out += '$$%5Cn%5Cn'%0A else:%0A out += '%5Cn$$%5Cn'%0A elif '$' in line:%0A out += line.replace('$','%5Cn$$%5Cn').replace('$$$$','$$')%0A else:%0A out += line%0A with open(outname, 'w', encoding='utf8') as f:%0A f.write(out)%0A %0A%0Aif __name__=='__main__':%0A arglen = len(sys.argv) - 1%0A if arglen == 2:%0A change(*sys.argv%5B1:%5D)%0A if arglen == 1:%0A filename = sys.argv%5B1%5D%0A change(filename, filename)%0A
|
|
e042fc9bf4768f5dec1ef7a331e73e507e1c3b06
|
set correct exit status from 'yotta target'
|
yotta/target.py
|
yotta/target.py
|
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
from __future__ import print_function
import re
import logging
import os
# colorama, BSD 3-Clause license, cross-platform terminal colours, pip install colorama
import colorama
# settings, , load and save settings, internal
from .lib import settings
# Target, , represents an installed target, internal
from .lib import target
# Component, , represents an installed component, internal
from .lib import component
# fsutils, , misc filesystem utils, internal
from .lib import fsutils
# OK this is a pretty terrible validation regex... should find a proper module
# to do this
Target_RE = re.compile('^('+
'[a-z0-9-]+,?('+
'[a-zA-Z0-9-]+/[a-zA-Z0-9-]+' +'|'+ '([a-zA-Z0-9_-]*@)?[a-zA-Z0-9_+-]+://.*' + '|' + '[a-z0-9.-]*'+
')?'+
')$')
def addOptions(parser):
parser.add_argument('set_target', default=None, nargs='?',
help='set the build target to this (targetname[,versionspec_or_url])'
)
parser.add_argument('-g', '--global', dest='save_global',
default=False, action='store_true',
help='set globally (in the per-user settings) instead of locally to this directory'
)
# FIXME: need help that lists possible targets, and we need a walkthrough
# guide to forking a new target for an existing board
#
# (the description of a target should have a list of things that it's
# similar to, e.g. objectador is similar to EFM32gg990f, # EFM32gg,
# Cortex-M3, ARMv8, ARM)
def displayCurrentTarget(args):
if not args.plain:
DIM = colorama.Style.DIM
BRIGHT = colorama.Style.BRIGHT
GREEN = colorama.Fore.GREEN
RED = colorama.Fore.RED
RESET = colorama.Style.RESET_ALL
else:
DIM = BRIGHT = GREEN = RED = RESET = u''
line = u''
derived_target, errors = target.getDerivedTarget(args.target, component.Component(os.getcwd()).targetsPath(), install_missing=False)
for error in errors:
logger.error(error)
if derived_target is None:
line = BRIGHT + RED + args.target + u' missing' + RESET
else:
for t in derived_target.hierarchy:
if len(line):
line += '\n'
if t:
line += t.getName() + DIM + u' ' + str(t.getVersion()) + RESET
if t.installedLinked():
line += GREEN + BRIGHT + u' -> ' + RESET + GREEN + fsutils.realpath(t.path)
else:
line += BRIGHT + RED + t.getName() + DIM + u' ' + str(t.getVersion()) + BRIGHT + u' missing'
line += RESET
base_spec = t.baseTargetSpec()
if base_spec:
# if the last target in the hierarchy has a base spec, then the
# hierarchy is incomplete:
line += '\n' + BRIGHT + RED + base_spec.name + u' ' + base_spec.version_req + u' missing'
if u'unicode' in str(type(line)):
# python 2.7
print(line.encode('utf-8'))
else:
print(line)
def execCommand(args, following_args):
if args.set_target is None:
displayCurrentTarget(args)
else:
if not Target_RE.match(args.set_target):
logging.error('''Invalid target: "%s"''' % args.set_target)#, targets must be one of:
#
# a valid name (lowercase letters, numbers, and hyphen)
# a github ref (owner/project)
# a valid url
#
#Note that to use a local directory as a target you can use
#
# # in the directory containing the target package:
# yotta link target
#
# # then in the directory of the application to use the target:
# yotta link target {targetname}
# yotta target {targetname}
#
#''')
else:
if args.set_target.find(',') == -1:
t = args.set_target + ',*'
else:
t = args.set_target
settings.setProperty('build', 'target', t, not args.save_global)
|
Python
| 0 |
@@ -3099,16 +3099,38 @@
t(line)%0A
+ return len(errors)
%0A%0A%0Adef e
@@ -3203,16 +3203,23 @@
+return
displayC
@@ -3972,16 +3972,37 @@
#''')%0A
+ return 1%0A
@@ -4229,8 +4229,29 @@
global)%0A
+ return 0%0A
|
209314b65ee960d73ee81baad6b9ced4102d6c0b
|
Introduce GenericSparseDB() class
|
lib/generic_sparse_db.py
|
lib/generic_sparse_db.py
|
Python
| 0 |
@@ -0,0 +1,629 @@
+#!/usr/bin/env python%0A# -*- encoding: utf-8%0A%0Aimport gzip%0Aimport scipy.io as sio%0Afrom utils.utils import Utils%0A%0A%0Aclass GenericSparseDB(Utils):%0A%0A def init(self):%0A self.data = sio.mmread(gzip.open(self._matrix_fn)).tolil()%0A self.factors = self._load_pickle(self._factors_fn)%0A self.fac_len = len(self.factors)%0A self.col_names = self.factors + self._load_pickle(self._colnames_fn)%0A assert self.data.shape%5B1%5D == len(self.col_names),%5C%0A 'Mismatch between the number of columns: %25s - %25s.'%5C%0A %25 (self.data.shape%5B1%5D, len(self.col_names))%0A%0A def reset(self):%0A self._init()%0A
|
|
47dff2561be481ff067c22ed98d9ea6a9cf8ae10
|
Add test to execute notebooks
|
test/test_notebook.py
|
test/test_notebook.py
|
Python
| 0.000001 |
@@ -0,0 +1,675 @@
+import os%0Aimport glob%0Aimport contextlib%0Aimport subprocess%0A%0Aimport pytest%0A%0Anotebooks = list(glob.glob(%22*.ipynb%22, recursive=True))%0A%[email protected]%0Adef cleanup(notebook):%0A name, __ = os.path.splitext(notebook)%0A yield%0A%0A fname = name + %22.html%22%0A if os.path.isfile(fname):%0A os.remove(fname)%0A%0A%[email protected](%22notebook%22, notebooks)%0Adef test_notebook(notebook):%0A with cleanup(notebook):%0A # hack to execute the notebook from commandline%0A assert 0 == subprocess.call(%5B%22jupyter%22, %22nbconvert%22, %22--to=html%22,%0A %22--ExecutePreprocessor.enabled=True%22,%0A notebook%5D)%0A
|
|
ec033203d8e82258347eb4f6a6a83ef67bc9171c
|
Add expr tests
|
tests/test_Expr.py
|
tests/test_Expr.py
|
Python
| 0.000001 |
@@ -0,0 +1,493 @@
+#!/usr/bin/python3%0A%0Aimport pytest%0Aimport numpy as np%0A%0Adef test_nans_in_same_place(testCOB):%0A norm_expr = testCOB.expr(raw=False)%0A raw_expr = testCOB.expr(raw=True).ix%5Bnorm_expr.index,norm_expr.columns%5D%0A assert all(np.isnan(norm_expr) == np.isnan(raw_expr))%0A%0Adef test_inplace_nansort(testCOB):%0A x = np.random.rand(50000)%0A for i in np.random.randint(0,50000,500):%0A x%5Bi%5D = np.nan%0A sorted_x = testCOB.inplace_nansort(x)%0A assert all(np.isnan(x) == np.isnan(sorted_x))%0A%0A
|
|
bddb239b33743fc92450971070d264573de95f8d
|
Add reminder handler for Assessment
|
src/ggrc/notifications/notification_handlers.py
|
src/ggrc/notifications/notification_handlers.py
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""Notification handlers for object in the ggrc module.
This module contains all function needed for handling notification objects
needed by ggrc notifications.
"""
from datetime import datetime
from sqlalchemy import inspect
from sqlalchemy import and_
from ggrc import db
from ggrc.services.common import Resource
from ggrc.models import request
from ggrc.models import assessment
from ggrc.models import notification
def _add_notification(obj, notif_type, when=None):
"""Add notification for an object.
Args:
obj (Model): an object for which we want te add a notification.
notif_type (NotificationType): type of notification that we want to store.
when (datetime): date and time when we want the notification to be sent.
default value is now.
"""
if not notif_type:
return
if not when:
when = datetime.now()
db.session.add(notification.Notification(
object_id=obj.id,
object_type=obj.type,
send_on=when,
notification_type=notif_type,
))
def _has_unsent_notifications(notif_type, obj):
"""Helper for searching unsent notifications.
Args:
notify_type (NotificationType): type of the notifications we're looking
for.
obj (sqlalchemy model): Object for which we're looking for notifications.
Returs:
True if there are any unsent notifications of notif_type for the given
object, and False otherwise.
"""
return db.session.query(notification.Notification).join(
notification.NotificationType).filter(and_(
notification.NotificationType.id == notif_type.id,
notification.Notification.object_id == obj.id,
notification.Notification.object_type == obj.type,
notification.Notification.sent_at.is_(None),
)).count() > 0
def _add_assignable_declined_notif(obj):
"""Add entries for assignable declined notifications.
Args:
obj (Assignable): Any object with assignable mixin for which we want to add
notifications.
"""
name = "{}_declined".format(obj._inflector.table_singular)
notif_type = notification.NotificationType.query.filter_by(name=name).first()
if not _has_unsent_notifications(notif_type, obj):
_add_notification(obj, notif_type)
def handle_assignable_modified(obj):
history = inspect(obj).attrs["status"].history
# The transition from "finished" to "in progress" only happens when a task is
# declined. So this is used as a triger for declined notifications.
if history.deleted == [u'Finished'] and history.added == [u'In Progress']:
_add_assignable_declined_notif(obj)
def handle_assignable_created(obj):
name = "{}_open".format(obj._inflector.table_singular)
notif_type = notification.NotificationType.query.filter_by(name=name).first()
_add_notification(obj, notif_type)
def handle_assignable_deleted(obj):
notification.Notification.query.filter_by(
object_id=obj.id,
object_type=obj.type,
).delete()
def register_handlers():
"""Register listeners for notification handlers"""
# Variables are used as listeners, and arguments are needed for callback
# functions.
# pylint: disable=unused-argument,unused-variable
@Resource.model_deleted.connect_via(request.Request)
@Resource.model_deleted.connect_via(assessment.Assessment)
def assignable_deleted_listener(sender, obj=None, src=None, service=None):
handle_assignable_deleted(obj)
@Resource.model_put.connect_via(request.Request)
@Resource.model_put.connect_via(assessment.Assessment)
def assignable_modified_listener(sender, obj=None, src=None, service=None):
handle_assignable_modified(obj)
@Resource.model_posted_after_commit.connect_via(request.Request)
@Resource.model_posted_after_commit.connect_via(assessment.Assessment)
def assignable_created_listener(sender, obj=None, src=None, service=None):
handle_assignable_created(obj)
|
Python
| 0 |
@@ -3167,24 +3167,284 @@
.delete()%0A%0A%0A
+def handle_reminder(obj, reminder_type):%0A if reminder_type in obj.REMINDERABLE_HANDLERS:%0A reminder_settings = obj.REMINDERABLE_HANDLERS%5Breminder_type%5D%0A handler = reminder_settings%5B'handler'%5D%0A data = reminder_settings%5B'data'%5D%0A handler(obj, data)%0A%0A%0A
def register
@@ -4349,16 +4349,263 @@
le_created(obj)%0A
+%0A @Resource.model_put.connect_via(assessment.Assessment)%0A def assessment_send_reminder(sender, obj=None, src=None, service=None):%0A reminder_type = src.get(%22reminderType%22, False)%0A if reminder_type:%0A handle_reminder(obj, reminder_type)%0A
|
f3c4b7513c49189750ea15b36e561a4e5ed56214
|
add linear classification back
|
soccer/gameplay/evaluation/linear_classification.py
|
soccer/gameplay/evaluation/linear_classification.py
|
Python
| 0.000342 |
@@ -0,0 +1,1373 @@
+%0A# Classifies a feature into any number of classes%0A%0A# Linear classfication defined is%0A# y = f(x, w, b) where...%0A# x is a vector of input features of an object%0A# w is a vector of weights to apply to the features%0A# b is the bias of the feature-weight system%0A# f() is x dot w + b%0A# y is the final output score%0A%0A%0A# Classifies the object into two distinct class based on a cutoff value%0A# Anything less than the cutoff is of class false, greater than the cutoff is of class true%0A# %0A# @param input The vector of input features%0A# @param weights The vector of weights to apply to the input features%0A# @param bias The bias of the features-weight system%0A# @param cutoff The number which splits the output score of the object into two classes%0A# @param Returns tuple of the class (true or false) and the given score%0Adef binary_classification(input, weights, bias, cutoff):%0A score = linear_classification(input, weights, bias)%0A return (score %3C cutoff, score)%0A%0A# Returns the raw output score of the linear classifier based on the dot product%0A#%0A# @param input The vector of input features%0A# @param weights The vector of weights to apply to the input features%0A# @param bias The bias of the features-weight system%0Adef linear_classification(input, weights, bias):%0A # Element wise multiplication%0A out = map(lambda x, w: x * w, input, weights)%0A%0A return sum(out) + bias
|
|
14c20e35bcfc55cc3c12d94596079fc27a907f94
|
Add unit tests
|
tests/test_unit.py
|
tests/test_unit.py
|
Python
| 0.000001 |
@@ -0,0 +1,719 @@
+import unittest%0Afrom test_utilities import mapTestJsonFiles, mapJsonToYml, testYaml, getImmediateSubdirectories, unidiff_output%0A%0Aclass TestUnit(unittest.TestCase):%0A def test_input(self):%0A testDirectories = getImmediateSubdirectories('test_input')%0A for directory in testDirectories:%0A json = mapTestJsonFiles(directory)%0A ymlInput = mapJsonToYml(json)%5B'services'%5D%0A ymlOutput = testYaml(ymlInput, inputDirectoryName=directory)%0A try:%0A self.assertEqual(ymlInput, ymlOutput, msg='%7B%7D%5Cn%7B%7D'.format(directory,unidiff_output(ymlOutput, ymlInput)))%0A except Exception, e:%0A print(e)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
c2b082ebe95acc24f86fde9cd6875d7de3a9ca40
|
Set up test_user file
|
tests/test_user.py
|
tests/test_user.py
|
Python
| 0.000002 |
@@ -0,0 +1,539 @@
+import unittest%0Aimport settings%0Aimport requests_mock%0A%0Afrom util import register_uris%0Afrom pycanvas.user import User%0Afrom pycanvas.exceptions import ResourceDoesNotExist%0Afrom pycanvas import Canvas%0A%0A%0Aclass TestUser(unittest.TestCase):%0A %22%22%22%0A Tests core Account functionality%0A %22%22%22%0A @classmethod%0A def setUpClass(self):%0A requires = %7B%0A%0A %7D%0A%0A adapter = requests_mock.Adapter()%0A self.canvas = Canvas(settings.BASE_URL, settings.API_KEY, adapter)%0A register_uris(settings.BASE_URL, requires, adapter)
|
|
0b8d5794d2c5a1ae46659e02b65d1c21ffe8881d
|
Implement tests for temperature endpoint
|
babyonboard/api/tests/test_views.py
|
babyonboard/api/tests/test_views.py
|
Python
| 0.000002 |
@@ -0,0 +1,1610 @@
+import json%0Afrom rest_framework import status%0Afrom django.test import TestCase, Client%0Afrom django.urls import reverse%0Afrom ..models import Temperature%0Afrom ..serializers import TemperatureSerializer%0A%0A%0Aclient = Client()%0A%0A%0Aclass GetCurrentTemperatureTest(TestCase):%0A %22%22%22 Test class for GET current temperature from API %22%22%22%0A%0A def setUp(self):%0A Temperature.objects.create(temperature=35)%0A%0A def test_get_current_temperature(self):%0A response = client.get(reverse('temperature'))%0A temperature = Temperature.objects.order_by('date', 'time').last()%0A serializer = TemperatureSerializer(temperature)%0A self.assertEqual(response.data, serializer.data)%0A self.assertEqual(response.status_code, status.HTTP_200_OK)%0A%0A%0Aclass CreateNewTemperatureTest(TestCase):%0A %22%22%22 Test class for saving a new temperature registry %22%22%22%0A%0A def setUp(self):%0A self.valid_payload = %7B%0A 'temperature': 27.2%0A %7D%0A%0A self.invalid_payload = %7B%0A 'temperature': ''%0A %7D%0A%0A def test_creat_valid_temperature(self):%0A response = client.post(%0A reverse('temperature'),%0A data=json.dumps(self.valid_payload),%0A content_type='application/json'%0A )%0A self.assertEqual(response.status_code, status.HTTP_201_CREATED)%0A%0A def test_create_invalid_temperature(self):%0A response = client.post(%0A reverse('temperature'),%0A data=json.dumps(self.invalid_payload),%0A content_type='application/json'%0A )%0A self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)%0A
|
|
a9d458c0995db80f164f6099b5264f23c1ceffbb
|
Create 02.py
|
02/qu/02.py
|
02/qu/02.py
|
Python
| 0 |
@@ -0,0 +1,211 @@
+# Define a procedure, sum3, that takes three%0A# inputs, and returns the sum of the three%0A# input numbers.%0A%0Adef sum3(aa, bb, cc):%0A return aa + bb + cc%0A%0A#print sum3(1,2,3)%0A#%3E%3E%3E 6%0A%0A#print sum3(93,53,70)%0A#%3E%3E%3E 216%0A
|
|
5676828093ac5c768b0670c0441c15c353840ace
|
Use Markup for escape
|
barbican_api.py
|
barbican_api.py
|
# -*- coding: utf-8 -*-
"""
Barbican API
~~~~~~~~~~~~
The API for Barbican.
DO NOT USE THIS IN PRODUCTION. IT IS NOT SECURE IN ANY WAY.
YOU HAVE BEEN WARNED.
:copyright: (c) 2013 by Jarret Raim
:license: Apache 2.0, see LICENSE for details
"""
import uuid
import datetime
from dateutil.parser import parse
from flask import Blueprint, request, jsonify, Response, json
from models import Event, Tenant, Key, Agent, Policy
from database import db_session
api = Blueprint('api', __name__, url_prefix="/api")
@api.route('/')
def root():
return jsonify(hello='World')
@api.route('/<int:tenant_id>/', methods=['GET', 'POST'])
def tenant(tenant_id):
if request.method == 'POST':
tenant = Tenant.query.filter_by(id=tenant_id).first()
if tenant is None:
tenant = Tenant(id=tenant_id)
db_session.add(tenant)
db_session.commit()
return jsonify(tenant.as_dict()), 201
else:
return jsonify(tenant.as_dict())
else:
tenant = Tenant.query.filter_by(id=tenant_id).first()
if tenant is None:
return Response("No tenant found!", status=404)
else:
return jsonify(tenant.as_dict())
@api.route('/<int:tenant_id>/policies/', methods=['GET', 'POST'])
def policies(tenant_id):
if request.method == 'POST':
for policy in request.json['policies']:
keys = []
for k in policy['keys']:
key = Key(uuid=k['uuid'], filename=k['filename'], mime_type=k['mime_type'],
expiration=parse(k['expiration']), secret=k['secret'], owner=k['owner'],
group=k['group'], cacheable=k['cacheable'])
keys.append(key)
policy = Policy(uuid=policy['uuid'], name=policy['name'], tenant_id=tenant_id,
directory_name=policy['directory_name'],
max_key_accesses=policy['max_key_accesses'],
time_available_after_reboot=policy['time_available_after_reboot'])
policy.keys.extend(keys)
db_session.add(policy)
db_session.commit()
return Response(status=200)
else:
policy = Policy.query.filter_by(tenant_id=tenant_id).first()
if policy is None:
return Response('No policies defined for tenant', status=404)
return jsonify(policy.as_dict())
@api.route('/<int:tenant_id>/agents/', methods=['GET', 'POST'])
def agents(tenant_id):
if request.method == 'POST':
tenant = Tenant.query.get(tenant_id)
agent = Agent(tenant=tenant, uuid=request.json['uuid'])
db_session.add(agent)
db_session.commit()
return jsonify(agent.as_dict())
else:
agents = Agent.query.filter_by(tenant_id=tenant_id)
agents_dicts = map(Agent.as_dict, agents.all())
return Response(json.dumps(agents_dicts, cls=DateTimeJsonEncoder), mimetype='application/json')
@api.route('/<int:tenant_id>/logs/', methods=['GET', 'POST'])
def logs(tenant_id):
if request.method == 'POST':
agent_id = uuid.UUID(request.json['agent_id'])
received_on = parse(request.json['received_on'])
key_id = uuid.UUID(request.json['key_id'])
if request.json['severity'] in ['DEBUG', 'INFO', 'WARN', 'FATAL']:
severity = request.json['severity']
else:
severity = 'UNKNOWN'
# Load the key and tenant
tenant = Tenant.query.get(tenant_id)
key = Key.query.filter_by(uuid=str(key_id)).first()
ev = Event(tenant_id=tenant_id, agent_id=str(agent_id), received_on=received_on,
severity=severity, message=request.json['message'], tenant=tenant, key=key)
db_session.add(ev)
db_session.commit()
return Response(json.dumps(ev.as_dict(), cls=DateTimeJsonEncoder), mimetype='application/json')
else:
events = Event.query.filter_by(tenant_id=tenant_id).order_by(Event.received_on)
events_dicts = map(Event.as_dict, events.all())
return Response(json.dumps(events_dicts, cls=DateTimeJsonEncoder), mimetype='application/json')
@api.route('/alllogs/', methods=['GET'])
def alllogs(timestamp=None):
events = Event.query.order_by(Event.received_on)
helper = Helper()
json_str = '''{
"aaData":[
'''
for event in events.all():
json_str += '''["%s","%s","%s","%s","%s","%s", "%s"
],''' % (event.id,event.received_on, event.tenant_id, event.key_id, event.agent_id, event.severity, helper.html_escape(event.message))
json_str = json_str[:-1]
json_str += ''']
}'''
return Response(json_str, mimetype='application/json')
class DateTimeJsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
else:
return super(DateTimeJsonEncoder, self).default(obj)
class Helper:
def __init__(self):
self.html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
def html_escape(self,text):
return "".join(self.html_escape_table.get(c,c) for c in text)
|
Python
| 0 |
@@ -391,16 +391,24 @@
se, json
+, Markup
%0Afrom mo
@@ -486,16 +486,17 @@
ession%0A%0A
+%0A
api = Bl
@@ -4355,30 +4355,8 @@
on)%0A
- helper = Helper()%0A
@@ -4371,16 +4371,16 @@
= '''%7B%0A
+
%09%09 %09%22aaD
@@ -4611,20 +4611,15 @@
ty,
-helper.html_
+Markup.
esca
@@ -4993,302 +4993,4 @@
j)%0A%0A
-class Helper:%0A def __init__(self):%0A self.html_escape_table = %7B%0A %22&%22: %22&%22,%0A '%22': %22"%22,%0A %22'%22: %22'%22,%0A %22%3E%22: %22>%22,%0A %22%3C%22: %22<%22,%0A %7D%0A %0A def html_escape(self,text):%0A return %22%22.join(self.html_escape_table.get(c,c) for c in text)%0A
|
9a705f58acbcfb2cc7292cb396544f1f8c9b89a1
|
Add basic web test
|
tests/baseweb_test.py
|
tests/baseweb_test.py
|
Python
| 0 |
@@ -0,0 +1,940 @@
+from __future__ import with_statement%0A%0Afrom ass2m.ass2m import Ass2m%0Afrom ass2m.server import Server%0A%0Afrom unittest import TestCase%0Afrom webtest import TestApp%0A%0Afrom tempfile import mkdtemp%0Aimport os.path%0Aimport shutil%0A%0Aclass BaseWebTest(TestCase):%0A def setUp(self):%0A self.root = mkdtemp(prefix='ass2m_test_root')%0A ass2m = Ass2m(self.root)%0A ass2m.create(self.root)%0A server = Server(self.root)%0A self.app = TestApp(server.process)%0A%0A def tearDown(self):%0A if self.root:%0A shutil.rmtree(self.root)%0A%0A def test_listAndDownload(self):%0A res = self.app.get(%22/%22)%0A assert %22%3Ch1%3EIndex of /%3C/h1%3E%22 in res.body%0A%0A with file(os.path.join(self.root, %22penguins_are_cute%22), 'a') as f:%0A f.write(%22HELLO%22)%0A%0A res = self.app.get(%22/%22)%0A assert %22penguins_are_cute%22 in res.body%0A%0A res = self.app.get(%22/penguins_are_cute%22)%0A assert %22HELLO%22 == res.body%0A%0A
|
|
56eba00d00e450b5dc8fae7ea8475d418b00e2db
|
Add problem69.py
|
euler_python/problem69.py
|
euler_python/problem69.py
|
Python
| 0.000241 |
@@ -0,0 +1,1432 @@
+%22%22%22%0Aproblem69.py%0A%0AEuler's Totient function, %CF%86(n) %5Bsometimes called the phi function%5D, is used to%0Adetermine the number of numbers less than n which are relatively prime to n. For%0Aexample, as 1, 2, 4, 5, 7, and 8, are all less than nine and relatively prime to%0Anine, %CF%86(9)=6.%0A%0AIt can be seen that n=6 produces a maximum n/%CF%86(n) for n %E2%89%A4 10. Find the value of%0An %E2%89%A4 1,000,000 for which n/%CF%86(n) is a maximum.%0A%22%22%22%0Afrom itertools import takewhile%0Afrom toolset import get_primes%0A%0A# def phi(n):%0A# ps = list(unique(prime_factors(n)))%0A# return n * reduce(operator.mul, (1 - Fraction(1, p) for p in ps))%0A# return max((n for n in range(2, 1000000+1)), key=lambda n: n/phi(n))%0A#%0A# The commented-out solution above is correct and true to the problem%0A# description, but slightly slower than 1 minute.%0A#%0A# So, note that the phi function multiplies n by (1 - (1/p)) for every p in%0A# its unique prime factors. Therefore, phi(n) will diminish as n has a%0A# greater number of small unique prime factors. Since we are seeking the%0A# largest value for n/phi(n), we want to minimize phi(n). We are therefore%0A# looking for the largest number %3C 1e6 which is the product of the smallest%0A# unique prime factors, i.e successive prime numbers starting from 2.%0A%0Adef candidates():%0A primes = get_primes()%0A x = next(primes)%0A while True:%0A yield x%0A x *= next(primes)%0A%0Adef problem69():%0A return max(takewhile(lambda x: x %3C 1e6, candidates()))%0A
|
|
47bdc98a7fb8c030f5beb09ec9bb1b83c100dc9a
|
Add missing migration
|
src/users/migrations/0008_auto_20160222_0553.py
|
src/users/migrations/0008_auto_20160222_0553.py
|
Python
| 0.0002 |
@@ -0,0 +1,1140 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9 on 2016-02-22 05:53%0Afrom __future__ import unicode_literals%0A%0Aimport django.core.validators%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('users', '0007_auto_20160122_1333'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='user',%0A name='github_id',%0A field=models.CharField(blank=True, help_text='Your GitHub account, without the %22@%22 sign. This will be shown when we display your public information.', max_length=100, validators=%5Bdjango.core.validators.RegexValidator('%5E%5B0-9a-zA-Z_-%5D*$', 'Not a valid GitHub account')%5D, verbose_name='GitHub'),%0A ),%0A migrations.AlterField(%0A model_name='user',%0A name='twitter_id',%0A field=models.CharField(blank=True, help_text='Your Twitter handle, without the %22@%22 sign. This will be shown when we display your public information.', max_length=100, validators=%5Bdjango.core.validators.RegexValidator('%5E%5B0-9a-zA-Z_%5D*$', 'Not a valid Twitter handle')%5D, verbose_name='Twitter'),%0A ),%0A %5D%0A
|
|
f0cd785688ed04821f0338021e2360b98bd9dd58
|
add very simple perf test
|
conform/perf.py
|
conform/perf.py
|
Python
| 0.000001 |
@@ -0,0 +1,330 @@
+#!/usr/bin/env python%0A%0Aimport sys%0Aimport barrister%0A%0Atrans = barrister.HttpTransport(%22http://localhost:9233/%22)%0Aclient = barrister.Client(trans, validate_request=False)%0A%0Anum = int(sys.argv%5B1%5D)%0A%0As = %22safasdfasdlfasjdflkasjdflaskjdflaskdjflasdjflaskdfjalsdkfjasldkfjasldkasdlkasjfasld%22%0A%0Afor i in range(num):%0A client.B.echo(s)%0A
|
|
e88a6a634f600a5ef3ae269fc0d49bcd1e1d58e8
|
Revert "More accurate info in examples."
|
examples/svm/plot_iris.py
|
examples/svm/plot_iris.py
|
"""
==================================================
Plot different SVM classifiers in the iris dataset
==================================================
Comparison of different linear SVM classifiers on the iris dataset. It
will plot the decision surface for four different SVM classifiers.
"""
import numpy as np
import pylab as pl
from scikits.learn import svm, datasets
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
Y = iris.target
h=.02 # step size in the mesh
# we create an instance of SVM and fit out data. We do not scale our
# data since we want to plot the support vectors
svc = svm.SVC(kernel='linear').fit(X, Y)
rbf_svc = svm.SVC(kernel='poly').fit(X, Y)
nu_svc = svm.NuSVC(kernel='linear').fit(X,Y)
lin_svc = svm.LinearSVC().fit(X, Y)
# create a mesh to plot in
x_min, x_max = X[:,0].min()-1, X[:,0].max()+1
y_min, y_max = X[:,1].min()-1, X[:,1].max()+1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# title for the plots
titles = ['SVC with linear kernel',
'SVC with polynomial (degree 3) kernel',
'NuSVC with linear kernel',
'LinearSVC (linear kernel)']
pl.set_cmap(pl.cm.Paired)
for i, clf in enumerate((svc, rbf_svc, nu_svc, lin_svc)):
# Plot the decision boundary. For that, we will asign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
pl.subplot(2, 2, i+1)
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
pl.set_cmap(pl.cm.Paired)
pl.contourf(xx, yy, Z)
pl.axis('tight')
# Plot also the training points
pl.scatter(X[:,0], X[:,1], c=Y)
pl.title(titles[i])
pl.axis('tight')
pl.show()
|
Python
| 0 |
@@ -257,40 +257,29 @@
ace
-for four different SVM classifie
+and the support vecto
rs.%0A
@@ -1844,16 +1844,20 @@
les%5Bi%5D)%0A
+
%0Apl.axis
|
9ba3c840514e765acac2542ee3faf47671824918
|
add missing source file
|
moban/buffered_writer.py
|
moban/buffered_writer.py
|
Python
| 0.000001 |
@@ -0,0 +1,1021 @@
+from moban import utils, file_system%0A%0Aimport fs%0Aimport fs.path%0A%0A%0Aclass BufferedWriter(object):%0A def __init__(self):%0A self.fs_list = %7B%7D%0A%0A def write_file_out(self, filename, content):%0A if %22zip://%22 in filename:%0A self.write_file_out_to_zip(filename, content)%0A else:%0A utils.write_file_out(filename, content)%0A%0A def write_file_out_to_zip(self, filename, content):%0A zip_file, file_name = filename.split(%22.zip/%22)%0A zip_file = zip_file + %22.zip%22%0A if zip_file not in self.fs_list:%0A self.fs_list%5Bzip_file%5D = fs.open_fs(%0A file_system.to_unicode(zip_file), create=True%0A )%0A base_dirs = fs.path.dirname(file_name)%0A if not self.fs_list%5Bzip_file%5D.exists(base_dirs):%0A self.fs_list%5Bzip_file%5D.makedirs(base_dirs)%0A self.fs_list%5Bzip_file%5D.writebytes(%0A file_system.to_unicode(file_name), content%0A )%0A%0A def close(self):%0A for fsx in self.fs_list.values():%0A fsx.close()%0A
|
|
b573daf86d2bcb5d8dc71e45a65b5f2ffc0866b1
|
Correct module help
|
examples/create_events.py
|
examples/create_events.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import misp_url, misp_key
import argparse
# For python2 & 3 compat, a bit dirty, but it seems to be the least bad one
try:
input = raw_input
except NameError:
pass
def init(url, key):
return PyMISP(url, key, True, 'json')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create an event on MISP.')
parser.add_argument("-d", "--distrib", type=int, help="The distribution setting used for the attributes and for the newly created event, if relevant. [0-3].")
parser.add_argument("-i", "--info", help="Used to populate the event info field if no event ID supplied.")
parser.add_argument("-a", "--analysis", type=int, help="The analysis level of the newly created event, if applicatble. [0-2]")
parser.add_argument("-t", "--threat", type=int, help="The threat level ID of the newly created event, if applicatble. [0-3]")
args = parser.parse_args()
misp = init(misp_url, misp_key)
event = misp.new_event(args.distrib, args.threat, args.analysis, args.info)
print event
response = misp.add_mutex(event, 'booh')
print response
|
Python
| 0.000001 |
@@ -946,19 +946,19 @@
atble. %5B
-0-3
+1-4
%5D%22)%0A
|
291d882a29981ea6c82c40c8e9a001aa3305e0ae
|
Create 8kyu_do_I_get_a_bonus.py
|
Solutions/8kyu/8kyu_do_I_get_a_bonus.py
|
Solutions/8kyu/8kyu_do_I_get_a_bonus.py
|
Python
| 0.000001 |
@@ -0,0 +1,79 @@
+def bonus_time(salary, bonus):%0A return '$%7B%7D'.format(salary*(%5B1,10%5D%5Bbonus%5D))%0A
|
|
7c16172f9ebe65d6928a72001a086637bf4bd725
|
Fix buggy annotations for stdout/stderr.
|
scripts/lib/node_cache.py
|
scripts/lib/node_cache.py
|
from __future__ import print_function
import os
import hashlib
from os.path import dirname, abspath
if False:
from typing import Optional, List, Tuple
from scripts.lib.zulip_tools import subprocess_text_output, run
ZULIP_PATH = dirname(dirname(dirname(abspath(__file__))))
NPM_CACHE_PATH = "/srv/zulip-npm-cache"
if 'TRAVIS' in os.environ:
# In Travis CI, we don't have root access
NPM_CACHE_PATH = "/home/travis/zulip-npm-cache"
def setup_node_modules(npm_args=None, stdout=None, stderr=None, copy_modules=False):
# type: (Optional[List[str]], Optional[str], Optional[str], Optional[bool]) -> None
sha1sum = hashlib.sha1()
sha1sum.update(subprocess_text_output(['cat', 'package.json']).encode('utf8'))
sha1sum.update(subprocess_text_output(['npm', '--version']).encode('utf8'))
sha1sum.update(subprocess_text_output(['node', '--version']).encode('utf8'))
if npm_args is not None:
sha1sum.update(''.join(sorted(npm_args)).encode('utf8'))
npm_cache = os.path.join(NPM_CACHE_PATH, sha1sum.hexdigest())
cached_node_modules = os.path.join(npm_cache, 'node_modules')
success_stamp = os.path.join(cached_node_modules, '.success-stamp')
# Check if a cached version already exists
if not os.path.exists(success_stamp):
do_npm_install(npm_cache, npm_args or [], stdout, stderr, copy_modules)
print("Using cached node modules from %s" % (cached_node_modules,))
cmds = [
['rm', '-rf', 'node_modules'],
["ln", "-nsf", cached_node_modules, 'node_modules'],
['touch', success_stamp],
]
for cmd in cmds:
run(cmd, stdout=stdout, stderr=stderr)
def do_npm_install(target_path, npm_args, stdout=None, stderr=None, copy_modules=False):
# type: (str, List[str], Optional[str], Optional[str], Optional[bool]) -> None
cmds = [
["sudo", "rm", "-rf", target_path],
['sudo', 'mkdir', '-p', target_path],
["sudo", "chown", "{}:{}".format(os.getuid(), os.getgid()), target_path],
['cp', 'package.json', target_path],
]
if copy_modules:
print("Cached version not found! Copying node modules.")
cmds.append(["mv", "node_modules", target_path])
else:
print("Cached version not found! Installing node modules.")
cmds.append(['npm', 'install'] + npm_args + ['--prefix', target_path])
for cmd in cmds:
run(cmd, stdout=stdout, stderr=stderr)
|
Python
| 0 |
@@ -143,16 +143,20 @@
l, List,
+ IO,
Tuple%0A%0A
@@ -565,35 +565,34 @@
str%5D%5D, Optional%5B
-str
+IO
%5D, Optional%5Bstr%5D
@@ -579,35 +579,34 @@
l%5BIO%5D, Optional%5B
-str
+IO
%5D, Optional%5Bbool
@@ -1771,35 +1771,34 @@
%5Bstr%5D, Optional%5B
-str
+IO
%5D, Optional%5Bstr%5D
@@ -1793,19 +1793,18 @@
ptional%5B
-str
+IO
%5D, Optio
|
d36762fcf98774560fe82b32b2d64d2eba1ec72b
|
Improve logging to debug invalid "extra_specs" entries
|
cinder/scheduler/filters/capabilities_filter.py
|
cinder/scheduler/filters/capabilities_filter.py
|
# Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import six
from cinder.scheduler import filters
from cinder.scheduler.filters import extra_specs_ops
LOG = logging.getLogger(__name__)
class CapabilitiesFilter(filters.BaseHostFilter):
"""HostFilter to work with resource (instance & volume) type records."""
def _satisfies_extra_specs(self, capabilities, resource_type):
"""Check if capabilities satisfy resource type requirements.
Check that the capabilities provided by the services satisfy
the extra specs associated with the resource type.
"""
extra_specs = resource_type.get('extra_specs', [])
if not extra_specs:
return True
for key, req in six.iteritems(extra_specs):
# Either not scope format, or in capabilities scope
scope = key.split(':')
if len(scope) > 1 and scope[0] != "capabilities":
continue
elif scope[0] == "capabilities":
del scope[0]
cap = capabilities
for index in range(len(scope)):
try:
cap = cap.get(scope[index])
except AttributeError:
return False
if cap is None:
return False
if not extra_specs_ops.match(cap, req):
LOG.debug("extra_spec requirement '%(req)s' "
"does not match '%(cap)s'",
{'req': req, 'cap': cap})
return False
return True
def host_passes(self, host_state, filter_properties):
"""Return a list of hosts that can create resource_type."""
# Note(zhiteng) Currently only Cinder and Nova are using
# this filter, so the resource type is either instance or
# volume.
resource_type = filter_properties.get('resource_type')
if not self._satisfies_extra_specs(host_state.capabilities,
resource_type):
LOG.debug("%(host_state)s fails resource_type extra_specs "
"requirements", {'host_state': host_state})
return False
return True
|
Python
| 0.000016 |
@@ -1867,16 +1867,146 @@
s None:%0A
+ LOG.debug(%22Host doesn't provide capability '%25(cap)s' %22 %25%0A %7B'cap': scope%5Bindex%5D%7D)%0A
|
d10ec57d6f58a4f96a2f648cac1bc94dc78efc32
|
Implement identifying to accounts
|
txircd/modules/extra/services/account_identify.py
|
txircd/modules/extra/services/account_identify.py
|
Python
| 0.000009 |
@@ -0,0 +1,2019 @@
+from twisted.plugin import IPlugin%0Afrom twisted.words.protocols import irc%0Afrom txircd.module_interface import Command, ICommand, IModuleData, ModuleData%0Afrom zope.interface import implements%0A%0Airc.ERR_SERVICES = %22955%22 # Custom numeric; 955 %3CTYPE%3E %3CSUBTYPE%3E %3CERROR%3E%0A%0Aclass AccountIdentify(ModuleData):%0A%09implements(IPlugin, IModuleData)%0A%09%0A%09name = %22AccountIdentify%22%0A%09%0A%09def userCommands(self):%0A%09%09return %5B (%22IDENTIFY%22, 1, IdentifyCommand(self)),%0A%09%09%09(%22ID%22, 1, IdCommand(self)) %5D%0A%09%0A%09def parseParams(self, command, user, params, prefix, tags):%0A%09%09if not params:%0A%09%09%09user.sendSingleError(%22IdentifyParams%22, irc.ERR_NEEDMOREPARAMS, command, %22Not enough parameters%22)%0A%09%09%09return None%0A%09%09if len(params) == 1:%0A%09%09%09return %7B%0A%09%09%09%09%22password%22: params%5B0%5D%0A%09%09%09%7D%0A%09%09return %7B%0A%09%09%09%22accountname%22: params%5B0%5D,%0A%09%09%09%22password%22: params%5B1%5D%0A%09%09%7D%0A%09%0A%09def execute(self, user, data):%0A%09%09resultValue = self.ircd.runActionUntilValue(%22accountauthenticate%22, user, data%5B%22accountname%22%5D if %22accountname%22 in data else user.nick, data%5B%22password%22%5D)%0A%09%09if not resultValue:%0A%09%09%09user.sendMessage(irc.ERR_SERVICES, %22ACCOUNT%22, %22IDENTIFY%22, %22This server doesn't have accounts set up.%22)%0A%09%09%09user.sendMessage(%22NOTICE%22, %22This server doesn't have accounts set up.%22)%0A%09%09%09return True%0A%09%09if resultValue%5B0%5D:%0A%09%09%09return True%0A%09%09user.sendMessage(irc.ERR_SERVICES, %22ACCOUNT%22, %22IDENTIFY%22, resultValue%5B1%5D)%0A%09%09user.sendMessage(%22NOTICE%22, resultValue%5B1%5D)%0A%09%09return True%0A%0Aclass IdentifyCommand(Command):%0A%09implements(ICommand)%0A%09%0A%09def __init__(self, module):%0A%09%09self.module = module%0A%09%0A%09def parseParams(self, user, params, prefix, tags):%0A%09%09return self.module.parseParams(%22IDENTIFY%22, user, params, prefix, tags)%0A%09%0A%09def execute(self, user, data):%0A%09%09return self.module.execute(user, data)%0A%0Aclass IdCommand(Command):%0A%09implements(ICommand)%0A%09%0A%09def __init__(self, module):%0A%09%09self.module = module%0A%09%0A%09def parseParams(self, user, params, prefix, tags):%0A%09%09return self.module.parseParams(%22ID%22, user, params, prefix, tags)%0A%09%0A%09def execute(self, user, data):%0A%09%09self.module.execute(user, data)%0A%0AidentifyCommand = AccountIdentify()
|
|
166854466771850bda3384b75d0f8d0656c259f6
|
add predict
|
gen_predict_res_format.py
|
gen_predict_res_format.py
|
Python
| 0.999925 |
@@ -0,0 +1,1118 @@
+# -*- coding: utf-8 -*-%0A'''%0ACreated on Jul 9, 2013%0A%0A@author: Chunwei Yan @ pkusz%0A@mail: [email protected]%0A'''%0Aimport sys%0Afrom utils import get_num_lines, args_check%0A%0Aclass Gen(object):%0A formats = %7B%0A '1':1,%0A '-1':0,%0A %7D%0A%0A def __init__(self, fph, test_ph, tph):%0A self.fph, self.test_ph, self.tph = %5C%0A fph, test_ph, tph%0A%0A def __call__(self):%0A self.trans()%0A self.tofile()%0A%0A def trans(self):%0A num_lines = get_num_lines(self.test_ph)%0A self.lines = %5B%5D%0A with open(self.fph) as resf:%0A with open(self.test_ph) as testf:%0A for i in range(num_lines):%0A res = resf.readline()%0A tes = testf.readline()%0A label = self.formats.get(res.strip())%0A line = %22%25d%5Ct%25s%22 %25 (label, tes.strip())%0A self.lines.append(line)%0A%0A def tofile(self):%0A with open(self.tph, 'w') as f:%0A f.write('%5Cn'.join(self.lines))%0A%0A%0A%0Aif __name__ == %22__main__%22:%0A fph, test_ph, tph = args_check(3, %22%22)%0A g = Gen(fph, test_ph, tph)%0A g()%0A
|
|
aa6837e14e520f5917cf1c452bd0c9a8ce2a27dd
|
Add new module for plugin loading
|
module/others/plugins.py
|
module/others/plugins.py
|
Python
| 0 |
@@ -0,0 +1,357 @@
+from maya import cmds%0A%0A%0Aclass Commands(object):%0A %22%22%22 class name must be 'Commands' %22%22%22%0A%0A commandDict = %7B%7D%0A%0A def _loadObjPlugin(self):%0A if not cmds.pluginInfo(%22objExport%22, q=True, loaded=True):%0A cmds.loadPlugin(%22objExport%22)%0A commandDict%5B'sampleCommand'%5D = %22sphere.png%22%0A # %5E Don't forget to add the command to the dictionary.%0A
|
|
67350e9ac3f2dc0fceb1899c8692adcd9cdd4213
|
Add a test case to validate `get_unseen_notes`
|
frappe/tests/test_boot.py
|
frappe/tests/test_boot.py
|
Python
| 0 |
@@ -0,0 +1,710 @@
+import unittest%0A%0Aimport frappe%0Afrom frappe.boot import get_unseen_notes%0Afrom frappe.desk.doctype.note.note import mark_as_seen%0A%0A%0Aclass TestBootData(unittest.TestCase):%0A%09def test_get_unseen_notes(self):%0A%09%09frappe.db.delete(%22Note%22)%0A%09%09frappe.db.delete(%22Note Seen By%22)%0A%09%09note = frappe.get_doc(%0A%09%09%09%7B%0A%09%09%09%09%22doctype%22: %22Note%22,%0A%09%09%09%09%22title%22: %22Test Note%22,%0A%09%09%09%09%22notify_on_login%22: 1,%0A%09%09%09%09%22content%22: %22Test Note 1%22,%0A%09%09%09%09%22public%22: 1,%0A%09%09%09%7D%0A%09%09)%0A%09%09note.insert()%0A%0A%09%09frappe.set_user(%[email protected]%22)%0A%09%09unseen_notes = %5Bd.title for d in get_unseen_notes()%5D%0A%09%09self.assertListEqual(unseen_notes, %5B%22Test Note%22%5D)%0A%0A%09%09mark_as_seen(note.name)%0A%09%09unseen_notes = %5Bd.title for d in get_unseen_notes()%5D%0A%09%09self.assertListEqual(unseen_notes, %5B%5D)%0A
|
|
15fd5f6ddd3aa79a26b28d5ef4b93eeb12e28956
|
add an update_users management command
|
controller/management/commands/update_users.py
|
controller/management/commands/update_users.py
|
Python
| 0.000002 |
@@ -0,0 +1,1519 @@
+%22%22%22%0AEnsure that the right users exist:%0A%0A- read USERS dictionary from auth.json%0A- if they don't exist, create them.%0A- if they do, update the passwords to match%0A%0A%22%22%22%0Aimport json%0Aimport logging%0A%0Afrom django.core.management.base import BaseCommand%0Afrom django.conf import settings%0Afrom django.contrib.auth.models import User%0A%0Alog = logging.getLogger(__name__)%0A%0Aclass Command(BaseCommand):%0A help = %22Create users that are specified in auth.json%22%0A%0A def handle(self, *args, **options):%0A%0A log.info(%22root is : %22 + settings.ENV_ROOT)%0A auth_path = settings.ENV_ROOT / %22auth.json%22%0A%0A log.info(' %5B*%5D reading %7B0%7D'.format(auth_path))%0A%0A with open(auth_path) as auth_file:%0A AUTH_TOKENS = json.load(auth_file)%0A users = AUTH_TOKENS.get('USERS', %7B%7D)%0A for username, pwd in users.items():%0A log.info(' %5B*%5D Creating/updating user %7B0%7D'.format(username))%0A try:%0A user = User.objects.get(username=username)%0A user.set_password(pwd)%0A user.save()%0A except User.DoesNotExist:%0A log.info(' ... %7B0%7D does not exist. Creating'.format(username))%0A%0A user = User.objects.create(username=username,%0A email=username + '@dummy.edx.org',%0A is_active=True)%0A user.set_password(pwd)%0A user.save()%0A log.info(' %5B*%5D All done!')%0A
|
|
3d0f6085bceffc5941e55678da20d8db4a7d5ce2
|
Create question4.py
|
huangguolong/question4.py
|
huangguolong/question4.py
|
Python
| 0.00002 |
@@ -0,0 +1,400 @@
+%0A%0Adef fib(nums):%0A '''%0A :param nums: %E4%B8%80%E4%B8%AA%E6%95%B4%E6%95%B0%EF%BC%8C%E7%9B%B8%E5%BD%93%E4%BA%8E%E6%95%B0%E5%88%97%E7%9A%84%E4%B8%8B%E6%A0%87%0A :return: %E8%BF%94%E5%9B%9E%E8%AF%A5%E4%B8%8B%E6%A0%87%E7%9A%84%E5%80%BC%0A '''%0A if nums == 0 or nums == 1:%0A return nums%0A else:%0A return fib(nums-2) + fib(nums-1)%0A%0A%0Adef createFib(n):%0A '''%0A :param n: %E9%9C%80%E8%A6%81%E5%B1%95%E7%A4%BA%E5%89%8D%E9%9D%A2n%E4%B8%AA%E6%95%B0%0A :return: %E8%BF%94%E5%9B%9E%E4%B8%80%E4%B8%AA%E5%88%97%E8%A1%A8%EF%BC%8C%E8%B4%B9%E6%B3%A2%E9%82%A3%E5%A5%91%E6%95%B0%E5%88%97%0A '''%0A list1 = %5B%5D%0A for i in range(n):%0A%0A list1.append(fib(i))%0A%0A print(list1)%0A%0A#%E8%B0%83%E7%94%A8%E7%94%9F%E6%88%90%E8%B4%B9%E6%B3%A2%E9%82%A3%E5%A5%91%E6%95%B0%E5%88%97%E5%87%BD%E6%95%B0%EF%BC%8C%E6%8C%87%E5%AE%9A%E5%B1%95%E7%A4%BA%E7%9A%84%E5%89%8D%E9%9D%A2n%E4%B8%AA%E6%95%B0%0AcreateFib(20)%0A
|
|
448ca2cfb8f7e167b1395e84a4f2b4b4cea57905
|
add file
|
crawler/jb51.py
|
crawler/jb51.py
|
Python
| 0.000001 |
@@ -0,0 +1,480 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Afrom async_spider import AsySpider%0A%0A%0Aclass Jb51Spider(AsySpider):%0A%0A def handle_html(self, url, html):%0A print(url)%0A '''%0A filename = url.rsplit('/', 1)%5B1%5D%0A with open(filename, 'w+') as f:%0A f.write(html)%0A '''%0A%0A%0Aif __name__ == '__main__':%0A urls = %5B%5D%0A for page in range(1, 73000):%0A urls.append('http://www.jb51.net/article/%25s.htm' %25 page)%0A s = Jb51Spider(urls)%0A s.run()%0A
|
|
55b3269f9c2cd22ef75a2632f04e37a9f723e961
|
add data migration
|
accelerator/migrations/0033_migrate_gender_data.py
|
accelerator/migrations/0033_migrate_gender_data.py
|
Python
| 0 |
@@ -0,0 +1,1889 @@
+# Generated by Django 2.2.10 on 2021-01-22 12:13%0Aimport sys%0A%0Afrom django.contrib.auth import get_user_model%0Afrom django.db import migrations%0A%0A# gender identity%0AGENDER_MALE = %22Male%22%0AGENDER_FEMALE = %22Female%22%0AGENDER_PREFER_TO_SELF_DESCRIBE = %22I Prefer To Self-describe%22%0AGENDER_PREFER_NOT_TO_SAY = %22I Prefer Not To Say%22%0A%0A# gender%0AMALE_CHOICE = 'm'%0AFEMALE_CHOICE = 'f'%0AOTHER_CHOICE = 'o'%0APREFER_NOT_TO_STATE_CHOICE = 'p'%0A%0Agender_map = %7B%0A MALE_CHOICE: GENDER_MALE,%0A FEMALE_CHOICE: GENDER_FEMALE,%0A OTHER_CHOICE: GENDER_PREFER_TO_SELF_DESCRIBE,%0A PREFER_NOT_TO_STATE_CHOICE: GENDER_PREFER_NOT_TO_SAY,%0A%7D%0A%0A%0Adef get_gender_choice_obj_dict(apps):%0A GenderChoices = apps.get_model('accelerator', 'GenderChoices')%0A return %7B%0A gender_choice.name: gender_choice%0A for gender_choice in GenderChoices.objects.all()%0A %7D%0A%0A%0Adef add_gender_identity(profile, gender_choice):%0A if not profile.gender_identity.filter(pk=gender_choice.pk).exists():%0A profile.gender_identity.add(gender_choice.pk)%0A%0A%0Adef migrate_gender_data_to_gender_identity(apps, schema_editor):%0A users = get_user_model().objects.all()%0A gender_choices = get_gender_choice_obj_dict(apps)%0A%0A for user in users:%0A profile = user.get_profile()%0A gender = profile.gender%0A if gender:%0A try:%0A gender_choice = gender_choices%5Bgender_map%5Bgender.lower()%5D%5D%0A add_gender_identity(profile, gender_choice)%0A except KeyError:%0A print(f%22Exception: %7Btype(profile)%7D ID:%7Bprofile.id%7D%22%0A f%22 has unexpected gender value '%7Bgender%7D'%22)%0A%0A%0Aclass Migration(migrations.Migration):%0A dependencies = %5B%0A ('accelerator', '0032_add_ethno_racial_identity_data'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(%0A migrate_gender_data_to_gender_identity,%0A migrations.RunPython.noop,%0A )%0A %5D%0A
|
|
3344bb0a967c4217f6fa1d701b2c4dfb89d578aa
|
add new package : alluxio (#14143)
|
var/spack/repos/builtin/packages/alluxio/package.py
|
var/spack/repos/builtin/packages/alluxio/package.py
|
Python
| 0 |
@@ -0,0 +1,821 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Alluxio(Package):%0A %22%22%22%0A Alluxio (formerly known as Tachyon) is a virtual distributed storage%0A system. It bridges the gap between computation frameworks and storage%0A systems, enabling computation applications to connect to numerous%0A storage systems through a common interface.%0A %22%22%22%0A%0A homepage = %22https://github.com/Alluxio/alluxio%22%0A url = %22https://github.com/Alluxio/alluxio/archive/v2.1.0.tar.gz%22%0A%0A version('2.1.0', sha256='c8b5b7848488e0ac10b093eea02ef05fa822250669d184291cc51b2f8aac253e')%0A%0A def install(self, spec, prefix):%0A install_tree('.', prefix)%0A
|
|
6dc035051d666707fdc09e63f510dbc4edf1724d
|
Migrate lab_members
|
lab_members/migrations/0001_initial.py
|
lab_members/migrations/0001_initial.py
|
Python
| 0.000001 |
@@ -0,0 +1,1633 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='Position',%0A fields=%5B%0A ('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),%0A ('title', models.CharField(max_length=64, unique=True, help_text='Please enter a title for this position', default='', verbose_name='title')),%0A %5D,%0A options=%7B%0A 'verbose_name': 'Position',%0A 'verbose_name_plural': 'Positions',%0A %7D,%0A bases=(models.Model,),%0A ),%0A migrations.CreateModel(%0A name='Scientist',%0A fields=%5B%0A ('id', models.AutoField(primary_key=True, auto_created=True, serialize=False, verbose_name='ID')),%0A ('full_name', models.CharField(max_length=64, unique=True, help_text='Please enter a full name for this scientist', default='', verbose_name='full name')),%0A ('slug', models.SlugField(max_length=64, help_text='Please enter a unique slug for this scientist', default='', verbose_name='slug')),%0A ('title', models.ForeignKey(blank=True, help_text='Please specify a title for this scientist', default=None, to='lab_members.Position', null=True)),%0A %5D,%0A options=%7B%0A 'verbose_name': 'Scientist',%0A 'verbose_name_plural': 'Scientists',%0A %7D,%0A bases=(models.Model,),%0A ),%0A %5D%0A
|
|
f7f25876d3398cacc822faf2b16cc156e88c7fd3
|
Use this enough, might as well add it.
|
misc/jp2_kakadu_pillow.py
|
misc/jp2_kakadu_pillow.py
|
Python
| 0 |
@@ -0,0 +1,1645 @@
+# This the basic flow for getting from a JP2 to a jpg w/ kdu_expand and Pillow%0A# Useful for debugging the scenario independent of the server.%0A%0Afrom PIL import Image%0Afrom PIL.ImageFile import Parser%0Afrom os import makedirs, path, unlink%0Aimport subprocess%0Aimport sys%0A%0AKDU_EXPAND='/usr/local/bin/kdu_expand'%0ALIB_KDU='/usr/local/lib/libkdu_v72R.so'%0ATMP='/tmp'%0AINPUT_JP2='/home/jstroop/Desktop/nanteuil.jp2'%0AOUT_JPG='/tmp/test.jpg'%0AREDUCE=0%0A%0A### cmds, etc.%0Apipe_fp = '%25s/mypipe.bmp' %25 (TMP,)%0Akdu_cmd = '%25s -i %25s -o %25s -num_threads 4 -reduce %25d' %25 (KDU_EXPAND, INPUT_JP2, pipe_fp, REDUCE)%0Amkfifo_cmd = '/usr/bin/mkfifo %25s' %25 (pipe_fp,)%0Armfifo_cmd = '/bin/rm %25s' %25 (pipe_fp,)%0A%0A# make a named pipe%0Amkfifo_resp = subprocess.check_call(mkfifo_cmd, shell=True)%0Aif mkfifo_resp == 0:%0A print 'mkfifo OK'%0A%0A# write kdu_expand's output to the named pipe %0Akdu_expand_proc = subprocess.Popen(kdu_cmd, shell=True, %0A bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE,%0A env=%7B 'LD_LIBRARY_PATH' : KDU_EXPAND %7D)%0A%0A# open the named pipe and parse the stream%0Awith open(pipe_fp, 'rb') as f:%0A p = Parser()%0A while True:%0A s = f.read(1024)%0A if not s:%0A break%0A p.feed(s)%0A im = p.close()%0A%0A# finish kdu%0Akdu_exit = kdu_expand_proc.wait()%0Aif kdu_exit != 0:%0A map(sys.stderr.write, kdu_expand_proc.stderr)%0Aelse:%0A # if kdu was successful, save to a jpg%0A map(sys.stdout.write, kdu_expand_proc.stdout)%0A im = im.resize((719,900), resample=Image.ANTIALIAS)%0A im.save(OUT_JPG, quality=95)%0A%0A# remove the named pipe%0Armfifo_resp = subprocess.check_call(rmfifo_cmd, shell=True)%0Aif rmfifo_resp == 0:%0A print 'rm fifo OK'%0A
|
|
2f9324f4d073082f47ecd8279d4bd85eaa1cf258
|
add splits-io api wrapper
|
modules/apis/splits_io.py
|
modules/apis/splits_io.py
|
Python
| 0 |
@@ -0,0 +1,399 @@
+#! /usr/bin/env python2.7%0A%0Aimport modules.apis.api_base as api%0A%0Aclass SplitsIOAPI(api.API):%0A%0A def __init__(self, session = None):%0A super(SplitsIOAPI, self).__init__(%22https://splits.io/api/v3%22, session)%0A%0A def get_user_splits(self, user, **kwargs):%0A endpoint = %22/users/%7B0%7D/pbs%22.format(user)%0A success, response = self.get(endpoint, **kwargs)%0A return success, response%0A
|
|
d5e67563f23acb11fe0e4641d48b67fe3509822f
|
Add test migration removing ref to old company image
|
apps/companyprofile/migrations/0002_auto_20151014_2132.py
|
apps/companyprofile/migrations/0002_auto_20151014_2132.py
|
Python
| 0 |
@@ -0,0 +1,386 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('companyprofile', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.RenameField(%0A model_name='company',%0A old_name='image',%0A new_name='old_image',%0A ),%0A %5D%0A
|
|
a1e2e51c2777107bbc8a20429078638917149b6a
|
Remove unused import
|
src/compas/rpc/services/default.py
|
src/compas/rpc/services/default.py
|
import os
import inspect
import json
import socket
import compas
from compas.rpc import Server
from compas.rpc import Service
class DefaultService(Service):
pass
if __name__ == '__main__':
import sys
import threading
try:
port = int(sys.argv[1])
except:
port = 1753
print('Starting default RPC service on port {0}...'.format(port))
server = Server(("localhost", port))
server.register_function(server.ping)
server.register_function(server.remote_shutdown)
server.register_instance(DefaultService())
print('Listening, press CTRL+C to abort...')
server.serve_forever()
|
Python
| 0.000001 |
@@ -211,29 +211,8 @@
sys
-%0A import threading
%0A%0A
|
f20f286a3c5c6e2b9adf7220ac4426ce783d96b5
|
Create regressors.py
|
trendpy/regressors.py
|
trendpy/regressors.py
|
Python
| 0.000001 |
@@ -0,0 +1,1159 @@
+# regressors.py%0A%0A# MIT License%0A%0A# Copyright (c) 2017 Rene Jean Corneille%0A%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A%0A# The above copyright notice and this permission notice shall be included in all%0A# copies or substantial portions of the Software.%0A%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A# SOFTWARE.%0A%0Aclass Lasso(Strategy):%0A%09pass%0A%09%0A
|
|
ad777af05d2995ee43b3d64ce435cc96379fa9a2
|
add iostat template
|
graph_templates/iostat.py
|
graph_templates/iostat.py
|
Python
| 0 |
@@ -0,0 +1,659 @@
+from . import GraphTemplate%0A%0A%0Aclass IostatTemplate(GraphTemplate):%0A '''%0A corresponds to diamond diskusage plugin%0A '''%0A target_types = %7B%0A 'gauge': %7B%0A 'match': '%5Eservers%5C.(?P%3Cserver%3E%5B%5E%5C.%5D+)%5C.iostat%5C.(?P%3Cdevice%3E%5B%5E%5C.%5D+)%5C.(?P%3Ctype%3E.*)$',%0A 'default_group_by': 'server',%0A 'default_graph_options': %7B'state': 'stacked'%7D%0A %7D,%0A 'rate': %7B%0A 'match': '%5Eservers%5C.(?P%3Cserver%3E%5B%5E%5C.%5D+)%5C.iostat%5C.(?P%3Cdevice%3E%5B%5E%5C.%5D+)%5C.(?P%3Ctype%3E.*)_per_second$',%0A 'default_group_by': 'server',%0A 'default_graph_options': %7B'state': 'stacked', 'vtitle': 'events/s'%7D%0A %7D%0A %7D%0A%0A# vim: ts=4 et sw=4:%0A
|
|
5a47b4f3b13c2d66a9e226eeb90dfddef048279f
|
Implement the host and address options in runserver
|
djangae/management/commands/runserver.py
|
djangae/management/commands/runserver.py
|
import os
from django.core.management.commands.runserver import BaseRunserverCommand
from datetime import datetime
class Command(BaseRunserverCommand):
"""
Overrides the default Django runserver command.
Instead of starting the default Django development server this
command fires up a copy of the full fledged App Engine
dev_appserver that emulates the live environment your application
will be deployed to.
"""
def inner_run(self, *args, **options):
import sys
shutdown_message = options.get('shutdown_message', '')
# We use the old dev appserver if threading is disabled or --old was passed
quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
from djangae.utils import find_project_root
from djangae.sandbox import _find_sdk_from_python_path
from django.conf import settings
from django.utils import translation
# Check for app.yaml
expected_path = os.path.join(find_project_root(), "app.yaml")
if not os.path.exists(expected_path):
sys.stderr.write("Unable to find app.yaml at '%s'\n" % expected_path)
sys.exit(1)
self.stdout.write("Validating models...\n\n")
self.validate(display_num_errors=True)
self.stdout.write((
"%(started_at)s\n"
"Django version %(version)s, using settings %(settings)r\n"
"Starting development server at http://%(addr)s:%(port)s/\n"
"Quit the server with %(quit_command)s.\n"
) % {
"started_at": datetime.now().strftime('%B %d, %Y - %X'),
"version": self.get_version(),
"settings": settings.SETTINGS_MODULE,
"addr": self._raw_ipv6 and '[%s]' % self.addr or self.addr,
"port": self.port,
"quit_command": quit_command,
})
sys.stdout.write("\n")
sys.stdout.flush()
# django.core.management.base forces the locale to en-us. We should
# set it up correctly for the first request (particularly important
# in the "--noreload" case).
translation.activate(settings.LANGUAGE_CODE)
# Will have been set by setup_paths
sdk_path = _find_sdk_from_python_path()
from google.appengine.tools.devappserver2 import devappserver2
from google.appengine.tools.devappserver2 import api_server
from google.appengine.tools.devappserver2 import python_runtime
from djangae import sandbox
class NoConfigDevServer(devappserver2.DevelopmentServer):
@staticmethod
def _create_api_server(request_data, storage_path, options, configuration):
return api_server.APIServer(options.api_host, options.api_port, configuration.app_id)
python_runtime._RUNTIME_PATH = os.path.join(sdk_path, '_python_runtime.py')
python_runtime._RUNTIME_ARGS = [sys.executable, python_runtime._RUNTIME_PATH]
devappserver = NoConfigDevServer()
devappserver.start(sandbox._OPTIONS)
if shutdown_message:
sys.stdout.write(shutdown_message)
return
|
Python
| 0.000001 |
@@ -571,92 +571,8 @@
')%0A%0A
- # We use the old dev appserver if threading is disabled or --old was passed%0A
@@ -2190,17 +2190,16 @@
path()%0A%0A
-%0A
@@ -2438,16 +2438,186 @@
andbox%0A%0A
+ sandbox._OPTIONS.port = int(self.port) if self.port else sandbox._OPTIONS.port%0A sandbox._OPTIONS.host = self.addr if self.addr else sandbox._OPTIONS.host%0A%0A
|
1637b53727f81c9528c47effab172f86a58e8b9a
|
Add script register_ph_migrate.py to mass register and migrate placeholders remotely
|
ereuse_devicehub/scripts/register_ph_migrate.py
|
ereuse_devicehub/scripts/register_ph_migrate.py
|
Python
| 0 |
@@ -0,0 +1,2519 @@
+import argparse%0A%0Aimport requests%0Afrom ereuse_devicehub.security.request_auth import Auth%0A%0A%0Adef create_placeholders_and_migrate(base_url, email, password, n_placeholders, origin_db, dest_db, label=None,%0A comment=None):%0A %22%22%22%0A Remotely connects to a devicehub, creates n_placeholders placeholders and then migrates them to a dest_db%0A in the same devicehub.%0A %22%22%22%0A try:%0A auth = Auth(base_url, email, password)%0A snapshot = %7B%0A %22@type%22: %22devices:Register%22,%0A %22device%22: %7B%0A %22@type%22: %22Device%22,%0A %22placeholder%22: True%0A %7D%0A %7D%0A%0A devices_id = %5B%5D%0A for _ in range(0, n_placeholders):%0A r = requests.post('%7B%7D/%7B%7D/events/devices/register'.format(base_url, origin_db), json=snapshot, auth=auth)%0A r.raise_for_status()%0A result = r.json()%0A devices_id.append(result%5B'device'%5D)%0A%0A migrate = %7B%0A %22@type%22: %22devices:Migrate%22,%0A %22label%22: label,%0A %22to%22: %7B%0A %22baseUrl%22: %22https://devicehub.ereuse.org/%22,%0A %22database%22: dest_db%0A %7D,%0A 'devices': devices_id,%0A %22comment%22: comment%0A %7D%0A r = requests.post('%7B%7D/%7B%7D/events/devices/migrate'.format(base_url, origin_db), json=migrate, auth=auth)%0A r.raise_for_status()%0A except Exception as e:%0A raise e%0A%0A%0Aif __name__ == '__main__':%0A desc = 'Creates a number of placeholders and then migrates them to another database. ' %5C%0A 'This method executes remotely to any DeviceHub on the web.'%0A epilog = 'Example: python register_ph_migrate.py http://api.foo.bar [email protected] pass 25 db1 db2' %5C%0A ' -l %22Migrate to DB2%22 -c %22This migrate represents...%22'%0A parser = argparse.ArgumentParser(description=desc, epilog=epilog)%0A parser.add_argument('base_url', help='Ex: https://api.devicetag.io')%0A parser.add_argument('email')%0A parser.add_argument('password')%0A parser.add_argument('n_placeholders', help='Number of placeholders to create and migrate', type=int)%0A parser.add_argument('origin_db', help='Name of the database where placeholders are Registered and them moved from')%0A parser.add_argument('dest_db', help='Destination db')%0A parser.add_argument('-l', '--label')%0A parser.add_argument('-c', '--comment')%0A args = vars(parser.parse_args()) # If --help or -h or wrong value this will print message to user and abort%0A create_placeholders_and_migrate(**args)%0A
|
|
54641126cd8d662c6443aff1e6fe238c4bb09932
|
Add PowerAnalysers Voltech PMn000
|
engineering_project/Instrument/PowerAnalyser.py
|
engineering_project/Instrument/PowerAnalyser.py
|
Python
| 0 |
@@ -0,0 +1,948 @@
+# import time%0A# import logging%0A# from scipy.interpolate import UnivariateSpline%0A# import numpy as np%0Atry:%0A from Instrument.GenericInstrument import GenericInstrument%0A from Instrument.IEEE488 import IEEE488%0A from Instrument.SCPI import SCPI%0A%0Aexcept ImportError:%0A from GenericInstrument import GenericInstrument%0A from IEEE488 import IEEE488%0A from SCPI import SCPI%0A%0A%0Aclass PowerAnalyser(GenericInstrument):%0A %22%22%22Parent class for PowerAnalysers.%22%22%22%0A%0A def __init__(self, instrument):%0A %22%22%22.%22%22%22%0A super().__init__(instrument)%0A%0A def __repr__(self):%0A %22%22%22.%22%22%22%0A return%22%7B%7D, %7B%7D%22.format(__class__, self.instrument)%0A%0A%0Aclass VoltechPM3000A(PowerAnalyser, IEEE488):%0A %22%22%22Voltech PM3000A.%0A%0A .. figure:: images/PowerAnalyser/VoltechPM3000A.jpg%0A %22%22%22%0A%0A%0Aclass VoltechPM1000P(PowerAnalyser, IEEE488):%0A %22%22%22Voltech PM1000+.%0A%0A .. figure:: images/PowerAnalyser/VoltechPM1000P.jpg%0A %22%22%22%0A%0A%0A# REGISTER = %7B%7D%0A
|
|
884861de58ddfb12f2f5d15ce35349c74eab0c4e
|
Create 5009-set_bio_gripper.py
|
example/wrapper/common/5009-set_bio_gripper.py
|
example/wrapper/common/5009-set_bio_gripper.py
|
Python
| 0.000012 |
@@ -0,0 +1,1067 @@
+#!/usr/bin/env python3%0A# Software License Agreement (BSD License)%0A#%0A# Copyright (c) 2020, UFACTORY, Inc.%0A# All rights reserved.%0A#%0A# Author: Hutton %[email protected]%3E%0A%0A%22%22%22%0AExample: Bio Gripper Control%0APlease make sure that the gripper is attached to the end.%0A%22%22%22%0Aimport os%0Aimport sys%0Aimport time%0Asys.path.append(os.path.join(os.path.dirname(__file__), '../../..'))%0A%0Afrom xarm.wrapper import XArmAPI%0Afrom configparser import ConfigParser%0Aparser = ConfigParser()%0Aparser.read('../robot.conf')%0Atry:%0A ip = parser.get('xArm', 'ip')%0Aexcept:%0A ip = input('Please input the xArm ip address%5B192.168.1.194%5D:')%0A if not ip:%0A ip = '192.168.1.194'%0A%0A%0Aarm = XArmAPI(ip)%0Atime.sleep(0.5)%0Aif arm.warn_code != 0:%0A arm.clean_warn()%0Aif arm.error_code != 0:%0A arm.clean_error()%0A%0Aarm.motion_enable(enable=True) #gripper enable%0Atime.sleep(2) #Initialize the wait time%0Aarm.set_gripper_position(-10,wait=False,auto_enable=True,speed=900,timeout=10) #gripper open%0Atime.sleep(1)%0Aarm.set_gripper_position(10,wait=False,auto_enable=True,speed=900,timeout=10) #gripper close%0A
|
|
3ebe9ccebede38cc0638ef4adefe54fca306f2e6
|
fix path
|
doc/conf.py
|
doc/conf.py
|
__license__ = """
Copyright 2012 DISQUS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# -*- coding: utf-8 -*-
import os
import sys
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
from version import version
project = u'pykafka'
copyright = u'2015, Parse.ly'
version = release = version
extensions = ['sphinx.ext.autodoc']
templates_path = ['_templates']
exclude_patterns = ['_build']
html_static_path = ['_static']
source_suffix = '.rst'
master_doc = 'index'
html_theme = 'nature'
pygments_style = 'sphinx'
htmlhelp_basename = 'pykafkadoc'
autodoc_default_flags = ['special-members', 'show-inheritance']
|
Python
| 0.000017 |
@@ -763,18 +763,15 @@
ath.
-insert(0,
+append(
pare
@@ -1187,16 +1187,77 @@
embers',
+ 'undoc-members', 'private-members',%0A
'show-i
|
3cfd37f81708e1f3a1b69d6c310c7f93d32eb8ed
|
add script to generate artificial data
|
django_db_meter/generate_data.py
|
django_db_meter/generate_data.py
|
Python
| 0.000002 |
@@ -0,0 +1,1227 @@
+import random%0Aimport threading%0A%0Afrom django.contrib.auth.models import User%0Afrom models import TestModel%0A%0A%0Adef generate_queries():%0A u1 = User.objects.filter()%0A%0A new_name = str(random.randint(0, 2000000))%0A if u1:%0A u1.update(first_name=new_name)%0A else:%0A u1 = User(username=new_name)%0A u1.save()%0A%0A u1 = User.objects.filter(username=new_name)%0A if u1:%0A u1.first_name = new_name + 'hello'%0A u1.save()%0A%0A users = %5BUser(username=get_random_text()) for i in xrange(100)%5D%0A for user in users:%0A user.save()%0A%0A t = TestModel.objects.filter(user=u1)%0A t = list(t)%0A%0A for i in xrange(100):%0A t = TestModel.objects.filter()%0A t = list(t)%0A%0A for i in xrange(len(users)):%0A random_user = random.choice(users)%0A t = TestModel(user=random_user)%0A t.save()%0A%0A for i in xrange(100):%0A k = TestModel.objects.select_related('user')%0A k = list(k)%0A%0A%0Adef get_random_text():%0A new_name = str(random.randint(0, 2000000))%0A return new_name%0A%0A%0Adef main(concurrency=2):%0A ths = %5Bthreading.Thread(target=generate_queries) for i in%0A xrange(concurrency)%5D%0A for th in ths:%0A th.start()%0A%0A for th in ths:%0A th.join()%0A
|
|
af88a37ed87b18941232a98f52fec001bd63b453
|
Fix bug in CookieJar where QSettings expected str, but got QByteArray instead (#10)
|
python/pyphantomjs/cookiejar.py
|
python/pyphantomjs/cookiejar.py
|
'''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PyQt4.QtCore import QSettings
from PyQt4.QtNetwork import QNetworkCookie, QNetworkCookieJar
class CookieJar(QNetworkCookieJar):
def __init__(self, parent, cookiesFile):
super(CookieJar, self).__init__(parent)
self.m_cookiesFile = cookiesFile
def setCookiesFromUrl(self, cookieList, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
settings.beginGroup(url.host())
for cookie in cookieList:
settings.setValue(cookie.name(), cookie.value())
settings.sync()
return True
def cookiesForUrl(self, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
cookieList = []
settings.beginGroup(url.host())
for cname in settings.childKeys():
cookieList.append(QNetworkCookie(cname, settings.value(cname)))
return cookieList
|
Python
| 0 |
@@ -1235,16 +1235,20 @@
etValue(
+str(
cookie.n
@@ -1252,18 +1252,23 @@
e.name()
+)
,
+str(
cookie.v
@@ -1274,16 +1274,17 @@
value())
+)
%0A%0A
|
eb34dd310cac0106070554c440b134d0baad6c8e
|
add a way to sequence animations into a new animation
|
vectortween/SequentialAnimation.py
|
vectortween/SequentialAnimation.py
|
Python
| 0.000001 |
@@ -0,0 +1,2599 @@
+from vectortween.Animation import Animation%0Afrom vectortween.Tween import Tween%0Afrom vectortween.Mapping import Mapping%0Afrom copy import deepcopy%0Afrom itertools import tee%0Aimport numpy as np%0A%0Adef pairwise(iterable):%0A %22s -%3E (s0,s1), (s1,s2), (s2, s3), ...%22%0A a, b = tee(iterable)%0A next(b, None)%0A return zip(a, b)%0A%0Adef normalize(x):%0A return x / sum(x)%0A%0Aclass SequentialAnimation(Animation):%0A def __init__(self, ListOfAnimations=%5B%5D, timeweight=%5B%5D, tween=%5B'linear'%5D):%0A super().__init__(None, None)%0A self.ListOfAnimations = %5B%5D%0A self.ListOfAnimationTimeWeight = np.array(%5B%5D)%0A self.CumulativeNormalizedTimeWeights = np.array(%5B%5D)%0A self.T = Tween(*tween)%0A if ListOfAnimations:%0A if not timeweight:%0A for a in ListOfAnimations:%0A self.add(a, 1)%0A else:%0A for a,t in zip(ListOfAnimations,timeweight):%0A self.add(a, t)%0A%0A def add(self, Anim, timeweight=1):%0A self.ListOfAnimations.append(deepcopy(Anim))%0A self.ListOfAnimationTimeWeight = np.append(self.ListOfAnimationTimeWeight, %5Btimeweight%5D)%0A self.CumulativeNormalizedTimeWeights = np.cumsum(normalize(self.ListOfAnimationTimeWeight))%0A%0A def make_frame(self, frame, birthframe, startframe, stopframe, deathframe):%0A if birthframe is None:%0A birthframe = startframe%0A if deathframe is None:%0A deathframe = stopframe%0A if frame %3C birthframe:%0A return None%0A if frame %3E deathframe:%0A return None%0A if frame %3C startframe:%0A return self.ListOfAnimations%5B0%5D.make_frame(frame, birthframe, startframe, stopframe, deathframe)%0A if frame %3E stopframe:%0A return self.ListOfAnimations%5B-1%5D.make_frame(frame, birthframe, startframe, stopframe, deathframe)%0A%0A t = self.T.tween2(frame, startframe, stopframe)%0A%0A for i, w in enumerate(self.CumulativeNormalizedTimeWeights):%0A if t %3C= w:%0A if i == 0: # reached the end of the cumulative weights%0A relativestartframe = 0%0A else:%0A relativestartframe = self.CumulativeNormalizedTimeWeights%5Bi-1%5D%0A relativestopframe = self.CumulativeNormalizedTimeWeights%5Bi%5D%0A absstartframe = Mapping.linlin(relativestartframe, 0, 1, startframe, stopframe)%0A absstopframe = Mapping.linlin(relativestopframe, 0, 1, startframe, stopframe)%0A return self.ListOfAnimations%5Bi%5D.make_frame(frame, birthframe, absstartframe, absstopframe, deathframe)%0A
|
|
d5b622e9fb855753630cd3a6fae1a315b4be1a08
|
Add example using new pytorch backend
|
examples/dominant_eigenvector_pytorch.py
|
examples/dominant_eigenvector_pytorch.py
|
Python
| 0 |
@@ -0,0 +1,1685 @@
+import numpy as np%0Aimport numpy.random as rnd%0Aimport numpy.linalg as la%0Aimport torch%0A%0Afrom pymanopt import Problem%0Afrom pymanopt.tools import decorators%0Afrom pymanopt.manifolds import Sphere%0Afrom pymanopt.solvers import TrustRegions%0A%0A%0Adef dominant_eigenvector(A):%0A %22%22%22%0A Returns the dominant eigenvector of the symmetric matrix A.%0A%0A Note: For the same A, this should yield the same as the dominant invariant%0A subspace example with p = 1.%0A %22%22%22%0A m, n = A.shape%0A assert m == n, %22matrix must be square%22%0A assert np.allclose(np.sum(A - A.T), 0), %22matrix must be symmetric%22%0A%0A manifold = Sphere(n)%0A solver = TrustRegions()%0A%0A A_ = torch.from_numpy(A)%0A%0A @decorators.pytorch%0A def cost(x):%0A return -x.matmul(A_.matmul(x))%0A%0A problem = Problem(manifold=manifold, cost=cost)%0A xopt = solver.solve(problem)%0A return xopt.squeeze()%0A%0A%0Aif __name__ == %22__main__%22:%0A # Generate random problem data.%0A n = 128%0A A = rnd.randn(n, n)%0A A = 0.5 * (A + A.T)%0A%0A # Calculate the actual solution by a conventional eigenvalue decomposition.%0A w, v = la.eig(A)%0A x = v%5B:, np.argmax(w)%5D%0A%0A # Solve the problem with pymanopt.%0A xopt = dominant_eigenvector(A)%0A%0A # Make sure both vectors have the same direction. Both are valid%0A # eigenvectors, of course, but for comparison we need to get rid of the%0A # ambiguity.%0A if np.sign(x%5B0%5D) != np.sign(xopt%5B0%5D):%0A xopt = -xopt%0A%0A # Print information about the solution.%0A print('')%0A print(%22l2-norm of x: %25f%22 %25 la.norm(x))%0A print(%22l2-norm of xopt: %25f%22 %25 la.norm(xopt))%0A print(%22solution found: %25s%22 %25 np.allclose(x, xopt, rtol=1e-3))%0A print(%22l2-error: %25f%22 %25 la.norm(x - xopt))%0A
|
|
7a9cb703e776d91d4fc3c632b190bd7d318a12a6
|
Create primary directions module
|
flatlib/predictives/primarydirections.py
|
flatlib/predictives/primarydirections.py
|
Python
| 0.000001 |
@@ -0,0 +1,1861 @@
+%22%22%22%0A This file is part of flatlib - (C) FlatAngle%0A Author: Jo%C3%A3o Ventura ([email protected])%0A %0A%0A This module implements the Primary Directions%0A method.%0A %0A%22%22%22%0A%0Afrom flatlib import angle%0Afrom flatlib import utils%0A%0A%0A%0A# === Base functions === #%0A%0Adef arc(pRA, pDecl, sRA, sDecl, mcRA, lat):%0A %22%22%22 Returns the arc of direction between a Promissor %0A and Significator. It uses the generic proportional %0A semi-arc method.%0A %0A %22%22%22%0A pDArc, pNArc = utils.dnarcs(pDecl, lat)%0A sDArc, sNArc = utils.dnarcs(sDecl, lat)%0A %0A # Select meridian and arcs to be used%0A # Default is MC and Diurnal arcs%0A mdRA = mcRA%0A sArc = sDArc%0A pArc = pDArc%0A if not utils.isAboveHorizon(sRA, sDecl, mcRA, lat):%0A # Use IC and Nocturnal arcs%0A mdRA = angle.norm(mcRA + 180)%0A sArc = sNArc%0A pArc = pNArc%0A %0A # Promissor and Significator distance to meridian%0A pDist = angle.closestdistance(mdRA, pRA)%0A sDist = angle.closestdistance(mdRA, sRA)%0A %0A # Promissor should be after significator (in degrees)%0A if pDist %3C sDist:%0A pDist += 360%0A %0A # Meridian distances proportional to respective semi-arcs%0A sPropDist = sDist / (sArc / 2.0)%0A pPropDist = pDist / (pArc / 2.0)%0A %0A # The arc is how much of the promissor's semi-arc is%0A # needed to reach the significator%0A return (pPropDist - sPropDist) * (pArc / 2.0)%0A%0A%0Adef getArc(prom, sig, mc, pos, zerolat):%0A %22%22%22 Returns the arc of direction between a promissor%0A and a significator. Arguments are also the MC, the%0A geoposition and zerolat to assume zero ecliptical %0A latitudes.%0A %0A ZeroLat true =%3E inZodiaco, false =%3E inMundo%0A %0A %22%22%22%0A pRA, pDecl = prom.eqCoords(zerolat)%0A sRa, sDecl = sig.eqCoords(zerolat)%0A mcRa, mcDecl = mc.eqCoords()%0A return arc(pRA, pDecl, sRa, sDecl, mcRa, pos.lat)
|
|
9fbde5b8dd4d2555e03bc0b7915fc4e55f8333d9
|
Add test to help module
|
numba/tests/test_help.py
|
numba/tests/test_help.py
|
Python
| 0 |
@@ -0,0 +1,2031 @@
+from __future__ import print_function%0A%0Aimport builtins%0Aimport types as pytypes%0A%0Aimport numpy as np%0A%0Afrom numba import types%0Afrom .support import TestCase%0Afrom numba.help.inspector import inspect_function, inspect_module%0A%0A%0Aclass TestInspector(TestCase):%0A def check_function_descriptor(self, info, must_be_defined=False):%0A self.assertIsInstance(info, dict)%0A self.assertIn('numba_type', info)%0A numba_type = info%5B'numba_type'%5D%0A if numba_type is None:%0A self.assertFalse(must_be_defined)%0A else:%0A self.assertIsInstance(numba_type, types.Type)%0A self.assertIn('explained', info)%0A self.assertIsInstance(info%5B'explained'%5D, str)%0A self.assertIn('source_infos', info)%0A self.assertIsInstance(info%5B'source_infos'%5D, dict)%0A%0A def test_inspect_function_on_range(self):%0A info = inspect_function(range)%0A self.check_function_descriptor(info, must_be_defined=True)%0A%0A def test_inspect_function_on_np_all(self):%0A info = inspect_function(np.all)%0A self.check_function_descriptor(info, must_be_defined=True)%0A source_infos = info%5B'source_infos'%5D%0A self.assertGreater(len(source_infos), 0)%0A c = 0%0A for srcinfo in source_infos.values():%0A self.assertIsInstance(srcinfo%5B'kind'%5D, str)%0A self.assertIsInstance(srcinfo%5B'name'%5D, str)%0A self.assertIsInstance(srcinfo%5B'sig'%5D, str)%0A self.assertIsInstance(srcinfo%5B'filename'%5D, str)%0A self.assertIsInstance(srcinfo%5B'lines'%5D, tuple)%0A self.assertIn('docstring', srcinfo)%0A c += 1%0A self.assertEqual(c, len(source_infos))%0A%0A def test_inspect_module(self):%0A c = 0%0A for it in inspect_module(builtins):%0A self.assertIsInstance(it%5B'module'%5D, pytypes.ModuleType)%0A self.assertIsInstance(it%5B'name'%5D, str)%0A self.assertTrue(callable(it%5B'obj'%5D))%0A self.check_function_descriptor(it)%0A c += 1%0A self.assertGreater(c, 0)%0A
|
|
e8607fce01bfe17c08de0702c4041d98504bc159
|
Add migration for changing CONTACTED_CHOICES
|
reunition/apps/alumni/migrations/0006_auto_20150823_2030.py
|
reunition/apps/alumni/migrations/0006_auto_20150823_2030.py
|
Python
| 0 |
@@ -0,0 +1,834 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('alumni', '0005_note'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='note',%0A name='contacted',%0A field=models.CharField(blank=True, max_length=10, null=True, choices=%5B(b'', b'No contact made'), (b'', b'---'), (b'email', b'Sent email'), (b'fb', b'Sent Facebook message'), (b'phone', b'Made phone call'), (b'text', b'Sent text message'), (b'other', b'Made other contact'), (b'', b'---'), (b'email-in', b'Received email'), (b'fb-in', b'Received Facebook message'), (b'phone-in', b'Received phone call'), (b'text-in', b'Received text message'), (b'other', b'Received other contact')%5D),%0A ),%0A %5D%0A
|
|
6be37e92139a5e6eacc5c43f8eb82fb45d3757f3
|
switch now has interfaces objects
|
kyco/core/switch.py
|
kyco/core/switch.py
|
# -*- coding: utf-8 *-*
"""Module with main classes related to Switches"""
import logging
from socket import error as SocketError
from socket import socket as Socket
from kyco.constants import CONNECTION_TIMEOUT
from kyco.utils import now
__all__ = ('Switch',)
log = logging.getLogger('Kyco')
class Connection(object):
def __init__(self, address, port, socket, switch=None):
self.address = address
self.port = port
self.socket = socket
self.switch = switch
@property
def id(self):
return (self.address, self.port)
def send(self, buffer):
try:
self.socket.send(buffer)
except (OSError, SocketError) as exception:
self.close()
# TODO: Raise or create an error event?
raise exception
def close(self):
if self.socket is not None:
self.socket.close()
self.socket = None # TODO: Is this really necessary?
if self.switch.connection is self:
self.switch.connection = None
def is_connected(self):
return self.socket is not None
def update_switch(self, switch):
self.switch = switch
self.switch.connection = self
class Switch(object):
"""This is the main class related to Switches modeled on Kyco.
A new Switch will be created every time the handshake process is done
(after receiving the first FeaturesReply). Considering this, the
:attr:`socket`, :attr:`connection_id`, :attr:`of_version` and
:attr:`features` need to be passed on init. But when the connection to the
switch is lost, then this attributes can be set to None (actually some of
them must be).
The :attr:`dpid` attribute will be the unique identifier of a Switch.
It is the :attr:`pyof.*.controller2switch.SwitchFeatures.datapath-id` that
defined by the OpenFlow Specification, it is a 64 bit field that should be
thought of as analogous to a Ethernet Switches bridge MAC, its a unique
identifier for the specific packet processing pipeline being managed. One
physical switch may have more than one datapath-id (think virtualization of
the switch).
:attr:`socket` is the request from a TCP connection, it represents the
effective connection between the switch and the controller.
:attr:`connection_id` is a tuple, composed by the ip and port of the
stabilished connection (if any). It will be used to help map the connection
to the Switch and vice-versa.
:attr:`ofp_version` is a string representing the accorded version of
python-openflow that will be used on the communication between the
Controller and the Switch.
:attr:`features` is an instance of
:class:`pyof.*.controller2switch.FeaturesReply` representing the current
featues of the switch.
Args:
dpid (): datapath_id of the switch
socket (socket): Socket/Request
connection_id (tuple): Tuple `(ip, port)`
ofp_version (string): Current talked OpenFlow version
features (FeaturesReply): FeaturesReply (from python-openflow) instance
"""
def __init__(self, dpid, connection=None, ofp_version='0x01', features=None):
self.dpid = dpid
self.connection = connection
self.ofp_version = ofp_version
self.features = features
self.firstseen = now()
self.lastseen = now()
self.sent_xid = None
self.waiting_for_reply = False
self.request_timestamp = 0
#: Dict associating mac addresses to switch ports.
#: the key of this dict is a mac_address, and the value is a set
#: containing the ports of this switch in which that mac can be
#: found.
self.mac2port = {}
#: This flood_table will keep track of flood packets to avoid over
#: flooding on the network. Its key is a hash composed by
#: (eth_type, mac_src, mac_dst) and the value is the timestamp of
#: the last flood.
self.flood_table = {}
if connection:
connection.switch = self
def disconnect(self):
"""Disconnect the switch.
"""
self.connection.close()
self.connection = None
log.info("Switch %s is disconnected", self.dpid)
def is_active(self):
return (now() - self.lastseen).seconds <= CONNECTION_TIMEOUT
def is_connected(self):
"""Verifies if the switch is connected to a socket.
"""
return self.connection.is_connected() and self.is_active()
def update_connection(self, connection):
self.connection = connection
self.connection.switch = self
def update_features(self, features):
self.features = features
def send(self, buffer):
"""Sends data to the switch.
Args:
buffer (bytes): bytes to be sent to the switch throught its
connection.
Raises:
# TODO: raise proper exceptions on the code
......: If the switch connection was connection.
......: If the passed `data` is not a bytes object
"""
if self.connection:
self.connection.send(buffer)
def update_lastseen(self):
self.lastseen = now()
def update_mac_table(self, mac, port_number):
if mac.value in self.mac2port:
self.mac2port[mac.value].add(port_number)
else:
self.mac2port[mac.value] = set([port_number])
def where_is_mac(self, mac):
try:
return list(self.mac2port[mac.value])
except KeyError as exception:
return None
|
Python
| 0 |
@@ -290,16 +290,276 @@
Kyco')%0A%0A
+class Interface(object):%0A def __init__(self, name, port_number, switch, address=None, state=None):%0A self.name = name%0A self.port_number = int(port_number)%0A self.switch = switch%0A self.address = address%0A self.state = state%0A%0A%0A
class Co
@@ -4306,16 +4306,45 @@
ble = %7B%7D
+%0A self.interfaces = %7B%7D
%0A%0A
@@ -5579,24 +5579,188 @@
en = now()%0A%0A
+ def update_interface(self, interface):%0A if interface.port_number not in self.interfaces:%0A self.interfaces%5Binterface.port_number%5D = interface%0A%0A
def upda
|
6ac9744539711418ea031717ce83ab42ef1ab8a9
|
Fix help text about the default type
|
DCA/__main__.py
|
DCA/__main__.py
|
# Copyright 2016 Goekcen Eraslan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os, sys, argparse
def parse_args():
parser = argparse.ArgumentParser(description='Autoencoder')
parser.add_argument('input', type=str, help='Input is raw count data in TSV/CSV '
'or H5AD (anndata) format. '
'Row/col names are mandatory. Note that TSV/CSV files must be in '
'gene x cell layout where rows are genes and cols are cells (scRNA-seq '
'convention).'
'Use the -t/--transpose option if your count matrix in cell x gene layout. '
'H5AD files must be in cell x gene format (stats and scanpy convention).')
parser.add_argument('outputdir', type=str, help='The path of the output directory')
# IO and norm options
parser.add_argument('--normtype', type=str, default='zheng',
help='Type of size factor estimation. Possible values: deseq, zheng.'
' (default: zheng)')
parser.add_argument('-t', '--transpose', dest='transpose',
action='store_true', help='Transpose input matrix (default: False)')
parser.add_argument('--testsplit', dest='testsplit',
action='store_true', help="Use one fold as a test set (default: False)")
# training options
parser.add_argument('--type', type=str, default='zinb-conddisp',
help="Type of autoencoder. Possible values: normal, poisson, nb, "
"nb-shared, nb-conddisp, nb-fork, zinb(default), "
"zinb-shared, zinb-conddisp zinb-fork")
parser.add_argument('-b', '--batchsize', type=int, default=32,
help="Batch size (default:32)")
parser.add_argument('--sizefactors', dest='sizefactors',
action='store_true', help="Normalize means by library size (default: True)")
parser.add_argument('--nosizefactors', dest='sizefactors',
action='store_false', help="Do not normalize means by library size")
parser.add_argument('--norminput', dest='norminput',
action='store_true', help="Zero-mean normalize input (default: True)")
parser.add_argument('--nonorminput', dest='norminput',
action='store_false', help="Do not zero-mean normalize inputs")
parser.add_argument('--loginput', dest='loginput',
action='store_true', help="Log-transform input (default: True)")
parser.add_argument('--nologinput', dest='loginput',
action='store_false', help="Do not log-transform inputs")
parser.add_argument('-d', '--dropoutrate', type=str, default='0.0',
help="Dropout rate (default: 0)")
parser.add_argument('--batchnorm', dest='batchnorm', action='store_true',
help="Batchnorm (default: True)")
parser.add_argument('--nobatchnorm', dest='batchnorm', action='store_false',
help="Do not use batchnorm")
parser.add_argument('--l2', type=float, default=0.0,
help="L2 regularization coefficient (default: 0.0)")
parser.add_argument('--l1', type=float, default=0.0,
help="L1 regularization coefficient (default: 0.0)")
parser.add_argument('--l2enc', type=float, default=0.0,
help="Encoder-specific L2 regularization coefficient (default: 0.0)")
parser.add_argument('--l1enc', type=float, default=0.0,
help="Encoder-specific L1 regularization coefficient (default: 0.0)")
parser.add_argument('--ridge', type=float, default=0.0,
help="L2 regularization coefficient for dropout probabilities (default: 0.0)")
parser.add_argument('--gradclip', type=float, default=5.0,
help="Clip grad values (default: 5.0)")
parser.add_argument('--activation', type=str, default='relu',
help="Activation function of hidden units (default: relu)")
parser.add_argument('--optimizer', type=str, default='rmsprop',
help="Optimization method (default: rmsprop)")
parser.add_argument('--init', type=str, default='glorot_uniform',
help="Initialization method for weights (default: glorot_uniform)")
parser.add_argument('-e', '--epochs', type=int, default=300,
help="Max number of epochs to continue training in case of no "
"improvement on validation loss (default: 300)")
parser.add_argument('--earlystop', type=int, default=15,
help="Number of epochs to stop training if no improvement in loss "
"occurs (default: 15)")
parser.add_argument('--reducelr', type=int, default=10,
help="Number of epochs to reduce learning rate if no improvement "
"in loss occurs (default: 10)")
parser.add_argument('-s', '--hiddensize', type=str, default='64,32,64',
help="Size of hidden layers (default: 64,32,64)")
parser.add_argument('--inputdropout', type=float, default=0.0,
help="Input layer dropout probability"),
parser.add_argument('-r', '--learningrate', type=float, default=None,
help="Learning rate (default: 0.001)")
parser.add_argument('--saveweights', dest='saveweights',
action='store_true', help="Save weights (default: False)")
parser.add_argument('--no-saveweights', dest='saveweights',
action='store_false', help="Do not save weights")
parser.add_argument('--hyper', dest='hyper',
action='store_true', help="Optimizer hyperparameters (default: False)")
parser.add_argument('--hypern', dest='hypern', type=int, default=1000,
help="Number of samples drawn from hyperparameter distributions during optimization. "
"(default: 1000)")
parser.add_argument('--hyperepoch', dest='hyperepoch', type=int, default=100,
help="Number of epochs used in each hyperpar optimization iteration. "
"(default: 100)")
parser.add_argument('--debug', dest='debug',
action='store_true', help="Enable debugging. Checks whether every term in "
"loss functions is finite. (default: False)")
parser.add_argument('--tensorboard', dest='tensorboard',
action='store_true', help="Use tensorboard for saving weight distributions and "
"visualization. (default: False)")
parser.add_argument('--denoisesubset', dest='denoisesubset', type=str,
help='Perform denoising only for the subset of genes '
'in the given file. Gene names should be line '
'separated.')
parser.set_defaults(transpose=False,
testsplit=False,
saveweights=False,
sizefactors=True,
batchnorm=True,
norminput=True,
hyper=False,
debug=False,
tensorboard=False,
loginput=True)
return parser.parse_args()
def main():
args = parse_args()
try:
import tensorflow as tf
except ImportError:
raise ImportError('DCA requires tensorflow. Please follow instructions'
' at https://www.tensorflow.org/install/ to install'
' it.')
# import tf and the rest after parse_args() to make argparse help faster
from . import train
train.train_with_args(args)
|
Python
| 0.00032 |
@@ -2146,17 +2146,8 @@
zinb
-(default)
, %22%0A
@@ -2190,16 +2190,25 @@
conddisp
+(default)
zinb-fo
|
5db1a4c8c721a0acffa6e903c5eef9b84ebfd0d3
|
rename example to avoid namespace problem
|
examples/tutorials/scipy2008/traits_example.py
|
examples/tutorials/scipy2008/traits_example.py
|
Python
| 0 |
@@ -0,0 +1,1774 @@
+%0Afrom numpy import linspace, sin%0A%0Afrom enable.api import ColorTrait%0Afrom chaco.api import ArrayPlotData, Plot, marker_trait%0Afrom enable.component_editor import ComponentEditor%0Afrom traits.api import HasTraits, Instance, Int%0Afrom traitsui.api import Group, Item, View%0A%0Aclass ScatterPlotTraits(HasTraits):%0A%0A plot = Instance(Plot)%0A color = ColorTrait(%22blue%22)%0A marker = marker_trait%0A marker_size = Int(4)%0A%0A traits_view = View(%0A Group(Item('color', label=%22Color%22, style=%22custom%22),%0A Item('marker', label=%22Marker%22),%0A Item('marker_size', label=%22Size%22),%0A Item('plot', editor=ComponentEditor(), show_label=False),%0A orientation = %22vertical%22),%0A width=800, height=600, resizable=True,%0A title=%22Chaco Plot%22%0A )%0A%0A def __init__(self):%0A # Create the data and the PlotData object%0A x = linspace(-14, 14, 100)%0A y = sin(x) * x**3%0A plotdata = ArrayPlotData(x = x, y = y)%0A # Create a Plot and associate it with the PlotData%0A plot = Plot(plotdata)%0A # Create a line plot in the Plot%0A self.renderer = plot.plot((%22x%22, %22y%22), type=%22scatter%22, color=%22blue%22)%5B0%5D%0A self.plot = plot%0A%0A def _color_changed(self):%0A self.renderer.color = self.color%0A%0A def _marker_changed(self):%0A self.renderer.marker = self.marker%0A%0A def _marker_size_changed(self):%0A self.renderer.marker_size = self.marker_size%0A%0A#===============================================================================%0A# demo object that is used by the demo.py application.%0A#===============================================================================%0Ademo = ScatterPlotTraits()%0A%0Aif __name__ == %22__main__%22:%0A demo.configure_traits()%0A%0A
|
|
212d19c29a42bd6966965b166cdbb4dd642e5eb4
|
Add test-cases for `get_user_membership`
|
wqflask/tests/unit/wqflask/test_resource_manager.py
|
wqflask/tests/unit/wqflask/test_resource_manager.py
|
Python
| 0.000002 |
@@ -0,0 +1,1895 @@
+%22%22%22Test cases for wqflask/resource_manager.py%22%22%22%0Aimport unittest%0A%0Afrom unittest import mock%0Afrom wqflask.resource_manager import get_user_membership%0A%0A%0Aclass TestGetUserMembership(unittest.TestCase):%0A %22%22%22Test cases for %60get_user_membership%60%22%22%22%0A%0A def setUp(self):%0A conn = mock.MagicMock()%0A conn.hgetall.return_value = %7B%0A '7fa95d07-0e2d-4bc5-b47c-448fdc1260b2': (%0A '%7B%22name%22: %22editors%22, '%0A '%22admins%22: %5B%228ad942fe-490d-453e-bd37-56f252e41604%22, %22rand%22%5D, '%0A '%22members%22: %5B%228ad942fe-490d-453e-bd37-56f252e41603%22, '%0A '%22rand%22%5D, '%0A '%22changed_timestamp%22: %22Oct 06 2021 06:39PM%22, '%0A '%22created_timestamp%22: %22Oct 06 2021 06:39PM%22%7D')%7D%0A self.conn = conn%0A%0A def test_user_is_group_member_only(self):%0A %22%22%22Test that a user is only a group member%22%22%22%0A self.assertEqual(%0A get_user_membership(%0A conn=self.conn,%0A user_id=%228ad942fe-490d-453e-bd37-56f252e41603%22,%0A group_id=%227fa95d07-0e2d-4bc5-b47c-448fdc1260b2%22),%0A %7B%22member%22: True,%0A %22admin%22: False%7D)%0A%0A def test_user_is_group_admin_only(self):%0A %22%22%22Test that a user is a group admin only%22%22%22%0A self.assertEqual(%0A get_user_membership(%0A conn=self.conn,%0A user_id=%228ad942fe-490d-453e-bd37-56f252e41604%22,%0A group_id=%227fa95d07-0e2d-4bc5-b47c-448fdc1260b2%22),%0A %7B%22member%22: False,%0A %22admin%22: True%7D)%0A%0A def test_user_is_both_group_member_and_admin(self):%0A %22%22%22Test that a user is both an admin and member of a group%22%22%22%0A self.assertEqual(%0A get_user_membership(%0A conn=self.conn,%0A user_id=%22rand%22,%0A group_id=%227fa95d07-0e2d-4bc5-b47c-448fdc1260b2%22),%0A %7B%22member%22: True,%0A %22admin%22: True%7D)%0A
|
|
3bdbc33e94a601f5d903bd32caf5ad7698fc025e
|
Fix zulip.com hardcoding.
|
zerver/management/commands/initialize_voyager_db.py
|
zerver/management/commands/initialize_voyager_db.py
|
from __future__ import absolute_import
from typing import Any, Iterable, Tuple
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from zerver.models import UserProfile, Stream, Recipient, \
Subscription, Realm, get_client, email_to_username
from django.conf import settings
from zerver.lib.bulk_create import bulk_create_users
from zerver.lib.actions import set_default_streams, do_create_realm
from optparse import make_option
from six import text_type
settings.TORNADO_SERVER = None
def create_users(name_list, bot_type=None):
# type: (Iterable[Tuple[text_type, text_type]], int) -> None
realms = {}
for realm in Realm.objects.all():
realms[realm.domain] = realm
user_set = set()
for full_name, email in name_list:
short_name = email_to_username(email)
user_set.add((email, full_name, short_name, True))
bulk_create_users(realms, user_set, bot_type)
class Command(BaseCommand):
help = "Populate an initial database for Zulip Voyager"
option_list = BaseCommand.option_list + (
make_option('--extra-users',
dest='extra_users',
type='int',
default=0,
help='The number of extra users to create'),
)
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
Realm.objects.create(domain="zulip.com")
names = [(settings.FEEDBACK_BOT_NAME, settings.FEEDBACK_BOT)]
create_users(names, bot_type=UserProfile.DEFAULT_BOT)
get_client("website")
get_client("API")
internal_bots = [(bot['name'], bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,))
for bot in settings.INTERNAL_BOTS]
create_users(internal_bots, bot_type=UserProfile.DEFAULT_BOT)
# Set the owners for these bots to the bots themselves
bots = UserProfile.objects.filter(email__in=[bot_info[1] for bot_info in internal_bots])
for bot in bots:
bot.bot_owner = bot
bot.save()
# Initialize the email gateway bot as an API Super User
email_gateway_bot = UserProfile.objects.get(email__iexact=settings.EMAIL_GATEWAY_BOT)
email_gateway_bot.is_api_super_user = True
email_gateway_bot.save()
(admin_realm, _) = do_create_realm(settings.ADMIN_DOMAIN,
settings.ADMIN_DOMAIN, True)
set_default_streams(admin_realm, settings.DEFAULT_NEW_REALM_STREAMS)
self.stdout.write("Successfully populated database with initial data.\n")
site = Site.objects.get_current()
site.domain = settings.EXTERNAL_HOST
site.save()
|
Python
| 0 |
@@ -1420,19 +1420,36 @@
ain=
-%22zulip.com%22
+settings.INTERNAL_BOT_DOMAIN
)%0A%0A
|
b2febfd4b52c1e50be4d8ba614adcbe4d59251d8
|
Add blank init file
|
SDK/__init__.py
|
SDK/__init__.py
|
Python
| 0 |
@@ -0,0 +1 @@
+%0A
|
|
fc58a85131675672ccef2302038cc55c9e4b0460
|
Migrate products
|
c2corg_api/scripts/migration/documents/products.py
|
c2corg_api/scripts/migration/documents/products.py
|
Python
| 0.000002 |
@@ -0,0 +1,3428 @@
+from c2corg_api.models.document import DocumentGeometry, %5C%0A ArchiveDocumentGeometry%0Afrom c2corg_api.models.waypoint import Waypoint, ArchiveWaypoint, %5C%0A WaypointLocale, ArchiveWaypointLocale%0Afrom c2corg_api.scripts.migration.documents.document import MigrateDocuments%0A%0A%0Aclass MigrateProducts(MigrateDocuments):%0A%0A def get_name(self):%0A return 'products'%0A%0A def get_model_document(self, locales):%0A return WaypointLocale if locales else Waypoint%0A%0A def get_model_archive_document(self, locales):%0A return ArchiveWaypointLocale if locales else ArchiveWaypoint%0A%0A def get_model_geometry(self):%0A return DocumentGeometry%0A%0A def get_model_archive_geometry(self):%0A return ArchiveDocumentGeometry%0A%0A def get_count_query(self):%0A return (%0A 'select count(*) from app_products_archives;'%0A )%0A%0A def get_query(self):%0A return (%0A 'select '%0A ' id, document_archive_id, is_latest_version, elevation, '%0A ' is_protected, redirects_to, '%0A ' ST_Force2D(ST_SetSRID(geom, 3857)) geom, '%0A ' product_type, url '%0A 'from app_products_archives '%0A 'order by id, document_archive_id;'%0A )%0A%0A def get_count_query_locales(self):%0A return (%0A 'select count(*) from app_products_i18n_archives;'%0A )%0A%0A def get_query_locales(self):%0A return (%0A 'select '%0A ' id, document_i18n_archive_id, is_latest_version, culture, '%0A ' name, description, hours, access '%0A 'from app_products_i18n_archives '%0A 'order by id, document_i18n_archive_id;'%0A )%0A%0A def get_document(self, document_in, version):%0A return dict(%0A document_id=document_in.id,%0A version=version,%0A waypoint_type='local_product',%0A elevation=document_in.elevation,%0A product_types=self.convert_types(%0A document_in.product_type,%0A MigrateProducts.product_types, %5B0%5D),%0A url=document_in.url%0A )%0A%0A def get_document_archive(self, document_in, version):%0A doc = self.get_document(document_in, version)%0A doc%5B'id'%5D = document_in.document_archive_id%0A return doc%0A%0A def get_document_geometry(self, document_in, version):%0A return dict(%0A document_id=document_in.id,%0A id=document_in.id,%0A version=version,%0A geom=document_in.geom%0A )%0A%0A def get_document_geometry_archive(self, document_in, version):%0A doc = self.get_document_geometry(document_in, version)%0A doc%5B'id'%5D = document_in.document_archive_id%0A return doc%0A%0A def get_document_locale(self, document_in, version):%0A # TODO extract summary%0A return dict(%0A document_id=document_in.id,%0A id=document_in.document_i18n_archive_id,%0A version=version,%0A culture=document_in.culture,%0A title=document_in.name,%0A description=document_in.description,%0A access=document_in.access,%0A access_period=document_in.hours%0A )%0A%0A def get_document_locale_archive(self, document_in, version):%0A return self.get_document_locale(document_in, version)%0A%0A product_types = %7B%0A '1': 'farm_sale',%0A '2': 'restaurant',%0A '3': 'grocery',%0A '4': 'bar',%0A '5': 'sport_shop'%0A %7D%0A
|
|
9719189501f8b0fcff186b1bc2130fcef8d21e8d
|
add movie scraper
|
scrape_rotten/scrape_rotten/spiders/movie_spider.py
|
scrape_rotten/scrape_rotten/spiders/movie_spider.py
|
Python
| 0.000009 |
@@ -0,0 +1,1315 @@
+import scrapy%0A%0Adef get_urls():%0A %0A # load from file%0A with open('movie_urls.json') as f:%0A return %5Bline.rstrip() for line in f%5D%0A %0Aclass MovieSpider(scrapy.Spider):%0A name = 'movies'%0A start_urls = get_urls()%0A%0A def meta_property(self, response, prop):%0A return response.xpath(%22//meta%5B@property='%7B%7D'%5D/@content%22.format(prop)).extract()%0A%0A def parse(self, response):%0A data = %7B'url': response.url%7D%0A%0A movie_url_handle = response.url.split('/')%0A poster_url = response.css('img.posterImage::attr(src)').extract()%0A movie_title = self.meta_property(response, 'og:title')%0A description = self.meta_property(response, 'og:description') %0A rotten_id = self.meta_property(response, 'movieID')%0A year = response.css(%22h1#movie-title%22).xpath('span/text()').extract() %0A%0A if movie_url_handle:%0A data%5B'movie_url_handle'%5D = movie_url_handle%5B-1%5D%0A%0A if poster_url:%0A data%5B'poster_url'%5D = poster_url%5B0%5D%0A%0A if movie_title:%0A data%5B'movie_title'%5D = movie_title%5B0%5D%0A%0A if description:%0A data%5B'description'%5D = description%5B0%5D%0A%0A if rotten_id:%0A data%5B'rt_id'%5D = rotten_id%5B0%5D%0A%0A if year:%0A data%5B'year'%5D = year%5B0%5D.replace('(', '').replace(')', '').strip()%0A%0A yield data%0A%0A%0A
|
|
052832a766e296a3444cb7afd5b5a930013d18d6
|
Create z04-convolutional-neural-network.py
|
skflow-examples/z04-convolutional-neural-network.py
|
skflow-examples/z04-convolutional-neural-network.py
|
Python
| 0.000003 |
@@ -0,0 +1,1530 @@
+# http://terrytangyuan.github.io/2016/03/14/scikit-flow-intro/%0A%0A%0A# Loading MNIST data%0Amnist = input_data.read_data_sets('MNIST_data')%0A%0Adef max_pool_2x2(tensor_in):%0A return tf.nn.max_pool(tensor_in, ksize=%5B1, 2, 2, 1%5D, strides=%5B1, 2, 2, 1%5D,%0A padding='SAME')%0A%0Adef conv_model(X, y):%0A # reshape X to 4d tensor with 2nd and 3rd dimensions being image width and height%0A # final dimension being the number of color channels%0A X = tf.reshape(X, %5B-1, 28, 28, 1%5D)%0A # first conv layer will compute 32 features for each 5x5 patch%0A with tf.variable_scope('conv_layer1'):%0A h_conv1 = skflow.ops.conv2d(X, n_filters=32, filter_shape=%5B5, 5%5D, %0A bias=True, activation=tf.nn.relu)%0A h_pool1 = max_pool_2x2(h_conv1)%0A # second conv layer will compute 64 features for each 5x5 patch%0A with tf.variable_scope('conv_layer2'):%0A h_conv2 = skflow.ops.conv2d(h_pool1, n_filters=64, filter_shape=%5B5, 5%5D, %0A bias=True, activation=tf.nn.relu)%0A h_pool2 = max_pool_2x2(h_conv2)%0A # reshape tensor into a batch of vectors%0A h_pool2_flat = tf.reshape(h_pool2, %5B-1, 7 * 7 * 64%5D)%0A # densely connected layer with 1024 neurons%0A h_fc1 = skflow.ops.dnn(h_pool2_flat, %5B1024%5D, activation=tf.nn.relu, keep_prob=0.5)%0A return skflow.models.logistic_regression(h_fc1, y)%0A%0A# Training and predicting%0Aclassifier = skflow.TensorFlowEstimator(%0A model_fn=conv_model, n_classes=10, batch_size=100, steps=20000,%0A learning_rate=0.001)%0A
|
|
b9ff8fc06f9bd55721332831d4ce23589d93fafb
|
Create 3Sum.py
|
leetcode/15.-3Sum/3Sum.py
|
leetcode/15.-3Sum/3Sum.py
|
Python
| 0.000002 |
@@ -0,0 +1,1296 @@
+class Solution(object):%0A def threeSum(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: List%5BList%5Bint%5D%5D%0A %22%22%22%0A res = %5B%5D%0A sortnum = sorted(nums)%0A length = len(sortnum)%0A # make sure a %3C b %3C c%0A for i in xrange(length-2):%0A a = sortnum%5Bi%5D%0A # remove duplicate a%0A if i %3E= 1 and a == sortnum%5Bi-1%5D:%0A continue%0A j = i + 1%0A k = length - 1%0A while j %3C k:%0A b = sortnum%5Bj%5D%0A c = sortnum%5Bk%5D%0A if b + c == -a:%0A res.append(%5Ba,b,c%5D)%0A # remove duplicate b,c%0A while j %3C k:%0A j += 1%0A k -= 1%0A if sortnum%5Bj%5D != b or sortnum%5Bk%5D != c:%0A break%0A elif b + c %3E -a:%0A # remove duplicate c%0A while j %3C k:%0A k -= 1%0A if sortnum%5Bk%5D != c:%0A break%0A else:%0A # remove duplicate b%0A while j %3C k:%0A j += 1%0A if sortnum%5Bj%5D != b:%0A break%0A return res%0A
|
|
80bf107b29f51456f778da718ef438fd62545b1b
|
Add server test file
|
pi_approach/UI/server.py
|
pi_approach/UI/server.py
|
Python
| 0.000001 |
@@ -0,0 +1,1134 @@
+import socket%0A %0AHOST = socket.gethostname() + '.local' # Server IP or Hostname%0APORT = 12345 # Pick an open Port (1000+ recommended), must match the client sport%0As = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0Aprint 'Socket created'%0A %0A#managing error exception%0Atry:%0A s.bind((HOST, PORT))%0A print %22Opened%22%0Aexcept socket.error:%0A print 'Bind failed '%0A %0A %0As.listen(5)%0Aprint 'Socket awaiting messages'%0A(conn, addr) = s.accept()%0Aprint 'Connected'%0A %0A# awaiting for message%0Awhile True: %0A data = conn.recv(1024)%0A print 'I sent a message back in response to: ' + data%0A reply = ''%0A %0A # process your message%0A if data == 'Hello': %0A reply = 'Hi, back!' %0A elif data == 'This is important': %0A reply = 'OK, I have done the important thing you have asked me!'%0A %0A #and so on and on until...%0A elif data == 'quit': %0A conn.send('Terminating') %0A break %0A else:%0A reply = 'Unknown command'%0A %0A # Sending reply%0A conn.send(reply)%0Aconn.close() # Close connections%0A
|
|
691b27a4d97d5c2966f1627ed6cc5870024537c0
|
add bouncy example
|
06-animation/bouncy.py
|
06-animation/bouncy.py
|
Python
| 0.000007 |
@@ -0,0 +1,1212 @@
+def setup():%0A%09size(300,300)%0A%09# ball properties%0A%09global rad, d, pos, vel, grav%0A%09rad = 25%09%09 # radius of the ball%0A%09pos = PVector( 150, 50 )%09%09# initial position of the ball%0A%09vel = PVector( random(-3,3), random(-3,3) )%09 # velocity of the balll%0A%09grav = PVector( 0, 0.9 )%09%09# force on the ball (gravity)%0A%09d = 0.97%09%09 # how much bounce?%0A%0Adef draw():%0A%09%22%22%22 update the ball's state and draw it every frame %22%22%22%0A%09global rad, d, pos, vel, grav%0A%09# update the velocity with the force%0A%09vel.add(grav)%0A%09%0A%09# update the position with the velocity%0A%09pos.add(vel)%0A%09%0A%09# deal with wall collisions%0A%09if(pos.y %3E height-rad):%09 # floor collision%0A%09%09pos.y = height-rad%0A%09%09vel.y = -vel.y%0A%09%09vel.mult(d)%0A%09%0A%09if(pos.x %3C rad):%09%09 # left wall collision%0A%09%09pos.x = rad%0A%09%09vel.x = -vel.x%0A%09%09vel.mult(d)%0A%09%0A%09if(pos.x %3E width-rad):%09 # right wall collision%0A%09%09pos.x = width-rad%0A%09%09vel.x = -vel.x%0A%09%09vel.mult(d)%0A%09%0A%09%0A%09# draw the scene%0A%09background(150)%09 # refresh the background%0A%09strokeWeight(2)%0A%09%0A%09fill(20,160,240)%0A%09ellipse( pos.x, pos.y, rad*2, rad*2) # draw the ball%0A%0A%0Adef mousePressed():%0A%09%22%22%22 If the ball is clicked, add a random velocity. %22%22%22%0A%09global rad, pos%0A%09if( dist(mouseX,mouseY,pos.x,pos.y) %3C rad ):%0A%09%09vel.add( PVector(random(-3,3), random(10,20)) )%0A%0A%0A
|
|
1f6e225a1b01e8eb4cd9f1d5da05455d85326064
|
Validate ck_user_has_mobile_or_other_auth constraint
|
migrations/versions/0357_validate_constraint.py
|
migrations/versions/0357_validate_constraint.py
|
Python
| 0 |
@@ -0,0 +1,569 @@
+%22%22%22%0A%0ARevision ID: 0357_validate_constraint%0ARevises: 0356_add_webautn_auth_type%0ACreate Date: 2021-05-13 14:15:25.259991%0A%0A%22%22%22%0Afrom alembic import op%0A%0Arevision = '0357_validate_constraint'%0Adown_revision = '0356_add_webautn_auth_type'%0A%0A%0Adef upgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.execute('ALTER TABLE users VALIDATE CONSTRAINT %22ck_user_has_mobile_or_other_auth%22')%0A # ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A pass%0A # ### end Alembic commands ###%0A
|
|
8387b0289e84ededdd9ba3db5ba47f149b918530
|
clean up batch submit script
|
dnanexus/dx_batch.py
|
dnanexus/dx_batch.py
|
Python
| 0 |
@@ -0,0 +1,1998 @@
+#!/usr/bin/env python%0Aimport argparse%0Aimport os%0Aimport sys%0Aimport subprocess%0A%0Aimport dxpy%0Aimport requests%0Afrom dxencode import dxencode as dxencode%0A%0ASERVER = 'https://www.encodeproject.org'%0AASSAY_TYPE = 'whole genome bisulfite sequencing'%0AASSAY_TERM_ID = 'OBI:0001863'%0AHEADERS = %7B'content-type': 'application/json'%7D%0A%0A%0Adef get_args():%0A '''Parse the input arguments.'''%0A ap = argparse.ArgumentParser(description='Set up DNA Methylation runs on DNA Nexus')%0A%0A ap.add_argument('-t', '--test',%0A help='Use test input folder',%0A action='store_true',%0A required=False)%0A%0A ap.add_argument('-n', '--numberjobs',%0A help='Maximum Number of jobs to run',%0A type=int,%0A required=False)%0A%0A%0A return ap.parse_args()%0A%0A%0Adef main():%0A cmnd = get_args()%0A%0A ## resolve projects%0A (AUTHID, AUTHPW, SERVER) = dxencode.processkey('www')%0A%0A query = '/search/?type=experiment&assay_term_id=%25s&award.rfa=ENCODE3&limit=all&files.file_format=fastq&frame=embedded&replicates.library.biosample.donor.organism.name=mouse' %25 ASSAY_TERM_ID%0A res = requests.get(SERVER+query, headers=HEADERS, auth=(AUTHID, AUTHPW),allow_redirects=True, stream=True)%0A%0A exps = res.json()%5B'@graph'%5D%0A%0A n=0%0A for exp in exps:%0A acc = exp%5B'accession'%5D%0A if n %3E= cmnd.numberjobs:%0A print %22Stopping at %25s replicates%22 %25 n%0A break%0A for rep in exp.get('replicates', %5B%5D):%0A try:%0A runcmd = %22./launchDnaMe.py --gzip -e %25s --br %25s --tr %25s %3E runs/launch%25s-%25s-%25s.out%22 %25 (acc, rep%5B'biological_replicate_number'%5D, rep%5B'technical_replicate_number'%5D,acc, rep%5B'biological_replicate_number'%5D, rep%5B'technical_replicate_number'%5D)%0A print runcmd%0A if not cmnd.test:%0A os.system(runcmd)%0A n+=1%0A except KeyError, e:%0A print %22%25s failed: %25s%22 %25 (acc, e)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
9e217f0641328e1dfce91cdffdb8b5d77e4fe8fa
|
Add segcnn
|
examples/human_sar/segcnn.py
|
examples/human_sar/segcnn.py
|
Python
| 0.000084 |
@@ -0,0 +1,2516 @@
+import os%0Aimport sys%0Aimport cPickle%0Aimport theano.tensor as T%0A%0Ahomepath = os.path.join('..', '..')%0A%0Aif not homepath in sys.path:%0A sys.path.insert(0, homepath)%0A%0Afrom dlearn.models.layer import FullConnLayer, ConvPoolLayer%0Afrom dlearn.models.nnet import NeuralNet%0Afrom dlearn.utils import actfuncs, costfuncs%0Afrom dlearn.optimization import sgd%0A%0A%0Adef load_data():%0A with open('data.pkl', 'rb') as f:%0A dataset = cPickle.load(f)%0A%0A return dataset%0A%0A%0Adef load_attr_model():%0A with open('scpool.pkl', 'rb') as f:%0A attr_model = cPickle.load(f)%0A%0A return attr_model%0A%0A%0Adef train_model(dataset, attr_model):%0A X = T.tensor4()%0A A = T.matrix()%0A S = T.tensor3()%0A%0A layers = %5B%5D%0A layers.append(ConvPoolLayer(%0A input=X * S.dimshuffle(0, 'x', 1, 2),%0A input_shape=(3, 160, 80),%0A filter_shape=(32, 3, 5, 5),%0A pool_shape=(2, 2),%0A active_func=actfuncs.tanh,%0A flatten=False,%0A W=attr_model.blocks%5B0%5D._W,%0A b=attr_model.blocks%5B0%5D._b%0A ))%0A%0A layers.append(ConvPoolLayer(%0A input=layers%5B-1%5D.output,%0A input_shape=layers%5B-1%5D.output_shape,%0A filter_shape=(64, 32, 5, 5),%0A pool_shape=(2, 2),%0A active_func=actfuncs.tanh,%0A flatten=True,%0A W=attr_model.blocks%5B0%5D._W,%0A b=attr_model.blocks%5B0%5D._b%0A ))%0A%0A layers.append(FullConnLayer(%0A input=layers%5B-1%5D.output,%0A input_shape=layers%5B-1%5D.output_shape,%0A output_shape=128,%0A dropout_ratio=0.1,%0A active_func=actfuncs.tanh%0A ))%0A%0A layers.append(FullConnLayer(%0A input=layers%5B-1%5D.output,%0A input_shape=layers%5B-1%5D.output_shape,%0A output_shape=37 * 17,%0A dropout_input=layers%5B-1%5D.dropout_output,%0A active_func=actfuncs.sigmoid%0A ))%0A%0A model = NeuralNet(layers, %5BX, A%5D, layers%5B-1%5D.output)%0A model.target = S%0A model.cost = costfuncs.binxent(layers%5B-1%5D.dropout_output, S.flatten(2)) + %5C%0A 1e-3 * model.get_norm(2)%0A model.error = costfuncs.binerr(layers%5B-1%5D.output, S.flatten(2))%0A model.consts = layers.blocks%5B0%5D.parameters + layers.blocks%5B1%5D.parameters%0A%0A sgd.train(model, dataset, lr=1e-2, momentum=0.9,%0A batch_size=100, n_epochs=300,%0A epoch_waiting=10)%0A%0A return model%0A%0A%0Adef save_model(model):%0A with open('model_segcnn.pkl', 'wb') as f:%0A cPickle.dump(model, f, cPickle.HIGHEST_PROTOCOL)%0A%0A%0Aif __name__ == '__main__':%0A dataset = load_data()%0A attr_model = load_attr_model()%0A model = train_model(dataset)%0A save_model(model)%0A
|
|
ef627493f87d60e404008b26fe13e816d492a333
|
add a bluetooth test component that simply displays when a device move on the network
|
python/test_bluetooth.py
|
python/test_bluetooth.py
|
Python
| 0 |
@@ -0,0 +1,2228 @@
+#!/usr/bin/python%0A# -- Content-Encoding: UTF-8 --%0A%22%22%22%0ATest bluetooth module that displays informations from%0Abluetooth and print a message when a bluetooth device%0Aappears or disappears.%0A%0A:author: Luc Libralesso%0A:copyright: Copyright 2014, isandlaTech%0A:license: Apache License 2.0%0A:version: 0.0.3%0A:status: Alpha%0A%0A..%0A%0A Copyright 2014 isandlaTech%0A%0A Licensed under the Apache License, Version 2.0 (the %22License%22);%0A you may not use this file except in compliance with the License.%0A You may obtain a copy of the License at%0A%0A http://www.apache.org/licenses/LICENSE-2.0%0A%0A Unless required by applicable law or agreed to in writing, software%0A distributed under the License is distributed on an %22AS IS%22 BASIS,%0A WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A See the License for the specific language governing permissions and%0A limitations under the License.%0A%22%22%22%0A%0A# Module version%0A__version_info__ = (0, 0, 3)%0A__version__ = %22.%22.join(str(x) for x in __version_info__)%0A%0A# Documentation strings format%0A__docformat__ = %22restructuredtext en%22%0A%0A# ------------------------------------------------------------------------------%0A%0Afrom pelix.ipopo.decorators import ComponentFactory, Instantiate, Requires, Validate%0Aimport logging%0Aimport herald.utils%0A%0A# ------------------------------------------------------------------------------%0A%0A_logger = logging.getLogger(__name__)%0A%0A# ------------------------------------------------------------------------------%0A%0A%0A@ComponentFactory(%22herald-bluetooth-test-factory%22)%0A@Requires('_discovery', herald.transports.bluetooth.BLUETOOTH_DISCOVERY_SERVICE)%0A@Instantiate('herald-bluetooth-test-test')%0Aclass BluetoothTest:%0A %22%22%22 A simple Test bluetooth module that displays information from%0A bluetooth and print a message when a bluetooth device%0A appears or disappears.%0A %22%22%22%0A%0A def __init__(self):%0A self._discovery = None%0A%0A @Validate%0A def validate(self, _):%0A # ask to be notified when there is a new device in the bluetooth network%0A self._discovery.listen_new(lambda x: print(x+%22 appears%22))%0A self._discovery.listen_del(lambda x: print(x+%22 disappears%22))%0A print('LISTENING TO THE BLUETOOTH NETWORK !')%0A
|
|
a1e3e275a81ff073fed226619bde23361230cfce
|
Add tests for packaging.tests.support (#12659).
|
Lib/packaging/tests/test_support.py
|
Lib/packaging/tests/test_support.py
|
Python
| 0 |
@@ -0,0 +1,2653 @@
+import os%0Aimport tempfile%0A%0Afrom packaging.dist import Distribution%0Afrom packaging.tests import support, unittest%0A%0A%0Aclass TestingSupportTestCase(unittest.TestCase):%0A%0A def test_fake_dec(self):%0A @support.fake_dec(1, 2, k=3)%0A def func(arg0, *args, **kargs):%0A return arg0, args, kargs%0A self.assertEqual(func(-1, -2, k=-3), (-1, (-2,), %7B'k': -3%7D))%0A%0A def test_TempdirManager(self):%0A files = %7B%7D%0A%0A class Tester(support.TempdirManager, unittest.TestCase):%0A%0A def test_mktempfile(self2):%0A tmpfile = self2.mktempfile()%0A files%5B'test_mktempfile'%5D = tmpfile.name%0A self.assertTrue(os.path.isfile(tmpfile.name))%0A%0A def test_mkdtemp(self2):%0A tmpdir = self2.mkdtemp()%0A files%5B'test_mkdtemp'%5D = tmpdir%0A self.assertTrue(os.path.isdir(tmpdir))%0A%0A def test_write_file(self2):%0A tmpdir = self2.mkdtemp()%0A files%5B'test_write_file'%5D = tmpdir%0A self2.write_file((tmpdir, 'file1'), 'me file 1')%0A file1 = os.path.join(tmpdir, 'file1')%0A self.assertTrue(os.path.isfile(file1))%0A text = ''%0A with open(file1, 'r') as f:%0A text = f.read()%0A self.assertEqual(text, 'me file 1')%0A%0A def test_create_dist(self2):%0A project_dir, dist = self2.create_dist()%0A files%5B'test_create_dist'%5D = project_dir%0A self.assertTrue(os.path.isdir(project_dir))%0A self.assertIsInstance(dist, Distribution)%0A%0A def test_assertIsFile(self2):%0A fd, fn = tempfile.mkstemp()%0A os.close(fd)%0A self.addCleanup(support.unlink, fn)%0A self2.assertIsFile(fn)%0A self.assertRaises(AssertionError, self2.assertIsFile, 'foO')%0A%0A def test_assertIsNotFile(self2):%0A tmpdir = self2.mkdtemp()%0A self2.assertIsNotFile(tmpdir)%0A%0A tester = Tester()%0A for name in ('test_mktempfile', 'test_mkdtemp', 'test_write_file',%0A 'test_create_dist', 'test_assertIsFile',%0A 'test_assertIsNotFile'):%0A tester.setUp()%0A try:%0A getattr(tester, name)()%0A finally:%0A tester.tearDown()%0A%0A # check clean-up%0A if name in files:%0A self.assertFalse(os.path.exists(files%5Bname%5D))%0A%0A%0Adef test_suite():%0A return unittest.makeSuite(TestingSupportTestCase)%0A%0Aif __name__ == %22__main__%22:%0A unittest.main(defaultTest=%22test_suite%22)%0A
|
|
5a59b5b96e223da782cf683aabbf4e8371c883e1
|
Add DHKE protocol
|
cryptos/dhke.py
|
cryptos/dhke.py
|
Python
| 0.000001 |
@@ -0,0 +1,1697 @@
+%22%22%22%0AImplementation of the Diffie-Hellman Key Exchange Protocol%0A%0AUsage:%0A%0A # Setup%0A invoker = DHKEInvoker()%0A other = DHKEParty(invoker.get_param())%0A%0A # Key exchange phase%0A other.receive_partial_key(invoker.get_partial_key())%0A invoker.receive_partial_key(other.get_partial_key())%0A%0A # Check consistency%0A assert(invoker.get_key() == other.get_key)%0A%22%22%22%0A%0Afrom .numt import randprime, modulo_exp%0Afrom random import randint%0A%0A%0A__author__ = 'Divyanshu Kakwani'%0A__license__ = 'MIT'%0A%0A%0Adef DHKEparam_gen(primelen=10):%0A %22%22%22%0A Generates parameters for the DHKE Protocol%0A %22%22%22%0A prime = randprime(10**(primelen-1), 10**primelen)%0A alpha = randint(2, prime-2)%0A return (prime, alpha)%0A%0A%0Aclass DHKEParty:%0A %22%22%22%0A Represents a party involved in DHKE Protocol%0A %22%22%22%0A%0A def __init__(self, param):%0A self.prime = param%5B0%5D%0A self.alpha = param%5B1%5D%0A self.secret = randint(2, self.prime-2)%0A self.Ka = modulo_exp(self.alpha, self.secret, self.prime)%0A self.Kb = None%0A%0A def get_param(self):%0A return (self.prime, self.alpha)%0A%0A def get_partial_key(self):%0A return self.Ka%0A%0A def receive_partial_key(self, Kb):%0A self.Kb = Kb%0A self.final_key = modulo_exp(Kb, self.secret, self.prime)%0A%0A def get_key(self):%0A if not self.Kb:%0A raise Exception('Partial key not received')%0A return self.final_key%0A%0A%0Aclass DHKEInvoker(DHKEParty):%0A %22%22%22%0A The party which invokes the DHKE Protocol. A DHKEInvoker%0A differs from a DHKEParty in that it has to generate the%0A DHKE parameters at the outset.%0A %22%22%22%0A%0A def __init__(self):%0A param = DHKEparam_gen()%0A DHKEParty.__init__(self, param)%0A
|
|
088cd2ddb79bdd2a8dd68e2d7169484eea90fd1a
|
Add problem79.py
|
euler_python/problem79.py
|
euler_python/problem79.py
|
Python
| 0.000088 |
@@ -0,0 +1,2140 @@
+%22%22%22%0Aproblem79.py%0A%0AA common security method used for online banking is to ask the user for three%0Arandom characters from a passcode. For example, if the passcode was 531278, they%0Amay ask for the 2nd, 3rd, and 5th characters; the expected reply would be: 317.%0A%0AThe text file, keylog.txt, contains fifty successful login attempts.%0A%0AGiven that the three characters are always asked for in order, analyse the file%0Aso as to determine the shortest possible secret passcode of unknown length.%0A%22%22%22%0Afrom collections import defaultdict, deque%0Afrom itertools import dropwhile%0A%0Adef to_digits(num):%0A return map(int, str(num))%0A%0Adef to_num(digits):%0A return int(''.join(map(str, digits)))%0A%0A# Use 'breadth-first tree search', inspired by Peter Norvig's version in AIMA.%0Adef solve(codes):%0A # Store all relations specified in the codes in a dict. Each digit%0A # is mapped to those digits appearing after it.%0A after = defaultdict(set)%0A for code in codes:%0A a, b, c = to_digits(code)%0A after%5Ba%5D.add(b)%0A after%5Ba%5D.add(c)%0A after%5Bb%5D.add(c)%0A%0A # We will use lists to represent nodes in the tree, each of which is%0A # a candidate solution. So, initialise the frontier to the possible%0A # starting values.%0A frontier = deque(%5Bx%5D for x in after)%0A while frontier:%0A node = frontier.popleft()%0A if goal_state(node, after):%0A return node%0A # Use the 'after' dict to find the values, x, reachable from the end of%0A # the current node. Child nodes are then node + %5Bx%5D.%0A frontier.extend(node + %5Bx%5D for x in after%5Bnode%5B-1%5D%5D)%0A%0Adef goal_state(node, after):%0A %22%22%22Check whether, for all the relations specified in the 'after' dict,%0A the node satisfies them.%22%22%22%0A # For each key, x, in the 'after' dict, the values, y, in after%5Bx%5D must%0A # exist after the first occurrence of x in the node.%0A return all(y in dropwhile(lambda dgt: dgt != x, node)%0A for x in after%0A for y in after%5Bx%5D)%0A%0Adef problem79():%0A with open(%22data/keylog.txt%22, %22r%22) as f:%0A codes = %5Bint(x) for x in f.readlines()%5D%0A solution = solve(codes)%0A return to_num(solution)%0A
|
|
a0e9ac222091619f41a4eed0cfb25c1653b8034d
|
add simple update script
|
cvxpy/utilities/cvxpy_upgrade.py
|
cvxpy/utilities/cvxpy_upgrade.py
|
Python
| 0 |
@@ -0,0 +1,1985 @@
+import argparse%0Aimport re%0A%0A%0A# Captures row and column parameters; note the captured object should%0A# not be a keyword argument other than %22cols%22 (hence the requirement that%0A# the captured group is followed by a comma, whitespace, or parentheses)%0AP_ROW_COL = r%22(?:rows=)?(%5Cw+),%5Cs*(?:cols=)?(%5Cw+)%5B%5Cs,)%5D%22%0A%0A# A list of substitutions to make, with the first entry in each tuple the%0A# pattern and the second entry the substitution.%0ASUBST = %5B%0A # The shape is a single argument in CVXPY 1.0 (either a tuple or an int)%0A (r%22Variable%5C(%22 + P_ROW_COL, r%22Variable(shape=(%5C1, %5C2)%22),%0A (r%22Bool%5C(%22 + P_ROW_COL, r%22Variable(shape=(%5C1, %5C2), boolean=True%22),%0A (r%22Int%5C(%22 + P_ROW_COL, r%22Variable(shape=(%5C1, %5C2), integer=True%22),%0A (r%22Parameter%5C(%22 + P_ROW_COL, r%22Parameter(shape=(%5C1, %5C2)%22),%0A # Interpret 1D variables as 2D; code may depend upon 2D structure%0A (r%22Variable%5C((%5B%5E,)%5D+)%5C)%22, r%22Variable(shape=(%5C1,1))%22),%0A (r%22Bool%5C((%5B%5E,)%5D+)%5C)%22, r%22Variable(shape=(%5C1,1), boolean=True)%22),%0A (r%22Int%5C((%5B%5E,)%5D+)%5C)%22, r%22Variable(shape=(%5C1,1), integer=True)%22),%0A (r%22Parameter%5C((%5B%5E,)%5D+)%5C)%22, r%22Parameter(shape=(%5C1,1))%22),%0A # Update atom names%0A (r%22sum_entries%22, %22sum%22),%0A (r%22max_entries%22, %22cummax%22),%0A (r%22max_elemwise%22, %22max%22)%0A%5D%0A%0Aif __name__ == %22__main__%22:%0A parser = argparse.ArgumentParser(%0A description=%22%22%22Upgrade cvxpy code to version 1.0%0A%0A Usage:%0A python cvxpy_upgrade.py --infile foo.py --outfile bar.py%0A %22%22%22)%0A parser.add_argument(%22--infile%22, dest=%22input_file%22,%0A help=%22The name of the file to upgrade.%22,%0A required=True)%0A parser.add_argument(%22--outfile%22, dest=%22output_file%22,%0A help=%22The output filename.%22,%0A required=True)%0A args = parser.parse_args()%0A with open(args.input_file, 'rU') as f:%0A code = f.read()%0A for pattern, subst in SUBST:%0A code = re.sub(pattern, subst, code)%0A with open(args.output_file, 'w') as f:%0A f.write(code)%0A
|
|
5a99f676a5b0b55d0490c955cb9af42d9121192d
|
Initialize database transactions
|
app/database.py
|
app/database.py
|
Python
| 0 |
@@ -0,0 +1,653 @@
+%22%22%22This module initialises .%22%22%22%0Afrom sqlalchemy import create_engine%0Afrom sqlalchemy.orm import scoped_session, sessionmaker%0Afrom sqlalchemy.ext.declarative import declarative_base%0Afrom config.config import Config%0A%0Aengine = create_engine(Config.DATABASE_URI, convert_unicode=True)%0Adb_session = scoped_session(sessionmaker(autocommit=False,%0A autoflush=False,%0A bind=engine))%0ABase = declarative_base()%0ABase.query = db_session.query_property()%0A%0A%0Adef init_db():%0A %22%22%22.%22%22%22%0A import app.models%0A Base.metadata.drop_all(bind=engine)%0A Base.metadata.create_all(bind=engine)%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.