blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
17c928e3cafbcf5f45ca353e1f379ba1e9e04da5 | 01c3ff1d74e754e0d4ce0fb7f8a8b329ec3766e1 | /python_exercises/19others/fun4.py | 6f9ac6e1fda53a3f6312dd5622e67ea3388ae8c9 | [] | no_license | vineel2014/Pythonfiles | 5ad0a2b824b5fd18289d21aa8306099aea22c202 | 0d653cb9659fe750cf676a70035ab67176179905 | refs/heads/master | 2020-04-28T03:56:22.713558 | 2019-03-11T08:38:54 | 2019-03-11T08:38:54 | 123,681,939 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | def product(numbers):
sum_so_far=1
for number in numbers:
sum_so_far*=number
return sum_so_far
print(product([1,2,3]))
print(product([7,-4,1,6,0]))
print(product([]))
| [
"[email protected]"
] | |
aaef040f57542efbf767c69d734449770a943986 | 8cdc63b549f5a7f1aca7b476a5a918e5c05e38c5 | /app/adm/tests/test_view_permission.py | ff2d71f4af8b26e48e27a3ff79c8e1c2b6854ac7 | [
"MIT"
] | permissive | rogeriopaulos/gep | 984e3bcd8bd4569031577e1d28a8c47c6aace91f | e56fd0450bdb8f572e2e35cc59a74ab0f0b372e2 | refs/heads/main | 2023-08-14T08:41:19.558899 | 2021-09-15T02:51:46 | 2021-09-15T02:51:46 | 402,270,601 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,575 | py |
from account.tests.factories import ProfileFactory
from adm.tests.factories import AdministrativoFactory
from adm.tests.utils import SetUpTestViewAtoData
from core.permissions import GRUPO_ADMINISTRATIVO
from django.contrib.auth.models import Group
from django.test import TestCase
from django.urls import reverse
from guardian.shortcuts import assign_perm
class SelectUserPermissionAdmViewTestCase(SetUpTestViewAtoData, TestCase):
def setUp(cls):
cls.processo = AdministrativoFactory(orgao_processo=cls.orgao, autor=cls.normal_user, arquivar=False)
cls.url = reverse('adm:select_user_adm', kwargs={'pk': cls.processo.pk})
cls.batch = ProfileFactory.create_batch(10, orgao_link=cls.orgao)
cls.context = {
'usuarios': [f'{profile.user.pk}' for profile in cls.batch]
}
cls.response_unlogged = 'componentes/singles/core/Home.html', 200
cls.response_without_perm_GET = '403.html', 403
cls.response_without_perm_POST = '403.html', 403
cls.response_policial_GET = '403.html', 403
cls.response_policial_POST = '403.html', 403
cls.response_superior_GET = 'componentes/shares/SelectUserPerm.html', 200
cls.response_superior_POST = 'componentes/shares/PermForUser.html', 200
assign_perm('adm.view_administrativo', cls.group_superior)
assign_perm('adm.view_administrativo', cls.normal_user)
assign_perm('adm.add_administrativo', cls.group_superior)
assign_perm('adm.add_administrativo', cls.normal_user, cls.processo)
cls.group = Group(name=GRUPO_ADMINISTRATIVO)
cls.group.save()
for profile in cls.batch:
profile.user.groups.add(cls.group)
def test_unlogged_GET(self):
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_unlogged[1])
self.assertTemplateUsed(response, self.response_unlogged[0])
def test_unlogged_POST(self):
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_unlogged[1])
self.assertTemplateUsed(response, self.response_unlogged[0])
def test_logged_without_permission_GET(self):
self.client.login(username=self.usuario.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_without_perm_GET[1])
self.assertTemplateUsed(response, self.response_without_perm_GET[0])
def test_logged_without_permission_POST(self):
self.client.login(username=self.usuario.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_without_perm_POST[1])
self.assertTemplateUsed(response, self.response_without_perm_POST[0])
def test_logged_with_policial_permission_GET(self):
self.client.login(username=self.normal_user.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_policial_GET[1])
self.assertTemplateUsed(response, self.response_policial_GET[0])
def test_logged_with_policial_permission_POST(self):
self.client.login(username=self.normal_user.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_policial_POST[1])
self.assertTemplateUsed(response, self.response_policial_POST[0])
def test_logged_with_superior_permission_GET(self):
self.client.login(username=self.superior.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_superior_GET[1])
self.assertTemplateUsed(response, self.response_superior_GET[0])
def test_logged_with_superior_permission_POST(self):
self.client.login(username=self.superior.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_superior_POST[1])
self.assertTemplateUsed(response, self.response_superior_POST[0])
class SelectUserOrgaoExternoAdmViewTestCase(SetUpTestViewAtoData, TestCase):
def setUp(cls):
cls.processo = AdministrativoFactory(orgao_processo=cls.orgao, autor=cls.normal_user, arquivar=False)
cls.url = reverse('adm:add_external_users_adm', kwargs={'pk': cls.processo.pk})
cls.batch = ProfileFactory.create_batch(10)
cls.context = {
'usuarios': [f'{profile.user.pk}' for profile in cls.batch]
}
cls.response_unlogged = 'componentes/singles/core/Home.html', 200
cls.response_without_perm_GET = '403.html', 403
cls.response_without_perm_POST = '403.html', 403
cls.response_policial_GET = '403.html', 403
cls.response_policial_POST = '403.html', 403
cls.response_superior_GET = 'componentes/shares/SelectUserPerm.html', 200
cls.response_superior_POST = 'componentes/shares/PermForUser.html', 200
assign_perm('adm.view_administrativo', cls.group_superior)
assign_perm('adm.view_administrativo', cls.normal_user)
assign_perm('adm.add_administrativo', cls.group_superior)
assign_perm('adm.add_administrativo', cls.normal_user, cls.processo)
cls.group = Group(name=GRUPO_ADMINISTRATIVO)
cls.group.save()
for profile in cls.batch:
profile.user.groups.add(cls.group)
def test_unlogged_GET(self):
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_unlogged[1])
self.assertTemplateUsed(response, self.response_unlogged[0])
def test_unlogged_POST(self):
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_unlogged[1])
self.assertTemplateUsed(response, self.response_unlogged[0])
def test_logged_without_permission_GET(self):
self.client.login(username=self.usuario.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_without_perm_GET[1])
self.assertTemplateUsed(response, self.response_without_perm_GET[0])
def test_logged_without_permission_POST(self):
self.client.login(username=self.usuario.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_without_perm_POST[1])
self.assertTemplateUsed(response, self.response_without_perm_POST[0])
def test_logged_with_policial_permission_GET(self):
self.client.login(username=self.normal_user.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_policial_GET[1])
self.assertTemplateUsed(response, self.response_policial_GET[0])
def test_logged_with_policial_permission_POST(self):
self.client.login(username=self.normal_user.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_policial_POST[1])
self.assertTemplateUsed(response, self.response_policial_POST[0])
def test_logged_with_superior_permission_GET(self):
self.client.login(username=self.superior.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_superior_GET[1])
self.assertTemplateUsed(response, self.response_superior_GET[0])
def test_logged_with_superior_permission_POST(self):
self.client.login(username=self.superior.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_superior_POST[1])
self.assertTemplateUsed(response, self.response_superior_POST[0])
class PermissionAdmViewTestCase(SetUpTestViewAtoData, TestCase):
def setUp(cls):
cls.processo = AdministrativoFactory(orgao_processo=cls.orgao, autor=cls.normal_user, arquivar=False)
cls.url = reverse('adm:select_perm_adm', kwargs={'pk': cls.processo.pk})
cls.batch = ProfileFactory.create_batch(2, orgao_link=cls.orgao)
cls.context = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '1000',
'form-0-username': cls.batch[0].user.username,
'form-1-username': cls.batch[1].user.username,
}
cls.response_unlogged = 'componentes/singles/core/Home.html', 200
cls.response_without_perm_GET = '403.html', 403
cls.response_without_perm_POST = '403.html', 403
cls.response_policial_GET = '403.html', 403
cls.response_policial_POST = '403.html', 403
cls.response_superior_GET = 'componentes/shares/PermForUser.html', 200
cls.response_superior_POST = 'componentes/singles/processos/adm/detalhes/_DetalheGeral.html', 200
assign_perm('adm.view_administrativo', cls.group_superior)
assign_perm('adm.view_administrativo', cls.normal_user)
assign_perm('adm.add_administrativo', cls.group_superior)
assign_perm('adm.add_administrativo', cls.normal_user, cls.processo)
cls.group = Group(name=GRUPO_ADMINISTRATIVO)
cls.group.save()
for profile in cls.batch:
profile.user.groups.add(cls.group)
def test_unlogged_GET(self):
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_unlogged[1])
self.assertTemplateUsed(response, self.response_unlogged[0])
def test_unlogged_POST(self):
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_unlogged[1])
self.assertTemplateUsed(response, self.response_unlogged[0])
def test_logged_without_permission_GET(self):
self.client.login(username=self.usuario.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_without_perm_GET[1])
self.assertTemplateUsed(response, self.response_without_perm_GET[0])
def test_logged_without_permission_POST(self):
self.client.login(username=self.usuario.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_without_perm_POST[1])
self.assertTemplateUsed(response, self.response_without_perm_POST[0])
def test_logged_with_policial_permission_GET(self):
self.client.login(username=self.normal_user.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_policial_GET[1])
self.assertTemplateUsed(response, self.response_policial_GET[0])
def test_logged_with_policial_permission_POST(self):
self.client.login(username=self.normal_user.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_policial_POST[1])
self.assertTemplateUsed(response, self.response_policial_POST[0])
def test_logged_with_superior_permission_GET(self):
self.client.login(username=self.superior.username, password=self.password)
response = self.client.get(self.url, follow=True)
self.assertEqual(response.status_code, self.response_superior_GET[1])
self.assertTemplateUsed(response, self.response_superior_GET[0])
def test_logged_with_superior_permission_POST(self):
self.client.login(username=self.superior.username, password=self.password)
response = self.client.post(self.url, self.context, follow=True)
self.assertEqual(response.status_code, self.response_superior_POST[1])
self.assertTemplateUsed(response, self.response_superior_POST[0])
| [
"[email protected]"
] | |
d4d95744592b24613ab6bfde2b48befb4f83d4c9 | 2e6309c8f2126a6196adf9288c31162e6a949c1d | /backend/djabgoBlog-master/blog/subscribe/forms.py | 90898d74a085347109f8f89fa6ee3117df4562b4 | [] | no_license | triest/codeExample2 | 7ff87908caed3c4f58ff1609187a5af78ce11663 | 6d8a20fdd710d1560a3dbc2dfb4455fcc4d862b1 | refs/heads/master | 2020-04-02T03:12:21.027401 | 2018-10-21T11:22:14 | 2018-10-21T11:22:14 | 153,951,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | py | from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from inspect import getmembers
from pprint import pprint
from django.forms import modelform_factory
from django import forms
from django.contrib.auth.models import User;
from . import models
#from blog.articles.models import Subscribe as Subscribe1
class SunscribeForm(forms.ModelForm):
class Meta:
model = models.Subscribe
fields = ['name']
| [
"[email protected]"
] | |
1913f94db48508d534a35b09a05eec732f312e5f | 2be43fdc9f328895b949c92ec4e7602fbbbf2ca3 | /tests/test_feeds.py | 20932f520aba76a42d6876c404625f2a4259e97f | [] | no_license | gjxlu/feedhq | 418a75ef68de759e7f85cf8b14f827b3fd5e5f27 | 3027b192e2c6f35ebb7f821c6d64e8eca49e4c44 | refs/heads/master | 2020-12-25T02:01:24.035251 | 2013-07-16T21:20:01 | 2013-07-16T21:36:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,214 | py | # -*- coding: utf-8 -*-
import feedparser
import json
from datetime import timedelta
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.utils import timezone
from django_push.subscriber.signals import updated
from django_webtest import WebTest
from httplib2 import Response
from mock import patch
from rache import schedule_job
from feedhq.feeds.models import Category, Feed, Entry, UniqueFeed
from feedhq.feeds.tasks import update_feed
from feedhq.feeds.templatetags.feeds_tags import smart_date
from feedhq.feeds.utils import USER_AGENT
from feedhq.profiles.models import User
from feedhq.wsgi import application # noqa
from .factories import UserFactory, CategoryFactory, FeedFactory, EntryFactory
from . import test_file, responses
class WebBaseTests(WebTest):
@patch('requests.get')
def test_welcome_page(self, get):
get.return_value = responses(304)
self.user = User.objects.create_user('testuser',
'[email protected]',
'pass')
user = UserFactory.create()
url = reverse('feeds:home')
response = self.app.get(url, user=user)
self.assertContains(response, 'Getting started')
FeedFactory.create(category__user=user, user=user)
response = self.app.get(url)
self.assertNotContains(response, 'Getting started')
def test_login_required(self):
url = reverse('feeds:home')
response = self.app.get(url, headers={'Accept': 'text/*'})
self.assertEqual(response.status_code, 200)
def test_homepage(self):
"""The homepage from a logged in user"""
user = UserFactory.create()
response = self.app.get(reverse('feeds:home'),
user=user)
self.assertContains(response, 'Home')
self.assertContains(response, user.username)
def test_unauth_homepage(self):
"""The home page from a logged-out user"""
response = self.app.get(reverse('feeds:home'))
self.assertContains(response, 'Sign in') # login required
def test_paginator(self):
user = UserFactory.create()
response = self.app.get(reverse('feeds:home', args=[5]),
user=user)
self.assertContains(response, 'Home')
def test_category(self):
user = UserFactory.create()
CategoryFactory.create(user=user, name=u'Cat yo')
url = reverse('feeds:category', args=['cat-yo'])
response = self.app.get(url, user=user)
self.assertContains(response, 'Cat yo')
@patch("requests.get")
def test_only_unread(self, get):
get.return_value = responses(304)
user = UserFactory.create()
category = CategoryFactory.create(user=user)
FeedFactory.create(category=category, user=user)
url = reverse('feeds:unread_category', args=[category.slug])
response = self.app.get(url, user=user)
self.assertContains(response, category.name)
self.assertContains(response, 'all <span class="ct">')
def test_add_category(self):
user = UserFactory.create()
url = reverse('feeds:add_category')
response = self.app.get(url, user=user)
form = response.forms['category']
response = form.submit()
self.assertFormError(response, 'form', 'name',
['This field is required.'])
form['name'] = 'New Name'
form['color'] = 'red'
response = form.submit()
self.assertRedirects(response, '/manage/')
# Re-submitting the same name fails
response = form.submit()
self.assertFormError(response, 'form', 'name',
['A category with this name already exists.'])
# Adding a category with a name generating the same slug.
# The slug will be different
form['name'] = 'New Name'
response = form.submit()
user.categories.get(slug='new-name-1')
self.assertRedirects(response, '/manage/')
# Now we add a category named 'add', which is a conflicting URL
form['name'] = 'add'
response = form.submit()
user.categories.get(slug='add-1')
self.assertRedirects(response, '/manage/')
# Add a category with non-ASCII names, slugify should cope
form['name'] = u'北京'
response = form.submit()
user.categories.get(slug='unknown')
self.assertRedirects(response, '/manage/')
form['name'] = u'北'
response = form.submit()
user.categories.get(slug='unknown-1')
self.assertRedirects(response, '/manage/')
form['name'] = u'京'
response = form.submit()
user.categories.get(slug='unknown-2')
self.assertRedirects(response, '/manage/')
def test_delete_category(self):
user = UserFactory.create()
category = CategoryFactory.create(user=user)
url = reverse('feeds:delete_category', args=[category.slug])
response = self.app.get(url, user=user)
self.assertEqual(response.status_code, 200)
self.assertEqual(Category.objects.count(), 1)
form = response.forms['delete']
response = form.submit().follow()
self.assertEqual(Category.objects.count(), 0)
@patch("requests.get")
def test_feed(self, get):
get.return_value = responses(304)
user = UserFactory.create()
feed = FeedFactory.create(category__user=user, user=user)
url = reverse('feeds:feed', args=[feed.pk])
response = self.app.get(url, user=user)
expected = (
'<a href="{0}unread/">unread <span class="ct">0</span></a>'
).format(feed.get_absolute_url())
self.assertContains(response, expected)
def test_edit_category(self):
user = UserFactory.create()
category = CategoryFactory.create(user=user)
url = reverse('feeds:edit_category', args=[category.slug])
response = self.app.get(url, user=user)
self.assertContains(response, u'Edit {0}'.format(category.name))
form = response.forms['category']
form['name'] = 'New Name'
form['color'] = 'blue'
response = form.submit().follow()
self.assertContains(response,
'New Name has been successfully updated')
@patch('requests.get')
def test_add_feed(self, get):
get.return_value = responses(304)
user = UserFactory.create()
category = CategoryFactory.create(user=user)
url = reverse('feeds:add_feed')
response = self.app.get(url, user=user)
self.assertContains(response, 'Add a feed')
form = response.forms['feed']
form['name'] = 'Lulz'
response = form.submit() # there is no URL
self.assertFormError(response, 'form', 'url',
['This field is required.'])
form['name'] = 'Bobby'
form['url'] = 'http://example.com/feed.xml'
form['category'] = category.pk
response = form.submit()
self.assertFormError(response, 'form', 'url', [
"Invalid response code from URL: HTTP 304.",
])
get.return_value = responses(200, 'categories.opml')
response = form.submit()
self.assertFormError(response, 'form', 'url', [
"This URL doesn't seem to be a valid feed.",
])
get.return_value = responses(200, 'bruno.im.png')
response = form.submit()
self.assertFormError(response, 'form', 'url', [
"This URL doesn't seem to be a valid feed.",
])
cache_key = "lock:feed_check:{0}".format(user.pk)
cache._client.set(cache_key, user.pk)
response = form.submit()
self.assertFormError(response, 'form', 'url', [
"This action can only be done one at a time.",
])
cache._client.delete(cache_key)
get.return_value = responses(200, 'brutasse.atom')
response = form.submit()
self.assertRedirects(response, '/manage/')
response.follow()
response = form.submit()
self.assertFormError(
response, 'form', 'url',
["It seems you're already subscribed to this feed."])
# Provide initial params via ?feed=foo&name=bar
response = self.app.get(url, {'feed': 'https://example.com/blog/atom',
'name': 'Some Example Blog'})
self.assertContains(response, 'value="https://example.com/blog/atom"')
self.assertContains(response, 'value="Some Example Blog"')
get.side_effect = ValueError
user.feeds.all().delete()
response = form.submit()
self.assertFormError(response, 'form', 'url',
['Error fetching the feed.'])
def test_feed_url_validation(self):
user = UserFactory.create()
category = CategoryFactory.create(user=user)
url = reverse('feeds:add_feed')
response = self.app.get(url, user=user)
form = response.forms['feed']
form['name'] = 'Test'
form['url'] = 'ftp://example.com'
form['category'] = category.pk
response = form.submit()
self.assertFormError(
response, 'form', 'url',
"Invalid URL scheme: 'ftp'. Only HTTP and HTTPS are supported.",
)
for invalid_url in ['http://localhost:8000', 'http://localhost',
'http://127.0.0.1']:
form['url'] = invalid_url
response = form.submit()
self.assertFormError(response, 'form', 'url', "Invalid URL.")
@patch("requests.get")
def test_edit_feed(self, get):
get.return_value = responses(304)
user = UserFactory.create()
feed = FeedFactory.create(user=user)
url = reverse('feeds:edit_feed', args=[feed.pk])
response = self.app.get(url, user=user)
self.assertContains(response, feed.name)
form = response.forms['feed']
form['name'] = 'New Name'
form['url'] = 'http://example.com/newfeed.xml'
get.return_value = responses(200, 'brutasse.atom')
response = form.submit().follow()
self.assertContains(response, 'New Name has been successfully updated')
cat = CategoryFactory.create(user=user)
response = self.app.get(url, user=user)
form = response.forms['feed']
form['category'] = cat.pk
response = form.submit().follow()
self.assertContains(response, 'New Name has been successfully updated')
self.assertEqual(Feed.objects.get().category_id, cat.pk)
@patch("requests.get")
def test_delete_feed(self, get):
get.return_value = responses(304)
user = UserFactory.create()
feed = FeedFactory.create(category__user=user, user=user)
url = reverse('feeds:delete_feed', args=[feed.pk])
response = self.app.get(url, user=user)
self.assertContains(response, 'Delete')
self.assertContains(response, feed.name)
self.assertEqual(Feed.objects.count(), 1)
response = response.forms['delete'].submit()
self.assertEqual(response.status_code, 302)
self.assertEqual(Feed.objects.count(), 0)
# Redirects to home so useless to test
@patch("requests.get")
def test_invalid_page(self, get):
get.return_value = responses(304)
# We need more than 25 entries
user = UserFactory.create()
FeedFactory.create(category__user=user, user=user)
url = reverse('feeds:home', args=[12000]) # that page doesn't exist
response = self.app.get(url, user=user)
self.assertContains(response, '<a href="/" class="current">')
# This is called by other tests
def _test_entry(self, from_url, user):
self.assertEqual(self.app.get(
from_url, user=user).status_code, 200)
e = Entry.objects.get(title="jacobian's django-deployment-workshop")
url = reverse('feeds:item', args=[e.pk])
response = self.app.get(url, user=user)
self.assertContains(response, "jacobian's django-deployment-workshop")
@patch('requests.get')
def test_entry(self, get):
user = UserFactory.create()
get.return_value = responses(200, 'sw-all.xml')
feed = FeedFactory.create(category__user=user, user=user)
url = reverse('feeds:home')
self._test_entry(url, user)
url = reverse('feeds:unread')
self._test_entry(url, user)
url = reverse('feeds:stars')
self._test_entry(url, user)
url = reverse('feeds:category', args=[feed.category.slug])
self._test_entry(url, user)
url = reverse('feeds:unread_category', args=[feed.category.slug])
self._test_entry(url, user)
url = reverse('feeds:feed', args=[feed.pk])
self._test_entry(url, user)
url = reverse('feeds:unread_feed', args=[feed.pk])
self._test_entry(url, user)
feed.category = None
feed.save()
self._test_entry(url, user)
@patch('requests.get')
def test_custom_ordering(self, get):
user = UserFactory.create()
get.return_value = responses(200, 'sw-all.xml')
FeedFactory.create(user=user, category__user=user)
url = reverse('feeds:unread')
response = self.app.get(url, user=user)
first_title = response.context['entries'].object_list[0].title
last_title = response.context['entries'].object_list[-1].title
user.oldest_first = True
user.save()
response = self.app.get(url, user=user)
self.assertEqual(response.context['entries'].object_list[0].title,
last_title)
self.assertEqual(response.context['entries'].object_list[-1].title,
first_title)
@patch('requests.get')
def test_last_entry(self, get):
user = UserFactory.create()
get.return_value = responses(200, 'sw-all.xml')
feed = FeedFactory.create(category__user=user, user=user)
with self.assertNumQueries(2):
update_feed(feed.url)
self.assertEqual(Feed.objects.get().unread_count,
user.entries.filter(read=False).count())
last_item = user.entries.order_by('date')[0]
url = reverse('feeds:item', args=[last_item.pk])
response = self.app.get(url, user=user)
self.assertNotContains(response, 'Next →')
def test_not_mocked(self):
with self.assertRaises(ValueError):
FeedFactory.create()
@patch("requests.get")
def test_img(self, get):
get.return_value = responses(304)
user = UserFactory.create()
feed = FeedFactory.create(category__user=user, url='http://exmpl.com',
user=user)
entry = Entry.objects.create(
feed=feed,
title="Random title",
subtitle='<img src="/favicon.png">',
link='http://example.com',
date=timezone.now(),
user=user,
)
url = reverse('feeds:item', args=[entry.pk])
response = self.app.get(url, user=user)
self.assertContains(response, 'External media is hidden')
self.assertNotContains(response,
'<img src="http://exmpl.com/favicon.png">')
self.assertEqual(Feed.objects.get(pk=feed.pk).media_safe, False)
form = response.forms['images']
response = form.submit(name='once')
self.assertContains(response, 'Always display external media')
self.assertContains(response,
'<img src="http://exmpl.com/favicon.png">')
self.assertEqual(Feed.objects.get(pk=feed.pk).media_safe, False)
form = response.forms['images']
response = form.submit(name='always')
self.assertContains(response, 'Disable external media')
self.assertContains(response,
'<img src="http://exmpl.com/favicon.png">')
self.assertEqual(Feed.objects.get(pk=feed.pk).media_safe, True)
form = response.forms['images']
response = form.submit(name='never')
self.assertNotContains(response, 'Disable external media')
self.assertEqual(Feed.objects.get(pk=feed.pk).media_safe, False)
user.allow_media = True
user.save(update_fields=['allow_media'])
response = form.submit(name='never')
self.assertFalse('images' in response.forms)
self.assertContains(response,
'<img src="http://exmpl.com/favicon.png">')
@patch("requests.get")
def test_actions(self, get):
get.return_value = responses(304)
user = UserFactory.create()
feed = FeedFactory.create(category__user=user, url='http://exmpl.com',
user=user)
entry = Entry.objects.create(
feed=feed,
title="Random title",
subtitle='Foo bar content',
link='http://example.com',
date=timezone.now(),
user=user,
)
url = reverse('feeds:item', args=[entry.pk])
response = self.app.get(url, user=user)
token = response.forms['unread'].fields['csrfmiddlewaretoken'][0].value
response = self.app.post(url, {'action': 'invalid',
'csrfmiddlewaretoken': token},
user=user)
form = response.forms['star']
response = form.submit()
self.assertTrue(Entry.objects.get().starred)
form = response.forms['star']
response = form.submit()
self.assertFalse(Entry.objects.get().starred)
user.oldest_first = True
user.save(update_fields=['oldest_first'])
form = response.forms['unread']
response = form.submit()
self.assertFalse(Entry.objects.get().read)
@patch('requests.get')
def test_opml_import(self, get):
user = UserFactory.create()
url = reverse('feeds:import_feeds')
response = self.app.get(url, user=user)
get.return_value = responses(304)
form = response.forms['import']
with open(test_file('sample.opml'), 'r') as opml_file:
form['file'] = 'sample.opml', opml_file.read()
response = form.submit().follow()
self.assertContains(response, '2 feeds have been imported')
# Re-import
with open(test_file('sample.opml'), 'r') as opml_file:
form['file'] = 'sample.opml', opml_file.read()
response = form.submit().follow()
self.assertContains(response, '0 feeds have been imported')
# Import an invalid thing
form['file'] = 'invalid', "foobar"
response = form.submit()
self.assertFormError(response, 'form', 'file', [
"This file doesn't seem to be a valid OPML file."
])
# Empty file
form['file'] = 'name', ""
response = form.submit()
self.assertFormError(response, 'form', 'file', [
"The submitted file is empty."
])
@patch('requests.get')
def test_greader_opml_import(self, get):
user = UserFactory.create()
url = reverse('feeds:import_feeds')
response = self.app.get(url, user=user)
get.return_value = responses(304)
form = response.forms['import']
with open(test_file('google-reader-subscriptions.xml'),
'r') as opml_file:
form['file'] = 'sample.opml', opml_file.read()
response = form.submit().follow()
self.assertContains(response, '1 feed has been imported')
self.assertEqual(Category.objects.count(), 0)
@patch('requests.get')
def test_categories_in_opml(self, get):
user = UserFactory.create()
url = reverse('feeds:import_feeds')
response = self.app.get(url, user=user)
self.assertEqual(response.status_code, 200)
get.return_value = responses(304)
form = response.forms["import"]
with open(test_file('categories.opml'), 'r') as opml_file:
form['file'] = 'categories.opml', opml_file.read()
response = form.submit().follow()
self.assertContains(response, '20 feeds have been imported')
self.assertEqual(user.categories.count(), 6)
with self.assertRaises(Category.DoesNotExist):
user.categories.get(name='Imported')
with self.assertRaises(Feed.DoesNotExist):
Feed.objects.get(
category__in=user.categories.all(),
name='No title',
)
for c in Category.objects.all():
c.get_absolute_url()
@patch('requests.get')
def test_dashboard(self, get):
get.return_value = responses(304)
user = UserFactory.create()
url = reverse('feeds:dashboard')
FeedFactory.create(category=None, user=user)
for i in range(5):
FeedFactory.create(category__user=user, user=user)
response = self.app.get(url, user=user)
self.assertContains(response, 'Dashboard')
@patch('requests.get')
def test_unread_count(self, get):
"""Unread feed count everywhere"""
user = UserFactory.create()
url = reverse('profile')
response = self.app.get(url, user=user)
self.assertContains(
response,
'<a class="unread" title="Unread entries" href="/unread/">0</a>'
)
get.return_value = responses(200, 'sw-all.xml')
FeedFactory.create(category__user=user, user=user)
response = self.app.get(url, user=user)
self.assertContains(
response,
'<a class="unread" title="Unread entries" href="/unread/">30</a>'
)
@patch('requests.get')
def test_mark_as_read(self, get):
get.return_value = responses(304)
user = UserFactory.create()
feed = FeedFactory.create(category__user=user, user=user)
url = reverse('feeds:unread')
response = self.app.get(url, user=user)
self.assertNotContains(response, '"Mark all as read"')
get.return_value = responses(200, 'sw-all.xml')
update_feed(feed.url)
response = self.app.get(url, user=user)
self.assertContains(response, '"Mark all as read"')
form = response.forms['read']
response = form.submit()
self.assertRedirects(response, url)
response = response.follow()
self.assertContains(response, '30 entries have been marked as read')
self.assertEqual(user.entries.filter(read=False).count(), 0)
self.assertEqual(user.entries.filter(read=True).count(), 30)
form = response.forms['undo']
response = form.submit()
self.assertRedirects(response, url)
response = response.follow()
self.assertContains(response, "30 entries have been marked as unread")
self.assertEqual(user.entries.filter(read=False).count(), 30)
self.assertEqual(user.entries.filter(read=True).count(), 0)
@patch('requests.get')
def test_promote_html_content_type(self, get):
get.return_value = responses(200, 'content-description.xml')
feed = FeedFactory.create()
self.assertEqual(
len(feed.entries.all()[0].content.split('Février 1953')), 2)
@patch('requests.get')
@patch('oauth2.Client')
def test_add_to_readability(self, Client, get): # noqa
client = Client.return_value
r = Response({
'status': 202,
'reason': 'Accepted',
'location': '/api/rest/v1/bookmarks/119',
'x-article-location': '/api/rest/v1/articles/xj28dwkx',
})
value = json.dumps({'article': {'id': 'foo'}})
client.request.return_value = [r, value]
user = UserFactory.create(
read_later='readability',
read_later_credentials=json.dumps({
'oauth_token': 'token',
'oauth_token_secret': 'token secret',
}),
)
get.return_value = responses(200, 'sw-all.xml')
feed = FeedFactory.create(category__user=user, user=user)
get.assert_called_with(
feed.url,
headers={'User-Agent': USER_AGENT % '1 subscriber',
'Accept': feedparser.ACCEPT_HEADER}, timeout=10)
entry_pk = Entry.objects.all()[0].pk
url = reverse('feeds:item', args=[entry_pk])
response = self.app.get(url, user=user)
self.assertContains(response, "Add to Readability")
form = response.forms['read-later']
response = form.submit()
client.request.assert_called_with('/api/rest/v1/bookmarks/119',
method='GET')
self.assertEqual(Entry.objects.get(pk=entry_pk).read_later_url,
'https://www.readability.com/articles/foo')
response = self.app.get(url, user=user)
self.assertNotContains(response, "Add to Instapaper")
@patch("requests.get")
@patch('oauth2.Client')
def test_add_to_instapaper(self, Client, get): # noqa
client = Client.return_value
r = Response({'status': 200})
client.request.return_value = [
r,
json.dumps([{'type': 'bookmark', 'bookmark_id': 12345,
'title': 'Some bookmark',
'url': 'http://example.com/some-bookmark'}])
]
user = UserFactory.create(
read_later='instapaper',
read_later_credentials=json.dumps({
'oauth_token': 'token',
'oauth_token_secret': 'token secret',
}),
)
get.return_value = responses(304)
feed = FeedFactory.create(category__user=user, user=user)
get.return_value = responses(200, 'sw-all.xml')
update_feed(feed.url)
get.assert_called_with(
feed.url,
headers={'User-Agent': USER_AGENT % '1 subscriber',
'Accept': feedparser.ACCEPT_HEADER}, timeout=10)
entry_pk = Entry.objects.all()[0].pk
url = reverse('feeds:item', args=[entry_pk])
response = self.app.get(url, user=user)
self.assertContains(response, "Add to Instapaper")
form = response.forms['read-later']
response = form.submit()
body = 'url=http%3A%2F%2Fsimonwillison.net%2F2010%2FMar%2F12%2Fre2%2F'
client.request.assert_called_with(
'https://www.instapaper.com/api/1/bookmarks/add',
body=body,
method='POST',
)
self.assertEqual(Entry.objects.get(pk=entry_pk).read_later_url,
'https://www.instapaper.com/read/12345')
response = self.app.get(url, user=user)
self.assertNotContains(response, "Add to Instapaper")
@patch('requests.get')
@patch('requests.post')
def test_add_to_readitlaterlist(self, post, get):
user = UserFactory.create(
read_later='readitlater',
read_later_credentials=json.dumps({'username': 'foo',
'password': 'bar'}),
)
get.return_value = responses(200, 'sw-all.xml')
feed = FeedFactory.create(category__user=user, user=user)
get.assert_called_with(
feed.url,
headers={'User-Agent': USER_AGENT % '1 subscriber',
'Accept': feedparser.ACCEPT_HEADER}, timeout=10)
url = reverse('feeds:item', args=[Entry.objects.all()[0].pk])
response = self.app.get(url, user=user)
self.assertContains(response, 'Add to Read it later')
form = response.forms['read-later']
response = form.submit()
# Read it Later doesn't provide the article URL so we can't display a
# useful link
self.assertContains(response, "added to your reading list")
post.assert_called_with(
'https://readitlaterlist.com/v2/add',
data={u'username': u'foo',
'url': u'http://simonwillison.net/2010/Mar/12/re2/',
'apikey': 'test read it later API key',
u'password': u'bar',
'title': (u'RE2: a principled approach to regular '
u'expression matching')},
)
@patch('requests.get')
def test_pubsubhubbub_handling(self, get):
user = UserFactory.create()
url = 'http://bruno.im/atom/tag/django-community/'
get.return_value = responses(304)
feed = FeedFactory.create(url=url, category__user=user, user=user)
get.assert_called_with(
url, headers={'User-Agent': USER_AGENT % '1 subscriber',
'Accept': feedparser.ACCEPT_HEADER},
timeout=10)
self.assertEqual(feed.entries.count(), 0)
path = test_file('bruno.im.atom')
with open(path, 'r') as f:
data = f.read()
updated.send(sender=None, notification=data, request=None, links=None)
self.assertEqual(feed.entries.count(), 5)
# Check content handling
for entry in feed.entries.all():
self.assertTrue(len(entry.subtitle) > 2400)
# Check date handling
self.assertEqual(feed.entries.filter(date__year=2011).count(), 3)
self.assertEqual(feed.entries.filter(date__year=2012).count(), 2)
@patch('requests.get')
def test_missing_links(self, get):
path = test_file('no-rel.atom')
with open(path, 'r') as f:
data = f.read()
updated.send(sender=None, notification=data, request=None, links=None)
@patch('requests.get')
def test_link_headers(self, get):
user = UserFactory.create()
url = 'foo'
get.return_value = responses(304)
feed = FeedFactory.create(url=url, category__user=user, user=user)
path = test_file('no-rel.atom')
with open(path, 'r') as f:
data = f.read()
updated.send(sender=None, notification=data, request=None,
links=[{'url': 'foo', 'rel': 'self'}])
self.assertEqual(feed.entries.count(), 1)
@patch('requests.get')
def test_subscribe_url(self, get):
get.return_value = responses(304)
user = UserFactory.create()
c = CategoryFactory.create(user=user)
url = reverse('feeds:subscribe')
response = self.app.get(url, {'feeds': "http://bruno.im/atom/latest/"},
user=user)
self.assertContains(response, 'value="http://bruno.im/atom/latest/"')
form = response.forms['subscribe']
response = form.submit()
self.assertContains(response, 'This field is required.', 1)
form['form-0-name'] = "Bruno's awesome blog"
form['form-0-category'] = c.pk
self.assertEqual(Feed.objects.count(), 0)
response = form.submit().follow()
self.assertEqual(Feed.objects.count(), 1)
form['form-0-name'] = ""
form['form-0-category'] = ""
form['form-0-subscribe'] = False
response = form.submit().follow()
self.assertContains(response, '0 feeds have been added')
form['form-0-name'] = 'Foo'
form['form-0-category'] = c.pk
form['form-0-subscribe'] = True
response = form.submit()
self.assertContains(response, "already subscribed")
UniqueFeed.objects.create(url='http://example.com/feed',
title='Awesome')
response = self.app.get(
url, {'feeds': ",".join(['http://bruno.im/atom/latest/',
'http://example.com/feed'])})
form = response.forms['subscribe']
self.assertEqual(form['form-0-name'].value, 'Awesome')
response = form.submit().follow()
self.assertEqual(Feed.objects.count(), 2)
def test_bookmarklet_no_feed(self):
user = UserFactory.create()
url = reverse('feeds:subscribe')
response = self.app.get(url, {'url': 'http://isitbeeroclock.com/'},
user=user)
self.assertContains(
response, ('it looks like there are no feeds available on '
'<a href="http://isitbeeroclock.com/">'))
@patch("requests.get")
def test_relative_links(self, get):
get.return_value = responses(200, path='brutasse.atom')
user = UserFactory.create()
FeedFactory.create(category__user=user, user=user,
url='https://github.com/brutasse.atom')
entry = user.entries.all()[0]
self.assertTrue('<a href="/brutasse"' in entry.subtitle)
self.assertFalse('<a href="/brutasse"' in entry.content)
self.assertTrue(
'<a href="https://github.com/brutasse"' in entry.content)
feed = Feed(url='http://standblog.org/blog/feed/rss2')
e = Entry(feed=feed, subtitle=(
' <p><img alt=":-)" class="smiley"'
'src="/dotclear2/themes/default/smilies/smile.png" /> . </p>'
))
self.assertTrue(('src="http://standblog.org/dotclear2/themes/'
'default/smilies/smile.png"') in e.content)
@patch('requests.get')
def test_empty_subtitle(self, get):
get.return_value = responses(304)
user = UserFactory.create()
entry = EntryFactory(user=user, feed__category__user=user, subtitle='')
url = reverse('feeds:item', args=[entry.pk])
self.app.get(url, user=user)
def test_smart_date(self):
now = timezone.now()
self.assertEqual(len(smart_date(now)), 5)
if now.day != 1 and now.month != 1: # Can't test this on Jan 1st :)
now = now - timedelta(days=1)
self.assertEqual(len(smart_date(now)), 6)
now = now - timedelta(days=366)
self.assertEqual(len(smart_date(now)), 12)
@patch('requests.get')
def test_manage_feed(self, get):
get.return_value = responses(304)
user = UserFactory.create()
url = reverse('feeds:manage')
response = self.app.get(url, user=user)
self.assertContains(response, 'Manage feeds')
FeedFactory.create(user=user, category=None)
FeedFactory.create(user=user, category=None)
FeedFactory.create(user=user, category=None)
unique = UniqueFeed.objects.all()[0]
schedule_job(unique.url, schedule_in=0, backoff_factor=10,
error=UniqueFeed.NOT_A_FEED)
response = self.app.get(url, user=user)
self.assertContains(response, 'Not a valid RSS/Atom feed')
schedule_job(unique.url, schedule_in=0, error='blah')
response = self.app.get(url, user=user)
self.assertContains(response, 'Error')
unique.muted = True
unique.save()
response = self.app.get(url, user=user)
self.assertContains(response, 'Error')
| [
"[email protected]"
] | |
884b687ae12296554a2ae41f29b26bf4382a86c2 | e2230f57dd5bb508b5c0f7bf4df6af8ae7b36f83 | /sympycore/matrices/__init__.py | 95ab7a348b3211d44be60bb7007659916c36a1b0 | [
"Apache-2.0"
] | permissive | wenyifdu/pymaclab | 2fd40192d9dff4fda488f3f9a61e584c1adafd49 | e5100ad4201bdf59c01dd600ac7e8865664075c3 | refs/heads/master | 2021-09-16T11:16:00.680933 | 2018-06-20T07:29:29 | 2018-06-20T07:29:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 193 | py | """ Provides matrix support.
"""
__docformat__ = "restructuredtext"
from .algebra import Matrix, MatrixBase
from .functions import eye, concatenate, jacobian
from .polyhedra import Polyhedron
| [
"[email protected]"
] | |
854d50100ce45aef6d69f163a46a0fc3b8619b8c | b65d3777372d25402f11814288d992e6cb29c0a5 | /tools/Ui_couput.py | 986eea054ca7ed3f9226970215929d3410e40c05 | [] | no_license | JoeChen999/tools | 082ee56982650b0fee9e26aea9baa7c762a0ceed | 090215568c41beabd6f1fa6b2d0cf253a29ba913 | refs/heads/master | 2020-05-25T14:58:51.564658 | 2016-09-22T06:31:30 | 2016-09-22T06:31:30 | 68,891,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,448 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/Users/chenbiao/Documents/workspaces/tools/couput.ui'
#
# Created: Thu Sep 5 15:37:23 2013
# by: PyQt4 UI code generator 4.10.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(702, 527)
self.textBrowser = QtGui.QTextBrowser(Dialog)
self.textBrowser.setGeometry(QtCore.QRect(0, 0, 701, 531))
self.textBrowser.setObjectName(_fromUtf8("textBrowser"))
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Dialog", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Dialog = QtGui.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
20c80bd801a815a348f93512423abbf0ad984c81 | 82228ee4e8941d67bb71020a2917706ba6962989 | /tests/utils/test_constants.py | aa54516e4bb5f8c500795b241090fda9839c7b05 | [
"MIT"
] | permissive | haoziyeung/puzzle | 2daccf618347e29980daf4ef7fd25ca431d86ca5 | 9476f05b416d3a5135d25492cb31411fdf831c58 | refs/heads/master | 2021-06-17T13:07:27.916154 | 2017-05-17T13:03:10 | 2017-05-17T13:03:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | from puzzle.utils.constants import HGNC_TO_OMIM, SEVERITY_DICT
def test_HGNC_TO_OMIM():
assert HGNC_TO_OMIM['CACNA1F'].get('mim_nr') == 300110
assert HGNC_TO_OMIM['ADK'].get('mim_nr') == 102750
def test_SEVERITY_DICT():
assert SEVERITY_DICT['transcript_ablation'] == 0
assert SEVERITY_DICT['start_lost'] == 6
| [
"[email protected]"
] | |
5267ef629b697a6f0f4684057928eb7df6bedada | 1c4de938e068c8967288ae06e15d1ea148d92ace | /Python_parallel/lu_mpi.py | 8701770bf65cd8e545e449987031ca7b2c78b8e4 | [
"MIT"
] | permissive | 610yilingliu/LU_Decomposition_MPIVersion | 3d5ddd578dc703ea830c35a421aab89d9ae614d2 | 0bd8af0e86b5fcc36970bea7bb045c129fdd2238 | refs/heads/master | 2023-01-07T22:25:57.883027 | 2020-10-16T08:28:27 | 2020-10-16T08:28:27 | 297,609,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,484 | py | import mpy4py
import random
def generate(sz, rg, sl):
"""
:type int sz: size of square matrix(sz * sz)
:type int rg, int sl: range and scale of data. For example if the value field will be [-0.2, 0.2], than
rg = 2, sl = 10
:rtype: (matrix List[List[float]], vector List[float]), float in Python equal to double in C
Way for generate matrix in parallel is already shown in C version, I will not do that again in Python
"""
random.seed(0)
matrix = []
vector = []
for _ in range(sz):
ls = []
for i in range(sz):
ele = (-rg + random.random() * 2 * rg) / sl
ls.append(ele)
matrix.append(ls)
vector.append((-rg + random.random() * 2 * rg) / sl)
return matrix, vector
def re_arrange(M, V):
"""
:type M: List[List[float]] generated matrix
:type V: Lists[float] generated vector. len(vector) == len(matrix)
:rtype (M List[List[float]], V List[float]) rearranged matrix and vector. Ax = b => PAx = Pb
"""
def find_mx(col):
mx = 0
idx = 0
for i in range(col, len(M)):
cur = abs(M[i][col])
if cur > mx:
mx = cur
idx = i
if mx == 0:
print("Invalid Matrix")
exit(0)
return idx
for i in range(M):
target = find_mx(i)
M[i], M[target] = M[target], M[i]
V[i], V[target] = V[target], V[i]
return M, V
def lu(M):
| [
"[email protected]"
] | |
999644e397b32cffa2e52edc374bbe9efd5e6866 | bc8ebbdb5d67658387be1d65a9f00a927664ac1c | /backend/new_18912/settings.py | 6eaf60be047de7422162e25b7eda34eaa1b9676d | [] | no_license | crowdbotics-apps/new-18912 | 3b499157a967673dc12ad7e5c9ee4a7b8bfb9ddc | b9aa5b270bbc72718b98ccc68257ef17448359c9 | refs/heads/master | 2022-11-18T00:47:55.053181 | 2020-07-15T04:00:56 | 2020-07-15T04:00:56 | 279,758,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,722 | py | """
Django settings for new_18912 project.
Generated by 'django-admin startproject' using Django 1.11.16.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import environ
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
env = environ.Env()
environ.Env.read_env(os.path.join(BASE_DIR, '.env'))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool('DEBUG', default=True)
ALLOWED_HOSTS = ['*']
SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'new_18912.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'new_18912.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'new_18912',
'USER': 'new_18912',
'PASSWORD': 'new_18912',
'HOST': 'localhost',
'PORT': '5432',
}
}
if env.str('DATABASE_URL', default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
# allauth
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = None
LOGIN_REDIRECT_URL = '/'
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = env.str('SENDGRID_USERNAME', '')
EMAIL_HOST_PASSWORD = env.str('SENDGRID_PASSWORD', '')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# Import local settings
try:
from .local_settings import *
INSTALLED_APPS += DEBUG_APPS
except:
pass
| [
"[email protected]"
] | |
55d046dd1d43409f967db43d069619a1b9a92d5f | d8b5aba2a1f53fbf3fcfc388c26e547afa76b13f | /modules/app_email/lib/delphi.py | d6e8461d29ca1decad1b18441f6a42b08596eed7 | [
"Apache-2.0"
] | permissive | dfrc-korea/carpe | e88b4e3bcb536355e2a64d00e807bccd631f8c93 | f9299b8ad0cb2a6bbbd5e65f01d2ba06406c70ac | refs/heads/master | 2023-04-28T01:12:49.138443 | 2023-04-18T07:37:39 | 2023-04-18T07:37:39 | 169,518,336 | 75 | 38 | Apache-2.0 | 2023-02-08T00:42:41 | 2019-02-07T04:21:23 | Python | UTF-8 | Python | false | false | 1,235 | py | #
# delphi.py, 200611
#
import os
import errno
def CreateDir(dir):
try:
if not os.path.isdir(dir):
os.makedirs(os.path.join(dir))
return True
except OSError as e:
if e.errno != errno.EEXIST: raise
return False
def DirectoryExists(dir):
return os.path.isdir(dir)
def ExtractFilePath(fn):
v = os.path.dirname(fn)
return v if v == '' else v + PathDelimiter
def ExtractFileDir(fn):
return os.path.dirname(fn)
def ExtractFileName(fn):
return os.path.basename(fn)
def ExtractFileExt(fn):
p = fn.rfind('.')
return fn[p:] if p > fn.rfind('=') else ''
def ChangeFileExt(fn, ext):
p = fn.rfind('.')
if p == -1: return ''
return fn[:p] + ext
def FileExists(fn):
return os.path.isfile(fn)
def StrToIntDef(v, default):
try:
return int(v)
except:
return default
def IncludeTrailingBackslash(v):
return os.path.join(v, '')
def ExcludeTrailingBackslash(v):
return v.rstrip(PathDelimiter)
IncludeTrailingPathDelimiter = IncludeTrailingBackslash
ExcludeTrailingPathDelimiter = ExcludeTrailingBackslash
PathDelimiter = '\\'
_NonPathDelimiter = '/'
if os.path.join('_', '').find(PathDelimiter) == -1:
PathDelimiter = '/'
_NonPathDelimiter = '\\'
| [
"[email protected]"
] | |
8ba102ebdd7f92dc9a0e9dc574bebbf7da51e6a9 | 058ed13ab33e8af7f5c7f6cfb985edcc1f0075b8 | /.history/application/controllers/sinhvien_20201006230934.py | c35efabcab728dece922a76da4e6ab65c34212a1 | [] | no_license | thinhpayer123/learning-python-vinacomi | e89a0b5302fbc42d1998aea82bba8ebea594eee6 | f4d11998911360e25f68c2c6c0336d32a8c25c65 | refs/heads/master | 2023-03-27T08:57:48.600710 | 2020-12-13T15:43:53 | 2020-12-13T15:43:53 | 353,254,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,765 | py | from application.extensions import apimanager
from application.models.model import QRUser, User, UserWallet
from application.extensions import auth
from application.database import db
from gatco.exceptions import ServerError
from sqlalchemy import create_engine
import os
import random
import string
import aiofiles
import time
from gatco.response import json
from application.server import app
from application.config import Config
import psycopg2
config = Config()
import pandas as pd
import xlrd
import qrcode
import shutil
import asyncio
import datetime
import ujson
def auth_func(request=None, **kw):
#uid = auth.current_user(request)
#if uid is None:
# raise ServerError("abc")
pass
@app.route('/api/v1/file/upload', methods=['GET', 'POST'])
async def file_load(request):
path = request.args.get("path", None)
ret = None
# url_qr = config.QR_SERVICE_URL
# url = config.FILE_SERVICE_URL
fsroot = config.FS_ROOT
if request.method == 'POST':
file = request.files.get('file', None)
if file:
extname = request.files.get('file').name
if not os.path.exists(fsroot):
os.makedirs(fsroot)
subPath = ""
if path is not None:
subPath = path + "/"
if not os.path.exists(fsroot + subPath):
os.makedirs(fsroot + subPath)
async with aiofiles.open(fsroot + subPath + extname, 'wb+') as f:
await f.write(file.body)
link_local = fsroot + subPath + extname
print(link_local)
data = pd.read_excel(link_local)
print(data)
df = pd.DataFrame(data, columns=['student_school_year', 'student_class', 'student_id', 'student_name', 'birthday', 'gender','email'])
# print('122112'+df)
# company_id = request.args.get("company_id")
company_id = 'TEST'
# print(company_id)
# result = []
a =df.get(["student_school_year", "student_class", "student_id",'student_name','birthday','gender','email'])
result = df.to_json(orient='records')
result_ujson = ujson.loads(result)
item_result = []
for item in result_ujson:
user_no = item.get("student_id",{})
extra_data = item
new_entry = UserWallet(user_no=user_no,
company_id=company_id,
extra_data=extra_data)
item_result.append(new_entry)
db.session.add_all(item_result)
db.session.commit()
# print(result)
# q = db.session.query(User).with_for_update(nowait=Tre, of=User)
# user_name =
# full_name
# email
# companyid
# alchemyEngine = create_engine('postgresql://icangteen_user:123456abcA@localhost:5432/icangteen', pool_recycle=3600);
# postgreSQLConnection = alchemyEngine.connect();
# postgreSQLTable = 'student';
# df.to_sql(postgreSQLTable, alchemyEngine, if_exists='append', index=False)
# #
ret = {
"notify":"upload file success ",
# "id": id
}
return json(ret)
@app.route('/api/v1/Genqr', methods=['GET' , 'POST'])
async def genqr(request):
fsroot = config.FS_ROOT
url = config.FILE_SERVICE_URL
qr = config.QR_ARCHIVE
# userWallets =[]
# print(id)
if request.method == 'POST':
path = request.args.get('')
userWallets = UserWallet.query.order_by(UserWallet.id).all()
for users in userWallets:
# format_data = ujson.loads
info_user = users.extra_data
student_id = info_user['student_id']
student_school_year = info_user['student_school_year']
student_class = info_user['student_class']
student_name = info_user['student_name']
birthday = info_user['birthday']
company_id = request.args.get('comapny_id')
company_no = request.args.get('comapny_no')
company_type = request.args.get('comapny_type')
current_user = request.args.get('user')
current_user_no = request.args.get('id')
# print(company_id)
# print(',..........'+ str(current_user))
img = qrcode.make(student_school_year + '-' + student_class + '-' + student_id + '-' + student_name + '-' + birthday)
name_img = student_class + '-' + student_id + '-' + student_name + '.png'
link_img = fsroot + 'qrcode/' + name_img
img.save(link_img)
user_wallet = UserWallet()
user_wallet.company_id = company_id
user_wallet.company_no = company_no
user_wallet.company_type = company_type
user_wallet.user_id = current_user
user_wallet.user_no =
user_wallet.created_at = user_wallet.created_at
user_wallet.
user_wallet.nameqr = student_class + '-' + student_id + '-' + student_name
user_wallet.saveDirectory = link_img
db.session.add(qr)
db.session.commit()
zipfile = shutil.make_archive(fsroot, 'zip', fsroot, 'qrcode/')
ret = {
"link": url
}
return json(ret)
# apimanager.create_api(collection_name='qrworker', model=QRworker,
# methods=['GET', 'POST', 'DELETE', 'PUT'],
# url_prefix='/api/v1',
# preprocess=dict(GET_SINGLE=[auth_func], GET_MANY=[auth_func], POST=[auth_func], PUT_SINGLE=[auth_func]),
# )
| [
"[email protected]"
] | |
987f92a7554c420a41bb61e75372a9b7d318c61d | a7ed4da896faab105ff08259a82ae822548d1c2c | /6/6_1.py | 7ed377a9d8fe394412cc185b039795c97bc0a055 | [] | no_license | nemesmarci/Advent-of-Code-2019 | e8e59d89be8ed43ce046fd7854b1c9b779a8930e | 251dc4dc62777a7c0daf7a74d832c0be3d75140e | refs/heads/master | 2021-12-31T10:53:35.924253 | 2019-12-27T17:17:06 | 2021-12-29T19:17:13 | 225,252,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from space import read_data
objects = read_data()
root = objects['COM']
def traverse(current, level=1):
n = len(current.orbiters) * level
for child in current.orbiters:
n += traverse(child, level + 1)
return n
print(traverse(root))
| [
"[email protected]"
] | |
6db39aefe0474c81e6a6a24d7b3c79f3ab7553e7 | 456ca4570118e2944eb46c439b9d8090750af1b1 | /Lab8/smtp_master.py | 2fdf64fc00e0f49fd1a6ecce04ec52e53ab84898 | [] | no_license | goreckinj/Network-Protocols-Labs | e34bb5c7b44d5dc43e5580c09830ec891740ca36 | 9c5e2185e6a298a6eca3c01b3ae1d69d529ca869 | refs/heads/master | 2020-04-25T05:26:54.197232 | 2019-02-25T16:38:45 | 2019-02-25T16:38:45 | 172,543,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,136 | py | """
- CS2911 - 0NN
- Fall 2017
- Lab N
- Names:
- Jason Gao
- Nick Gorecki
- Alex
A simple email sending program.
Thanks to Trip Horbinski from the Fall 2015 class for providing the password-entering functionality.
"""
# GUI library for password entry
import tkinter as tk
# Socket library
import socket
# SSL/TLS library
import ssl
# base-64 encode/decode
import base64
# Python date/time and timezone modules
import datetime
import time
import pytz
import tzlocal
# Module for reading password from console without echoing it
import getpass
# Modules for some file operations
import os
import mimetypes
# Extraneous
# Host name for MSOE (hosted) SMTP server
SMTP_SERVER = 'smtp.office365.com'
# The default port for STARTTLS SMTP servers is 587
SMTP_PORT = 587
# SMTP domain name
SMTP_DOMAINNAME = 'msoe.edu'
def main():
"""Main test method to send an SMTP email message.
Modify data as needed/desired to test your code,
but keep the same interface for the smtp_send
method.
"""
(username, password) = login_gui()
message_info = {}
message_info['To'] = '[email protected]'
message_info['From'] = username
message_info['Subject'] = 'SMTP Email'
message_info['Date'] = get_formatted_date()
print("message_info =", message_info)
message_text = 'Test message_info number 6\r\n\r\nAnother line.'
smtp_send(password, message_info, message_text)
def login_gui():
"""
Creates a graphical user interface for secure user authorization.
:return: (email_value, password_value)
email_value -- The email address as a string.
password_value -- The password as a string.
:author: Tripp Horbinski
"""
gui = tk.Tk()
gui.title("MSOE Email Client")
center_gui_on_screen(gui, 370, 120)
tk.Label(gui, text="Please enter your MSOE credentials below:") \
.grid(row=0, columnspan=2)
tk.Label(gui, text="Email Address: ").grid(row=1)
tk.Label(gui, text="Password: ").grid(row=2)
email = tk.StringVar()
email_input = tk.Entry(gui, textvariable=email)
email_input.grid(row=1, column=1)
password = tk.StringVar()
password_input = tk.Entry(gui, textvariable=password, show='*')
password_input.grid(row=2, column=1)
auth_button = tk.Button(gui, text="Authenticate", width=25, command=gui.destroy)
auth_button.grid(row=3, column=1)
gui.mainloop()
email_value = email.get()
password_value = password.get()
return email_value, password_value
def center_gui_on_screen(gui, gui_width, gui_height):
"""Centers the graphical user interface on the screen.
:param gui: The graphical user interface to be centered.
:param gui_width: The width of the graphical user interface.
:param gui_height: The height of the graphical user interface.
:return: The graphical user interface coordinates for the center of the screen.
:author: Tripp Horbinski
"""
screen_width = gui.winfo_screenwidth()
screen_height = gui.winfo_screenheight()
x_coord = (screen_width / 2) - (gui_width / 2)
y_coord = (screen_height / 2) - (gui_height / 2)
return gui.geometry('%dx%d+%d+%d' % (gui_width, gui_height, x_coord, y_coord))
# *** Do not modify code above this line ***
def smtp_send(password, message_info, message_text):
"""Send a message via SMTP.
:author: gaoj, gorecki, alex
:param password: String containing user password.
:param message_info: Dictionary with string values for the following keys:
'To': Recipient address (only one recipient required)
'From': Sender address
'Date': Date string for current date/time in SMTP format
'Subject': Email subject
Other keys can be added to support other email headers, etc.
"""
status_354 = b'354'
status_334 = b'334'
status_250 = b'250'
status_235 = b'235'
status_221 = b'221'
status_220 = b'220'
tcp_socket = None
wrapped_socket = None
try:
# connect socket
tcp_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcp_socket.connect((SMTP_SERVER, SMTP_PORT))
# get reply from server
read_response(tcp_socket, status_220)
# send ehlo
send_reply(tcp_socket, b'EHLO ' + SMTP_DOMAINNAME.encode() + b'\r\n')
# read status/header responses
read_response(tcp_socket, status_250)
# send starttls
send_reply(tcp_socket, b'STARTTLS\r\n')
# get server ready response
read_response(tcp_socket, status_220)
# create wrapped socket
context = ssl.create_default_context()
wrapped_socket = context.wrap_socket(tcp_socket, server_hostname=SMTP_SERVER)
# send ehlo
send_reply(wrapped_socket, b'EHLO ' + SMTP_DOMAINNAME.encode() + b'\r\n')
# read status/header responses
read_response(wrapped_socket, status_250)
# send auth login w/ username b64
send_reply(wrapped_socket, b'AUTH LOGIN ' + base64.b64encode(message_info['From'].encode()) + b'\r\n')
# read password request
read_response(wrapped_socket, status_334)
# send password b64
send_reply(wrapped_socket, base64.b64encode(password.encode()) + b'\r\n')
# read authentication reply
read_response(wrapped_socket, status_235)
# Send over header lines + data [e.g.: MAIL FROM: ~~~ . . . DATA . . .]
send_reply(wrapped_socket, b'MAIL FROM: <' + message_info['From'].encode() + b'>\r\n')
read_response(wrapped_socket, status_250)
send_reply(wrapped_socket, b'RCPT TO: <' + message_info['To'].encode() + b'>\r\n')
read_response(wrapped_socket, status_250)
send_reply(wrapped_socket, b'DATA\r\n')
read_response(wrapped_socket, status_354)
send_reply(wrapped_socket, create_data(message_info, message_text))
read_response(wrapped_socket, status_250)
# quit
send_reply(wrapped_socket, b'QUIT\r\n')
read_response(wrapped_socket, status_221)
except RuntimeError as e:
print(e)
print('NOTICE: Force-Closing Connection')
if tcp_socket is not None:
tcp_socket.close()
if wrapped_socket is not None:
wrapped_socket.close()
if tcp_socket is not None:
tcp_socket.close()
if wrapped_socket is not None:
wrapped_socket.close()
def create_data(message_info, message_text):
"""
Created the data message to send
:author: gaoj, gorecki, alex
:param message_info: Dictionary containing the message info
:param message_text: The message to send
:return: bytes
"""
data = b'Content-Transfer-Encoding: 7bit\r\n' \
+ b'Content-Type: text/plain; charset="us-ascii"\r\n' \
+ b'Subject: ' + message_info['Subject'].encode() + b'\r\n' \
+ b'To: <' + message_info['To'].encode() + b'>\r\n' \
+ b'MIME-Version: 1.0\r\n' \
+ b'From: <' + message_info['From'].encode() + b'>\r\n' \
+ b'Date: ' + message_info['Date'].encode() + b'\r\n\r\n' \
+ message_text.encode() + b'\r\n.\r\n'
return data
def read_response(data_socket, comparator):
"""
Reads the response from the data_socket
:author: gaoj, goreckinj, alex
:param data_socket: The socket to pull bytes from
:param comparator: the comparator to compare the status_code with
:return: bytes[]
"""
response = []
# get response lines
line = b''
while line[3:4] != b' ' or line[-2:] != b'\r\n':
line += next_byte(data_socket, 1)
if line[-2:] == b'\r\n':
response.append(line)
if line[3:4] != b' ':
line = b''
# print response lines
for lin in response:
print(b'response: ' + lin)
# get status code
status_code = response[-1][:3]
# check status code
if status_code != comparator:
raise RuntimeError('ERROR:\n\tExpected Status: ' + comparator.decode()
+ '\n\tActual Status: ' + status_code.decode()
+ '\n\tMessage: ' + response[-1][4:].decode())
def send_reply(data_socket, message):
"""
Sends a reply to the server
:author:
:param data_socket: The socket to pull bytes from
:param message: The message to send
:return: None
"""
print(b'reply: ' + message)
data_socket.sendall(message)
def next_byte(data_socket, buffer_size):
"""
Gets the next byte from the data socket
:author: goreckinj
:param data_socket: The data socket to pull bytes from
:param buffer_size: The amount of bytes to pull
:return: bytes - The bytes received
"""
bytez = data_socket.recv(buffer_size)
while len(bytez) != buffer_size:
bytez += data_socket.recv(buffer_size - len(bytez))
return bytez
def get_formatted_date():
"""Get the current date and time, in a format suitable for an email date header.
The constant TIMEZONE_NAME should be one of the standard pytz timezone names.
If you really want to see them all, call the print_all_timezones function.
tzlocal suggested by http://stackoverflow.com/a/3168394/1048186
See RFC 5322 for details about what the timezone should be
https://tools.ietf.org/html/rfc5322
:return: Formatted current date/time value, as a string.
"""
zone = tzlocal.get_localzone()
print("zone =", zone)
timestamp = datetime.datetime.now(zone)
timestring = timestamp.strftime('%a, %d %b %Y %H:%M:%S %z') # Sun, 06 Nov 1994 08:49:37 +0000
return timestring
def print_all_timezones():
""" Print all pytz timezone strings. """
for tz in pytz.all_timezones:
print(tz)
# You probably won't need the following methods, unless you decide to
# try to handle email attachments or send multi-part messages.
# These advanced capabilities are not required for the lab assignment.
def get_mime_type(file_path):
"""Try to guess the MIME type of a file (resource), given its path (primarily its file extension)
:param file_path: String containing path to (resource) file, such as './abc.jpg'
:return: If successful in guessing the MIME type, a string representing the content
type, such as 'image/jpeg'
Otherwise, None
:rtype: int or None
"""
mime_type_and_encoding = mimetypes.guess_type(file_path)
mime_type = mime_type_and_encoding[0]
return mime_type
def get_file_size(file_path):
"""Try to get the size of a file (resource) in bytes, given its path
:param file_path: String containing path to (resource) file, such as './abc.html'
:return: If file_path designates a normal file, an integer value representing the the file size in bytes
Otherwise (no such file, or path is not a file), None
:rtype: int or None
"""
# Initially, assume file does not exist
file_size = None
if os.path.isfile(file_path):
file_size = os.stat(file_path).st_size
return file_size
main()
| [
"="
] | = |
0ad05c0dfd6e48742b121e8c44fddf8a92224af1 | dd770e697daddab20e09fbf8ce199c97ee540c37 | /bigtop-packages/src/charm/spark/layer-spark/tests/10-test-ha.py | 99c604f9ffd781c069a3d253e4eb43d243ba046e | [
"Apache-2.0",
"FreeBSD-DOC",
"MIT",
"DOC"
] | permissive | PKConsul/bigtop | 0e7b5133be17a2093c0d5279b000c60b67072a16 | 2f8311b184bf0c5d25756b098895e43b1dbc3c2e | refs/heads/master | 2021-01-20T02:08:29.012667 | 2017-04-22T17:44:30 | 2017-04-23T06:27:13 | 89,379,381 | 1 | 0 | null | 2017-04-25T15:53:29 | 2017-04-25T15:53:29 | null | UTF-8 | Python | false | false | 3,718 | py | #!/usr/bin/python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import amulet
import requests
import time
import unittest
class TestDeployment(unittest.TestCase):
"""
Test scaling of Apache Spark in HA mode.
"""
@classmethod
def setUpClass(cls):
cls.d = amulet.Deployment(series='xenial')
cls.d.add('spark-test-ha', 'cs:xenial/spark', units=3)
cls.d.add('zk-test', 'cs:xenial/zookeeper')
cls.d.relate('zk-test:zookeeper', 'spark-test-ha:zookeeper')
cls.d.expose('spark-test-ha')
cls.d.setup(timeout=3600)
cls.d.sentry.wait(timeout=3600)
@classmethod
def tearDownClass(cls):
# NB: seems to be a remove_service issue with amulet. However, the
# unit does still get removed. Pass OSError for now:
# OSError: juju command failed ['remove-application', 'zk-test']:
# ERROR allocation for service ...zk-test... owned by ... not found
try:
cls.d.remove_service('spark-test-ha', 'zk-test')
except OSError as e:
print("IGNORE: Amulet remove_service failed: {}".format(e))
pass
def test_master_selected(self):
"""
Wait for all three spark-test-ha units to agree on a master leader.
Remove the leader unit.
Check that a new leader is elected.
"""
self.d.sentry.wait_for_messages({"spark-test-ha": ["ready (standalone - HA)",
"ready (standalone - HA)",
"ready (standalone - HA)"]}, timeout=900)
print("Waiting for units to agree on master.")
time.sleep(120)
master = ''
masters_count = 0
for unit in self.d.sentry['spark-test-ha']:
ip = unit.info['public-address']
url = 'http://{}:8080'.format(ip)
homepage = requests.get(url)
if 'ALIVE' in homepage.text:
masters_count += 1
master = unit.info['unit_name']
else:
assert 'STANDBY' in homepage.text
assert masters_count == 1
print("Removing master: {} ".format(master))
self.d.remove_unit(master)
time.sleep(120)
self.d.sentry.wait_for_messages({"spark-test-ha": ["ready (standalone - HA)",
"ready (standalone - HA)"]}, timeout=900)
masters_count = 0
for unit in self.d.sentry['spark-test-ha']:
ip = unit.info['public-address']
url = 'http://{}:8080'.format(ip)
homepage = requests.get(url)
if 'ALIVE' in homepage.text:
print("New master is {}".format(unit.info['unit_name']))
masters_count += 1
else:
assert 'STANDBY' in homepage.text
assert masters_count == 1
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
fc3ef945c17fc0a2cc07ebc2bd0847622a35d8e1 | 778a420262c8d1547cbcbbe3143cd73a94d9cff6 | /test/test_common.py | bea1a3d8c600fa0d2718f9889976a7f6782f3467 | [
"Apache-2.0"
] | permissive | ddierkes/bdbag | a59ef7c4a7207dd9a90374a021c682b0b4642c54 | bc35ddcb544f7fa923d4649835ad99a15a778bce | refs/heads/master | 2020-03-17T22:06:22.942490 | 2018-05-03T11:05:57 | 2018-05-03T11:05:57 | 133,989,136 | 0 | 0 | null | 2018-05-18T18:14:03 | 2018-05-18T18:14:03 | null | UTF-8 | Python | false | false | 4,281 | py | import os
import gc
import sys
import shutil
import tempfile
import unittest
class BaseTest(unittest.TestCase):
def setUp(self):
if sys.version_info < (3,):
self.assertRaisesRegex = self.assertRaisesRegexp
self.tmpdir = tempfile.mkdtemp(prefix="bdbag_test_")
shutil.copytree(os.path.abspath(os.path.join('test', 'test-data')), os.path.join(self.tmpdir, 'test-data'))
self.test_data_dir = os.path.join(self.tmpdir, 'test-data', 'test-dir')
self.assertTrue(os.path.isdir(self.test_data_dir))
self.test_archive_dir = os.path.join(self.tmpdir, 'test-data', 'test-archives')
self.assertTrue(os.path.isdir(self.test_archive_dir))
self.test_config_dir = os.path.join(self.tmpdir, 'test-data', 'test-config')
self.assertTrue(os.path.isdir(self.test_config_dir))
self.test_http_dir = os.path.join(self.tmpdir, 'test-data', 'test-http')
self.assertTrue(os.path.isdir(self.test_http_dir))
self.test_bag_dir = os.path.join(self.tmpdir, 'test-data', 'test-bag')
self.assertTrue(os.path.isdir(self.test_bag_dir))
self.test_bag_incomplete_dir = os.path.join(self.tmpdir, 'test-data', 'test-bag-incomplete')
self.assertTrue(os.path.isdir(self.test_bag_incomplete_dir))
self.test_bag_fetch_http_dir = os.path.join(self.tmpdir, 'test-data', 'test-bag-fetch-http')
self.assertTrue(os.path.isdir(self.test_bag_fetch_http_dir))
self.test_bag_fetch_ark_dir = os.path.join(self.tmpdir, 'test-data', 'test-bag-fetch-ark')
self.assertTrue(os.path.isdir(self.test_bag_fetch_ark_dir))
self.test_bag_fetch_minid_dir = os.path.join(self.tmpdir, 'test-data', 'test-bag-fetch-minid')
self.assertTrue(os.path.isdir(self.test_bag_fetch_minid_dir))
self.test_bag_fetch_ftp_dir = os.path.join(self.tmpdir, 'test-data', 'test-bag-fetch-ftp')
self.assertTrue(os.path.isdir(self.test_bag_fetch_ftp_dir))
self.test_bag_invalid_structure_manifest_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-invalid-structure-manifest')
self.assertTrue(os.path.isdir(self.test_bag_invalid_structure_manifest_dir))
self.test_bag_invalid_structure_filesystem_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-invalid-structure-filesystem')
self.assertTrue(os.path.isdir(self.test_bag_invalid_structure_filesystem_dir))
self.test_bag_invalid_structure_fetch_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-invalid-structure-fetch')
self.assertTrue(os.path.isdir(self.test_bag_invalid_structure_fetch_dir))
self.test_bag_invalid_state_manifest_fetch_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-invalid-state-manifest-fetch')
self.assertTrue(os.path.isdir(self.test_bag_invalid_state_manifest_fetch_dir))
self.test_bag_invalid_state_fetch_filesize_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-invalid-state-fetch-filesize')
self.assertTrue(os.path.isdir(self.test_bag_invalid_state_fetch_filesize_dir))
self.test_bag_update_invalid_fetch_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-update-invalid-fetch')
self.assertTrue(os.path.isdir(self.test_bag_update_invalid_fetch_dir))
self.test_bag_invalid_state_duplicate_manifest_fetch_dir = os.path.join(
self.tmpdir, 'test-data', 'test-bag-invalid-state-duplicate-manifest-fetch')
self.assertTrue(os.path.isdir(self.test_bag_invalid_state_duplicate_manifest_fetch_dir))
def tearDown(self):
if os.path.isdir(self.tmpdir):
shutil.rmtree(self.tmpdir)
gc.collect()
def assertExpectedMessages(self, messages, output):
for expected in messages:
self.assertIn(expected, output, "Expected \'%s\' in output string." % expected)
def assertUnexpectedMessages(self, messages, output):
for unexpected in messages:
self.assertNotIn(unexpected, output, "Unexpected \'%s\' in output string." % unexpected)
def getTestHeader(self, desc, args=None):
return str('\n\n[%s: %s]\n%s') % (self.__class__.__name__, desc, (' '.join(args) + '\n') if args else "")
| [
"[email protected]"
] | |
e209a95d690abfd4189adb9be07986331f77a9b4 | f889bc01147869459c0a516382e7b95221295a7b | /test/test_customer_data_customer_search_results_interface.py | edf62d3b5d9ee180f4386a1c633555c9f563ed2b | [] | no_license | wildatheart/magento2-api-client | 249a86f5c0289743f8df5b0324ccabd76f326512 | e6a707f85b37c6c3e4ef3ff78507a7deb8f71427 | refs/heads/master | 2021-07-14T16:01:17.644472 | 2017-10-18T13:33:08 | 2017-10-18T13:33:08 | 107,412,121 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,167 | py | # coding: utf-8
"""
Magento Community
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.customer_data_customer_search_results_interface import CustomerDataCustomerSearchResultsInterface
class TestCustomerDataCustomerSearchResultsInterface(unittest.TestCase):
""" CustomerDataCustomerSearchResultsInterface unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testCustomerDataCustomerSearchResultsInterface(self):
"""
Test CustomerDataCustomerSearchResultsInterface
"""
# FIXME: construct object with mandatory attributes with example values
#model = swagger_client.models.customer_data_customer_search_results_interface.CustomerDataCustomerSearchResultsInterface()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
26a913ad3667d31c973daeb7828df81b0f4ab511 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_199/134.py | 085bae10457bf1468888eded9f5f112884ecf5f6 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | import numpy as np
def mainFunc(S, K):
ret = 0
for i in range(len(S)-K+1):
if S[i]=="-":
ret+=1
for j in range(K):
if S[i+j]=='-': S[i+j]='+'
else: S[i+j]='-'
for i in range(K):
if S[len(S)-1-i]=='-': return "IMPOSSIBLE"
return str(ret)
T = int(raw_input())
for t in range(T):
P = raw_input().split(' ')
print 'Case #' + str(t+1) + ': ' + mainFunc(list(P[0]), int(P[1])) | [
"[email protected]"
] | |
2dbd39a7be367e0af9174da5b1b0cbfe9ed4b588 | 8d30f2d627eb15bf0a3ff541812af3a289fffcfc | /backend/yfkyfkfu_1352/settings.py | bed71fd8664c0a8b11e6d92e1c95522d7680c020 | [] | no_license | crowdbotics-apps/yfkyfkfu-1352 | 2eda6d948adf2fd9fcace7997d606ff057168e03 | fd53b7f60397737ad99e7cae6e81c1b91c4ae22c | refs/heads/master | 2022-12-15T03:23:19.526405 | 2019-03-13T18:48:22 | 2019-03-13T18:48:22 | 175,480,158 | 0 | 0 | null | 2022-12-06T15:10:19 | 2019-03-13T18:48:19 | JavaScript | UTF-8 | Python | false | false | 4,746 | py | """
Django settings for yfkyfkfu_1352 project.
Generated by 'django-admin startproject' using Django 1.11.16.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import environ
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
env = environ.Env()
environ.Env.read_env(os.path.join(BASE_DIR, '.env'))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool('DEBUG', default=True)
ALLOWED_HOSTS = ['*']
SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'yfkyfkfu_1352.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'yfkyfkfu_1352.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'yfkyfkfu_1352',
'USER': 'yfkyfkfu_1352',
'PASSWORD': 'yfkyfkfu_1352',
'HOST': 'localhost',
'PORT': '5432',
}
}
if env.str('DATABASE_URL', default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
# allauth
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = None
LOGIN_REDIRECT_URL = '/'
if DEBUG:
# output email to console instead of sending
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = env.str('SENDGRID_USERNAME', '')
EMAIL_HOST_PASSWORD = env.str('SENDGRID_PASSWORD', '')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# Import local settings
try:
from .local_settings import *
INSTALLED_APPS += DEBUG_APPS
except:
pass
| [
"[email protected]"
] | |
f3234921e0a1cd9eaf4c40340a44f85a9b4c646b | 74c8f10bfc82e762d83db424e5d4f4b3681ffba0 | /venv/Lib/site-packages/winrt/windows/security/cryptography/certificates/__init__.py | df6f418b8fd0c94949f27c7b4d23e5ac4cb7a211 | [] | no_license | meghnaraswan/HEARTLabHapticSleeveProject | 1e7bd0710c26dad6588f73f6b1f0741c8e31334d | 6c2c9a227aaacf34b2205bcb1a856cc70ceccd55 | refs/heads/main | 2023-07-04T01:51:01.462007 | 2021-08-17T13:34:43 | 2021-08-17T13:34:43 | 390,823,593 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,949 | py | # WARNING: Please don't edit this file. It was generated by Python/WinRT v0.9.210202.1
import typing, winrt
import enum
_ns_module = winrt._import_ns_module("Windows.Security.Cryptography.Certificates")
try:
import winrt.windows.foundation
except:
pass
try:
import winrt.windows.foundation.collections
except:
pass
try:
import winrt.windows.networking
except:
pass
try:
import winrt.windows.storage.streams
except:
pass
class CertificateChainPolicy(enum.IntEnum):
BASE = 0
SSL = 1
N_T_AUTHENTICATION = 2
MICROSOFT_ROOT = 3
class ChainValidationResult(enum.IntEnum):
SUCCESS = 0
UNTRUSTED = 1
REVOKED = 2
EXPIRED = 3
INCOMPLETE_CHAIN = 4
INVALID_SIGNATURE = 5
WRONG_USAGE = 6
INVALID_NAME = 7
INVALID_CERTIFICATE_AUTHORITY_POLICY = 8
BASIC_CONSTRAINTS_ERROR = 9
UNKNOWN_CRITICAL_EXTENSION = 10
REVOCATION_INFORMATION_MISSING = 11
REVOCATION_FAILURE = 12
OTHER_ERRORS = 13
class EnrollKeyUsages(enum.IntFlag):
NONE = 0
DECRYPTION = 0x1
SIGNING = 0x2
KEY_AGREEMENT = 0x4
ALL = 0xffffff
class ExportOption(enum.IntEnum):
NOT_EXPORTABLE = 0
EXPORTABLE = 1
class InstallOptions(enum.IntFlag):
NONE = 0
DELETE_EXPIRED = 0x1
class KeyProtectionLevel(enum.IntEnum):
NO_CONSENT = 0
CONSENT_ONLY = 1
CONSENT_WITH_PASSWORD = 2
CONSENT_WITH_FINGERPRINT = 3
class KeySize(enum.IntEnum):
INVALID = 0
RSA2048 = 2048
RSA4096 = 4096
class SignatureValidationResult(enum.IntEnum):
SUCCESS = 0
INVALID_PARAMETER = 1
BAD_MESSAGE = 2
INVALID_SIGNATURE = 3
OTHER_ERRORS = 4
Certificate = _ns_module.Certificate
CertificateChain = _ns_module.CertificateChain
CertificateEnrollmentManager = _ns_module.CertificateEnrollmentManager
CertificateExtension = _ns_module.CertificateExtension
CertificateKeyUsages = _ns_module.CertificateKeyUsages
CertificateQuery = _ns_module.CertificateQuery
CertificateRequestProperties = _ns_module.CertificateRequestProperties
CertificateStore = _ns_module.CertificateStore
CertificateStores = _ns_module.CertificateStores
ChainBuildingParameters = _ns_module.ChainBuildingParameters
ChainValidationParameters = _ns_module.ChainValidationParameters
CmsAttachedSignature = _ns_module.CmsAttachedSignature
CmsDetachedSignature = _ns_module.CmsDetachedSignature
CmsSignerInfo = _ns_module.CmsSignerInfo
CmsTimestampInfo = _ns_module.CmsTimestampInfo
KeyAlgorithmNames = _ns_module.KeyAlgorithmNames
KeyAttestationHelper = _ns_module.KeyAttestationHelper
KeyStorageProviderNames = _ns_module.KeyStorageProviderNames
PfxImportParameters = _ns_module.PfxImportParameters
StandardCertificateStoreNames = _ns_module.StandardCertificateStoreNames
SubjectAlternativeNameInfo = _ns_module.SubjectAlternativeNameInfo
UserCertificateEnrollmentManager = _ns_module.UserCertificateEnrollmentManager
UserCertificateStore = _ns_module.UserCertificateStore
| [
"[email protected]"
] | |
f7ad37c473eb788222d0d0e5934e021dc5afeffc | 238e46a903cf7fac4f83fa8681094bf3c417d22d | /VTK/vtk_7.1.1_x64_Release/lib/python2.7/site-packages/twisted/lore/_version.py | c81adaac2b5e3ff5d0fc8545fdf68c50dfda705d | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"BSD-3-Clause"
] | permissive | baojunli/FastCAE | da1277f90e584084d461590a3699b941d8c4030b | a3f99f6402da564df87fcef30674ce5f44379962 | refs/heads/master | 2023-02-25T20:25:31.815729 | 2021-02-01T03:17:33 | 2021-02-01T03:17:33 | 268,390,180 | 1 | 0 | BSD-3-Clause | 2020-06-01T00:39:31 | 2020-06-01T00:39:31 | null | UTF-8 | Python | false | false | 271 | py | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
# This is an auto-generated file. Do not edit it.
"""
Provides Twisted version information.
"""
from twisted.python import versions
version = versions.Version('twisted.lore', 14, 0, 0)
| [
"l”[email protected]“"
] | |
cd4d183ad4e4c95ed4724893de2871e7b5527640 | f26ea24cebb60b8b6176ee4d3ecbec477be9b7c6 | /native_tags/contrib/mapreduce.py | 50346ee6e3655e78113e5d8117322daf7447de1a | [
"BSD-3-Clause"
] | permissive | lukaszb/django-native-tags | 994fcc0e382ebfd8dfed868a4caeeb33aa78aee5 | e1987497b652b1939d152cbc6bc281daf727b34c | refs/heads/master | 2021-01-09T06:13:18.485447 | 2010-05-04T20:08:15 | 2010-05-04T20:09:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,172 | py | from native_tags.decorators import function
def get_func(func_name, op=True):
import operator
from native_tags.registry import register
try:
return register['function'][func_name]
except KeyError:
pass
if func_name in __builtins__:
return __builtins__[func_name]
elif hasattr(operator, func_name):
return getattr(operator, func_name)
return lambda: None
def do_map(func_name, *sequence):
"""
Return a list of the results of applying the function to the items of
the argument sequence(s).
Functions may be registered with ``native_tags``
or can be ``builtins`` or from the ``operator`` module
If more than one sequence is given, the
function is called with an argument list consisting of the corresponding
item of each sequence, substituting None for missing values when not all
sequences have the same length. If the function is None, return a list of
the items of the sequence (or a list of tuples if more than one sequence).
Syntax::
{% map [function] [sequence] %}
{% map [function] [item1 item2 ...] %}
For example::
{% map sha1 hello world %}
calculates::
[sha1(hello), sha1(world)]
"""
if len(sequence)==1:
sequence = sequence[0]
return map(get_func(func_name, False), sequence)
do_map = function(do_map, name='map')
def do_reduce(func_name, *sequence):
"""
Apply a function of two arguments cumulatively to the items of a sequence,
from left to right, so as to reduce the sequence to a single value.
Functions may be registered with ``native_tags``
or can be ``builtins`` or from the ``operator`` module
Syntax::
{% reduce [function] [sequence] %}
{% reduce [function] [item1 item2 ...] %}
For example::
{% reduce add 1 2 3 4 5 %}
calculates::
((((1+2)+3)+4)+5) = 15
"""
if len(sequence)==1:
sequence = sequence[0]
return reduce(get_func(func_name), sequence)
do_reduce = function(do_reduce, name='reduce')
| [
"[email protected]"
] | |
e0222454928efa8bc1dbb30a24c6055737fc0f20 | 9a7550dc49afa11b1bb900b8e22752cfb9e941ed | /oauth/urls.py | 92ffdbb5ec9cbb586ee9683ea9d549b451ee25c1 | [] | no_license | SunmoonSan/MapleBlog | 862c4e3631a07ab4e6edb1fd934a4f4768ddd1f3 | bc61d42f3927016f7e9e7fb86534639c70cb6479 | refs/heads/master | 2020-03-22T08:46:52.563003 | 2018-07-05T03:29:48 | 2018-07-05T03:29:48 | 139,790,441 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 897 | py | """sunmoon URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from . import views
app_name = 'oauth'
urlpatterns = [
path('github_login/', views.github_login, name='github_login'),
path('github_check/', views.github_check, name='github_check'),
]
| [
"[email protected]"
] | |
808cf496a8bf650c2d81d9c2043e69ea1f1164d0 | 974d04d2ea27b1bba1c01015a98112d2afb78fe5 | /test/legacy_test/test_dataset_cifar.py | 1fca233d3be786b2a42f7dea6aed8dae58d6c25b | [
"Apache-2.0"
] | permissive | PaddlePaddle/Paddle | b3d2583119082c8e4b74331dacc4d39ed4d7cff0 | 22a11a60e0e3d10a3cf610077a3d9942a6f964cb | refs/heads/develop | 2023-08-17T21:27:30.568889 | 2023-08-17T12:38:22 | 2023-08-17T12:38:22 | 65,711,522 | 20,414 | 5,891 | Apache-2.0 | 2023-09-14T19:20:51 | 2016-08-15T06:59:08 | C++ | UTF-8 | Python | false | false | 4,284 | py | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from paddle.vision.datasets import Cifar10, Cifar100
class TestCifar10Train(unittest.TestCase):
def test_main(self):
cifar = Cifar10(mode='train')
self.assertTrue(len(cifar) == 50000)
# traversal whole dataset may cost a
# long time, randomly check 1 sample
idx = np.random.randint(0, 50000)
data, label = cifar[idx]
data = np.array(data)
self.assertTrue(len(data.shape) == 3)
self.assertTrue(data.shape[2] == 3)
self.assertTrue(data.shape[1] == 32)
self.assertTrue(data.shape[0] == 32)
self.assertTrue(0 <= int(label) <= 9)
class TestCifar10Test(unittest.TestCase):
def test_main(self):
cifar = Cifar10(mode='test')
self.assertTrue(len(cifar) == 10000)
# traversal whole dataset may cost a
# long time, randomly check 1 sample
idx = np.random.randint(0, 10000)
data, label = cifar[idx]
data = np.array(data)
self.assertTrue(len(data.shape) == 3)
self.assertTrue(data.shape[2] == 3)
self.assertTrue(data.shape[1] == 32)
self.assertTrue(data.shape[0] == 32)
self.assertTrue(0 <= int(label) <= 9)
# test cv2 backend
cifar = Cifar10(mode='test', backend='cv2')
self.assertTrue(len(cifar) == 10000)
# traversal whole dataset may cost a
# long time, randomly check 1 sample
idx = np.random.randint(0, 10000)
data, label = cifar[idx]
self.assertTrue(len(data.shape) == 3)
self.assertTrue(data.shape[2] == 3)
self.assertTrue(data.shape[1] == 32)
self.assertTrue(data.shape[0] == 32)
self.assertTrue(0 <= int(label) <= 99)
with self.assertRaises(ValueError):
cifar = Cifar10(mode='test', backend=1)
class TestCifar100Train(unittest.TestCase):
def test_main(self):
cifar = Cifar100(mode='train')
self.assertTrue(len(cifar) == 50000)
# traversal whole dataset may cost a
# long time, randomly check 1 sample
idx = np.random.randint(0, 50000)
data, label = cifar[idx]
data = np.array(data)
self.assertTrue(len(data.shape) == 3)
self.assertTrue(data.shape[2] == 3)
self.assertTrue(data.shape[1] == 32)
self.assertTrue(data.shape[0] == 32)
self.assertTrue(0 <= int(label) <= 99)
class TestCifar100Test(unittest.TestCase):
def test_main(self):
cifar = Cifar100(mode='test')
self.assertTrue(len(cifar) == 10000)
# traversal whole dataset may cost a
# long time, randomly check 1 sample
idx = np.random.randint(0, 10000)
data, label = cifar[idx]
data = np.array(data)
self.assertTrue(len(data.shape) == 3)
self.assertTrue(data.shape[2] == 3)
self.assertTrue(data.shape[1] == 32)
self.assertTrue(data.shape[0] == 32)
self.assertTrue(0 <= int(label) <= 99)
# test cv2 backend
cifar = Cifar100(mode='test', backend='cv2')
self.assertTrue(len(cifar) == 10000)
# traversal whole dataset may cost a
# long time, randomly check 1 sample
idx = np.random.randint(0, 10000)
data, label = cifar[idx]
self.assertTrue(len(data.shape) == 3)
self.assertTrue(data.shape[2] == 3)
self.assertTrue(data.shape[1] == 32)
self.assertTrue(data.shape[0] == 32)
self.assertTrue(0 <= int(label) <= 99)
with self.assertRaises(ValueError):
cifar = Cifar100(mode='test', backend=1)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
cf0672b04e3ad7d5240dff6351e4a75134aff575 | a66460a46611483dfbdc94c7996893f427e60d97 | /ansible/my_env/lib/python2.7/site-packages/ansible/modules/cloud/amazon/aws_direct_connect_connection.py | 40aede0d048137aeaccca9bd1a9ea0bc41ad91cc | [
"MIT"
] | permissive | otus-devops-2019-02/yyashkin_infra | 06b57807dde26f94f501828c07503d6bf1d70816 | 0cd0c003884155ac922e3e301305ac202de7028c | refs/heads/master | 2020-04-29T02:42:22.056724 | 2019-05-15T16:24:35 | 2019-05-15T16:24:35 | 175,780,718 | 0 | 0 | MIT | 2019-05-15T16:24:36 | 2019-03-15T08:37:35 | HCL | UTF-8 | Python | false | false | 12,050 | py | #!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: aws_direct_connect_connection
short_description: Creates, deletes, modifies a DirectConnect connection
description:
- Create, update, or delete a Direct Connect connection between a network and a specific AWS Direct Connect location.
Upon creation the connection may be added to a link aggregation group or established as a standalone connection.
The connection may later be associated or disassociated with a link aggregation group.
version_added: "2.4"
author: "Sloane Hertel (@s-hertel)"
extends_documentation_fragment:
- aws
- ec2
requirements:
- boto3
- botocore
options:
state:
description:
- The state of the Direct Connect connection.
choices:
- present
- absent
name:
description:
- The name of the Direct Connect connection. This is required to create a
new connection. To recreate or delete a connection I(name) or I(connection_id)
is required.
connection_id:
description:
- The ID of the Direct Connect connection. I(name) or I(connection_id) is
required to recreate or delete a connection. Modifying attributes of a
connection with I(forced_update) will result in a new Direct Connect connection ID.
location:
description:
- Where the Direct Connect connection is located. Required when I(state=present).
bandwidth:
description:
- The bandwidth of the Direct Connect connection. Required when I(state=present).
choices:
- 1Gbps
- 10Gbps
link_aggregation_group:
description:
- The ID of the link aggregation group you want to associate with the connection.
This is optional in case a stand-alone connection is desired.
forced_update:
description:
- To modify bandwidth or location the connection will need to be deleted and recreated.
By default this will not happen - this option must be set to True.
type: bool
"""
EXAMPLES = """
# create a Direct Connect connection
- aws_direct_connect_connection:
name: ansible-test-connection
state: present
location: EqDC2
link_aggregation_group: dxlag-xxxxxxxx
bandwidth: 1Gbps
register: dc
# disassociate the LAG from the connection
- aws_direct_connect_connection:
state: present
connection_id: dc.connection.connection_id
location: EqDC2
bandwidth: 1Gbps
# replace the connection with one with more bandwidth
- aws_direct_connect_connection:
state: present
name: ansible-test-connection
location: EqDC2
bandwidth: 10Gbps
forced_update: True
# delete the connection
- aws_direct_connect_connection:
state: absent
name: ansible-test-connection
"""
RETURN = """
connection:
description: The attributes of the direct connect connection.
type: complex
returned: I(state=present)
contains:
aws_device:
description: The endpoint which the physical connection terminates on.
returned: when the requested state is no longer 'requested'
type: string
sample: EqDC2-12pmo7hemtz1z
bandwidth:
description: The bandwidth of the connection.
returned: always
type: string
sample: 1Gbps
connection_id:
description: The ID of the connection.
returned: always
type: string
sample: dxcon-ffy9ywed
connection_name:
description: The name of the connection.
returned: always
type: string
sample: ansible-test-connection
connection_state:
description: The state of the connection.
returned: always
type: string
sample: pending
loa_issue_time:
description: The issue time of the connection's Letter of Authorization - Connecting Facility Assignment.
returned: when the LOA-CFA has been issued (the connection state will no longer be 'requested')
type: string
sample: '2018-03-20T17:36:26-04:00'
location:
description: The location of the connection.
returned: always
type: string
sample: EqDC2
owner_account:
description: The account that owns the direct connect connection.
returned: always
type: string
sample: '123456789012'
region:
description: The region in which the connection exists.
returned: always
type: string
sample: us-east-1
"""
import traceback
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import (camel_dict_to_snake_dict, AWSRetry)
from ansible.module_utils.aws.direct_connect import (DirectConnectError, delete_connection,
associate_connection_and_lag, disassociate_connection_and_lag)
try:
from botocore.exceptions import BotoCoreError, ClientError
except:
pass
# handled by imported AnsibleAWSModule
retry_params = {"tries": 10, "delay": 5, "backoff": 1.2, "catch_extra_error_codes": ["DirectConnectClientException"]}
def connection_status(client, connection_id):
return connection_exists(client, connection_id=connection_id, connection_name=None, verify=False)
def connection_exists(client, connection_id=None, connection_name=None, verify=True):
params = {}
if connection_id:
params['connectionId'] = connection_id
try:
response = AWSRetry.backoff(**retry_params)(client.describe_connections)(**params)
except (BotoCoreError, ClientError) as e:
if connection_id:
msg = "Failed to describe DirectConnect ID {0}".format(connection_id)
else:
msg = "Failed to describe DirectConnect connections"
raise DirectConnectError(msg=msg,
last_traceback=traceback.format_exc(),
exception=e)
match = []
connection = []
# look for matching connections
if len(response.get('connections', [])) == 1 and connection_id:
if response['connections'][0]['connectionState'] != 'deleted':
match.append(response['connections'][0]['connectionId'])
connection.extend(response['connections'])
for conn in response.get('connections', []):
if connection_name == conn['connectionName'] and conn['connectionState'] != 'deleted':
match.append(conn['connectionId'])
connection.append(conn)
# verifying if the connections exists; if true, return connection identifier, otherwise return False
if verify and len(match) == 1:
return match[0]
elif verify:
return False
# not verifying if the connection exists; just return current connection info
elif len(connection) == 1:
return {'connection': connection[0]}
return {'connection': {}}
def create_connection(client, location, bandwidth, name, lag_id):
if not name:
raise DirectConnectError(msg="Failed to create a Direct Connect connection: name required.")
params = {
'location': location,
'bandwidth': bandwidth,
'connectionName': name,
}
if lag_id:
params['lagId'] = lag_id
try:
connection = AWSRetry.backoff(**retry_params)(client.create_connection)(**params)
except (BotoCoreError, ClientError) as e:
raise DirectConnectError(msg="Failed to create DirectConnect connection {0}".format(name),
last_traceback=traceback.format_exc(),
exception=e)
return connection['connectionId']
def changed_properties(current_status, location, bandwidth):
current_bandwidth = current_status['bandwidth']
current_location = current_status['location']
return current_bandwidth != bandwidth or current_location != location
@AWSRetry.backoff(**retry_params)
def update_associations(client, latest_state, connection_id, lag_id):
changed = False
if 'lagId' in latest_state and lag_id != latest_state['lagId']:
disassociate_connection_and_lag(client, connection_id, lag_id=latest_state['lagId'])
changed = True
if (changed and lag_id) or (lag_id and 'lagId' not in latest_state):
associate_connection_and_lag(client, connection_id, lag_id)
changed = True
return changed
def ensure_present(client, connection_id, connection_name, location, bandwidth, lag_id, forced_update):
# the connection is found; get the latest state and see if it needs to be updated
if connection_id:
latest_state = connection_status(client, connection_id=connection_id)['connection']
if changed_properties(latest_state, location, bandwidth) and forced_update:
ensure_absent(client, connection_id)
return ensure_present(client=client,
connection_id=None,
connection_name=connection_name,
location=location,
bandwidth=bandwidth,
lag_id=lag_id,
forced_update=forced_update)
elif update_associations(client, latest_state, connection_id, lag_id):
return True, connection_id
# no connection found; create a new one
else:
return True, create_connection(client, location, bandwidth, connection_name, lag_id)
return False, connection_id
@AWSRetry.backoff(**retry_params)
def ensure_absent(client, connection_id):
changed = False
if connection_id:
delete_connection(client, connection_id)
changed = True
return changed
def main():
argument_spec = dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(),
location=dict(),
bandwidth=dict(choices=['1Gbps', '10Gbps']),
link_aggregation_group=dict(),
connection_id=dict(),
forced_update=dict(type='bool', default=False)
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
required_one_of=[('connection_id', 'name')],
required_if=[('state', 'present', ('location', 'bandwidth'))]
)
connection = module.client('directconnect')
state = module.params.get('state')
try:
connection_id = connection_exists(
connection,
connection_id=module.params.get('connection_id'),
connection_name=module.params.get('name')
)
if not connection_id and module.params.get('connection_id'):
module.fail_json(msg="The Direct Connect connection {0} does not exist.".format(module.params.get('connection_id')))
if state == 'present':
changed, connection_id = ensure_present(connection,
connection_id=connection_id,
connection_name=module.params.get('name'),
location=module.params.get('location'),
bandwidth=module.params.get('bandwidth'),
lag_id=module.params.get('link_aggregation_group'),
forced_update=module.params.get('forced_update'))
response = connection_status(connection, connection_id)
elif state == 'absent':
changed = ensure_absent(connection, connection_id)
response = {}
except DirectConnectError as e:
if e.last_traceback:
module.fail_json(msg=e.msg, exception=e.last_traceback, **camel_dict_to_snake_dict(e.exception.response))
else:
module.fail_json(msg=e.msg)
module.exit_json(changed=changed, **camel_dict_to_snake_dict(response))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
a108c73292745efbb8c63a7021a928c423ba5f28 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_136/3242.py | f20b77deb7902b6a054756552bf203772edf07d0 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,299 | py | #!/usr/bin/python
def solve_n(case, C, F, X):
t = 0.0 # Time stamp
cc = 0.0 # Number of cookies
cookie_rate = 2.0
solved = False
while not solved:
time_to_next_event = C / cookie_rate
## Will I solve in this round
time_to_solve = t + (X - cc) / cookie_rate
if cc + cookie_rate * time_to_next_event >= X:
t += (X - cc)/cookie_rate
break
cc += cookie_rate * time_to_next_event
t += time_to_next_event
buy_cookie = True
## Should I buy a cookie?
# print "Before Buy cookies: %0.7f at t: %0.7f" % (cc, t)
if buy_cookie:
cc -= C
cookie_rate += F
# print "After Buy cookies: %f at t: %0.7f, rate: %0.7f" % (cc, t, cookie_rate)
new_time_to_solve = t + (X - cc) / cookie_rate
# print time_to_solve, new_time_to_solve
if new_time_to_solve > time_to_solve:
t = time_to_solve
break
print "Case #%d: %0.7f" % (case, t)
def solve(ip):
count = int(ip.readline())
# solve_n(-1, 500, 4.0, 2000.0)
for case in range(count):
C, F, X = map(float, ip.readline().split())
solve_n(case + 1, C, F, X)
if __name__ == "__main__":
import sys
solve(open(sys.argv[1], "r"))
| [
"[email protected]"
] | |
b270e0997d1a0890099ab916b7ac8762bc7f9c20 | 3fa27b3ad1c1ca90f2bcf311d89fe8c2ca241cb4 | /Stores/migrations/0002_auto_20201004_2348.py | 9df9d2685983fb78f3531d6d90e5557aaf8fa728 | [] | no_license | emperorDuke/django-backend-for-ecommerce | 717e15d7be899abcd5a4b7a7d2203c612f001aeb | 83c1ca4d016d876a5c8711ac5cdc448d5a4a533d | refs/heads/master | 2023-02-10T08:57:17.852721 | 2021-01-02T15:49:07 | 2021-01-02T15:49:07 | 271,039,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | # Generated by Django 2.2.7 on 2020-10-04 22:48
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('Users', '0001_initial'),
('Stores', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='store',
name='address',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='Users.Address'),
),
migrations.AddField(
model_name='store',
name='merchant',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='advert',
name='store',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='Stores.Store', verbose_name='store'),
),
]
| [
"[email protected]"
] | |
9b71bfc82bf69157a0a2f7f8ecb174d1811970bd | f5d4863b6a62ef19ffc98e4f94f6ade1bc8810d3 | /Hash Table/274_H-Index.py | 76ba54016e28d234d66981f9499e284a2c30bef6 | [] | no_license | xiaomojie/LeetCode | 138808eb83938f9bd3c2e8a755d908509dff0fd3 | eedf73b5f167025a97f0905d3718b6eab2ee3e09 | refs/heads/master | 2021-06-12T09:26:01.257348 | 2019-10-23T10:41:06 | 2019-10-23T10:41:06 | 76,184,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,410 | py | """
Given an array of citations (each citation is a non-negative integer) of a researcher, write a function to compute the researcher's h-index.
According to the definition of h-index on Wikipedia: "A scientist has index h if h of his/her N papers have at least h citations each, and the other N − h papers have no more than h citations each."
Example:
Input: citations = [3,0,6,1,5]
Output: 3
Explanation: [3,0,6,1,5] means the researcher has 5 papers in total and each of them had
received 3, 0, 6, 1, 5 citations respectively.
Since the researcher has 3 papers with at least 3 citations each and the remaining
two with no more than 3 citations each, her h-index is 3.
Note: If there are several possible values for h, the maximum one is taken as the h-index.
"""
class Solution(object):
# 法一
def hIndex1(self, citations):
"""
:type citations: List[int]
:rtype: int
"""
h = 0
citations.sort()
for i in range(len(citations)):
# if len(citations) - i >= citations[i]:
h = max(h, min(citations[i], len(citations) - i))
return h
def hIndex(self, citations):
h = len(citations)
citations.sort()
for i in range(len(citations)):
if citations[i] >= len(citations) - i:
return len(citations) - i
return 0
| [
"[email protected]"
] | |
77a18bf8239b47b8f953066ffb358acd3967a8d0 | 129e65b23a172ea686f220868c923eb4b08493c7 | /game/PyGame/Others/memorypuzzle_buggy4.py | 0d3542bc9379b6b5f7d4f9b764be8fc0b21c0128 | [] | no_license | urstkj/Python | 8dcf434858f9eb171204e064237bb10d76fe7f16 | 5a41df7c57c1975e0d335f59b528e28ba63cab85 | refs/heads/master | 2023-01-01T14:15:42.833144 | 2020-10-12T15:00:55 | 2020-10-12T15:00:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,234 | py | #!/usr/local/bin/python
#-*- coding: utf-8 -*-
# This version of the game has a bug in it. See if you can figure out how to fix it.
# http://inventwithpython.com/pygame/buggy
# Bug Description: Icons don't show up
# Memory Puzzle
# By Al Sweigart [email protected]
# http://inventwithpython.com/pygame
# Released under a "Simplified BSD" license
import pygame
from pygame.locals import *
import random
import sys
FPS = 30 # frames per second, the general speed of the program
WINDOWWIDTH = 640 # size of window's width in pixels
WINDOWHEIGHT = 480 # size of windows' height in pixels
REVEALSPEED = 8 # speed boxes' sliding reveals and covers
BOXSIZE = 40 # size of box height & width in pixels
GAPSIZE = 10 # size of gap between boxes in pixels
BOARDWIDTH = 10 # number of columns of icons
BOARDHEIGHT = 7 # number of rows of icons
assert (BOARDWIDTH * BOARDHEIGHT) % 2 == 0, 'Board needs to have an even number of boxes for pairs of matches.'
XMARGIN = int((WINDOWWIDTH - (BOARDWIDTH * (BOXSIZE + GAPSIZE))) / 2)
YMARGIN = int((WINDOWHEIGHT - (BOARDHEIGHT * (BOXSIZE + GAPSIZE))) / 2)
# R G B
GRAY = (100, 100, 100)
NAVYBLUE = (60, 60, 100)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
ORANGE = (255, 128, 0)
PURPLE = (255, 0, 255)
CYAN = (0, 255, 255)
BGCOLOR = NAVYBLUE
LIGHTBGCOLOR = GRAY
BOXCOLOR = WHITE
HIGHLIGHTCOLOR = BLUE
DONUT = 'donut'
SQUARE = 'square'
DIAMOND = 'diamond'
LINES = 'lines'
OVAL = 'oval'
ALLCOLORS = (RED, GREEN, BLUE, YELLOW, ORANGE, PURPLE, CYAN)
ALLSHAPES = (DONUT, SQUARE, DIAMOND, LINES, OVAL)
assert len(ALLCOLORS) * len(ALLSHAPES) * 2 >= BOARDWIDTH * BOARDHEIGHT, "Board is too big for the number of shapes/colors defined."
def main():
global FPSCLOCK, DISPLAYSURF
pygame.init()
FPSCLOCK = pygame.time.Clock()
DISPLAYSURF = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
mousex = 0 # used to store x coordinate of mouse event
mousey = 0 # used to store y coordinate of mouse event
pygame.display.set_caption('Memory Game')
mainBoard = getRandomizedBoard()
revealedBoxes = generateRevealedBoxesData(False)
firstSelection = None # stores the (x, y) of the first box clicked.
DISPLAYSURF.fill(BGCOLOR)
startGameAnimation(mainBoard)
while True: # main game loop
mouseClicked = False
DISPLAYSURF.fill(BGCOLOR) # drawing the window
drawBoard(mainBoard, revealedBoxes)
for event in pygame.event.get(): # event handling loop
if event.type == QUIT or (event.type == KEYUP and event.key == K_ESCAPE):
pygame.quit()
sys.exit()
elif event.type == MOUSEMOTION:
mousex, mousey = event.pos
elif event.type == MOUSEBUTTONUP:
mousex, mousey = event.pos
mouseClicked = True
boxx, boxy = getBoxAtPixel(mousex, mousey)
if boxx != None and boxy != None:
# The mouse is currently over a box.
if not revealedBoxes[boxx][boxy]:
drawHighlightBox(boxx, boxy)
if not revealedBoxes[boxx][boxy] and mouseClicked:
revealBoxesAnimation(mainBoard, [(boxx, boxy)])
revealedBoxes[boxx][boxy] = True # set the box as "revealed"
if firstSelection == None: # the current box was the first box clicked
firstSelection = (boxx, boxy)
else: # the current box was the second box clicked
# Check if there is a match between the two icons.
icon1shape, icon1color = getShapeAndColor(mainBoard, firstSelection[0], firstSelection[1])
icon2shape, icon2color = getShapeAndColor(mainBoard, boxx, boxy)
if icon1shape != icon2shape or icon1color != icon2color:
# Icons don't match. Re-cover up both selections.
pygame.time.wait(1000) # 1000 milliseconds = 1 sec
coverBoxesAnimation(mainBoard, [(firstSelection[0], firstSelection[1]), (boxx, boxy)])
revealedBoxes[firstSelection[0]][firstSelection[1]] = False
revealedBoxes[boxx][boxy] = False
elif hasWon(revealedBoxes): # check if all pairs found
gameWonAnimation(mainBoard)
pygame.time.wait(2000)
# Reset the board
mainBoard = getRandomizedBoard()
revealedBoxes = generateRevealedBoxesData(False)
# Show the fully unrevealed board for a second.
drawBoard(mainBoard, revealedBoxes)
pygame.display.update()
pygame.time.wait(1000)
# Replay the start game animation.
startGameAnimation(mainBoard)
firstSelection = None # reset firstSelection variable
# Redraw the screen and wait a clock tick.
pygame.display.update()
FPSCLOCK.tick(FPS)
def generateRevealedBoxesData(val):
revealedBoxes = []
for i in range(BOARDWIDTH):
revealedBoxes.append([val] * BOARDHEIGHT)
return revealedBoxes
def getRandomizedBoard():
# Get a list of every possible shape in every possible color.
icons = []
for color in ALLCOLORS:
for shape in ALLSHAPES:
icons.append((shape, color))
random.shuffle(icons) # randomize the order of the icons list
numIconsUsed = int(BOARDWIDTH * BOARDHEIGHT / 2) # calculate how many icons are needed
icons = icons[:numIconsUsed] * 2 # make two of each
random.shuffle(icons)
# Create the board data structure, with randomly placed icons.
board = []
for x in range(BOARDWIDTH):
column = []
for y in range(BOARDHEIGHT):
column.append(icons[0])
del icons[0] # remove the icons as we assign them
board.append(column)
return board
def splitIntoGroupsOf(groupSize, theList):
# splits a list into a list of lists, where the inner lists have at
# most groupSize number of items.
result = []
for i in range(0, len(theList), groupSize):
result.append(theList[i:i + groupSize])
return result
def leftTopCoordsOfBox(boxx, boxy):
# Convert board coordinates to pixel coordinates
left = boxx * (BOXSIZE + GAPSIZE) + XMARGIN
top = boxy * (BOXSIZE + GAPSIZE) + YMARGIN
return (left, top)
def getBoxAtPixel(x, y):
for boxx in range(BOARDWIDTH):
for boxy in range(BOARDHEIGHT):
left, top = leftTopCoordsOfBox(boxx, boxy)
boxRect = pygame.Rect(left, top, BOXSIZE, BOXSIZE)
if boxRect.collidepoint(x, y):
return (boxx, boxy)
return (None, None)
def drawIcon(shape, color, boxx, boxy):
quarter = int(BOXSIZE * 0.25) # syntactic sugar
half = int(BOXSIZE * 0.5) # syntactic sugar
left, top = leftTopCoordsOfBox(boxx, boxy) # get pixel coords from board coords
# Draw the shapes
if shape == DONUT:
pygame.draw.circle(DISPLAYSURF, color, (left + half, top + half), half - 5)
pygame.draw.circle(DISPLAYSURF, BGCOLOR, (left + half, top + half), quarter - 5)
elif shape == SQUARE:
pygame.draw.rect(DISPLAYSURF, color, (left + quarter, top + quarter, BOXSIZE - half, BOXSIZE - half))
elif shape == DIAMOND:
pygame.draw.polygon(DISPLAYSURF, color, ((left + half, top), (left + BOXSIZE - 1, top + half), (left + half, top + BOXSIZE - 1), (left, top + half)))
elif shape == LINES:
for i in range(0, BOXSIZE, 4):
pygame.draw.line(DISPLAYSURF, color, (left, top + i), (left + i, top))
pygame.draw.line(DISPLAYSURF, color, (left + i, top + BOXSIZE - 1), (left + BOXSIZE - 1, top + i))
elif shape == OVAL:
pygame.draw.ellipse(DISPLAYSURF, color, (left, top + quarter, BOXSIZE, half))
def getShapeAndColor(board, boxx, boxy):
# shape value for x, y spot is stored in board[x][y][0]
# color value for x, y spot is stored in board[x][y][1]
return board[boxx][boxy][1], board[boxx][boxy][0]
def drawBoxCovers(board, boxes, coverage):
# Draws boxes being covered/revealed. "boxes" is a list
# of two-item lists, which have the x & y spot of the box.
for box in boxes:
left, top = leftTopCoordsOfBox(box[0], box[1])
pygame.draw.rect(DISPLAYSURF, BGCOLOR, (left, top, BOXSIZE, BOXSIZE))
shape, color = getShapeAndColor(board, box[0], box[1])
drawIcon(shape, color, box[0], box[1])
if coverage > 0: # only draw the cover if there is an coverage
pygame.draw.rect(DISPLAYSURF, BOXCOLOR, (left, top, coverage, BOXSIZE))
pygame.display.update()
FPSCLOCK.tick(FPS)
def revealBoxesAnimation(board, boxesToReveal):
# Do the "box reveal" animation.
for coverage in range(BOXSIZE, (-REVEALSPEED) - 1, -REVEALSPEED):
drawBoxCovers(board, boxesToReveal, coverage)
def coverBoxesAnimation(board, boxesToCover):
# Do the "box cover" animation.
for coverage in range(0, BOXSIZE + REVEALSPEED, REVEALSPEED):
drawBoxCovers(board, boxesToCover, coverage)
def drawBoard(board, revealed):
# Draws all of the boxes in their covered or revealed state.
for boxx in range(BOARDWIDTH):
for boxy in range(BOARDHEIGHT):
left, top = leftTopCoordsOfBox(boxx, boxy)
if not revealed[boxx][boxy]:
# Draw a covered box.
pygame.draw.rect(DISPLAYSURF, BOXCOLOR, (left, top, BOXSIZE, BOXSIZE))
else:
# Draw the (revealed) icon.
shape, color = getShapeAndColor(board, boxx, boxy)
drawIcon(shape, color, boxx, boxy)
def drawHighlightBox(boxx, boxy):
left, top = leftTopCoordsOfBox(boxx, boxy)
pygame.draw.rect(DISPLAYSURF, HIGHLIGHTCOLOR, (left - 5, top - 5, BOXSIZE + 10, BOXSIZE + 10), 4)
def startGameAnimation(board):
# Randomly reveal the boxes 8 at a time.
coveredBoxes = generateRevealedBoxesData(False)
boxes = []
for x in range(BOARDWIDTH):
for y in range(BOARDHEIGHT):
boxes.append((x, y))
random.shuffle(boxes)
boxGroups = splitIntoGroupsOf(8, boxes)
drawBoard(board, coveredBoxes)
for boxGroup in boxGroups:
revealBoxesAnimation(board, boxGroup)
coverBoxesAnimation(board, boxGroup)
def gameWonAnimation(board):
# flash the background color when the player has won
coveredBoxes = generateRevealedBoxesData(True)
color1 = LIGHTBGCOLOR
color2 = BGCOLOR
for i in range(13):
color1, color2 = color2, color1 # swap colors
DISPLAYSURF.fill(color1)
drawBoard(board, coveredBoxes)
pygame.display.update()
pygame.time.wait(300)
def hasWon(revealedBoxes):
# Returns True if all the boxes have been revealed, otherwise False
for i in revealedBoxes:
if False in i:
return False # return False if any boxes are covered.
return True
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
8ae50cd6b83ea1fc1e05b6127155a800c38eb5e1 | a3f1e80179c23d9202d72b75dd37a49b44785f45 | /api/client/swagger_client/models/api_model_script.py | a86fc78a1cbbdf8bc28909ede0b049bcda2e34cf | [
"Apache-2.0"
] | permissive | pvaneck/mlx | b1e82fae5ac8aaa1dddac23aaa38c46f6e6cfc27 | 6edaa0bd77787c56b737322a0c875ae30de6cd49 | refs/heads/main | 2023-05-14T06:08:38.404133 | 2021-05-04T01:41:11 | 2021-05-04T01:41:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,189 | py | # Copyright 2021 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
MLX API
MLX API Extension for Kubeflow Pipelines # noqa: E501
OpenAPI spec version: 0.1.25-related-assets
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApiModelScript(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'pipeline_stage': 'str',
'execution_platform': 'str',
'script_code': 'str'
}
attribute_map = {
'pipeline_stage': 'pipeline_stage',
'execution_platform': 'execution_platform',
'script_code': 'script_code'
}
def __init__(self, pipeline_stage=None, execution_platform=None, script_code=None): # noqa: E501
"""ApiModelScript - a model defined in Swagger""" # noqa: E501
self._pipeline_stage = None
self._execution_platform = None
self._script_code = None
self.discriminator = None
self.pipeline_stage = pipeline_stage
self.execution_platform = execution_platform
self.script_code = script_code
@property
def pipeline_stage(self):
"""Gets the pipeline_stage of this ApiModelScript. # noqa: E501
pipeline stage that this code sample applies to, either 'train' or 'serve' # noqa: E501
:return: The pipeline_stage of this ApiModelScript. # noqa: E501
:rtype: str
"""
return self._pipeline_stage
@pipeline_stage.setter
def pipeline_stage(self, pipeline_stage):
"""Sets the pipeline_stage of this ApiModelScript.
pipeline stage that this code sample applies to, either 'train' or 'serve' # noqa: E501
:param pipeline_stage: The pipeline_stage of this ApiModelScript. # noqa: E501
:type: str
"""
if pipeline_stage is None:
raise ValueError("Invalid value for `pipeline_stage`, must not be `None`") # noqa: E501
self._pipeline_stage = pipeline_stage
@property
def execution_platform(self):
"""Gets the execution_platform of this ApiModelScript. # noqa: E501
execution platform that this code sample applies to, i.e. 'kubernetes', 'knative' # noqa: E501
:return: The execution_platform of this ApiModelScript. # noqa: E501
:rtype: str
"""
return self._execution_platform
@execution_platform.setter
def execution_platform(self, execution_platform):
"""Sets the execution_platform of this ApiModelScript.
execution platform that this code sample applies to, i.e. 'kubernetes', 'knative' # noqa: E501
:param execution_platform: The execution_platform of this ApiModelScript. # noqa: E501
:type: str
"""
if execution_platform is None:
raise ValueError("Invalid value for `execution_platform`, must not be `None`") # noqa: E501
self._execution_platform = execution_platform
@property
def script_code(self):
"""Gets the script_code of this ApiModelScript. # noqa: E501
the source code to run the model in a pipeline stage # noqa: E501
:return: The script_code of this ApiModelScript. # noqa: E501
:rtype: str
"""
return self._script_code
@script_code.setter
def script_code(self, script_code):
"""Sets the script_code of this ApiModelScript.
the source code to run the model in a pipeline stage # noqa: E501
:param script_code: The script_code of this ApiModelScript. # noqa: E501
:type: str
"""
if script_code is None:
raise ValueError("Invalid value for `script_code`, must not be `None`") # noqa: E501
self._script_code = script_code
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApiModelScript, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiModelScript):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
7c432b6ff53b6f2d2ff8f063dc48ade3a4c92cd1 | e2e08d7c97398a42e6554f913ee27340226994d9 | /pyautoTest-master(ICF-7.5.0)/test_case/scg/scg_Administrator/test_c139330.py | 0952f34469d1beb7cbb7b7a29b6cc088aec4bbac | [] | no_license | lizhuoya1111/Automated_testing_practice | 88e7be512e831d279324ad710946232377fb4c01 | b3a532d33ddeb8d01fff315bcd59b451befdef23 | refs/heads/master | 2022-12-04T08:19:29.806445 | 2020-08-14T03:51:20 | 2020-08-14T03:51:20 | 287,426,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,662 | py | import pytest
import time
import sys
from os.path import dirname, abspath
sys.path.insert(0, dirname(dirname(abspath(__file__))))
from page_obj.scg.scg_def_sys import *
from page_obj.scg.scg_def import *
from page_obj.scg.scg_button import *
from page_obj.scg.scg_def_log import *
from page_obj.common.rail import *
from page_obj.scg.scg_def_ifname_OEM import *
from page_obj.scg.scg_dev import *
test_id = 139330
def test_c139330(browser):
try:
login_web(browser, url=dev1)
configuer(browser)
time.sleep(2)
loginfo = get_log(browser, 管理日志)
browser.switch_to.default_content()
# print(loginfo)
delete_all_admin_list_jyl(browser)
time.sleep(1)
delete_all_admin_profile_jyl(browser)
time.sleep(1)
try:
assert "添加管理员帐户成功" in loginfo
rail_pass(test_run_id, test_id)
except:
rail_fail(test_run_id, test_id)
assert "添加管理员帐户失败" in loginfo
except Exception as err:
# 如果上面的步骤有报错,重新设备,恢复配置
reload(hostip=dev1)
print(err)
rail_fail(test_run_id, test_id)
assert False
def configuer(browser):
add_admin_profile(browser, profile_name='aaa', desc="aaa权限", cfg="读写", report="读写")
time.sleep(2)
add_admin_remote_jyl(browser, admin_name="bob", auth_database="remote", temp="log_profile", https="yes",
telent="yes", ssh="yes", console="yes", status="enable", interface=interface_name_6,
online_num="3", ip1="0.0.0.0/0", ip2="3.3.3.0/24", )
if __name__ == '__main__':
pytest.main(["-v", "-s", "test_c" + str(test_id) + ".py"])
| [
"[email protected]"
] | |
fc28ed8f4facbaccd5cc5e51818b7b8af618bc45 | cedfdd1398b947b15eccf4473e9bbaddccb878d7 | /SDK/openstack/tests/unit/cloud/test_flavors.py | 6f44544b37e1975cbe596bede8bcf381013844f4 | [] | no_license | Doctor-DC/CMP-Recycle | 36fb1fdcf7c3a396bfef89d03948bd0ce626b053 | e3e6421f0b5dc28a075bc5bf91be9a45bcbe97c6 | refs/heads/dev | 2022-12-15T06:28:12.695868 | 2019-02-26T06:22:21 | 2019-02-26T06:22:21 | 142,127,512 | 0 | 0 | null | 2022-12-08T02:29:44 | 2018-07-24T08:18:46 | Python | UTF-8 | Python | false | false | 11,881 | py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import SDK.openstack.cloud
from SDK.openstack.tests import fakes
from SDK.openstack.tests.unit import base
class TestFlavors(base.TestCase):
def test_create_flavor(self):
self.register_uris([
dict(method='POST',
uri='{endpoint}/flavors'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavor': fakes.FAKE_FLAVOR},
validate=dict(
json={
'flavor': {
"name": "vanilla",
"ram": 65536,
"vcpus": 24,
"swap": 0,
"os-flavor-access:is_public": True,
"rxtx_factor": 1.0,
"OS-FLV-EXT-DATA:ephemeral": 0,
"disk": 1600,
"id": None}}))])
self.cloud.create_flavor(
'vanilla', ram=65536, disk=1600, vcpus=24,
)
self.assert_calls()
def test_delete_flavor(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST}),
dict(method='DELETE',
uri='{endpoint}/flavors/{id}'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=fakes.FLAVOR_ID))])
self.assertTrue(self.cloud.delete_flavor('vanilla'))
self.assert_calls()
def test_delete_flavor_not_found(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST})])
self.assertFalse(self.cloud.delete_flavor('invalid'))
self.assert_calls()
def test_delete_flavor_exception(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST}),
dict(method='DELETE',
uri='{endpoint}/flavors/{id}'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=fakes.FLAVOR_ID),
status_code=503)])
self.assertRaises(SDK.openstack.cloud.OpenStackCloudException,
self.cloud.delete_flavor, 'vanilla')
def test_list_flavors(self):
uris_to_mock = [
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST}),
]
self.register_uris(uris_to_mock)
flavors = self.cloud.list_flavors()
# test that new flavor is created correctly
found = False
for flavor in flavors:
if flavor['name'] == 'vanilla':
found = True
break
self.assertTrue(found)
needed_keys = {'name', 'ram', 'vcpus', 'id', 'is_public', 'disk'}
if found:
# check flavor content
self.assertTrue(needed_keys.issubset(flavor.keys()))
self.assert_calls()
def test_list_flavors_with_extra(self):
uris_to_mock = [
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST}),
]
uris_to_mock.extend([
dict(method='GET',
uri='{endpoint}/flavors/{id}/os-extra_specs'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=flavor['id']),
json={'extra_specs': {}})
for flavor in fakes.FAKE_FLAVOR_LIST])
self.register_uris(uris_to_mock)
flavors = self.cloud.list_flavors(get_extra=True)
# test that new flavor is created correctly
found = False
for flavor in flavors:
if flavor['name'] == 'vanilla':
found = True
break
self.assertTrue(found)
needed_keys = {'name', 'ram', 'vcpus', 'id', 'is_public', 'disk'}
if found:
# check flavor content
self.assertTrue(needed_keys.issubset(flavor.keys()))
self.assert_calls()
def test_get_flavor_by_ram(self):
uris_to_mock = [
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST}),
]
uris_to_mock.extend([
dict(method='GET',
uri='{endpoint}/flavors/{id}/os-extra_specs'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=flavor['id']),
json={'extra_specs': {}})
for flavor in fakes.FAKE_FLAVOR_LIST])
self.register_uris(uris_to_mock)
flavor = self.cloud.get_flavor_by_ram(ram=250)
self.assertEqual(fakes.STRAWBERRY_FLAVOR_ID, flavor['id'])
def test_get_flavor_by_ram_and_include(self):
uris_to_mock = [
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': fakes.FAKE_FLAVOR_LIST}),
]
uris_to_mock.extend([
dict(method='GET',
uri='{endpoint}/flavors/{id}/os-extra_specs'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=flavor['id']),
json={'extra_specs': {}})
for flavor in fakes.FAKE_FLAVOR_LIST])
self.register_uris(uris_to_mock)
flavor = self.cloud.get_flavor_by_ram(ram=150, include='strawberry')
self.assertEqual(fakes.STRAWBERRY_FLAVOR_ID, flavor['id'])
def test_get_flavor_by_ram_not_found(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={'flavors': []})])
self.assertRaises(
SDK.openstack.cloud.OpenStackCloudException,
self.cloud.get_flavor_by_ram,
ram=100)
def test_get_flavor_string_and_int(self):
flavor_list_uri = '{endpoint}/flavors/detail?is_public=None'.format(
endpoint=fakes.COMPUTE_ENDPOINT)
flavor_resource_uri = '{endpoint}/flavors/1/os-extra_specs'.format(
endpoint=fakes.COMPUTE_ENDPOINT)
flavor_list_json = {'flavors': [fakes.make_fake_flavor(
'1', 'vanilla')]}
flavor_json = {'extra_specs': {}}
self.register_uris([
dict(method='GET', uri=flavor_list_uri, json=flavor_list_json),
dict(method='GET', uri=flavor_resource_uri, json=flavor_json),
dict(method='GET', uri=flavor_list_uri, json=flavor_list_json),
dict(method='GET', uri=flavor_resource_uri, json=flavor_json)])
flavor1 = self.cloud.get_flavor('1')
self.assertEqual('1', flavor1['id'])
flavor2 = self.cloud.get_flavor(1)
self.assertEqual('1', flavor2['id'])
def test_set_flavor_specs(self):
extra_specs = dict(key1='value1')
self.register_uris([
dict(method='POST',
uri='{endpoint}/flavors/{id}/os-extra_specs'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=1),
json=dict(extra_specs=extra_specs))])
self.cloud.set_flavor_specs(1, extra_specs)
self.assert_calls()
def test_unset_flavor_specs(self):
keys = ['key1', 'key2']
self.register_uris([
dict(method='DELETE',
uri='{endpoint}/flavors/{id}/os-extra_specs/{key}'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id=1, key=key))
for key in keys])
self.cloud.unset_flavor_specs(1, keys)
self.assert_calls()
def test_add_flavor_access(self):
self.register_uris([
dict(method='POST',
uri='{endpoint}/flavors/{id}/action'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id='flavor_id'),
json={
'flavor_access': [{
'flavor_id': 'flavor_id', 'tenant_id': 'tenant_id'}]},
validate=dict(
json={'addTenantAccess': {'tenant': 'tenant_id'}}))])
self.cloud.add_flavor_access('flavor_id', 'tenant_id')
self.assert_calls()
def test_remove_flavor_access(self):
self.register_uris([
dict(method='POST',
uri='{endpoint}/flavors/{id}/action'.format(
endpoint=fakes.COMPUTE_ENDPOINT, id='flavor_id'),
json={'flavor_access': []},
validate=dict(
json={'removeTenantAccess': {'tenant': 'tenant_id'}}))])
self.cloud.remove_flavor_access('flavor_id', 'tenant_id')
self.assert_calls()
def test_list_flavor_access(self):
self.register_uris([
dict(method='GET',
uri='{endpoint}/flavors/vanilla/os-flavor-access'.format(
endpoint=fakes.COMPUTE_ENDPOINT),
json={
'flavor_access': [
{'flavor_id': 'vanilla', 'tenant_id': 'tenant_id'}]})
])
self.cloud.list_flavor_access('vanilla')
self.assert_calls()
def test_get_flavor_by_id(self):
flavor_uri = '{endpoint}/flavors/1'.format(
endpoint=fakes.COMPUTE_ENDPOINT)
flavor_json = {'flavor': fakes.make_fake_flavor('1', 'vanilla')}
self.register_uris([
dict(method='GET', uri=flavor_uri, json=flavor_json),
])
flavor1 = self.cloud.get_flavor_by_id('1')
self.assertEqual('1', flavor1['id'])
self.assertEqual({}, flavor1.extra_specs)
flavor2 = self.cloud.get_flavor_by_id('1')
self.assertEqual('1', flavor2['id'])
self.assertEqual({}, flavor2.extra_specs)
def test_get_flavor_with_extra_specs(self):
flavor_uri = '{endpoint}/flavors/1'.format(
endpoint=fakes.COMPUTE_ENDPOINT)
flavor_extra_uri = '{endpoint}/flavors/1/os-extra_specs'.format(
endpoint=fakes.COMPUTE_ENDPOINT)
flavor_json = {'flavor': fakes.make_fake_flavor('1', 'vanilla')}
flavor_extra_json = {'extra_specs': {'name': 'test'}}
self.register_uris([
dict(method='GET', uri=flavor_uri, json=flavor_json),
dict(method='GET', uri=flavor_extra_uri, json=flavor_extra_json),
])
flavor1 = self.cloud.get_flavor_by_id('1', get_extra=True)
self.assertEqual('1', flavor1['id'])
self.assertEqual({'name': 'test'}, flavor1.extra_specs)
flavor2 = self.cloud.get_flavor_by_id('1', get_extra=False)
self.assertEqual('1', flavor2['id'])
self.assertEqual({}, flavor2.extra_specs)
| [
"[email protected]"
] | |
d330833cb9b420e93029ecd1df3e12af203a3a9d | ee05d803ee01c939d7324db8e8ff2b8990877d69 | /e_单词规律/290.py | f5edbac5e8f0297cffaf85541074befd5136adb1 | [] | no_license | Nostalogicwh/Leetcode | 7660153ffe56b1a348d2bb145bbd77c9c46a5525 | 5004d6b7157dc6a21666c7f79a38e95fa0ca092f | refs/heads/master | 2023-02-26T10:23:26.575572 | 2021-02-01T09:05:04 | 2021-02-01T09:05:04 | 295,983,019 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | class Solution:
def wordPattern(self, pattern: str, s: str) -> bool:
t = s.split()
if len(pattern) != len(t):
return False
dct = {}
for i in range(len(pattern)):
if pattern[i] not in dct:
if t[i] in dct.values():
return False
dct[pattern[i]] = t[i]
else:
if dct[pattern[i]] != t[i]:
return False
print(dct)
return True | [
"[email protected]"
] | |
e13b84b29a8d28f05463a7eab4ee596dc1714cae | 5b7af6548668085da9a6ab86f564538ee73c4865 | /build/scripts/slave/recipe_modules/luci_config/example.py | ddbd1f0ab8890cf00a7b52766c5d291be3d044b2 | [
"BSD-3-Clause"
] | permissive | elastos/Elastos.APP.Android.ShiJiuTV | 463a986450a915f7b3066e6a03aca903cf56f69b | f77189a2b8df86028adc68105988710d16ce012b | refs/heads/master | 2023-03-18T03:11:58.337349 | 2018-03-12T08:50:57 | 2018-03-13T11:10:27 | 124,007,751 | 0 | 1 | null | 2022-10-03T03:30:29 | 2018-03-06T02:21:25 | null | UTF-8 | Python | false | false | 1,504 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine.recipe_api import Property
DEPS = [
'luci_config',
'recipe_engine/properties',
'recipe_engine/step'
]
PROPERTIES = {
'auth_token': Property(default=None),
'protobuf': Property(default=None),
}
def RunSteps(api, auth_token, protobuf):
if auth_token:
api.luci_config.c.auth_token = auth_token
if protobuf:
result = api.luci_config.parse_textproto(api.luci_config.get_project_config(
'build', 'recipes.cfg')['content'].split('\n'))
api.step('checkit', ['echo', str(result)])
return
api.luci_config.get_project_config('build', 'recipes.cfg')
api.luci_config.get_project_metadata('build')
def GenTests(api):
yield (
api.test('basic') +
api.luci_config.get_projects(['build']) +
api.luci_config.get_project_config('build', 'recipes.cfg', 'testcontent')
)
yield (
api.test('auth_token') +
api.properties(auth_token='ya2930948320948203480=') +
api.luci_config.get_projects(['build']) +
api.luci_config.get_project_config('build', 'recipes.cfg', 'testcontent')
)
protobuf_lines = """
foo: 1
bar: "hi"
baz {
the_thing: "hi"
}
"""
yield (
api.test('protobuf') +
api.luci_config.get_project_config(
'build', 'recipes.cfg', protobuf_lines) +
api.properties(protobuf=True)
)
| [
"[email protected]"
] | |
e9fac787cd491026568ab5aa63c583c3cc349c1c | 78ea634c53a3b52cbc379b5509d99f6729e1644b | /user/forms.py | 04c395cf518eb98c1946fa41c4eb578ee10656ab | [] | no_license | TapanManu/Todo-Backend | 56a2999a8139a967e1f59d346d22b902f5ed151a | 4ed02cbbf2c843fc248edb4a4027e3491fa800c9 | refs/heads/master | 2022-03-30T10:21:49.165029 | 2020-02-02T18:45:39 | 2020-02-02T18:45:39 | 226,716,615 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | from django import forms
from .models import UserProfile
from django.contrib.auth.models import User
class UserForm(forms.ModelForm):
username=forms.CharField(label=("Username"),widget=forms.TextInput( attrs={'placeholder':
('Username'),
'autofocus': 'autofocus','required':'true'}))
password=forms.CharField(widget=forms.PasswordInput(attrs={'placeholder':('password')}))
email = forms.EmailField(max_length=254)
class Meta:
model=User
fields=('username','email','password')
class UserProfileForm(forms.ModelForm):
class Meta:
model=UserProfile
fields=('website',)
| [
"[email protected]"
] | |
2109b2bc169fb2b076bbf069f2ef187f1e7baab8 | df328969e8d61a02603374c0cb3450556a51c184 | /tests/runtests.py | f0f367a3d3e63c7eff09f034e8fdb67b71a9796e | [
"BSD-2-Clause"
] | permissive | samuderapase/django-comments-xtd | c5dd8641e8ca6933a124ae91377f003dd1dfc7a6 | 5ac29f5269c18acb1709a35b30de3decff7a59fe | refs/heads/master | 2020-12-24T23:49:33.382692 | 2012-10-17T20:07:09 | 2012-10-17T20:07:09 | 6,543,617 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | import os
import sys
def setup_django_settings():
os.chdir(os.path.join(os.path.dirname(__file__), ".."))
sys.path.insert(0, os.getcwd())
os.environ["DJANGO_SETTINGS_MODULE"] = "tests.settings"
def run_tests():
if not os.environ.get("DJANGO_SETTINGS_MODULE", False):
setup_django_settings()
from django.conf import settings
from django.test.utils import get_runner
TestRunner = get_runner(settings)
test_suite = TestRunner(verbosity=2, interactive=True, failfast=False)
test_suite.run_tests(["django_comments_xtd"])
if __name__ == "__main__":
run_tests()
| [
"[email protected]"
] | |
1b002ff27e79f71f469b0ebe1a940084049bd552 | 52c8d7594de49e3ba47573c50c95bd112c3c8828 | /cycles.py | 67f8843f2dd44172c7166fbadb936912a45fed0f | [] | no_license | vitroid/LiqChemSummerSchool2014 | bcf592be419bccb6e972c11ffff95a7eff355213 | 23a29acbc13548c702518448f52f0f8336b167e2 | refs/heads/master | 2021-06-03T10:29:56.428479 | 2021-02-15T05:34:56 | 2021-02-15T05:34:56 | 21,470,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,020 | py | #!/usr/bin/env python
# coding: utf-8
############# functions ###############################################
#cycles
def all_cycles(graph, maxc=6):
#local functions
def self_avoiding_cycle(graph,vertex_list):
#迷走して経路長が長くなりすぎた時は
if len(vertex_list) == maxc+1:
#あきらめる。(結果なし)
return []
last = vertex_list[-1]
results = []
#last頂点に隣接する頂点それぞれについて
for next in graph[last]:
#もしnextが経路の最初の頂点に戻ってきて、しかも経路長が3以上なら、
if next == vertex_list[0]:
if len(vertex_list) >= 3:
#帰ってきた!
#vertex_listを、結果に加える
results.append(vertex_list)
else:
continue
#経路の途中に交わってしまったら
elif next in vertex_list:
continue
else:
#再帰的にwalkを延ばす
results += self_avoiding_cycle(graph,vertex_list + [next,])
return results
#end of local functions
cycles = []
graph_size = len(graph)
#すべての頂点を順番に始点として、
for v in range(graph_size):
#self-avoiding pathをさがす
cycles += self_avoiding_cycle(graph, [v,])
#重複(始点が違うだけ、逆回りなどすべて)を含むすべてのサイクルを返す。
return cycles
def unique_cycles(graph, maxc=6):
cycles = all_cycles(graph, maxc)
#重複するサイクルを省く。
#重複のないリスト
uniquecycles = []
#重複のない集合
uniqueset = set()
for cycle in cycles:
#cycleに含まれる頂点を集合にする。
fs = frozenset(cycle)
#その集合が、既出でなければ
if not fs in uniqueset:
#集合の集合に追加する
uniqueset.add(fs)
#リストにも追加する
uniquecycles.append(cycle)
#リストのほうを返り値とする
return uniquecycles
############# end of functions ########################################
from distance_matrix import *
#test case
if __name__ == "__main__":
#グラフは、辺の集合(set)で表現する。
#頂点のラベルは0からの連番とする。
#辺は無向とし、2頂点のラベル(小さい順)のタプルで表す。
#大きなグラフ(立方体グラフ)
edges = set([(0,1),(1,2),(2,3),(0,3),
(0,4),(1,5),(2,6),(3,7),
(4,5),(5,6),(6,7),(4,7)])
size = 8
#連結な頂点から頂点へたどっていきやすいように、隣接関係をリストで表現する。
graph = adjacency_table(edges, size)
#cycleのリスト(最大サイズは6歩)
print unique_cycles(graph,6)
| [
"[email protected]"
] | |
e062c0a4d1a6c48d574c1f5205d0a19e1c11e9be | 6fff0893ef43f1018d65f2e8e1bf27d9f8accf5b | /pw_package/py/pw_package/package_manager.py | 1254121126b21ce292b55127abe72651f6f4487a | [
"Apache-2.0"
] | permissive | isabella232/pigweed | eeb68a4eda6f0a9b5ef0b8145d0204bc9f85bfdc | 53c2f3e2569d7e582d3dd3056ceb9b2c3b8197b2 | refs/heads/main | 2023-06-03T10:32:29.498066 | 2021-06-17T06:38:15 | 2021-06-17T20:44:55 | 378,165,913 | 0 | 0 | Apache-2.0 | 2021-06-18T13:54:37 | 2021-06-18T13:53:40 | null | UTF-8 | Python | false | false | 6,156 | py | # Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Install and remove optional packages."""
import argparse
import dataclasses
import logging
import os
import pathlib
import shutil
from typing import Dict, List, Sequence, Tuple
_LOG: logging.Logger = logging.getLogger(__name__)
class Package:
"""Package to be installed.
Subclass this to implement installation of a specific package.
"""
def __init__(self, name):
self._name = name
@property
def name(self):
return self._name
def install(self, path: pathlib.Path) -> None: # pylint: disable=no-self-use
"""Install the package at path.
Install the package in path. Cannot assume this directory is empty—it
may need to be deleted or updated.
"""
def remove(self, path: pathlib.Path) -> None: # pylint: disable=no-self-use
"""Remove the package from path.
Removes the directory containing the package. For most packages this
should be sufficient to remove the package, and subclasses should not
need to override this package.
"""
if os.path.exists(path):
shutil.rmtree(path)
def status(self, path: pathlib.Path) -> bool: # pylint: disable=no-self-use
"""Returns if package is installed at path and current.
This method will be skipped if the directory does not exist.
"""
def info(self, path: pathlib.Path) -> Sequence[str]: # pylint: disable=no-self-use
"""Returns a short string explaining how to enable the package."""
_PACKAGES: Dict[str, Package] = {}
def register(package_class: type, *args, **kwargs) -> None:
obj = package_class(*args, **kwargs)
_PACKAGES[obj.name] = obj
@dataclasses.dataclass
class Packages:
all: Tuple[str, ...]
installed: Tuple[str, ...]
available: Tuple[str, ...]
class PackageManager:
"""Install and remove optional packages."""
def __init__(self, root: pathlib.Path):
self._pkg_root = root
os.makedirs(root, exist_ok=True)
def install(self, package: str, force: bool = False) -> None:
pkg = _PACKAGES[package]
if force:
self.remove(package)
pkg.install(self._pkg_root / pkg.name)
def remove(self, package: str) -> None:
pkg = _PACKAGES[package]
pkg.remove(self._pkg_root / pkg.name)
def status(self, package: str) -> bool:
pkg = _PACKAGES[package]
path = self._pkg_root / pkg.name
return os.path.isdir(path) and pkg.status(path)
def list(self) -> Packages:
installed = []
available = []
for package in sorted(_PACKAGES.keys()):
pkg = _PACKAGES[package]
if pkg.status(self._pkg_root / pkg.name):
installed.append(pkg.name)
else:
available.append(pkg.name)
return Packages(
all=tuple(_PACKAGES.keys()),
installed=tuple(installed),
available=tuple(available),
)
def info(self, package: str) -> Sequence[str]:
pkg = _PACKAGES[package]
return pkg.info(self._pkg_root / pkg.name)
class PackageManagerCLI:
"""Command-line interface to PackageManager."""
def __init__(self):
self._mgr: PackageManager = None
def install(self, package: str, force: bool = False) -> int:
_LOG.info('Installing %s...', package)
self._mgr.install(package, force)
_LOG.info('Installing %s...done.', package)
for line in self._mgr.info(package):
_LOG.info('%s', line)
return 0
def remove(self, package: str) -> int:
_LOG.info('Removing %s...', package)
self._mgr.remove(package)
_LOG.info('Removing %s...done.', package)
return 0
def status(self, package: str) -> int:
if self._mgr.status(package):
_LOG.info('%s is installed.', package)
for line in self._mgr.info(package):
_LOG.info('%s', line)
return 0
_LOG.info('%s is not installed.', package)
return -1
def list(self) -> int:
packages = self._mgr.list()
_LOG.info('Installed packages:')
for package in packages.installed:
_LOG.info(' %s', package)
for line in self._mgr.info(package):
_LOG.info(' %s', line)
_LOG.info('')
_LOG.info('Available packages:')
for package in packages.available:
_LOG.info(' %s', package)
_LOG.info('')
return 0
def run(self, command: str, pkg_root: pathlib.Path, **kwargs) -> int:
self._mgr = PackageManager(pkg_root.resolve())
return getattr(self, command)(**kwargs)
def parse_args(argv: List[str] = None) -> argparse.Namespace:
parser = argparse.ArgumentParser("Manage packages.")
parser.add_argument(
'--package-root',
'-e',
dest='pkg_root',
type=pathlib.Path,
default=(pathlib.Path(os.environ['_PW_ACTUAL_ENVIRONMENT_ROOT']) /
'packages'),
)
subparsers = parser.add_subparsers(dest='command', required=True)
install = subparsers.add_parser('install')
install.add_argument('--force', '-f', action='store_true')
remove = subparsers.add_parser('remove')
status = subparsers.add_parser('status')
for cmd in (install, remove, status):
cmd.add_argument('package', choices=_PACKAGES.keys())
_ = subparsers.add_parser('list')
return parser.parse_args(argv)
def run(**kwargs):
return PackageManagerCLI().run(**kwargs)
| [
"[email protected]"
] | |
54c9a65d15786c181c4f835eedac784926e7b84a | ce745254ee1c55f06c90b2c0739a4db29efa9913 | /src/test/python/cpython-3f944f44ee41/Lib/warnings.py | a427e3510ecaaa6dadeb2a4e957bb6d4880576c7 | [
"MIT"
] | permissive | bkiers/python3-parser | 5deb0c681e42b07d459758d864fd0689bf26dbad | 5a7e097f2dba8d38fa41ebfc95c8bdf4da3042dd | refs/heads/master | 2022-10-31T22:02:07.484767 | 2021-11-17T09:31:49 | 2021-11-17T09:31:49 | 20,738,250 | 39 | 25 | MIT | 2022-12-13T08:42:28 | 2014-06-11T19:34:28 | Python | UTF-8 | Python | false | false | 13,725 | py | """Python part of the warnings subsystem."""
import sys
__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except OSError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
import linecache
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=False):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, *, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
| [
"[email protected]"
] | |
f0e7c0414602a99eb49b99e2deeb7a13c9cef635 | 1b2a1f807b98034567e936b9b5c76c2fc89b908a | /adj_stf/experimental/classification/multi_label_classification_model.py | 6cb7fefe4e9aa02a3c55a5373d8693d745ffdb73 | [] | no_license | Adreambottle/Transformer2GP | 48c955d8eb155caef4c24a3c03ee3aa9ab0bd3da | 5ba1a5005c2ad21066304cdeb1d7c2587c8191da | refs/heads/main | 2023-07-07T14:17:51.673437 | 2021-08-17T14:14:56 | 2021-08-17T14:14:56 | 397,279,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,051 | py | from multiprocessing import cpu_count
import torch
from adj_tf import (
WEIGHTS_NAME,
AlbertConfig,
AlbertTokenizer,
BertConfig,
BertTokenizer,
DistilBertConfig,
DistilBertTokenizer,
RobertaConfig,
RobertaTokenizer,
XLMConfig,
XLMTokenizer,
XLNetConfig,
XLNetTokenizer,
)
from adj_stf.classification import ClassificationModel
from adj_stf.custom_models.models import (
AlbertForMultiLabelSequenceClassification,
BertForMultiLabelSequenceClassification,
DistilBertForMultiLabelSequenceClassification,
RobertaForMultiLabelSequenceClassification,
XLMForMultiLabelSequenceClassification,
XLNetForMultiLabelSequenceClassification,
)
class MultiLabelClassificationModel(ClassificationModel):
def __init__(self, model_type, model_name, num_labels=None, pos_weight=None, args=None, use_cuda=True):
"""
Initializes a MultiLabelClassification model.
Args:
model_type: The type of model (bert, roberta)
model_name: Default Transformer model name or path to a directory containing Transformer model file (pytorch_nodel.bin).
num_labels (optional): The number of labels or classes in the dataset.
pos_weight (optional): A list of length num_labels containing the weights to assign to each label for loss calculation.
args (optional): Default args will be used if this parameter is not provided. If provided, it should be a dict containing the args that should be changed in the default args.
use_cuda (optional): Use GPU if available. Setting to False will force model to use CPU only.
"""
MODEL_CLASSES = {
"bert": (BertConfig, BertForMultiLabelSequenceClassification, BertTokenizer),
"roberta": (RobertaConfig, RobertaForMultiLabelSequenceClassification, RobertaTokenizer),
"xlnet": (XLNetConfig, XLNetForMultiLabelSequenceClassification, XLNetTokenizer),
"xlm": (XLMConfig, XLMForMultiLabelSequenceClassification, XLMTokenizer),
"distilbert": (DistilBertConfig, DistilBertForMultiLabelSequenceClassification, DistilBertTokenizer),
"albert": (AlbertConfig, AlbertForMultiLabelSequenceClassification, AlbertTokenizer),
}
config_class, model_class, tokenizer_class = MODEL_CLASSES[model_type]
if num_labels:
self.config = config_class.from_pretrained(model_name, num_labels=num_labels)
self.num_labels = num_labels
else:
self.config = config_class.from_pretrained(model_name)
self.num_labels = self.config.num_labels
self.tokenizer = tokenizer_class.from_pretrained(model_name)
self.tokenizer = tokenizer_class.from_pretrained(model_name)
self.num_labels = num_labels
self.pos_weight = pos_weight
self.sliding_window = False
if use_cuda:
if torch.cuda.is_available():
self.device = torch.device("cuda")
else:
raise ValueError(
"'use_cuda' set to True when cuda is unavailable. Make sure CUDA is available or set use_cuda=False."
)
else:
self.device = "cpu"
if self.pos_weight:
self.model = model_class.from_pretrained(
model_name, config=self.config, pos_weight=torch.Tensor(self.pos_weight).to(self.device)
)
else:
self.model = model_class.from_pretrained(model_name, config=self.config)
self.results = {}
self.args = {
"output_dir": "outputs/",
"cache_dir": "cache_dir/",
"fp16": False,
"max_seq_length": 128,
"train_batch_size": 8,
"gradient_accumulation_steps": 1,
"eval_batch_size": 8,
"num_train_epochs": 1,
"weight_decay": 0,
"learning_rate": 4e-5,
"adam_epsilon": 1e-8,
"warmup_ratio": 0.06,
"warmup_steps": 0,
"max_grad_norm": 1.0,
"stride": False,
"logging_steps": 50,
"save_steps": 2000,
"evaluate_during_training": False,
"overwrite_output_dir": False,
"reprocess_input_data": False,
"process_count": cpu_count() - 2 if cpu_count() > 2 else 1,
"n_gpu": 1,
"use_multiprocessing": True,
"silent": False,
"threshold": 0.5,
}
if not use_cuda:
self.args["fp16"] = False
if args:
self.args.update(args)
self.args["model_name"] = model_name
self.args["model_type"] = model_type
def train_model(
self, train_df, multi_label=True, eval_df=None, output_dir=None, show_running_loss=True, args=None
):
return super().train_model(
train_df,
multi_label=multi_label,
eval_df=eval_df,
output_dir=output_dir,
show_running_loss=show_running_loss,
args=args,
)
def eval_model(self, eval_df, multi_label=True, output_dir=None, verbose=False, **kwargs):
return super().eval_model(eval_df, output_dir=output_dir, multi_label=multi_label, verbose=verbose, **kwargs)
def evaluate(self, eval_df, output_dir, multi_label=True, prefix="", **kwargs):
return super().evaluate(eval_df, output_dir, multi_label=multi_label, prefix=prefix, **kwargs)
def load_and_cache_examples(self, examples, evaluate=False, no_cache=False, multi_label=True):
return super().load_and_cache_examples(examples, evaluate=evaluate, no_cache=no_cache, multi_label=multi_label)
def compute_metrics(self, preds, labels, eval_examples, multi_label=True, **kwargs):
return super().compute_metrics(preds, labels, eval_examples, multi_label=multi_label, **kwargs)
def predict(self, to_predict, multi_label=True):
return super().predict(to_predict, multi_label=multi_label)
| [
"[email protected]"
] | |
1657f47f0d775612c12560663e94a60de071eab1 | 7d096568677660790479d87c22b47aae838ef96b | /stubs-legacy/System/Diagnostics/__init___parts/DebuggerBrowsableState.py | 43d462d3dfbd7bf6c6f0f09f2a3384a2c0254e67 | [
"MIT"
] | permissive | NISystemsEngineering/rfmx-pythonnet | 30adbdd5660b0d755957f35b68a4c2f60065800c | cd4f90a88a37ed043df880972cb55dfe18883bb7 | refs/heads/master | 2023-02-04T00:39:41.107043 | 2023-02-01T21:58:50 | 2023-02-01T21:58:50 | 191,603,578 | 7 | 5 | MIT | 2023-02-01T21:58:52 | 2019-06-12T16:02:32 | Python | UTF-8 | Python | false | false | 999 | py | class DebuggerBrowsableState(Enum,IComparable,IFormattable,IConvertible):
"""
Provides display instructions for the debugger.
enum DebuggerBrowsableState,values: Collapsed (2),Never (0),RootHidden (3)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
Collapsed=None
Never=None
RootHidden=None
value__=None
| [
"[email protected]"
] | |
ba1dcb17a6e7d524f9f07edc2b67b588720faaad | c25a17f0f82c2eebca55bbe180f4c2ccbbf00292 | /01_Jump_to_python/Chap06/6장_practice/practice4_메모장.py | 786b7b2f7c82951b7ad1f09ccb24176b86c390b9 | [] | no_license | superbeom97/jumpjump | a0a4da6f0df0483ef0cef9833b5fe0402ec63c9c | fc45efce2a2b00c614aa5aa54b36be1572ed40ce | refs/heads/master | 2021-09-15T09:35:16.903857 | 2018-05-30T00:00:59 | 2018-05-30T00:00:59 | 111,883,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,901 | py | import sys
args = sys.argv
args_one = args[1]
args_two = args[2:]
if args_one == "-a":
try:
f = open("memo.txt", 'r')
f = open("memo.txt", 'a')
f.write(" ".join(args_two))
f.write("\n")
f.close()
except:
number = int(input("memo.txt 파일이 없습니다. 아래 중 선택하세요\n1. memo.txt 파일을 새로 생성하시겠습니까?\n2. 파일 경로를 입력하시겠습니까?\n: "))
if number == 1:
f = open("memo.txt", 'w')
f.write(" ".join(args_two))
f.write("\n")
print("memo.txt 파일을 생성했습니다. 감사합니다.")
f.close()
elif number == 2:
address = str(input("파일 경로를 입력하세요: "))
f = open(address, 'a')
f.write(" ".join(args_two))
f.write("\n")
f.close()
print("정상 처리되었습니다. 감사합니다.")
else:
print("1번과 2번 중에 선택해라잉 확 마")
elif args_one == "-au":
for i in args_two:
f = open("memo.txt", 'a')
b = "".join(i)
f.write(b.upper())
f.write("\n")
f.close
elif args_one == "-v":
try:
f = open("memo.txt", 'r')
data = f.read()
print(data)
except FileNotFoundError:
number = int(input("memo.txt 파일이 없습니다. 아래 중 선택하세요\n1. 종료하시겠습니까?\n2. 파일 경로를 입력하시겠습니까?\n: "))
if number == 1:
print("이용해 주셔서 감사합니다.")
# break
elif number == 2:
address = str(input("파일 경로를 입력하세요: "))
f = open(address, 'r')
data = f.read()
print(data)
else:
print("1번과 2번 중에 선택해라잉 확 마") | [
"[email protected]"
] | |
e67af2421558b2a196f0c5584ac66fb0e1dd4252 | 8f0fc0f4ac44e85e87ade78ac3f8d3b1996587e5 | /Model_1.py | ae103fcd74ce05a1803a2d225dbbde006b88ee19 | [] | no_license | Data-drone/Kaggle_Allstate | 91bc9d2400866f58813f4ba6f04009647d90fd0a | 70b7f63eefea55ec9f0c8da6217b7f9ee8f12e76 | refs/heads/master | 2021-06-30T09:57:59.864482 | 2017-09-21T17:48:43 | 2017-09-21T17:48:43 | 74,899,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,111 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Nov 29 23:23:27 2016
@author: Brian
"""
"""
Build model 1 script
"""
import os
import pandas as pd
import numpy as np
"""
feat table_1
"""
#### standard functions
def Find_Low_Var(frame):
sizes = []
low_var_col = []
for column in frame:
data_sets = frame[column]
uniques = data_sets.unique() # note the brackets
# print the pivot to show distributions
# will error
if (column != 'id'):
pd_Table = pd.pivot_table(train, values = 'id', index=column, aggfunc='count').apply(lambda x: np.round(np.float(x)/len(train)*100, 2))
#print (pd_Table )
if max(pd_Table) > 99:
low_var_col.append(column)
#variance = np.var(data_sets)
#print(uniques.size)
#print(column)
sizes.append(uniques.size)
#print(len(uniques))
return(sizes, low_var_col)
data_path = 'Data'
os.listdir('./Data/')
train = pd.read_csv('./Data/train.csv')
### 2 tyoes if columns the continuous and the categorical
# lets separate these
cate_cols = [col for col in train.columns if 'cat' in col]
continuous_cols = [col for col in train.columns if 'cont' in col]
categorical = train[cate_cols]
sizes, low_var_cols_to_drop = Find_Low_Var(categorical)
categorical_to_keep = categorical.drop(low_var_cols_to_drop, axis = 1)
### check how big the one hot is first
OneHot = pd.get_dummies(categorical_to_keep)
## try feature hasher again
from sklearn.feature_extraction import FeatureHasher
FH = FeatureHasher(n_features = 1000, input_type = 'dict')
hashed_Feat = FH.transform(categorical_to_keep.to_dict(orient='records'))
#dense_Feat = hashed_Feat.todense()
### make into categories
continuous = train[continuous_cols]
## id and target columns
id_target = train[['id', 'loss']]
## quick test 1
frame = [continuous, OneHot]
merge = pd.concat(frame, axis = 1)
"""
train test splitting
"""
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split( merge, np.log(id_target.loss), test_size=0.4, random_state=0)
assert X_train.shape[0] + X_test.shape[0] == continuous.shape[0]
# model 1 # like
# 8917348 RMSE for log
# 8116237 for normal
#from sklearn import linear_model
#reg = linear_model.RidgeCV(alphas=[0.1, 1.0, 10.0])
#reg.fit(X_train, y_train)
#result = reg.predict(X_test)
# model 2
# 9069687 rmse for 100 regressors
# with one hots vars 4332759 rmse
#from sklearn.ensemble import RandomForestRegressor
#clf = RandomForestRegressor(n_estimators = 400, criterion='mse', verbose = 1, n_jobs = 7)
#clf.fit(X_train, y_train)
#result = clf.predict(X_test)
# model 3 # default xgb was 4389784
import xgboost as xgb
from xgboost.sklearn import XGBRegressor
xgb_mod = XGBRegressor(max_depth = 10, learning_rate = 0.25, n_estimators = 150)
xgb_mod.fit(X_train, y_train)
result = xgb_mod.predict(X_test)
# score
from sklearn.metrics import mean_squared_error
mean_squared_error(np.exp(y_test), np.exp(result) )
import matplotlib.pyplot as plt
y_test.hist()
plt.hist(result, bins='auto') | [
"[email protected]"
] | |
ad0646d8b6725f5fb875510a1944d2a4c900e23d | c89e59b4d018e8a2d7dc0dbc3bb7a3768024f849 | /before2021/python/문제풀이/day5/3_숫자카운팅.py | a21ba81101844a7fb5f88a171cfd12bc1c5fbafa | [] | no_license | leeiopd/algorithm | ff32103a43e467a5a091257cc07cf35365ecbf91 | e41647d3918c3099110d97f455c5ebf9a38d571e | refs/heads/master | 2023-03-08T23:46:34.919991 | 2023-02-22T09:39:46 | 2023-02-22T09:39:46 | 166,131,885 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | import sys
sys.stdin = open("3_input.txt")
'''
첫째 줄에 N 이 입력된다. (1≤N≤200,000)
둘째 줄에 배열에 저장 되어있는 N개의 숫자가 순서대로 공백으로 구분되어 입력된다.
셋째 줄에 M 이 입력된다. (1≤M≤200,000)
넷째 줄에 M개의 탐색할 숫자가 순서대로 공백으로 구분되어 입력된다.
(이 숫자는 정렬 되어있지 않다)
입력 넷째 줄에서 주어진 탐색할 숫자의 배열 내 저장된 개수를 차례대로 출력한다.
'''
N = int(input())
arr = list(map(int, input().split()))
M = int(input())
find = list(map(int, input().split()))
def lowerSearch(s, e, where):
global arr
while s < e:
# e = N-1
# s = 0
m = (s+e)//2 # mid
# if where == arr[m]: return m+1
if where > arr[m] : s = m + 1
else: e = m
return e
def upperSearch(s, e, where):
global arr
while s < e:
# e = N-1
# s = 0
m = (s+e)//2 # mid
# if where == arr[m]: return m+1
if where >= arr[m] : s = m + 1
else: e = m
return e
for i in range(M):
low = lowerSearch(0, N, find[i])
up = upperSearch(0, N, find[i])
if low != up: # 찾았을 경우에만 오른쪽 끝 탐색
print(up-low, end=' ')
else:
print(0,end=' ') | [
"[email protected]"
] | |
9c7e0ec9ac281a7e422bfd1d6657a9deae3fdd71 | 5b9035dbfe0750e9933728f9631ad7a183dd3429 | /18/01/Pool.py | 370cc9e9856dfee8cce0818fc689057d3fd7a977 | [
"CC0-1.0"
] | permissive | pylangstudy/201709 | 271efbd4f337d912d0ca958a621eb2a040091528 | 53d868786d7327a83bfa7f4149549c6f9855a6c6 | refs/heads/master | 2021-01-21T12:16:21.950493 | 2017-09-30T00:02:34 | 2017-09-30T00:02:34 | 102,058,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,791 | py | import weakref
import csv
class ConstMeta(type):
class ConstError(TypeError): pass
def __init__(self, name, bases, dict):
super(ConstMeta, self).__init__(name, bases, dict)
import sys
sys.modules[name]=self()#ConstMetaを継承したクラスのモジュールに、そのクラスのインスタンスを代入する
def __setattr__(self, name, value):
if name in self.__dict__.keys(): raise self.ConstError('readonly。再代入禁止です。')
super(ConstMeta, self).__setattr__(name, value)
class Pool:
def __new__(cls):
cls.__Pool = {}
cls.__WeakPool = weakref.WeakValueDictionary(cls.__Pool)
print(dir(cls))
return super().__new__(cls)
@classmethod
def Get(cls, _id):
if _id in cls.__WeakPool: return cls.__WeakPool[_id]
else:
target = cls.__Read(_id)
if None is target: raise ValueError('指定したidのデータが存在しませんでした。: _id={_id}')
cls.__Pool[target.Id] = target
return cls.__WeakPool[_id]
@classmethod
def Release(cls, _id):
if _id in cls.__Pool: del cls.__Pool[_id]
@classmethod
def __Read(cls, _id):
with csv.read('Humans.csv') as f:
reader = csv.reader(f)
header = next(reader) # ヘッダーを読み飛ばしたい時
print(header)
Human = collections.namedtuple('Humans', header)
for row in reader:
if 0 == len(row.strip()): continue
# print row # 1行づつ取得できる
if row[0] == _id: return Human(','.split(row))
return None
if __name__ == '__main__':
h0 = Pool.Get(0)
print(h0)
| [
"[email protected]"
] | |
07b293d229357fb7a30af429f26cc27c3258bcc2 | a7fd2ed77661ed0e32d75678bdfbb448a4b1a38d | /ABC/ABC169/E.py | 83299df85f453d27cbba2a130895d08c020b6e77 | [] | no_license | Rikuo-git/AtCoder | 218a976a96f961371f4c1f4a8bf0e4ef7a78e09c | e7fe5db837acba11aed890181d429517c0f4bedc | refs/heads/master | 2023-03-03T10:07:09.825074 | 2021-02-15T11:37:10 | 2021-02-15T11:37:10 | 266,530,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | # my solution
import numpy as np
(n,),*l = [[*map(int,i.split())]for i in open(0)]
a,b = np.median(l,axis=0)
if n%2>0:
print(int(b-a+1))
else:
print(int(b*2-a*2)+1)
# 奇数の時は中央値の間、偶数の時は中央値の間かける2倍
# shortest fsdshn 数学的にやっていることは同じ
n,*l=map(int,open(0).read().split())
m,M=sorted(l[::2]),sorted(l[1::2])
d=n//2
print(M[d]-m[d]+1+(M[d-1]+-m[d-1])*(n%2^1)) | [
"[email protected]"
] | |
43d4c7d2d7bb6f1ecbf5e63561d6f9a6cec1f7ee | b891b6f5f51750a95c4b4ad5766cc63431ad2799 | /config.py | 0618a3ed2bdd25bc6efb6da48cbc927c5bb0eb49 | [
"MIT"
] | permissive | dennisnyamweya/pitching | b12a75f5681289ee70cab65eeb19da5c35c7e718 | 9c338d3f496f9855b0a9233579f9aa4d3c0d6464 | refs/heads/master | 2022-09-26T13:55:22.784323 | 2019-08-06T04:27:52 | 2019-08-06T04:27:52 | 200,616,118 | 0 | 0 | MIT | 2022-09-16T18:07:47 | 2019-08-05T08:36:44 | Python | UTF-8 | Python | false | false | 821 | py | import os
class Config:
"""Main configurations class"""
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://moringa:123@localhost/pitchy'
SECRET_KEY = "try harder"
UPLOADED_PHOTOS_DEST = 'app/static/photos'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD")
class ProdConfig(Config):
"""Production configuration class that inherits from the main configurations class"""
SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL")
class DevConfig(Config):
"""Configuration class for development stage of the app"""
DEBUG = True
config_options = {
'development': DevConfig,
'production': ProdConfig
}
| [
"[email protected]"
] | |
c0f84c701d6083a055c56a3db2f06fafc53a65f1 | 19ec1116024bebfe16d099f273bd3997b212f594 | /FinancePython/strategy_tester/ResultsAnalyser.py | c5e023ef831ce5f75993d87f3206530e38a5596c | [] | no_license | GBelzoni/BigGits | 6058f3e89016ad8d39dfe4de339c76e547529032 | daa538eca30459bd38a6d7332a27a3b38fac5875 | refs/heads/master | 2016-09-05T16:06:33.263000 | 2014-05-10T17:54:19 | 2014-05-10T17:54:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,357 | py | '''
Created on Jun 23, 2013
@author: phcostello
'''
import pandas as pd
import numpy as np
class ResultsAnalyser(object):
'''
This class analyses fitted trade strategy object
'''
def __init__(self, data, valueIndex = 'Value', referenceIndex = None):
'''
Takes a pandas dataframe object with a 'Date' index which should be of type date or datetime
valueIndex column which is timeseries of portfolio value
'Signal' which is the trade signal
'''
self.valueIndex = valueIndex
self.result = data
if referenceIndex != None:
self.refIndex = data[referenceIndex]
else:
self.refIndex = None
self.result_date_range = [ data.index[0], data.index[-1]]
self.result_subset = self.result.loc[self.result_date_range[0]:self.result_date_range[-1]].copy()
def set_results_range(self, tstart, tend, make_positive=False):
'''
Sets the range for which the results will be calculated for given run strategy
make_positive adds min over range so returns, etc make sense
'''
self.result_date_range = [ tstart, tend]
self.result_subset = self.result.loc[self.result_date_range[0]:self.result_date_range[-1]].copy()
if make_positive:
if min(self.result_subset[self.valueIndex]) <= 0:
#make the series >= 100 so that returns calc ok
self.result_subset[self.valueIndex] += np.abs(min(self.result_subset[self.valueIndex])) + 100
def reset_range_to_dafault(self):
self.result_date_range = [ self.result.index[0], self.result.index[-1]]
def get_result(self):
return self.result_subset.loc[self.result_date_range[0]:self.result_date_range[1]]
def get_returns(self,annualing_factor=1,useReference=False):
''' Calcs returns vs above referenceIndex, if None type then usual returns '''
value = self.result_subset[self.valueIndex]
#data = self.result[self.valueIndex][self.result[self.valueIndex].notnull()] #Horrible line, but is just filtering out notnull values
retsPort = pd.DataFrame(value.pct_change())
#retsPort = (1+retsPort)**cumperiods-1 #For annualising
if useReference == True:
if self.refIndex is None:
raise ValueError('No reference index set')
retsRef = pd.DataFrame(self.refIndex.pct_change())
rets = pd.merge(retsPort,retsRef, how='inner',left_index=True,right_index=True)
else:
rets = retsPort
rets['Reference'] = 0
rets.columns = ['Portfolio','Reference']
#Annualising factor
rets *= annualing_factor
return rets
def get_cumulative_return(self,cumperiods=1,useReference = False):
rets = self.get_returns(1, useReference) #get 1period returns
rets_abv_ref = pd.DataFrame(rets['Portfolio']-rets['Reference'])
cumrets = (1+rets_abv_ref).cumprod()-1 #cumulate
cumrets = (cumrets-1)**cumperiods + 1 #Apply annualising factor
return cumrets
def get_volatility(self, annualising_scalar = 1, returns = False):
'''Get volatility of portfolio value or returns'''
if not(returns):
value = self.result_subset[self.valueIndex]
return value.std()
else:
rets = self.get_returns(annualising_scalar, False) #get 1period returns
return rets['Portfolio'].std()
def sharpe_ratio(self, useMarketRef=False, annualising_factor = 252):
'''
Calcs sharpe ratio vs marketRef,
if useMarketRef = None then riskfree rate assumed to be 0
annualising_factor - scales sr by sqrt of AF. Default is daily returns to annual with 252 trading days
'''
rets = self.get_returns(useReference=useMarketRef)
if useMarketRef:
retsOverRef = rets['Portfolio'] - rets['Reference']
else:
retsOverRef = rets['Portfolio']
sr = np.sqrt(annualising_factor) * retsOverRef.mean(skipna=True)/retsOverRef.std(skipna = True)
return sr
def sortino_ratio(self, useMarketRef= False, benchmarkRate = None ):
''' Calcs sortino ratio vs benchmark
if False useMarketRef then refrate assumed to be be 0
if no benchmark is None the benchmark target is zero
'''
rets = self.get_returns(useReference=useMarketRef)
retsOverRef = rets['Portfolio'] - rets['Reference']
if benchmarkRate == None:
benchmarkRate = 0.0
benchmarkSeries = benchmarkRate*np.zeros(len(retsOverRef))
benchmarkSeries = pd.DataFrame(benchmarkSeries, index = retsOverRef.index)
retsOverBenchMark = retsOverRef - benchmarkSeries
#Calc numerator in sortino ratio
numerator = retsOverBenchMark.mean()
#Calc denominator in sortino, ie std for only returns over benchmark
denominator = (retsOverBenchMark.abs() + retsOverBenchMark)/2.0 #Gives max(value,0)
denominator = denominator**2 #Square values
denominator = denominator.mean() #get mean
denominator = np.sqrt(denominator)
sortinor = numerator/denominator
return sortinor[0]
def draw_downs(self, percent = False):
'''Calcs timeseries of current percentage drawdown'''
#Calculate percentag Drawdowns
value = self.result_subset[self.valueIndex]
startt= value.index[0]
Max = value[startt]
dd = 0
startDd = startt
endDd = startt
lengthDd = 0
result = [[startt,Max,dd, startDd, endDd, lengthDd]]
for t in value.index:
Max = max(Max,value[t])
if Max == value[t]:
startDd = t
dd = 0
if percent:
thisDd = (Max - value[t])/Max
else:
thisDd = (Max - value[t])
dd = max(dd, thisDd )
if not(Max == value[t]):
endDd = t
lengthDd = startDd - endDd
thisResult = [t,Max,dd, startDd, endDd, lengthDd]
result.append(thisResult)
#Format results to dataframe
columns = ['Time','MaxVal','Drawdown','StartDD','EndDD','LengthDD']
result = pd.DataFrame(data=result, columns=columns)
result.set_index('Time', inplace=True)
return result
def max_draw_down_magnitude(self, percent = False):
'''Calcs max drawdown'''
dd = self.draw_downs(percent)
maxDD = max(dd['Drawdown'])
maxDDr = dd[dd['Drawdown'] == maxDD]
return maxDDr.iloc[0]
def max_draw_down_timelength(self, percent = False):
'''Calcs max drawdown'''
dd = self.draw_downs(percent)
thisDD = dd.iloc[2:] #get rid of non types at start of series
maxDD = min(thisDD['LengthDD'])
maxDDr = thisDD[thisDD['LengthDD'] == maxDD]
return maxDDr.transpose()
def var(self, percentile = 0.05):
'''Value at Risk'''
#subset on range and then diff
diffs = value = self.result_subset[self.valueIndex].diff()
return diffs.quantile(percentile)
def etl(self, percentile = 0.05):
'''Expected Tail Loss'''
diffs = value = self.result_subset[self.valueIndex].diff()
#sort is in place
diffs.sort()
#Calcing var as as quantile of percentile
length = len(diffs)
vindex = np.floor(percentile*length)
vindex = int(vindex)
#etl
etl = np.average(diffs.iloc[0:vindex].values)
return etl
def summary(self):
'''Calcs summary of Sharpe and max drawdown'''
print "Sharpe Ratio", self.sharpe_ratio()
print "Value At Risk 0.05, 0.10"
print self.var(0.05), self.var(0.05)
print "Expected Tail Loss 0.05, 0.10"
print self.etl(0.05), self.etl(0.05)
print "MaxDrawDown (level)"
print self.max_draw_down_magnitude()
print ""
print "MaxDrawDown (percent)"
print self.max_draw_down_magnitude(percent = True)
print ""
print "MaxDrawDown time (level)"
print self.max_draw_down_timelength()
print ""
print "MaxDrawDown time (percent)"
print self.max_draw_down_timelength(percent = True)
class PairTradeAnalyser(ResultsAnalyser):
'''
This class analyses fitted trade strategy object
'''
def __init__(self, strategy, referenceIndex):
'''
Constructor
'''
ResultsAnalyser.__init__(self,strategy, referenceIndex)
self.yscaling = strategy.market_data.results.params[0]
self.adfPvalue= strategy.market_data.adfResids()[1]
def summary(self):
print "Summary for Pair Trade Strategy"
print "Scaling", self.yscaling
print "Adf p-value", self.adfPvalue
print "Sharpe Ratio", self.sharpe_ratio()
print "Value At Risk 0.05, 0.10"
print self.var(0.05), self.var(0.10)
print "Expected Tail Loss 0.05, 0.10"
print self.etl(0.05), self.etl(0.10)
print ""
print "MaxDrawDown (level)"
print self.max_draw_down_magnitude()
print ""
print "MaxDrawDown (percent)"
print self.max_draw_down_magnitude(percent = True)
print ""
print "MaxDrawDown time (level)"
print self.max_draw_down_timelength().loc['LengthDD']
print ""
print "MaxDrawDown time (percent)"
print self.max_draw_down_timelength(percent = True).loc['LengthDD']
| [
"[email protected]"
] | |
da2d02b896509df95c30e65c5bf1e3d25ed7b51d | 96dcea595e7c16cec07b3f649afd65f3660a0bad | /homeassistant/components/watttime/config_flow.py | 4f4206da6ec6ff93b3340a48f97d4137be234a68 | [
"Apache-2.0"
] | permissive | home-assistant/core | 3455eac2e9d925c92d30178643b1aaccf3a6484f | 80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743 | refs/heads/dev | 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 | Apache-2.0 | 2023-09-14T21:50:15 | 2013-09-17T07:29:48 | Python | UTF-8 | Python | false | false | 8,742 | py | """Config flow for WattTime integration."""
from __future__ import annotations
from collections.abc import Mapping
from typing import TYPE_CHECKING, Any
from aiowatttime import Client
from aiowatttime.errors import CoordinatesNotFoundError, InvalidCredentialsError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigEntry, OptionsFlow
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_PASSWORD,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers import aiohttp_client, config_validation as cv
from .const import (
CONF_BALANCING_AUTHORITY,
CONF_BALANCING_AUTHORITY_ABBREV,
CONF_SHOW_ON_MAP,
DOMAIN,
LOGGER,
)
CONF_LOCATION_TYPE = "location_type"
LOCATION_TYPE_COORDINATES = "Specify coordinates"
LOCATION_TYPE_HOME = "Use home location"
STEP_COORDINATES_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_LATITUDE): cv.latitude,
vol.Required(CONF_LONGITUDE): cv.longitude,
}
)
STEP_LOCATION_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_LOCATION_TYPE): vol.In(
[LOCATION_TYPE_HOME, LOCATION_TYPE_COORDINATES]
),
}
)
STEP_REAUTH_CONFIRM_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
@callback
def get_unique_id(data: dict[str, Any]) -> str:
"""Get a unique ID from a data payload."""
return f"{data[CONF_LATITUDE]}, {data[CONF_LONGITUDE]}"
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for WattTime."""
VERSION = 1
def __init__(self) -> None:
"""Initialize."""
self._client: Client | None = None
self._data: dict[str, Any] = {}
async def _async_validate_credentials(
self, username: str, password: str, error_step_id: str, error_schema: vol.Schema
) -> FlowResult:
"""Validate input credentials and proceed accordingly."""
session = aiohttp_client.async_get_clientsession(self.hass)
try:
self._client = await Client.async_login(username, password, session=session)
except InvalidCredentialsError:
return self.async_show_form(
step_id=error_step_id,
data_schema=error_schema,
errors={"base": "invalid_auth"},
description_placeholders={CONF_USERNAME: username},
)
except Exception as err: # pylint: disable=broad-except
LOGGER.exception("Unexpected exception while logging in: %s", err)
return self.async_show_form(
step_id=error_step_id,
data_schema=error_schema,
errors={"base": "unknown"},
description_placeholders={CONF_USERNAME: username},
)
if CONF_LATITUDE in self._data:
# If coordinates already exist at this stage, we're in an existing flow and
# should reauth:
entry_unique_id = get_unique_id(self._data)
if existing_entry := await self.async_set_unique_id(entry_unique_id):
self.hass.config_entries.async_update_entry(
existing_entry, data=self._data
)
self.hass.async_create_task(
self.hass.config_entries.async_reload(existing_entry.entry_id)
)
return self.async_abort(reason="reauth_successful")
# ...otherwise, we're in a new flow:
self._data[CONF_USERNAME] = username
self._data[CONF_PASSWORD] = password
return await self.async_step_location()
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
"""Define the config flow to handle options."""
return WattTimeOptionsFlowHandler(config_entry)
async def async_step_coordinates(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the coordinates step."""
if not user_input:
return self.async_show_form(
step_id="coordinates", data_schema=STEP_COORDINATES_DATA_SCHEMA
)
if TYPE_CHECKING:
assert self._client
unique_id = get_unique_id(user_input)
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
try:
grid_region = await self._client.emissions.async_get_grid_region(
user_input[CONF_LATITUDE], user_input[CONF_LONGITUDE]
)
except CoordinatesNotFoundError:
return self.async_show_form(
step_id="coordinates",
data_schema=STEP_COORDINATES_DATA_SCHEMA,
errors={CONF_LATITUDE: "unknown_coordinates"},
)
except Exception as err: # pylint: disable=broad-except
LOGGER.exception("Unexpected exception while getting region: %s", err)
return self.async_show_form(
step_id="coordinates",
data_schema=STEP_COORDINATES_DATA_SCHEMA,
errors={"base": "unknown"},
)
return self.async_create_entry(
title=unique_id,
data={
CONF_USERNAME: self._data[CONF_USERNAME],
CONF_PASSWORD: self._data[CONF_PASSWORD],
CONF_LATITUDE: user_input[CONF_LATITUDE],
CONF_LONGITUDE: user_input[CONF_LONGITUDE],
CONF_BALANCING_AUTHORITY: grid_region["name"],
CONF_BALANCING_AUTHORITY_ABBREV: grid_region["abbrev"],
},
)
async def async_step_location(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the "pick a location" step."""
if not user_input:
return self.async_show_form(
step_id="location", data_schema=STEP_LOCATION_DATA_SCHEMA
)
if user_input[CONF_LOCATION_TYPE] == LOCATION_TYPE_HOME:
return await self.async_step_coordinates(
{
CONF_LATITUDE: self.hass.config.latitude,
CONF_LONGITUDE: self.hass.config.longitude,
}
)
return await self.async_step_coordinates()
async def async_step_reauth(self, entry_data: Mapping[str, Any]) -> FlowResult:
"""Handle configuration by re-auth."""
self._data = {**entry_data}
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle re-auth completion."""
if not user_input:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_REAUTH_CONFIRM_DATA_SCHEMA,
description_placeholders={CONF_USERNAME: self._data[CONF_USERNAME]},
)
self._data[CONF_PASSWORD] = user_input[CONF_PASSWORD]
return await self._async_validate_credentials(
self._data[CONF_USERNAME],
self._data[CONF_PASSWORD],
"reauth_confirm",
STEP_REAUTH_CONFIRM_DATA_SCHEMA,
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the initial step."""
if not user_input:
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA
)
return await self._async_validate_credentials(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
"user",
STEP_USER_DATA_SCHEMA,
)
class WattTimeOptionsFlowHandler(config_entries.OptionsFlow):
"""Handle a WattTime options flow."""
def __init__(self, entry: ConfigEntry) -> None:
"""Initialize."""
self.entry = entry
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Manage the options."""
if user_input is not None:
return self.async_create_entry(data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required(
CONF_SHOW_ON_MAP,
default=self.entry.options.get(CONF_SHOW_ON_MAP, True),
): bool
}
),
)
| [
"[email protected]"
] | |
4bb5fe491298503041491de635624ead25c64fe4 | 55fb8b448831a0aee6b7f5ff050616da3eee9f8d | /network/AI_net.py | 110b24c5fd1cf3bd76cffdc1aacbb1cd8d08ede8 | [] | no_license | xsir317/Renju-AI | 0f502ab9bab278e0601377004513c8596a465223 | 92350f0a99a9adcfb4617ebc872c9120a3f02569 | refs/heads/master | 2021-05-31T17:42:44.153725 | 2016-06-29T13:12:45 | 2016-06-29T13:12:45 | 114,357,155 | 0 | 1 | null | 2017-12-15T10:12:54 | 2017-12-15T10:12:54 | null | UTF-8 | Python | false | false | 13,424 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# author: <[email protected]>
import os
import re
import simplejson as json
import tensorflow as tf
TOWER_NAME = "tower"
def weight_variable(shape, stddev=1.0, name=None):
initial = tf.truncated_normal(shape, stddev=stddev)
return tf.Variable(initial, name=name)
def weight_variable_v2(shape, stddev=1.0, name=None):
initial = tf.random_normal(shape, stddev=stddev)
return tf.Variable(initial, name=name)
def bias_variable(shape, name=None):
initial = tf.constant(0.01, shape=shape)
return tf.Variable(initial, name=name)
def bias_variable_v2(shape, stddev=1.0, name=None):
initial = tf.random_normal(shape, stddev=stddev)
return tf.Variable(initial, name=name)
def conv2d(x, W, stride):
return tf.nn.conv2d(x, W, strides=[1, stride, stride, 1], padding="SAME")
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
def get_variable_name(prefix=None, var_num=0):
if prefix is not None:
return "%s_%d" % (prefix, var_num), var_num + 1
else:
return None, var_num
def create_policy_network(_input, planes, filters=192, board_size=15, layers=5):
# first conv1
conv1 = conv2d(_input, (8, 8, planes, filters), "conv_1", stride=1)
# norm1
norm1 = tf.nn.lrn(conv1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name='norm_1')
# conv2 ~ conv_k
pre_layer = norm1
for i in xrange(layers):
conv_k = conv2d(pre_layer, (5, 5, filters, filters), "conv_%d" % (i + 2), stride=1)
norm2 = tf.nn.lrn(conv_k, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name='norm_%d' % (i + 2))
pre_layer = norm2
# last layer
conv_n = conv2d(pre_layer, (3, 3, filters, 32), "conv_n", stride=1)
norm_n = tf.nn.lrn(conv_n, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75, name='norm_n')
reshape = tf.reshape(norm_n, [-1, board_size * board_size * 32])
# dim = reshape.get_shape()[1].value
fc1 = full_connect(reshape, (board_size * board_size * 32, 1024), "fc_1")
with tf.variable_scope("out") as scope:
weights = _variable_with_weight_decay('weights', shape=(1024, board_size * board_size),
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [board_size * board_size], tf.constant_initializer(0.1))
softmax_linear = tf.add(tf.matmul(fc1, weights), biases, name=scope.name)
_activation_summary(softmax_linear)
return softmax_linear
def create_value_network(planes, ps_device, worker_device, filters=192, board_size=15, layers=5, name_prefix=None):
variable_num = 0
with tf.device(ps_device):
var_name, variable_num = get_variable_name(name_prefix, variable_num)
W_conv1 = weight_variable((5, 5, planes, filters), stddev=0.1, name=var_name)
var_name, variable_num = get_variable_name(name_prefix, variable_num)
b_conv1 = bias_variable([filters], name=var_name)
W_conv_k, b_conv_k = [], []
for _ in range(layers):
var_name, variable_num = get_variable_name(name_prefix, variable_num)
W_conv_k.append(weight_variable((3, 3, filters, filters), stddev=0.1, name=var_name))
var_name, variable_num = get_variable_name(name_prefix, variable_num)
b_conv_k.append(bias_variable([filters]))
var_name, variable_num = get_variable_name(name_prefix, variable_num)
W_conv_n = weight_variable((1, 1, filters, 1), stddev=0.1, name=var_name)
var_name, variable_num = get_variable_name(name_prefix, variable_num)
b_conv_n = bias_variable([1])
var_name, variable_num = get_variable_name(name_prefix, variable_num)
W_fc1 = weight_variable([board_size * board_size, 256], stddev=0.1, name=var_name)
var_name, variable_num = get_variable_name(name_prefix, variable_num)
b_fc1 = bias_variable([256], name=var_name)
var_name, variable_num = get_variable_name(name_prefix, variable_num)
W_fc2 = weight_variable([256, 3], stddev=0.1, name=var_name)
var_name, variable_num = get_variable_name(name_prefix, variable_num)
b_fc2 = bias_variable([3], name=var_name)
# input
_input = tf.placeholder("float", [None, board_size, board_size, planes])
with tf.device(worker_device):
# first cnn layer
h_conv1 = tf.nn.relu(conv2d(_input, W_conv1, stride=1) + b_conv1)
# middle cnn layers
pre_input = h_conv1
for i in range(layers):
h_conv_k = tf.nn.relu(conv2d(pre_input, W_conv_k[i], stride=1) + b_conv_k[i])
pre_input = h_conv_k
# last cnn layers
h_conv_n = tf.nn.relu(conv2d(h_conv_k, W_conv_n, stride=1) + b_conv_n)
# softmax
h_flat1 = tf.reshape(h_conv_n, [-1, board_size * board_size])
h_fc1 = tf.nn.relu(tf.matmul(h_flat1, W_fc1) + b_fc1)
# _output = tf.tanh(tf.matmul(h_fc1, W_fc2) + b_fc2)
_output = tf.matmul(h_fc1, W_fc2) + b_fc2
return _input, _output
def create_rollout_network(_input, planes, board_size=15):
fc1 = full_connect(_input, (15*15*planes, 15*15*planes), "fc_1")
with tf.variable_scope("out") as scope:
weights = _variable_with_weight_decay('weights', shape=(15*15*planes, board_size * board_size),
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [board_size * board_size], tf.constant_initializer(0.1))
softmax_linear = tf.add(tf.matmul(fc1, weights), biases, name=scope.name)
_activation_summary(softmax_linear)
return softmax_linear
class SuperNetwork(object):
def __init__(self, model_dir=""):
# set session
# self.session = tf.Session(config=tf.ConfigProto(log_device_placement=True, allow_soft_placement=True))
# load model if exist
self.model_dir = model_dir
if not os.path.isdir(self.model_dir):
os.makedirs(self.model_dir)
self.saver = tf.train.Saver(max_to_keep=None)
# self.restore_model(model_file=model_file)
self.param_file = "%s/params.json" % self.model_dir
self.session = None
def set_session(self, sess):
self.session = sess
def close(self):
# frees all resources associated with the session
self.session.close()
def param_serierlize(self, param_dict):
open(self.param_file, "w").write(json.dumps(param_dict))
def param_unserierlize(self, init_params=None):
if os.path.exists(self.param_file):
jd = json.loads(open(self.param_file, 'r').read())
else:
jd = init_params
return jd
def restore_model(self, model_file=None):
if model_file is not None:
model_file_path = "%s/%s" % (self.model_dir, model_file)
self.saver.restore(self.session, model_file_path)
print("Successfully loaded:", model_file_path)
return True
else:
checkpoint = tf.train.get_checkpoint_state(self.model_dir)
if checkpoint and checkpoint.model_checkpoint_path:
self.saver.restore(self.session, checkpoint.model_checkpoint_path)
print("Successfully loaded:", checkpoint.model_checkpoint_path)
return True
else:
print("Could not find old network weights")
return False
def save_model(self, prefix, global_step=None):
checkpoint_filename = self.saver.save(self.session, self.model_dir + "/" + prefix, global_step=global_step)
return checkpoint_filename
def _activation_summary(x):
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.histogram_summary(tensor_name + '/activations', x)
tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
def _variable_with_weight_decay(name, shape, stddev, wd):
var = _variable_on_cpu(name, shape,
tf.truncated_normal_initializer(stddev=stddev))
if wd is not None:
weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def _variable_on_cpu(name, shape, initializer):
with tf.device('/cpu:0'):
var = tf.get_variable(name, shape, initializer=initializer)
return var
def conv2d(x, kernel_shape, variable_scope, stride=1, stddev=1e-2, padding="SAME"):
with tf.variable_scope(variable_scope) as scope:
kernel = _variable_with_weight_decay('weights', shape=kernel_shape,
stddev=stddev, wd=0.0)
conv = tf.nn.conv2d(x, kernel, strides=[1, stride, stride, 1], padding=padding)
biases = _variable_on_cpu('biases', [kernel_shape[-1]], tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
conv = tf.nn.relu(bias, name=scope.name)
_activation_summary(conv)
return conv
def full_connect(x, W_shape, variable_scope, stddev=0.04):
with tf.variable_scope(variable_scope) as scope:
weights = _variable_with_weight_decay('weights', shape=W_shape,
stddev=stddev, wd=0.004)
biases = _variable_on_cpu('biases', [W_shape[-1]], tf.constant_initializer(0.1))
fc = tf.nn.relu(tf.matmul(x, weights) + biases, name=scope.name)
_activation_summary(fc)
return fc
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
def loss(logits, labels):
# Calculate the average cross entropy loss across the batch.
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
# accuracy
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(logits, 1), labels), tf.float32))
tf.add_to_collection('accuracy', accuracy)
# The total loss is defined as the cross entropy loss plus all of the weight
# decay terms (L2 loss).
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def tower_loss(scope, inference, states, labels):
# Build inference Graph.
logits = inference(states)
tf.add_to_collection('logits', logits)
# Build the portion of the Graph calculating the losses. Note that we will
# assemble the total_loss using a custom function below.
_ = loss(logits, labels)
# Assemble all of the losses for the current tower only.
losses = tf.get_collection('losses', scope)
# Calculate the total loss for the current tower.
total_loss = tf.add_n(losses, name='total_loss')
# Compute the moving average of all individual losses and the total loss.
loss_averages = tf.train.ExponentialMovingAverage(0.9, name='avg')
loss_averages_op = loss_averages.apply(losses + [total_loss])
# Attach a scalar summary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
loss_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', l.op.name)
# Name each loss as '(raw)' and name the moving average version of the loss
# as the original loss name.
tf.scalar_summary(loss_name + ' (raw)', l)
tf.scalar_summary(loss_name, loss_averages.average(l))
with tf.control_dependencies([loss_averages_op]):
total_loss = tf.identity(total_loss)
return total_loss
def average_gradients(tower_grads):
"""Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list
is over individual gradients. The inner list is over the gradient
calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers.
"""
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
# Add 0 dimension to the gradients to represent the tower.
expanded_g = tf.expand_dims(g, 0)
# Append on a 'tower' dimension which we will average over below.
grads.append(expanded_g)
# Average over the 'tower' dimension.
grad = tf.concat(0, grads)
grad = tf.reduce_mean(grad, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
| [
"[email protected]"
] | |
431b96040b15421c2067d65fcbc5cc24243089f3 | 3929d28489c8cf53b7d828998ba48e618ee08d08 | /example_django_react_templatetags/runtests.py | 89cce13f9e26c82c47c886b3fc007c977217adc1 | [
"MIT"
] | permissive | EriSilver/django-react-templatetags | 89e0c08ff54a1a4b5cf96e4848a44fd173172072 | b11dd2f1802015589621c3c173850355969c88cf | refs/heads/main | 2023-05-27T04:07:29.710783 | 2021-05-25T19:22:06 | 2021-05-25T19:22:06 | 386,394,693 | 1 | 0 | MIT | 2021-07-15T18:52:56 | 2021-07-15T18:52:55 | null | UTF-8 | Python | false | false | 392 | py | import os
import sys
import argparse
from django.core.management import execute_from_command_line
os.environ["DJANGO_SETTINGS_MODULE"] = "django_react_templatetags.tests.demosite.settings"
def runtests():
args, rest = argparse.ArgumentParser().parse_known_args()
argv = [sys.argv[0], "test"] + rest
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
| [
"[email protected]"
] | |
6aba396a479895a42694694b4f4d0f13154ed4bc | da687718aa8ce62974090af63d25e057262e9dfe | /cap18-Interface-GUI-Tkinter/extras/02_tutopoint/08_listBox.py | 2d5af720f62d7bbd09ee2a26784e198af2b8b98a | [] | no_license | frclasso/revisao_Python_modulo1 | 77928fa4409c97d49cc7deccdf291f44c337d290 | 1e83d0ef9657440db46a8e84b136ac5f9a7c556e | refs/heads/master | 2020-06-25T05:37:28.768343 | 2019-07-27T22:23:58 | 2019-07-27T22:23:58 | 199,217,969 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py | #!/usr/bin/env python3
from tkinter import *
top = Tk()
Lb1 = Listbox(top)
Lb1.insert(1, "Python")
Lb1.insert(2, "Perl")
Lb1.insert(3, "C")
Lb1.insert(4, "Julia")
Lb1.insert(5, "Djago")
Lb1.insert(6, "Go")
Lb1.pack()
top.mainloop() | [
"[email protected]"
] | |
521b4bf3657fd33b18f0dd8c026ee69df493d283 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-vpc/huaweicloudsdkvpc/v2/model/batch_create_subnet_tags_request.py | 9c6ab367a7d382993a6806837feaf9a13b280a97 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,003 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class BatchCreateSubnetTagsRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'subnet_id': 'str',
'body': 'BatchCreateSubnetTagsRequestBody'
}
attribute_map = {
'subnet_id': 'subnet_id',
'body': 'body'
}
def __init__(self, subnet_id=None, body=None):
"""BatchCreateSubnetTagsRequest
The model defined in huaweicloud sdk
:param subnet_id: 子网ID
:type subnet_id: str
:param body: Body of the BatchCreateSubnetTagsRequest
:type body: :class:`huaweicloudsdkvpc.v2.BatchCreateSubnetTagsRequestBody`
"""
self._subnet_id = None
self._body = None
self.discriminator = None
self.subnet_id = subnet_id
if body is not None:
self.body = body
@property
def subnet_id(self):
"""Gets the subnet_id of this BatchCreateSubnetTagsRequest.
子网ID
:return: The subnet_id of this BatchCreateSubnetTagsRequest.
:rtype: str
"""
return self._subnet_id
@subnet_id.setter
def subnet_id(self, subnet_id):
"""Sets the subnet_id of this BatchCreateSubnetTagsRequest.
子网ID
:param subnet_id: The subnet_id of this BatchCreateSubnetTagsRequest.
:type subnet_id: str
"""
self._subnet_id = subnet_id
@property
def body(self):
"""Gets the body of this BatchCreateSubnetTagsRequest.
:return: The body of this BatchCreateSubnetTagsRequest.
:rtype: :class:`huaweicloudsdkvpc.v2.BatchCreateSubnetTagsRequestBody`
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this BatchCreateSubnetTagsRequest.
:param body: The body of this BatchCreateSubnetTagsRequest.
:type body: :class:`huaweicloudsdkvpc.v2.BatchCreateSubnetTagsRequestBody`
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BatchCreateSubnetTagsRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
e6790effcd437b560f2f787aba0b6d9062b9e38e | 6437a3a4a31ab9ad233d6b2d985beb50ed50de23 | /PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/paramiko/sftp.py | 075e6e9b4e62c0ecd9c3279fdf58e28f5b90d4e0 | [] | no_license | sreyemnayr/jss-lost-mode-app | 03ddc472decde3c17a11294d8ee48b02f83b71e7 | 3ff4ba6fb13f4f3a4a98bfc824eace137f6aabaa | refs/heads/master | 2021-05-02T08:50:10.580091 | 2018-02-08T20:32:29 | 2018-02-08T20:32:29 | 120,813,623 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,534 | py | #\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
import select
import socket
import struct
from paramiko import util
from paramiko.common import asbytes, DEBUG
from paramiko.message import Message
from paramiko.py3compat import byte_chr, byte_ord
CMD_INIT, CMD_VERSION, CMD_OPEN, CMD_CLOSE, CMD_READ, CMD_WRITE, CMD_LSTAT, CMD_FSTAT, \
CMD_SETSTAT, CMD_FSETSTAT, CMD_OPENDIR, CMD_READDIR, CMD_REMOVE, CMD_MKDIR, \
CMD_RMDIR, CMD_REALPATH, CMD_STAT, CMD_RENAME, CMD_READLINK, CMD_SYMLINK = range(1, 21)
CMD_STATUS, CMD_HANDLE, CMD_DATA, CMD_NAME, CMD_ATTRS = range(101, 106)
CMD_EXTENDED, CMD_EXTENDED_REPLY = range(200, 202)
SFTP_OK = 0
SFTP_EOF, SFTP_NO_SUCH_FILE, SFTP_PERMISSION_DENIED, SFTP_FAILURE, SFTP_BAD_MESSAGE, \
SFTP_NO_CONNECTION, SFTP_CONNECTION_LOST, SFTP_OP_UNSUPPORTED = range(1, 9)
SFTP_DESC = ['Success',
'End of file',
'No such file',
'Permission denied',
'Failure',
'Bad message',
'No connection',
'Connection lost',
'Operation unsupported']
SFTP_FLAG_READ = 0x1
SFTP_FLAG_WRITE = 0x2
SFTP_FLAG_APPEND = 0x4
SFTP_FLAG_CREATE = 0x8
SFTP_FLAG_TRUNC = 0x10
SFTP_FLAG_EXCL = 0x20
_VERSION = 3
# for debugging
CMD_NAMES = {
CMD_INIT: 'init',
CMD_VERSION: 'version',
CMD_OPEN: 'open',
CMD_CLOSE: 'close',
CMD_READ: 'read',
CMD_WRITE: 'write',
CMD_LSTAT: 'lstat',
CMD_FSTAT: 'fstat',
CMD_SETSTAT: 'setstat',
CMD_FSETSTAT: 'fsetstat',
CMD_OPENDIR: 'opendir',
CMD_READDIR: 'readdir',
CMD_REMOVE: 'remove',
CMD_MKDIR: 'mkdir',
CMD_RMDIR: 'rmdir',
CMD_REALPATH: 'realpath',
CMD_STAT: 'stat',
CMD_RENAME: 'rename',
CMD_READLINK: 'readlink',
CMD_SYMLINK: 'symlink',
CMD_STATUS: 'status',
CMD_HANDLE: 'handle',
CMD_DATA: 'data',
CMD_NAME: 'name',
CMD_ATTRS: 'attrs',
CMD_EXTENDED: 'extended',
CMD_EXTENDED_REPLY: 'extended_reply'
}
class SFTPError (Exception):
pass
class BaseSFTP (object):
def __init__(self):
self.logger = util.get_logger('paramiko.sftp')
self.sock = None
self.ultra_debug = False
### internals...
def _send_version(self):
self._send_packet(CMD_INIT, struct.pack('>I', _VERSION))
t, data = self._read_packet()
if t != CMD_VERSION:
raise SFTPError('Incompatible sftp protocol')
version = struct.unpack('>I', data[:4])[0]
# if version != _VERSION:
# raise SFTPError('Incompatible sftp protocol')
return version
def _send_server_version(self):
# winscp will freak out if the server sends version info before the
# client finishes sending INIT.
t, data = self._read_packet()
if t != CMD_INIT:
raise SFTPError('Incompatible sftp protocol')
version = struct.unpack('>I', data[:4])[0]
# advertise that we support "check-file"
extension_pairs = ['check-file', 'md5,sha1']
msg = Message()
msg.add_int(_VERSION)
msg.add(*extension_pairs)
self._send_packet(CMD_VERSION, msg)
return version
def _log(self, level, msg, *args):
self.logger.log(level, msg, *args)
def _write_all(self, out):
while len(out) > 0:
n = self.sock.send(out)
if n <= 0:
raise EOFError()
if n == len(out):
return
out = out[n:]
return
def _read_all(self, n):
out = bytes()
while n > 0:
if isinstance(self.sock, socket.socket):
# sometimes sftp is used directly over a socket instead of
# through a paramiko channel. in this case, check periodically
# if the socket is closed. (for some reason, recv() won't ever
# return or raise an exception, but calling select on a closed
# socket will.)
while True:
read, write, err = select.select([self.sock], [], [], 0.1)
if len(read) > 0:
x = self.sock.recv(n)
break
else:
x = self.sock.recv(n)
if len(x) == 0:
raise EOFError()
out += x
n -= len(x)
return out
def _send_packet(self, t, packet):
#self._log(DEBUG2, 'write: %s (len=%d)' % (CMD_NAMES.get(t, '0x%02x' % t), len(packet)))
packet = asbytes(packet)
out = struct.pack('>I', len(packet) + 1) + byte_chr(t) + packet
if self.ultra_debug:
self._log(DEBUG, util.format_binary(out, 'OUT: '))
self._write_all(out)
def _read_packet(self):
x = self._read_all(4)
# most sftp servers won't accept packets larger than about 32k, so
# anything with the high byte set (> 16MB) is just garbage.
if byte_ord(x[0]):
raise SFTPError('Garbage packet received')
size = struct.unpack('>I', x)[0]
data = self._read_all(size)
if self.ultra_debug:
self._log(DEBUG, util.format_binary(data, 'IN: '))
if size > 0:
t = byte_ord(data[0])
#self._log(DEBUG2, 'read: %s (len=%d)' % (CMD_NAMES.get(t), '0x%02x' % t, len(data)-1))
return t, data[1:]
return 0, bytes()
| [
"[email protected]"
] | |
cffe50dd97a932ebc3250fdbf9a6349b509f3431 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/powerbidedicated/azure-mgmt-powerbidedicated/generated_samples/create_auto_scale_vcore.py | 67ad65f12c53cbe937f4d46b56b858ef521762f8 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,885 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.powerbidedicated import PowerBIDedicated
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-powerbidedicated
# USAGE
python create_auto_scale_vcore.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = PowerBIDedicated(
credential=DefaultAzureCredential(),
subscription_id="613192d7-503f-477a-9cfe-4efc3ee2bd60",
)
response = client.auto_scale_vcores.create(
resource_group_name="TestRG",
vcore_name="testvcore",
v_core_parameters={
"location": "West US",
"properties": {"capacityLimit": 10, "capacityObjectId": "a28f00bd-5330-4572-88f1-fa883e074785"},
"sku": {"capacity": 0, "name": "AutoScale", "tier": "AutoScale"},
"tags": {"testKey": "testValue"},
},
)
print(response)
# x-ms-original-file: specification/powerbidedicated/resource-manager/Microsoft.PowerBIdedicated/stable/2021-01-01/examples/createAutoScaleVCore.json
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
3739f0b84f86b376235cade51f9071d7d62c8b18 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /eraBhwF8HkJDAa2pS_7.py | 4d28a63e1772e975610e4ed6a58348f01d38c9a7 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,759 | py | """
A group of pirates each have a distribution of gold coins, which can be
represented as a list:
[3, 9, 4, 5, 5]
# Pirate 1 has 3 gold, Pirate 2 has 9 gold, etc.
The difference between each pirate's share of gold and that of the richest
pirate is represented as:
[6, 0, 5, 4, 4]
# Since 6 = 9 - 3, 0 = 9 - 9, 4 = 9 - 5, etc.
Pirates have a keen sense of fairness, and a pirate will kill the others if he
deems his share to be too little. Each pirate has a **unique inequality
threshold** \- the maximum difference he is willing to tolerate before he
kills his comrades.
Using the above gold distribution:
[5, 0, 5, 5, 5]
# Pirates killed, since 5 < 6.
# 5 is Pirate 1's inequality distribution and 6 is his gold difference.
[7, 0, 5, 5, 5]
# Pirate 1 is satisfied, since 7 > 6.
# All other pirates are satisfied as well.
Given a distribution of coins and a list of inequality thresholds, create a
function that returns `True` if any pirates are killed, or `False` otherwise.
### Examples
pirates_killed([3, 5, 8, 3, 4], [10, 4, 2, 5, 5]) ➞ False
pirates_killed([3, 5, 8, 3, 4], [10, 4, 2, 5, 1]) ➞ True
pirates_killed([3, 3, 10], [7, 7, 0]) ➞ False
pirates_killed([3, 3, 10], [6, 6, 0]) ➞ True
### Notes
* A pirate kills if the difference in his share of gold from the riches pirate is **strictly greater** than his **inequality threshold**.
* Gold and inequality distribution lists are both ordered the same. (e.g. Pirate 1 is index 0 for both lists, Pirate 2 is index 1 for both lists, etc).
"""
def pirates_killed(gold, tolerance):
m=max(gold)
a=len(gold)
for i in range(0,a):
if(m-gold[i]>tolerance[i]):
return True
return False
| [
"[email protected]"
] | |
412354dd10998bf72bc427f1882857eed5d7345a | b4b95c1e3ab70b332596bda4f008cd077fda6b66 | /week4/day4/hero_project/hero_project/settings.py | 21c5ca234a53139ed25c0c958d84d8c99e7dc311 | [] | no_license | johnjdailey/wf-2020-11-15 | 863952f0267a90191d0cf36869d870746530545f | 2f4f6789914be8f5ae2e107c8b2d3762c8330b01 | refs/heads/main | 2023-03-20T19:14:31.403918 | 2021-02-24T08:08:10 | 2021-02-24T08:08:10 | 364,556,774 | 1 | 0 | null | 2021-05-05T11:40:07 | 2021-05-05T11:40:07 | null | UTF-8 | Python | false | false | 3,123 | py | """
Django settings for hero_project project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=iyixn3&mdrg+x5#s$*79s8nm428p(rfo#kyf6nn82@=lh1&a6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'power_app',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hero_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'hero_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
1a7a445a0183da5b57d25a858acc8af2e8320d0d | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02833/s059211372.py | ed4bbbaccfabc03d67edc5bcf26aebe9e1659cf8 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py | n = input()
m = len(n)
n = int(n)
if n%2==1:
print(0)
else:
if m==1:
print(0)
else:
ans = 0
i = 1
while True:
ans_plus=n//(2*5**i)
if ans_plus==0:
break
ans += ans_plus
i += 1
print(ans) | [
"[email protected]"
] | |
ee7358a3297e1b86ffe16f03447bbb5fa211b7df | b99b32fb0b4597bee94809ebd3b2ddae43064bee | /landmark_detection/menpofit/math/correlationfilter.py | 2b80d9e1b419f80ff46311e072543bc34e8aa2a5 | [] | no_license | HongwenZhang/ECT-FaceAlignment | c0129dc2aa20bc2bdba03a9ed1cabebcd5e5d848 | e94b446db73fca5ba751d6d9a81d42633208f228 | refs/heads/master | 2023-01-29T14:25:19.502350 | 2020-12-13T09:18:55 | 2020-12-13T09:18:55 | 111,511,579 | 31 | 19 | null | null | null | null | UTF-8 | Python | false | false | 14,354 | py | import numpy as np
from numpy.fft import fft2, ifft2, ifftshift
from scipy.sparse import spdiags, eye as speye
from scipy.sparse.linalg import spsolve
from menpofit.math.fft_utils import pad, crop
def mosse(X, y, l=0.01, boundary='constant', crop_filter=True):
r"""
Minimum Output Sum of Squared Errors (MOSSE) filter.
Parameters
----------
X : ``(n_images, n_channels, image_h, image_w)`` `ndarray`
The training images.
y : ``(1, response_h, response_w)`` `ndarray`
The desired response.
l : `float`, optional
Regularization parameter.
boundary : ``{'constant', 'symmetric'}``, optional
Determines how the image is padded.
crop_filter : `bool`, optional
If ``True``, the shape of the MOSSE filter is the same as the shape
of the desired response. If ``False``, the filter's shape is equal to:
``X[0].shape + y.shape - 1``
Returns
-------
f : ``(1, response_h, response_w)`` `ndarray`
Minimum Output Sum od Squared Errors (MOSSE) filter associated to
the training images.
sXY : ``(N,)`` `ndarray`
The auto-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
sXX : ``(N, N)`` `ndarray`
The cross-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
References
----------
.. [1] D. S. Bolme, J. R. Beveridge, B. A. Draper, and Y. M. Lui. "Visual
Object Tracking using Adaptive Correlation Filters", IEEE Proceedings
of International Conference on Computer Vision and Pattern Recognition
(CVPR), 2010.
"""
# number of images, number of channels, height and width
n, k, hx, wx = X.shape
# height and width of desired responses
_, hy, wy = y.shape
y_shape = (hy, wy)
# extended shape
ext_h = hx + hy - 1
ext_w = wx + wy - 1
ext_shape = (ext_h, ext_w)
# extend desired response
ext_y = pad(y, ext_shape)
# fft of extended desired response
fft_ext_y = fft2(ext_y)
# auto and cross spectral energy matrices
sXX = 0
sXY = 0
# for each training image and desired response
for x in X:
# extend image
ext_x = pad(x, ext_shape, boundary=boundary)
# fft of extended image
fft_ext_x = fft2(ext_x)
# update auto and cross spectral energy matrices
sXX += fft_ext_x.conj() * fft_ext_x
sXY += fft_ext_x.conj() * fft_ext_y
# compute desired correlation filter
fft_ext_f = sXY / (sXX + l)
# reshape extended filter to extended image shape
fft_ext_f = fft_ext_f.reshape((k, ext_h, ext_w))
# compute extended filter inverse fft
f = np.real(ifftshift(ifft2(fft_ext_f), axes=(-2, -1)))
if crop_filter:
# crop extended filter to match desired response shape
f = crop(f, y_shape)
return f, sXY, sXX
def imosse(A, B, n_ab, X, y, l=0.01, boundary='constant',
crop_filter=True, f=1.0):
r"""
Incremental Minimum Output Sum of Squared Errors (iMOSSE) filter.
Parameters
----------
A : ``(N,)`` `ndarray`
The current auto-correlation array, where
``N = (patch_h+response_h-1) * (patch_w+response_w-1) * n_channels``.
B : ``(N, N)`` `ndarray`
The current cross-correlation array, where
``N = (patch_h+response_h-1) * (patch_w+response_w-1) * n_channels``.
n_ab : `int`
The current number of images.
X : ``(n_images, n_channels, image_h, image_w)`` `ndarray`
The training images (patches).
y : ``(1, response_h, response_w)`` `ndarray`
The desired response.
l : `float`, optional
Regularization parameter.
boundary : ``{'constant', 'symmetric'}``, optional
Determines how the image is padded.
crop_filter : `bool`, optional
If ``True``, the shape of the MOSSE filter is the same as the shape
of the desired response. If ``False``, the filter's shape is equal to:
``X[0].shape + y.shape - 1``
f : ``[0, 1]`` `float`, optional
Forgetting factor that weights the relative contribution of new
samples vs old samples. If ``1.0``, all samples are weighted equally.
If ``<1.0``, more emphasis is put on the new samples.
Returns
-------
f : ``(1, response_h, response_w)`` `ndarray`
Minimum Output Sum od Squared Errors (MOSSE) filter associated to
the training images.
sXY : ``(N,)`` `ndarray`
The auto-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
sXX : ``(N, N)`` `ndarray`
The cross-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
References
----------
.. [1] D. S. Bolme, J. R. Beveridge, B. A. Draper, and Y. M. Lui. "Visual
Object Tracking using Adaptive Correlation Filters", IEEE Proceedings
of International Conference on Computer Vision and Pattern Recognition
(CVPR), 2010.
"""
# number of images; number of channels, height and width
n_x, k, hz, wz = X.shape
# height and width of desired responses
_, hy, wy = y.shape
y_shape = (hy, wy)
# multiply the number of samples used to produce the auto and cross
# spectral energy matrices A and B by forgetting factor
n_ab *= f
# total number of samples
n = n_ab + n_x
# compute weighting factors
nu_ab = n_ab / n
nu_x = n_x / n
# extended shape
ext_h = hz + hy - 1
ext_w = wz + wy - 1
ext_shape = (ext_h, ext_w)
# extend desired response
ext_y = pad(y, ext_shape)
# fft of extended desired response
fft_ext_y = fft2(ext_y)
# extend images
ext_X = pad(X, ext_shape, boundary=boundary)
# auto and cross spectral energy matrices
sXX = 0
sXY = 0
# for each training image and desired response
for ext_x in ext_X:
# fft of extended image
fft_ext_x = fft2(ext_x)
# update auto and cross spectral energy matrices
sXX += fft_ext_x.conj() * fft_ext_x
sXY += fft_ext_x.conj() * fft_ext_y
# combine old and new auto and cross spectral energy matrices
sXY = nu_ab * A + nu_x * sXY
sXX = nu_ab * B + nu_x * sXX
# compute desired correlation filter
fft_ext_f = sXY / (sXX + l)
# reshape extended filter to extended image shape
fft_ext_f = fft_ext_f.reshape((k, ext_h, ext_w))
# compute filter inverse fft
f = np.real(ifftshift(ifft2(fft_ext_f), axes=(-2, -1)))
if crop_filter:
# crop extended filter to match desired response shape
f = crop(f, y_shape)
return f, sXY, sXX
def mccf(X, y, l=0.01, boundary='constant', crop_filter=True):
r"""
Multi-Channel Correlation Filter (MCCF).
Parameters
----------
X : ``(n_images, n_channels, image_h, image_w)`` `ndarray`
The training images.
y : ``(1, response_h, response_w)`` `ndarray`
The desired response.
l : `float`, optional
Regularization parameter.
boundary : ``{'constant', 'symmetric'}``, optional
Determines how the image is padded.
crop_filter : `bool`, optional
If ``True``, the shape of the MOSSE filter is the same as the shape
of the desired response. If ``False``, the filter's shape is equal to:
``X[0].shape + y.shape - 1``
Returns
-------
f : ``(1, response_h, response_w)`` `ndarray`
Multi-Channel Correlation Filter (MCCF) filter associated to the
training images.
sXY : ``(N,)`` `ndarray`
The auto-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
sXX : ``(N, N)`` `ndarray`
The cross-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
References
----------
.. [1] H. K. Galoogahi, T. Sim, and Simon Lucey. "Multi-Channel
Correlation Filters". IEEE Proceedings of International Conference on
Computer Vision (ICCV), 2013.
"""
# number of images; number of channels, height and width
n, k, hx, wx = X.shape
# height and width of desired responses
_, hy, wy = y.shape
y_shape = (hy, wy)
# extended shape
ext_h = hx + hy - 1
ext_w = wx + wy - 1
ext_shape = (ext_h, ext_w)
# extended dimensionality
ext_d = ext_h * ext_w
# extend desired response
ext_y = pad(y, ext_shape)
# fft of extended desired response
fft_ext_y = fft2(ext_y)
# extend images
ext_X = pad(X, ext_shape, boundary=boundary)
# auto and cross spectral energy matrices
sXX = 0
sXY = 0
# for each training image and desired response
for ext_x in ext_X:
# fft of extended image
fft_ext_x = fft2(ext_x)
# store extended image fft as sparse diagonal matrix
diag_fft_x = spdiags(fft_ext_x.reshape((k, -1)),
-np.arange(0, k) * ext_d, ext_d * k, ext_d).T
# vectorize extended desired response fft
diag_fft_y = fft_ext_y.ravel()
# update auto and cross spectral energy matrices
sXX += diag_fft_x.conj().T.dot(diag_fft_x)
sXY += diag_fft_x.conj().T.dot(diag_fft_y)
# solve ext_d independent k x k linear systems (with regularization)
# to obtain desired extended multi-channel correlation filter
fft_ext_f = spsolve(sXX + l * speye(sXX.shape[-1]), sXY)
# reshape extended filter to extended image shape
fft_ext_f = fft_ext_f.reshape((k, ext_h, ext_w))
# compute filter inverse fft
f = np.real(ifftshift(ifft2(fft_ext_f), axes=(-2, -1)))
if crop_filter:
# crop extended filter to match desired response shape
f = crop(f, y_shape)
return f, sXY, sXX
def imccf(A, B, n_ab, X, y, l=0.01, boundary='constant', crop_filter=True,
f=1.0):
r"""
Incremental Multi-Channel Correlation Filter (MCCF)
Parameters
----------
A : ``(N,)`` `ndarray`
The current auto-correlation array, where
``N = (patch_h+response_h-1) * (patch_w+response_w-1) * n_channels``.
B : ``(N, N)`` `ndarray`
The current cross-correlation array, where
``N = (patch_h+response_h-1) * (patch_w+response_w-1) * n_channels``.
n_ab : `int`
The current number of images.
X : ``(n_images, n_channels, image_h, image_w)`` `ndarray`
The training images (patches).
y : ``(1, response_h, response_w)`` `ndarray`
The desired response.
l : `float`, optional
Regularization parameter.
boundary : ``{'constant', 'symmetric'}``, optional
Determines how the image is padded.
crop_filter : `bool`, optional
If ``True``, the shape of the MOSSE filter is the same as the shape
of the desired response. If ``False``, the filter's shape is equal to:
``X[0].shape + y.shape - 1``
f : ``[0, 1]`` `float`, optional
Forgetting factor that weights the relative contribution of new
samples vs old samples. If ``1.0``, all samples are weighted equally.
If ``<1.0``, more emphasis is put on the new samples.
Returns
-------
f : ``(1, response_h, response_w)`` `ndarray`
Multi-Channel Correlation Filter (MCCF) filter associated to the
training images.
sXY : ``(N,)`` `ndarray`
The auto-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
sXX : ``(N, N)`` `ndarray`
The cross-correlation array, where
``N = (image_h+response_h-1) * (image_w+response_w-1) * n_channels``.
References
----------
.. [1] D. S. Bolme, J. R. Beveridge, B. A. Draper, and Y. M. Lui. "Visual
Object Tracking using Adaptive Correlation Filters", IEEE Proceedings
of International Conference on Computer Vision and Pattern Recognition
(CVPR), 2010.
.. [2] H. K. Galoogahi, T. Sim, and Simon Lucey. "Multi-Channel
Correlation Filters". IEEE Proceedings of International Conference on
Computer Vision (ICCV), 2013.
"""
# number of images; number of channels, height and width
n_x, k, hz, wz = X.shape
# height and width of desired responses
_, hy, wy = y.shape
y_shape = (hy, wy)
# multiply the number of samples used to produce the auto and cross
# spectral energy matrices A and B by forgetting factor
n_ab *= f
# total number of samples
n = n_ab + n_x
# compute weighting factors
nu_ab = n_ab / n
nu_x = n_x / n
# extended shape
ext_h = hz + hy - 1
ext_w = wz + wy - 1
ext_shape = (ext_h, ext_w)
# extended dimensionality
ext_d = ext_h * ext_w
# extend desired response
ext_y = pad(y, ext_shape)
# fft of extended desired response
fft_ext_y = fft2(ext_y)
# extend images
ext_X = pad(X, ext_shape, boundary=boundary)
# auto and cross spectral energy matrices
sXX = 0
sXY = 0
# for each training image and desired response
for ext_x in ext_X:
# fft of extended image
fft_ext_x = fft2(ext_x)
# store extended image fft as sparse diagonal matrix
diag_fft_x = spdiags(fft_ext_x.reshape((k, -1)),
-np.arange(0, k) * ext_d, ext_d * k, ext_d).T
# vectorize extended desired response fft
diag_fft_y = fft_ext_y.ravel()
# update auto and cross spectral energy matrices
sXX += diag_fft_x.conj().T.dot(diag_fft_x)
sXY += diag_fft_x.conj().T.dot(diag_fft_y)
# combine old and new auto and cross spectral energy matrices
sXY = nu_ab * A + nu_x * sXY
sXX = nu_ab * B + nu_x * sXX
# solve ext_d independent k x k linear systems (with regularization)
# to obtain desired extended multi-channel correlation filter
fft_ext_f = spsolve(sXX + l * speye(sXX.shape[-1]), sXY)
# reshape extended filter to extended image shape
fft_ext_f = fft_ext_f.reshape((k, ext_h, ext_w))
# compute filter inverse fft
f = np.real(ifftshift(ifft2(fft_ext_f), axes=(-2, -1)))
if crop_filter:
# crop extended filter to match desired response shape
f = crop(f, y_shape)
return f, sXY, sXX
| [
"[email protected]"
] | |
896dfb5f50363046d705420cf1d4635764de6422 | b1c271e8c69c5dad361ddf4b9415a0818db061ea | /Brain-Tumor-Segmentation-3D-UNet-CNN-master/create_predictions.py | 7777d835a1742a3e1ce6a456d8f788c922f4ccab | [] | no_license | ssh6189/2020.05.12 | 105e9947d9a6f340120d90aae5edf7c90de44379 | a12fab4c477c54ee11a1b58fa207b548ad5e310d | refs/heads/master | 2022-07-23T13:07:18.240311 | 2020-05-14T12:34:19 | 2020-05-14T12:34:19 | 263,185,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,663 | py | import pickle
import numpy as np
import copy
import nibabel as nib
from tifffile import imsave
from unet_utils import crop_img, weights_dir, data_dir, pred_dir, DataGenerator, create_model
from keras.models import load_model
from keras import backend as K
from keras.engine import Model
K.tensorflow_backend.set_image_dim_ordering('tf')
K.set_image_data_format('channels_first')
# doesn't look like I kept using these:
# from libtiff import TIFF
# from skimage.io._plugins import freeimage_plugin as fi
def predict_unet(num_outputs, load_weights_filepath):
train_val_test_dict = pickle.load(open( "train_val_test_dict.pkl", "rb" ) ) # this has the test/train ID matches
# pickle.dump( results.history, open( weights_dir / f"history_{num_outputs}_pred.pkl", "wb" ) )
# This isn't an ideal way to do things.
# I need to find a simpler way around the issue of having custom layers (ValueError: Unknown layer: InstanceNormalization)
model = create_model(input_shape=(4, 160, 192, 160),
n_base_filters=12,
depth=5,
dropout_rate=0.3,
n_segmentation_levels=3,
n_labels=3,
num_outputs=num_outputs,
optimizer='adam',
learning_rate=1e-2,
activation_name="sigmoid")
model.load_weights(load_weights_filepath, by_name=True) # by_name=True allows you to use a different architecture and bring in the weights from the matching layers
# Turned shuffle off so that we can match the values in the dictionary to the predictions.
# This way we can compare the predictions side-by-side with the ground truth.
params = {'dim': (160,192,160),
'batch_size': 1,
'n_classes': 3,
'n_channels': 4,
'shuffle': False,
'num_outputs': num_outputs}
validation_generator = DataGenerator(train_val_test_dict['test'], **params)
# # load model
# model = load_model(model_path, custom_objects={'InstanceNormalization':unet_utils.InstanceNormalization})
# keras_contrib.layers.normalization.instancenormalization import InstanceNormalization
predictions = model.predict_generator(generator=validation_generator)
for i, prediction in enumerate(predictions):
pickle.dump( prediction, open( pred_dir / f"predictions_{num_outputs}_pred_{i}.pkl", "wb" ) )
def create_tiffs_from_predictions(num_outputs):
train_val_test_dict = pickle.load(open( "train_val_test_dict.pkl", "rb" ) ) # this has the test/train ID matches
# access the test list:
testIDlist = train_val_test_dict['test']
# ID = testIDlist[i]
for i in range(len(testIDlist)):
# for i in range(2):
print("current image:", i)
ID = testIDlist[i]
img1 = data_dir / f'{ID}_flair.nii.gz'
img2 = data_dir / f'{ID}_t1.nii.gz'
img3 = data_dir / f'{ID}_t1ce.nii.gz'
img4 = data_dir / f'{ID}_t2.nii.gz'
img5 = data_dir / f'{ID}_seg.nii.gz'
img_list = [str(x) for x in [img1, img2, img3, img4, img5]]
newimage = nib.concat_images(img_list)
# # nibabel uses .lower on the filepath, which requires the filepath to be string, not the posixpath type from pathlib
# newimage = nib.concat_images([img1, img2, img3, img4, img5])
cropped = crop_img(newimage)
img_array = np.array(cropped.dataobj)
z = np.rollaxis(img_array, 3, 0)
padded_image = np.zeros((5, 160, 192, 160))
padded_image[:z.shape[0], :z.shape[1], :z.shape[2], :z.shape[3]] = z
a, b, c, d, seg_mask = np.split(padded_image, 5, axis=0)
images = np.concatenate([a, b, c, d], axis=0)
# print("images shape:", images.shape, "images values:", np.unique(images.astype(int)))
# split the channels:
# seg_mask_1 = copy.deepcopy(seg_mask.astype(int))
seg_mask_1 = np.zeros((1, 160, 192, 160))
seg_mask_1[seg_mask.astype(int) > 0] = 1
seg_mask_2 = np.zeros((1, 160, 192, 160))
seg_mask_2[seg_mask.astype(int) > 1] = 1
seg_mask_3 = np.zeros((1, 160, 192, 160))
seg_mask_3[seg_mask.astype(int) > 2] = 1
seg_mask_3ch = np.concatenate(
[seg_mask_1, seg_mask_2, seg_mask_3], axis=0).astype(int)
# def scale_image(image_array):
# image_array = image_array.astype(float)
# image_array *= 255.0/image_array.max() # convert to 8-bit pixel values
# image_array = image_array.astype(int)
# return image_array
# img_array_list = [a,seg_mask_1,seg_mask_2,seg_mask_3]
# for img_array in img_array_list:
# img_array = scale_image(img_array)
a = a.astype(float)
a *= 255.0/a.max() # convert to 8-bit pixel values
a = np.rollaxis(a, 0, 2) # cxyz -> xycz for imagej
a = np.rollaxis(a, 0, 3) # switching x and z
a = a.astype('uint8')
# print("unique flair values:", np.unique(a))
seg_mask_1 = seg_mask_1.astype(float)
seg_mask_1 *= 255.0/seg_mask_1.max() # convert to 8-bit pixel values
seg_mask_1 = np.rollaxis(seg_mask_1, 0, 2) # cxyz -> xycz for imagej
seg_mask_1 = np.rollaxis(seg_mask_1, 0, 3) # switching x and z
seg_mask_1 = seg_mask_1.astype('uint8')
# print("unique segment mask values:", np.unique(seg_mask_1))
seg_mask_2 = seg_mask_2.astype(float)
seg_mask_2 *= 255.0/seg_mask_2.max() # convert to 8-bit pixel values
seg_mask_2 = np.rollaxis(seg_mask_2, 0, 2) # cxyz -> xycz for imagej
seg_mask_2 = np.rollaxis(seg_mask_2, 0, 3) # switching x and z
seg_mask_2 = seg_mask_2.astype('uint8')
seg_mask_3 = seg_mask_3.astype(float)
seg_mask_3 *= 255.0/seg_mask_3.max() # convert to 8-bit pixel values
seg_mask_3 = np.rollaxis(seg_mask_3, 0, 2) # cxyz -> xycz for imagej
seg_mask_3 = np.rollaxis(seg_mask_3, 0, 3) # switching x and z
seg_mask_3 = seg_mask_3.astype('uint8')
# ground_truth = np.concatenate(
# [a, seg_mask_1, seg_mask_2, seg_mask_3], axis=0).astype('uint8')
# print("unique flair + segment mask values:", np.unique(ground_truth))
# shape.ground_truth
# flairimg = flairimg.astype(float)
# flairimg *= 255.0/flairimg.max() # convert to 8-bit pixel values
# flairimg = flairimg.astype(int)
# print(np.unique(flairimg))
# print("final image shape:", ground_truth.shape)
# imsave("./channel_split/"+testIDlist[i]+"ground_truth.tif", ground_truth, 'imagej')
imsave(pred_dir / f"{ID}_flair.tif", a, 'imagej')
imsave(pred_dir / f"{ID}_ground_truth_1.tif", seg_mask_1, 'imagej')
imsave(pred_dir / f"{ID}_ground_truth_2.tif", seg_mask_2, 'imagej')
imsave(pred_dir / f"{ID}_ground_truth_3.tif", seg_mask_3, 'imagej')
imarray = pickle.load(open( pred_dir / f"predictions_{num_outputs}_pred_{i}.pkl", "rb" ) )
prediction_thresh = copy.deepcopy(imarray)
prediction_thresh[prediction_thresh < 0.5] = 0.
prediction_thresh[prediction_thresh >= 0.5] = 1.
prediction_thresh = prediction_thresh
print(np.unique(prediction_thresh))
prediction_thresh *= 255.0/prediction_thresh.max() # convert to 8-bit pixel values
prediction_thresh = prediction_thresh.astype('uint8')
prediction_thresh = np.rollaxis(prediction_thresh, 1, 3) # switching x and z; c will be taken care of in split
print(np.unique(prediction_thresh))
print(prediction_thresh.shape)
pred1, pred2, pred3 = np.split(prediction_thresh, 3, axis=0)
imsave(pred_dir / f"{ID}_predicted_1.tif", pred1, 'imagej')
imsave(pred_dir / f"{ID}_predicted_2.tif", pred2, 'imagej')
imsave(pred_dir / f"{ID}_predicted_3.tif", pred3, 'imagej')
# print("images shape:", images.shape, "images values:", np.unique(images.astype(int)))
# split the channels:
# seg_mask_1 = copy.deepcopy(seg_mask.astype(int))
# seg_mask_3ch = np.concatenate(
# [seg_mask_1, seg_mask_2, seg_mask_3], axis=0).astype(int)
# imarray *= 255.0/imarray.max()
# imsave("./channel_split/"+testIDlist[i]+"ground_truth_1.tif", seg_mask_1, 'imagej')
# imsave("./channel_split/"+testIDlist[i]+"ground_truth_2.tif", seg_mask_2, 'imagej')
# imsave("./channel_split/"+testIDlist[i]+"ground_truth_3.tif", seg_mask_3, 'imagej')
if __name__ == "__main__":
num_outputs = 1
predict_unet(num_outputs, './weights/model_weights_3_outputs.h5')
create_tiffs_from_predictions(num_outputs) | [
"[email protected]"
] | |
f76e2a9511e5d7f982695f3e4c56ae5960b336e7 | 771d7c30e9984eb07ba88f0feb2a52c0ee510549 | /备份/1804爬虫/爬虫文件/第三天 (1)/mzcookie_opener.py | c7d9dc7b5c5721f122a7802a30b94c735c19698d | [] | no_license | 1615961606/-test | 5eae5cab4e82136ecf8f4cbdb9990c3bb9e4839f | 81a822d303a07310dafe2af612f932d9f34503c3 | refs/heads/master | 2020-03-19T01:07:01.851584 | 2019-04-08T10:54:35 | 2019-04-08T10:54:35 | 135,523,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 739 | py | #有时候我们需要将获取到的cookie保存在本地文件中,
# 我们需要使用到MozillaCookieJar
from http import cookiejar
from urllib import request
#1设置一个文件名,将cookie保存在这个文件下
filename = 'cookie.txt'
#2.创建一个cookiejar对象,用来管理和存储cookie
mz_cookiejar = cookiejar.MozillaCookieJar(filename)
#3.创建一个HTTPCookieprocessor处理器对象,管理cookiejar
handler = request.HTTPCookieProcessor(mz_cookiejar)
#自定义一个opener
opener = request.build_opener(handler)
#使用opener对象发起请求
req = request.Request('http://www.baidu.com/')
response = opener.open(req)
print(response.status)
#使用save方法,保存cookie
mz_cookiejar.save() | [
"[email protected]"
] | |
0e26073101df751e8d715c82ccaac37a322f3dc8 | 43ec1c06825a39c31b976906f7bded21c19b6019 | /google/cloud/talent_v4beta1/services/company_service/transports/grpc_asyncio.py | 32721d7b8b24d7c2b9346e0eda6240b2ea168341 | [
"Apache-2.0"
] | permissive | renovate-bot/python-talent | b70b036c9d5a6b2887ef428fca853940df067c4b | 0413f5114256b8a8c2c157b33aa0cbc1eb9feca5 | refs/heads/master | 2023-06-07T05:27:59.879356 | 2021-08-30T16:12:17 | 2021-08-30T16:12:17 | 238,039,237 | 0 | 0 | Apache-2.0 | 2020-02-03T19:01:22 | 2020-02-03T19:01:22 | null | UTF-8 | Python | false | false | 16,345 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.talent_v4beta1.types import company
from google.cloud.talent_v4beta1.types import company as gct_company
from google.cloud.talent_v4beta1.types import company_service
from google.protobuf import empty_pb2 # type: ignore
from .base import CompanyServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import CompanyServiceGrpcTransport
class CompanyServiceGrpcAsyncIOTransport(CompanyServiceTransport):
"""gRPC AsyncIO backend transport for CompanyService.
A service that handles company management, including CRUD and
enumeration.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "jobs.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "jobs.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def create_company(
self,
) -> Callable[
[company_service.CreateCompanyRequest], Awaitable[gct_company.Company]
]:
r"""Return a callable for the create company method over gRPC.
Creates a new company entity.
Returns:
Callable[[~.CreateCompanyRequest],
Awaitable[~.Company]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_company" not in self._stubs:
self._stubs["create_company"] = self.grpc_channel.unary_unary(
"/google.cloud.talent.v4beta1.CompanyService/CreateCompany",
request_serializer=company_service.CreateCompanyRequest.serialize,
response_deserializer=gct_company.Company.deserialize,
)
return self._stubs["create_company"]
@property
def get_company(
self,
) -> Callable[[company_service.GetCompanyRequest], Awaitable[company.Company]]:
r"""Return a callable for the get company method over gRPC.
Retrieves specified company.
Returns:
Callable[[~.GetCompanyRequest],
Awaitable[~.Company]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_company" not in self._stubs:
self._stubs["get_company"] = self.grpc_channel.unary_unary(
"/google.cloud.talent.v4beta1.CompanyService/GetCompany",
request_serializer=company_service.GetCompanyRequest.serialize,
response_deserializer=company.Company.deserialize,
)
return self._stubs["get_company"]
@property
def update_company(
self,
) -> Callable[
[company_service.UpdateCompanyRequest], Awaitable[gct_company.Company]
]:
r"""Return a callable for the update company method over gRPC.
Updates specified company.
Returns:
Callable[[~.UpdateCompanyRequest],
Awaitable[~.Company]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_company" not in self._stubs:
self._stubs["update_company"] = self.grpc_channel.unary_unary(
"/google.cloud.talent.v4beta1.CompanyService/UpdateCompany",
request_serializer=company_service.UpdateCompanyRequest.serialize,
response_deserializer=gct_company.Company.deserialize,
)
return self._stubs["update_company"]
@property
def delete_company(
self,
) -> Callable[[company_service.DeleteCompanyRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete company method over gRPC.
Deletes specified company.
Prerequisite: The company has no jobs associated with
it.
Returns:
Callable[[~.DeleteCompanyRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_company" not in self._stubs:
self._stubs["delete_company"] = self.grpc_channel.unary_unary(
"/google.cloud.talent.v4beta1.CompanyService/DeleteCompany",
request_serializer=company_service.DeleteCompanyRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_company"]
@property
def list_companies(
self,
) -> Callable[
[company_service.ListCompaniesRequest],
Awaitable[company_service.ListCompaniesResponse],
]:
r"""Return a callable for the list companies method over gRPC.
Lists all companies associated with the project.
Returns:
Callable[[~.ListCompaniesRequest],
Awaitable[~.ListCompaniesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_companies" not in self._stubs:
self._stubs["list_companies"] = self.grpc_channel.unary_unary(
"/google.cloud.talent.v4beta1.CompanyService/ListCompanies",
request_serializer=company_service.ListCompaniesRequest.serialize,
response_deserializer=company_service.ListCompaniesResponse.deserialize,
)
return self._stubs["list_companies"]
__all__ = ("CompanyServiceGrpcAsyncIOTransport",)
| [
"[email protected]"
] | |
d76568dec057241b650414033a7585fd422344b3 | 549317bc0a7230ec163914c75f75dd008900c57b | /pyroomacoustics/tests/test_room_is_insided.py | 3d49962456b8076f646ddc7bbaee2b7887245503 | [
"MIT"
] | permissive | oucxlw/pyroomacoustics | 0bb633427cd7ce3e93392cdc9d0bc3afc5f2dbf3 | 0adc91579c9c6daf1b73d2c4863a9fc66b308dbb | refs/heads/master | 2023-06-17T17:43:49.743201 | 2021-07-21T05:36:46 | 2021-07-21T05:36:46 | 288,884,904 | 1 | 0 | MIT | 2021-07-21T05:36:47 | 2020-08-20T02:22:54 | Python | UTF-8 | Python | false | false | 2,248 | py |
import numpy as np
import pyroomacoustics as pra
def test_room_is_inside():
# fix the seed for repeatable testing
np.random.seed(0)
# This was a problematic case
# if the source is placed at the same height as one of the corners
# the test would fail, even though the source is in the room
floorplan = [ [0, 6, 6, 2, 0],
[0, 0, 5, 5, 3] ]
source_loc = [ 2, 3 ] # same y-coordinate as the corner at [0, 3]
room = pra.Room.from_corners(floorplan)
room.add_source(source_loc)
for i in range(100):
# because the test is randomized, let's check many times
assert room.is_inside([0,0], include_borders=True)
assert not room.is_inside([0,0], include_borders=False)
assert room.is_inside([3,0], include_borders=True)
assert not room.is_inside([3,0], include_borders=False)
assert room.is_inside([1,4], include_borders=True)
assert not room.is_inside([1,4], include_borders=False)
assert room.is_inside([0,1], include_borders=True)
assert not room.is_inside([0,1], include_borders=False)
assert not room.is_inside([0.5,4], include_borders=False)
# now test in 3D
room.extrude(4.)
for i in range(100):
# because the test is randomized, let's check many times
assert room.is_inside([2, 3, 1.7])
assert not room.is_inside([0.5, 4, 1.8])
assert not room.is_inside([0.5, 4, 1.8])
assert room.is_inside([0,0,0], include_borders=True)
assert not room.is_inside([0,0,0], include_borders=False)
assert room.is_inside([3,0,0], include_borders=True)
assert not room.is_inside([3,0,0], include_borders=False)
assert room.is_inside([0,1,0], include_borders=True)
assert not room.is_inside([0,1,0], include_borders=False)
assert room.is_inside([3,2,0], include_borders=True)
assert not room.is_inside([3,2,0], include_borders=False)
assert room.is_inside([1,4,3], include_borders=True)
assert not room.is_inside([1,4,3], include_borders=False)
assert not room.is_inside([2,2,7])
assert not room.is_inside([2,2,-7])
if __name__ == '__main__':
test_room_is_inside()
| [
"[email protected]"
] | |
f832f81e49e1c70f75a87ad822f0a71408bcd878 | dc221edce0ad617aac3b9ad8f4f347ff84f56bf9 | /.history/env/sim_20200805143251.py | 242268c2da33aec8fc7e1b122fd06d49cdc6e028 | [] | no_license | zlm05170/cacontroller | 310014c83ecf130643230eba87990e635fe1575f | e76d2eb5d58d6adfe7823e0dcd0059027c52b6bc | refs/heads/master | 2022-12-21T08:05:58.315017 | 2020-09-23T11:45:07 | 2020-09-23T11:45:07 | 284,527,141 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | class Sim():
def __init__(self):
self.running = True
self.actor_list = []
def isRunning(self):
return self.running
def update(self):
for actor in self.actor_list:
actor.update()
def add_actor(self):
print(0)
sim = Sim()
while sim.isRunning():
sim.update() | [
"[email protected]"
] | |
e35f76e578a9be895bc6fe5bd17d24525b6c2d83 | 5ae01ab82fcdedbdd70707b825313c40fb373fa3 | /scripts/charonInterpreter/parsers/MaterialBlock/IncompleteIonizationDonor/charonLineParserIonizationEnergy.py | 2ef6d351a57e960f50d56e19dfefba626502aca9 | [] | no_license | worthenmanufacturing/tcad-charon | efc19f770252656ecf0850e7bc4e78fa4d62cf9e | 37f103306952a08d0e769767fe9391716246a83d | refs/heads/main | 2023-08-23T02:39:38.472864 | 2021-10-29T20:15:15 | 2021-10-29T20:15:15 | 488,068,897 | 0 | 0 | null | 2022-05-03T03:44:45 | 2022-05-03T03:44:45 | null | UTF-8 | Python | false | false | 5,602 | py |
from __future__ import print_function
import copy
class charonLineParserIonizationEnergy:
"IonizationEnergy parser"
def __init__(self):
# Register the parsing keys
self.parserName = "IonizationEnergy"
self.parsingKey = "ionization energy"
self.parsingKeyOptional = []
self.interpreterHelpLine = "ionization energy = {valueIonizationEnergy} "
self.interpreterQuickHelp = "Specify incomplete ionization energy for donor dopant type."
self.interpreterLongHelp = "Specify incomplete ionization energy for donor dopant type."
# Register the xml required lines
self.xmlRequiredLines = []
self.xmlRequiredLinePriority = []
self.xmlRequiredLines.append("Charon->Closure Models->{MaterialBlockName}->Incomplete Ionized Donor->Model,Ionization Energy,double,{valueIonizationEnergy}")
self.xmlRequiredLinePriority.append(2)
self.xmlNewRequiredLines = []
# Register the xml required arguments and their indexes
self.xmlRequiredArgument = []
self.xmlRequiredArgument.append("{valueIonizationEnergy}")
self.xmlRequiredArgumentIndexes = []
self.xmlRequiredArgumentIndexes.append("3")
# Register the xml optional lines
self.xmlOptionalLines = [[]]
self.xmlOptionalLinePriority = [[]]
# Register the xml optional arguments and their indexes
self.xmlOptionalArgument = []
self.xmlOptionalArgumentIndexes = []
# Register the xml default lines
self.xmlDefaultLines = []
self.xmlDefaultLinePriority = []
self.xmlReturned = []
self.xmlPriorityCode = []
def isThisMe(self,tokenizer,line):
# Tokenize the line
lineTokens = tokenizer.tokenize(line)
# Tokenize the parsing key
parsingTokens = self.parsingKey.split()
returnType = True
for itoken in range(len(parsingTokens)):
if itoken+1 > len(lineTokens):
return False
if lineTokens[itoken].lower() != parsingTokens[itoken].lower():
returnType = False
return returnType
def getName(self):
# Return parser name
return self.parserName
def getHelp(self,verbosity):
# Return help content
if verbosity.lower() == "long":
return (self.interpreterHelpLine,self.interpreterLongHelp)
else:
return (self.interpreterHelpLine,self.interpreterQuickHelp)
def generateXML(self,tokenizer,line):
# Tokenize the line
lineTokens = tokenizer.tokenize(line)
self.xmlNewRequiredLines[:] = []
for xL in self.xmlRequiredLines:
self.xmlNewRequiredLines.append(xL)
for ipar in range(len(self.xmlRequiredArgument)):
line.replace(self.xmlRequiredArgument[ipar],lineTokens[int(self.xmlRequiredArgumentIndexes[ipar])])
for iRLine in range(len(self.xmlRequiredLines)):
self.xmlNewRequiredLines[iRLine]=self.xmlNewRequiredLines[iRLine].replace(self.xmlRequiredArgument[ipar],lineTokens[int(self.xmlRequiredArgumentIndexes[ipar])])
for index,xmlLine in enumerate(self.xmlNewRequiredLines):
self.xmlReturned.append(xmlLine)
self.xmlPriorityCode.append(self.xmlRequiredLinePriority[index]) #required lines have priority code 2
# Look over input line to see if any options are called out.
optCounter = 0
optIndex = 0
for optKey in self.parsingKeyOptional:
# Tokenize the opt keys
foundOptionalKey = False
optKeyTokens = optKey.split()
for iLT in range(len(lineTokens)):
if lineTokens[iLT].lower() == optKeyTokens[0]:
if len(optKeyTokens) == 1:
optIndex = iLT
foundOptionalKey = True
else:
for iPK in range(len(optKeyTokens)-1):
optIndex = iLT
if iLT+iPK+1 > len(lineTokens)-1:
continue
if optKeyTokens[iPK+1] == lineTokens[iLT+iPK+1].lower():
if iPK+2 == len(optKeyTokens):
foundOptionalKey = True
else:
continue
#Found the key, now create the xml line
if foundOptionalKey == True:
self.Returned=copy.deepcopy(self.xmlOptionalLines[optCounter])
for iopt in range(len(self.xmlOptionalLines[optCounter])):
for ipar in range(len(self.xmlOptionalArgument[optCounter])):
self.Returned[iopt] = self.Returned[iopt].replace(self.xmlOptionalArgument[optCounter][ipar],lineTokens[optIndex+int(self.xmlOptionalArgumentIndexes[optCounter][ipar])])
for ipar in range(len(self.xmlRequiredArgument)):
self.Returned[iopt] = self.Returned[iopt].replace(self.xmlRequiredArgument[ipar],lineTokens[int(self.xmlRequiredArgumentIndexes[ipar])])
self.xmlReturned.append(self.Returned[iopt])
self.xmlPriorityCode.append(2) #optional lines have priority code 2
optCounter += 1
for xmlLine in self.xmlDefaultLines:
self.xmlReturned.append(xmlLine)
self.xmlPriorityCode.append(1) #optional lines have priority code 1
return (self.xmlReturned,self.xmlPriorityCode)
| [
"[email protected]"
] | |
a50c8907cabee913bacdeff6e3fb16fbd0d147ca | 47b49ee4d14254cea00a839123fe5d68f0938959 | /notifierlib/channels/jabber.py | 109b7df750b2db4e45884d0d3509212e97fcd8d8 | [
"MIT"
] | permissive | wefner/notifierlib | 35d8b9c754803821462e647239bfd0be564c0a40 | 0eeec7aef278f66262b1dceab296b5f115e372c3 | refs/heads/master | 2021-06-30T22:27:06.147554 | 2017-09-19T13:14:37 | 2017-09-19T13:14:37 | 104,245,027 | 0 | 0 | null | 2017-09-20T17:12:26 | 2017-09-20T17:12:26 | null | UTF-8 | Python | false | false | 6,372 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# File: jabber.py
import sleekxmpp
import logging
from notifierlib.notifierlib import Channel
__author__ = '''Costas Tyfoxylos <[email protected]>, Argiris Gounaris <[email protected]>'''
__docformat__ = 'plaintext'
__date__ = '''19-09-2017'''
class XmppClient(sleekxmpp.ClientXMPP):
"""A basic SleekXMPP bot, logs in, sends message, logs out."""
def __init__(self,
user_id,
password,
recipient,
message,
server,
port,
tls=False,
ssl=True,
reattempt=False):
super(XmppClient, self).__init__(user_id, password)
self._logger = logging.getLogger(self.__class__.__name__)
self.recipient = recipient
self.message = message
self.server = server
self.port = port
self.tls = tls
self.ssl = ssl
self.reattempt = reattempt
self.add_event_handler('session_start', self.start)
self.register_plugin('xep_0030') # Service Discovery
self.register_plugin('xep_0199') # XMPP Ping
# Connect to the XMPP server and start processing XMPP stanzas.
if not self.connect((self.server, self.port),
use_tls=self.tls,
use_ssl=self.ssl,
reattempt=self.reattempt):
message = ('Could not connect to '
'{server}:{port}').format(server=self.server,
port=self.port)
self._logger.error(message)
raise SyntaxError(message)
self.process(block=True)
def start(self, event):
_ = event # noqa
self.send_message(mto=self.recipient,
mbody=self.message,
mtype='chat')
self.disconnect(wait=True)
class XmppGroupClient(sleekxmpp.ClientXMPP):
"""A basic SleekXMPP bot, logs in, sends message, logs out."""
def __init__(self,
user_id,
password,
room,
nickname,
message,
server,
port,
room_password=None,
tls=False,
ssl=True,
reattempt=False):
super(XmppGroupClient, self).__init__(user_id, password)
self._logger = logging.getLogger(self.__class__.__name__)
self.room = room
self.room_password = room_password
self.nickname = nickname
self.message = message
self.server = server
self.port = port
self.tls = tls
self.ssl = ssl
self.reattempt = reattempt
self.add_event_handler('session_start', self.start)
self.register_plugin('xep_0030') # Service Discovery
self.register_plugin('xep_0045') # Multi-User Chat
self.register_plugin('xep_0199') # XMPP Ping
# Connect to the XMPP server and start processing XMPP stanzas.
if not self.connect((self.server, self.port),
use_tls=self.tls,
use_ssl=self.ssl,
reattempt=self.reattempt):
message = ('Could not connect to '
'{server}:{port}').format(server=self.server,
port=self.port)
self._logger.error(message)
raise SyntaxError(message)
self.process(block=True)
def start(self, event):
_ = event # noqa
self.plugin['xep_0045'].joinMUC(self.room,
self.nickname,
# If a room password is needed, use:
password=self.room_password,
wait=True)
self.send_message(mto=self.room,
mbody=self.message,
mtype='groupchat')
self.disconnect(wait=True)
class Jabber(Channel):
def __init__(self,
name,
user_id,
password,
recipient_id,
server,
port,
tls=False,
ssl=True,
reattempt=False):
super(Jabber, self).__init__(name)
self.user = user_id
self.password = password
self.server = server
self.recipient = recipient_id
self.port = port
self.tls = tls
self.ssl = ssl
self.reattempt = reattempt
def notify(self, **kwargs):
message = kwargs.get('message')
_ = XmppClient(self.user, # noqa
self.password,
self.recipient,
message,
self.server,
self.port,
self.tls,
self.ssl,
self.reattempt)
return True
class JabberGroup(Channel):
def __init__(self,
name,
user_id,
password,
room,
nickname,
server,
port,
room_password=None,
tls=False,
ssl=True,
reattempt=False):
super(JabberGroup, self).__init__(name)
self.user = user_id
self.password = password
self.nickname = nickname
self.room = room
self.room_password = room_password
self.server = server
self.port = port
self.tls = tls
self.ssl = ssl
self.reattempt = reattempt
def notify(self, **kwargs):
message = kwargs.get('message')
_ = XmppGroupClient(self.user, # noqa
self.password,
self.room,
self.nickname,
message,
self.server,
self.port,
self.room_password,
self.tls,
self.ssl,
self.reattempt)
return True
| [
"[email protected]"
] | |
d8458f6e25f605602bc2ada1071b2c5365b26943 | 54c08823016949aa23ff1d372cf70778e6f88758 | /raylab/policy/model_based/sampling_mixin.py | bea9c129c44620da8d861320dda315db82a141cb | [
"MIT"
] | permissive | jCrompton/raylab | b4d41c446bc4d8d9ea42ebfdfad59c61956bfe98 | 9773d6fb942c06c65fe5297a8275f86649966abd | refs/heads/master | 2022-11-05T21:59:06.704075 | 2020-06-08T15:36:20 | 2020-06-08T15:36:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,338 | py | """Environment model handling mixins for TorchPolicy."""
from dataclasses import dataclass
from typing import List
import numpy as np
import torch
from dataclasses_json import DataClassJsonMixin
from numpy.random import Generator
from ray.rllib import SampleBatch
from torch.nn import Module
@dataclass(frozen=True)
class SamplingSpec(DataClassJsonMixin):
"""Specifications for sampling from the model.
Attributes:
num_elites: Use this number of best performing models to sample
transitions
rollout_length: Lenght of model-based rollouts from each initial
state extracted from input sample batch
"""
num_elites: int = 1
rollout_length: int = 1
def __post_init__(self):
assert self.num_elites > 0, "Must have at least one elite model to sample from"
assert (
self.rollout_length > 0
), "Length of model-based rollouts must be positive"
class ModelSamplingMixin:
"""Adds model sampling behavior to a TorchPolicy class.
Expects:
* A `self.reward_fn` callable that computes the reward tensors for a batch
of transitions
* A `self.termination_fn` callable that computes the termination tensors for
a batch of transitions
* A `models` attribute in `self.module`
* A `self.config` dict attribute
* A `model_sampling` dict in `self.config`
* A `seed` int in `self.config`
Attributes:
model_sampling_spec: Specifications for model training and sampling
elite_models: Sequence of the `num_elites` best models sorted by
performance. Initially set using the policy's model order.
rng: Random number generator for choosing from the elite models for
sampling.
"""
model_sampling_spec: SamplingSpec
elite_models: List[Module]
rng: Generator
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.model_sampling_spec = SamplingSpec.from_dict(self.config["model_sampling"])
models = self.module.models
num_elites = self.model_sampling_spec.num_elites
assert num_elites <= len(models), "Cannot have more elites than models"
self.elite_models = list(models[:num_elites])
self.rng = np.random.default_rng(self.config["seed"])
def setup_sampling_models(self, losses: List[float]):
"""Update the elite models based on model losses.
Args:
losses: list of model losses following the order of the ensemble
"""
models = self.module.models
self.elite_models = [models[i] for i in np.argsort(losses)]
@torch.no_grad()
def generate_virtual_sample_batch(self, samples: SampleBatch) -> SampleBatch:
"""Rollout model with latest policy.
Produces samples for populating the virtual buffer, hence no gradient
information is retained.
If a transition is terminal, the next transition, if any, is generated from
the initial state passed through `samples`.
Args:
samples: the transitions to extract initial states from
Returns:
A batch of transitions sampled from the model
"""
virtual_samples = []
obs = init_obs = self.convert_to_tensor(samples[SampleBatch.CUR_OBS])
for _ in range(self.model_sampling_spec.rollout_length):
model = self.rng.choice(self.elite_models)
action, _ = self.module.actor.sample(obs)
next_obs, _ = model.sample(obs, action)
reward = self.reward_fn(obs, action, next_obs)
done = self.termination_fn(obs, action, next_obs)
transition = {
SampleBatch.CUR_OBS: obs,
SampleBatch.ACTIONS: action,
SampleBatch.NEXT_OBS: next_obs,
SampleBatch.REWARDS: reward,
SampleBatch.DONES: done,
}
virtual_samples += [
SampleBatch({k: v.numpy() for k, v in transition.items()})
]
obs = torch.where(done.unsqueeze(-1), init_obs, next_obs)
return SampleBatch.concat_samples(virtual_samples)
@staticmethod
def model_sampling_defaults():
"""The default configuration dict for model sampling."""
return SamplingSpec().to_dict()
| [
"[email protected]"
] | |
88c81310c694092a2f288b858519b4dc9d54fdca | 0b38b3d237044b605a519e1aadb298e254c96a6a | /app.py | 98eaa7437e240c19d9f07fb20498e32121e58693 | [] | no_license | imapex/boilerplate | 100355bec3414bd0874fc47e5ff8cdad4464b054 | 810a2de2ceb6120dd57a64324c6b8581113d348f | refs/heads/master | 2023-02-03T19:54:25.074233 | 2019-08-22T15:13:21 | 2019-08-22T15:13:21 | 64,156,013 | 2 | 4 | null | 2023-02-02T06:13:08 | 2016-07-25T17:53:14 | HTML | UTF-8 | Python | false | false | 862 | py | from flask import Flask, render_template, request
from flask_restful import Api
from views.topology import topology
from views.dashboard import dashboard
from views.device import device_list
from views.patterns import patterns
from api.device import Device
from api.topology import Topology
app = Flask(__name__)
api = Api(app)
@app.route('/')
def index():
return render_template('index.html')
api.add_resource(Device, '/api/device')
api.add_resource(Topology, '/api/topology')
app.add_url_rule('/topology', endpoint='topology-view', view_func=topology)
app.add_url_rule('/device', endpoint='device-list', view_func=device_list)
app.add_url_rule('/dashboard', endpoint='dashboard', view_func=dashboard)
app.add_url_rule('/patterns', endpoint='patterns', view_func=patterns)
if __name__ == '__main__':
app.run(host='127.0.0.1', port=5000, debug=True)
| [
"[email protected]"
] | |
84442f858cf23a277156002041062239855c2fd2 | b8fe7c1769a274964d289d05c1594f2d9f702560 | /devel/lib/python2.7/dist-packages/mavros_msgs/msg/_Param.py | f381bb69fd314372e108aa5a3242a0ff31759278 | [] | no_license | amilearning/mavros_ws | f3f19c98562cb6fab00e3735b5798dc8fc65652f | b822676abb0a36fe34f7e4064a162db0ddd75fe5 | refs/heads/master | 2023-06-30T08:27:45.134723 | 2021-07-16T06:52:35 | 2021-07-16T06:52:35 | 386,538,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | /home/hojin/drone_ws/ethz_ws/mavros_ws/devel/.private/mavros_msgs/lib/python2.7/dist-packages/mavros_msgs/msg/_Param.py | [
"[email protected]"
] | |
7d41b30ed78b8b2b0274008e2dd827b812ad3c4b | 69e6c93e5d9cc0ad3fcc8d595aff95c71609f13e | /tests/test_nh_32bit.py | 2fe18a2a1d465780ee3c2bc08a8405deba44d4fa | [
"GPL-2.0-only",
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | atsgen/tf-vrouter | 2d27e233774bc5116e964c403f3332df7633afc7 | c95daa24744bdeb4839f63ebd057552f18404171 | refs/heads/master | 2023-02-16T15:37:58.986288 | 2021-01-15T06:45:23 | 2021-01-15T06:45:23 | 290,211,517 | 0 | 0 | BSD-2-Clause | 2020-08-25T12:40:00 | 2020-08-25T12:39:59 | null | UTF-8 | Python | false | false | 3,288 | py | #!/usr/bin/python
import os
import sys
sys.path.append(os.getcwd())
sys.path.append(os.getcwd() + '/lib/')
from imports import * # noqa
# anything with *test* will be assumed by pytest as a test
class TestNh32(unittest.TestCase):
@classmethod
def setup_class(cls):
ObjectBase.setUpClass()
ObjectBase.set_auto_features(cleanup=True)
@classmethod
def teardown_class(cls):
ObjectBase.tearDownClass()
def setup_method(self, method):
ObjectBase.setUp(method)
def teardown_method(self, method):
ObjectBase.tearDown()
# tc to add, del nh with nhid > 65k
def test1_nh32(self):
# Add a Vif interface
vif = VirtualVif(
name="tap_1",
ipv4_str="1.1.1.10",
mac_str="de:ad:be:ef:00:02",
idx=1,
flags=None,
nh_idx=494949,
ipv6_str="571:3896:c427:3738:30c4:fd9f:720e:fefe")
vif.sync()
# Query the vif back
vif_get = VirtualVif(
name="tap_1",
ipv4_str="1.1.1.10",
mac_str="fe:ad:be:ef:00:02",
idx=1,
flags=None,
h_op=constants.SANDESH_OPER_GET)
vif_get.sync()
self.assertEqual(494949, vif_get.get_vif_nh_id())
# Add NH
encap_nh = EncapNextHop(
encap_oif_id=vif.idx(),
encap="de ad be ef 01 02 de ad be ef 00 01 08 00",
nh_idx=490496,
nh_family=constants.AF_BRIDGE)
encap_nh.sync()
# Get the same NH back
nh_get = EncapNextHop(
encap_oif_id=vif.idx(),
encap=None,
nh_idx=490496,
nh_family=constants.AF_BRIDGE,
h_op=constants.SANDESH_OPER_GET)
nh_get.sync()
self.assertEqual(490496, nh_get.get_nh_idx())
self.assertEqual(constants.AF_BRIDGE, nh_get.get_nh_family())
self.assertEqual(constants.NH_TYPE_ENCAP, nh_get.get_nh_type())
# tc to add, del flow with nhid > 65k
def test2_nh32(self):
# Add vif - 10.1.1.1
vif1 = VirtualVif(
name="tap_1",
ipv4_str="10.1.1.1",
mac_str="de:ad:be:ef:00:02",
idx=1,
nh_idx=494949,
flags=None)
vif1.sync()
# Add 2nd vif - 10.1.1.2
vif2 = VirtualVif(
name="tap_2",
ipv4_str="10.1.1.2",
mac_str="ed:da:eb:fe:00:03",
nh_idx=474747,
flags=None,
idx=2)
vif2.sync()
# Add NH
encap_nh = EncapNextHop(
encap_oif_id=vif2.idx(),
encap="de ad be ef 01 02 de ad be ef 00 01 08 00",
nh_idx=474747,
nh_family=constants.AF_BRIDGE)
encap_nh.sync()
# Add route which points to the NH
rt = BridgeRoute(
vrf=0,
mac_str="de:ad:be:ef:02:02",
nh_idx=474747)
rt.sync()
# Add flow
flow = InetFlow(
sip='1.1.1.1',
dip='2.2.2.2',
sport=31,
dport=31,
proto=17,
action=2,
src_nh_idx=494949,
flow_nh_idx=594949)
flow.sync(resp_required=True)
flow.delete()
| [
"[email protected]"
] | |
37b53d420486df7030ee6727a9bb4e5d031b98de | d5463d1efdf17941ca3fd79ef106a3db9be48fbc | /booktest/admin.py | 61b22a340409278043c70630021506ed840b0214 | [] | no_license | holyzhang1314/test11 | 8d81a408f67bbd010c15429eecfe70ce82ab60c8 | 24f927a87d4e96a0a5632e73408c9c5abce5c8ab | refs/heads/master | 2020-03-19T00:13:35.110758 | 2018-05-30T15:39:18 | 2018-05-30T15:39:18 | 135,464,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | from django.contrib import admin
from booktest.models import BookInfo,HeroInfo
# Register your models here.
class BookInfoAdmin(admin.ModelAdmin):
list_display = ['id','btitle','bpub_date']
class HeroInfoAdmin(admin.ModelAdmin):
list_display = ['id','hname','hgender','hcomment']
admin.site.register(BookInfo,BookInfoAdmin)
admin.site.register(HeroInfo,HeroInfoAdmin)
| [
"[email protected]"
] | |
fa3a1c7dc5841b9a1f6b5f0dc876c827ea15a115 | 9e84a433007ed4f6b2f9fc40f17a6fc5deb8603c | /frontbackend/Polaris/migrations_bak/0032_auto_20190514_1130.py | 5143de7c52d0c09775f5fb48da2b2e4d6aa5af9b | [] | no_license | wellwang1993/glbs | 8a654bcd2b5e06a823112b6f07f324753f8a1034 | a96cd8a949bfae06026c2b9f9fa2ec0230997932 | refs/heads/master | 2020-05-16T04:56:07.019399 | 2019-07-26T11:22:21 | 2019-07-26T11:22:21 | 182,794,117 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | # Generated by Django 2.1.2 on 2019-05-14 11:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Polaris', '0031_tb_fact_dnszone_info'),
]
operations = [
migrations.RemoveField(
model_name='tb_fact_view_info',
name='view_type',
),
migrations.DeleteModel(
name='tb_fact_view_info',
),
]
| [
"root@localhost"
] | root@localhost |
141cdf073eba87447e04afe683e7d304141d170d | 10920b11a22a20f9a7f63157818327f3c4e41888 | /Final_Project_BIR/Robot_arm/demo_002.py | 1a17f31ef610acdfaaa212131cad6479e3b77a34 | [] | no_license | dsall/computerv | e331b3d025c8cec0119b789107d1fef18d08f02a | 40671d618c31ad9d9b20fc902a218a8e281098bc | refs/heads/master | 2021-09-15T09:33:08.495580 | 2018-05-29T23:41:42 | 2018-05-29T23:41:42 | 135,363,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | import numpy
from arm import Arm
from board import Board
a = Arm.connect()
b = Board.connect(verbose=True)
while 1:
pot = b.get_pot()
pho = b.get_photo()
fx = round(numpy.interp(pot, [0, 1], [190, 210]))
fy = round(numpy.interp(pot, [0, 1], [-200, 200]))
fz = round(numpy.interp(pot, [0, 0.5, 1], [50, 300, 50]))
print(pot, fx, fy, fz)
a.goto(fx, fy, fz, wait=True)
if pho > 0.8: a.grab(False)
if pho < 0.8: a.grab(True)
| [
"[email protected]"
] | |
c6aabf9bb04b6c5f151ac01e844bdefdc7b49cb2 | 6a2a4f97009e31e53340f1b4408e775f3051e498 | /Iniciante/p2139.py | 4fe3e0111f40176ba65920fe08df23ba24da9d0c | [] | no_license | rafacasa/OnlineJudgePythonCodes | 34c31f325cccb325f074492b40591ad880175816 | 030c18f9020898fdc4f672f9cc17723236e1271d | refs/heads/master | 2023-07-15T12:09:45.534873 | 2023-06-27T00:24:03 | 2023-06-27T00:24:03 | 250,595,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 609 | py | from datetime import datetime
while True:
try:
mes, dia = map(int, input().split())
data = datetime(day=dia, month=mes, year=2016)
natal = datetime(day=25, month=12, year=2016)
if data > natal:
print('Ja passou!')
continue
if data == natal:
print('E natal!')
continue
diferenca = natal - data
dias = diferenca.days
if dias == 1:
print('E vespera de natal!')
else:
print('Faltam {:d} dias para o natal!'.format(dias))
except EOFError:
break
| [
"[email protected]"
] | |
f85638ba486b3870c34ad06f582151b51107b7b2 | 56470dbd199578f73f9c5b676d19b4774960a68d | /src/CNVRepeat/main.py | af7442c4e6be8f0a2d80549918df51ca5b995614 | [
"MIT"
] | permissive | bioCKO/CNVRepeat | a646fa729db6f2b8cca718b0a1dc78b5b848b149 | e6b9b90599bf973f523a879ad66f836f82a45bf2 | refs/heads/master | 2021-03-12T01:26:51.315608 | 2017-09-15T22:56:20 | 2017-09-15T22:56:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,856 | py | #the package use modules from grocsvs (https://github.com/grocsvs/grocsvs) as basis for designing the pipeline.
from __future__ import print_function
import argparse
import collections
import json
import logging
import sys
import os
from CNVRepeat import log
from CNVRepeat import options as opts
from CNVRepeat import pipeline
from CNVRepeat import analysis
def load_config(config_path):
try:
config = json.load(open(config_path))
except ValueError as err:
print("Error parsing configuration file '{}': '{}'\n Check that this is a properly formatted JSON file!".format(config_path, err))
sys.exit(1)
options = opts.Options.deserialize(config, config_path)
return options
def run(options):
analysis_steps = prepare_analysis(options)
runner = pipeline.Runner(options)
print("Running")
for analysis_name, analysis_step in analysis_steps.items():
print ('Running analysis: "{}"'.format(analysis_name))
runner.run_stage(analysis_step, analysis_name)
def prepare_analysis(options):
analysis_steps = collections.OrderedDict()
if options.method == 'single_copy_exon':
if not os.path.exists(options.bed) or not os.path.splitext(options.bed)[1] == '.bed':
analysis_steps["Single Copy Exon"] = analysis.single_copy_exon.SingleCopyExonStep
analysis_steps["Genome Coverage Estimator"] = analysis.estimate_genome_coverage_bed.EstimateGenomeCoverageStep
analysis_steps["Genome Coverage Merger"] = analysis.estimate_genome_coverage_bed.CombineGenomeCoverageStep
elif options.method == 'single_copy_exon':
if not os.path.exists(options.bed) or not os.path.splitext(options.bed)[1] == '.bed':
analysis_steps["Random Region"] = analysis.single_copy_exon.RandomRegionStep
analysis_steps["Genome Coverage Estimator"] = analysis.estimate_genome_coverage_bed.EstimateGenomeCoverageStep
analysis_steps["Genome Coverage Merger"] = analysis.estimate_genome_coverage_bed.CombineGenomeCoverageStep
elif options.method == 'goleft':
analysis_steps["Genome Coverage Estimator Goleft"] = analysis.estimate_genome_coverage_bed.EstimateGenomeCoverageGoleftStep
analysis_steps["Repaet Coverage Estimator"] = analysis.estimate_repeat_coverage.EstimateRepeatCoverageStep
analysis_steps["Repeat Copy Number"] = analysis.estimate_repeat_copy_number.EstimateRepeatCopyNumberStep
return analysis_steps
def main():
parser = argparse.ArgumentParser(description="CNVRepeat: estimate copy number of repeat sequence in the genome")
parser.add_argument("--config", help="Path to configuration.json file")
parser.add_argument("--local", action="store_true", help="run job locally in multiprocess mode")
parser.add_argument("--scheduler", help="run job using scheduler, SLURM, SGE, PBS/Torque")
parser.add_argument("--cpu", default=1, help="number of cpu")
parser.add_argument("--method", default='goleft', help="method for estimation of genome coverage: goleft, single_copy_exon, random_region")
parser.add_argument("--random_dna_length", default=1000, help="length of DNA for random selection of method random_region")
parser.add_argument("--random_dna_number", default=100000, help="number of DNA for random selection of method random_region")
parser.add_argument("--debug", action="store_true", help="run in debug mode")
args = parser.parse_args()
if len(sys.argv) <= 1:
parser.print_help()
sys.exit(1)
options = load_config(args.config)
options.debug = args.debug
options.method = args.method
options.random_dna_length = args.random_dna_length
options.random_dna_number = args.random_dna_number
log.log_command(options, sys.argv)
run(options)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
95de5df7a22b6a56135887f66ed343865118bf9f | ea2b40a2b2209db1c363833e33d77086e1a4b023 | /tests/robust/test_min_param_perturbation.py | aeed86f73b1fcb5e15fcf33cf1dd469ca4ea4944 | [
"BSD-3-Clause"
] | permissive | kopalgarg/captum | 3ecb8de09c2a0e0efa487c67638abb0bb7870d1f | 67a3ddcb627f008cf0c23df7b10bc50d75324efe | refs/heads/master | 2023-06-15T18:00:54.449011 | 2021-07-04T21:45:58 | 2021-07-04T21:46:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,092 | py | #!/usr/bin/env python3
from typing import List
import torch
from torch import Tensor
from captum.robust import MinParamPerturbation
from tests.helpers.basic import BaseTest, assertTensorAlmostEqual
from tests.helpers.basic_models import BasicModel, BasicModel_MultiLayer
def inp_subtract(inp: Tensor, ind: int = 0, add_arg: int = 0) -> Tensor:
inp_repeat = 1.0 * inp
inp_repeat[0][ind] -= add_arg
return inp_repeat
def add_char(inp: List[str], ind: int = 0, char_val: int = 0) -> List[str]:
list_copy = list(inp)
list_copy[ind] = chr(122 - char_val) + list_copy[ind]
return list_copy
def add_char_batch(inp: List[List[str]], ind: int, char_val: int) -> List[List[str]]:
return [add_char(elem, ind, char_val) for elem in inp]
def text_preproc_fn(inp: List[str]) -> Tensor:
return torch.tensor([float(ord(elem[0])) for elem in inp]).unsqueeze(0)
def batch_text_preproc_fn(inp: List[List[str]]) -> Tensor:
return torch.cat([text_preproc_fn(elem) for elem in inp])
def alt_correct_fn(model_out: Tensor, target: int, threshold: float) -> bool:
if all(model_out[:, target] > threshold):
return True
return False
class Test(BaseTest):
def test_minimal_pert_basic_linear(self) -> None:
model = BasicModel()
inp = torch.tensor([[2.0, -9.0, 9.0, 1.0, -3.0]])
minimal_pert = MinParamPerturbation(
forward_func=lambda x: model(x)
+ torch.tensor([[0.000001, 0.0, 0.0, 0.0, 0.0]]),
attack=inp_subtract,
arg_name="add_arg",
arg_min=0.0,
arg_max=1000.0,
arg_step=1.0,
)
target_inp, pert = minimal_pert.evaluate(
inp, target=0, attack_kwargs={"ind": 0}
)
self.assertAlmostEqual(pert, 2.0)
assertTensorAlmostEqual(
self, target_inp, torch.tensor([[0.0, -9.0, 9.0, 1.0, -3.0]])
)
def test_minimal_pert_basic_binary(self) -> None:
model = BasicModel()
inp = torch.tensor([[2.0, -9.0, 9.0, 1.0, -3.0]])
minimal_pert = MinParamPerturbation(
forward_func=lambda x: model(x)
+ torch.tensor([[0.000001, 0.0, 0.0, 0.0, 0.0]]),
attack=inp_subtract,
arg_name="add_arg",
arg_min=0.0,
arg_max=1000.0,
arg_step=1.0,
mode="binary",
)
target_inp, pert = minimal_pert.evaluate(
inp,
target=0,
attack_kwargs={"ind": 0},
perturbations_per_eval=10,
)
self.assertAlmostEqual(pert, 2.0)
assertTensorAlmostEqual(
self, target_inp, torch.tensor([[0.0, -9.0, 9.0, 1.0, -3.0]])
)
def test_minimal_pert_preproc(self) -> None:
model = BasicModel_MultiLayer()
text_inp = ["abc", "zyd", "ghi"]
minimal_pert = MinParamPerturbation(
forward_func=model,
attack=add_char,
arg_name="char_val",
arg_min=0,
arg_max=26,
arg_step=1,
preproc_fn=text_preproc_fn,
apply_before_preproc=True,
)
target_inp, pert = minimal_pert.evaluate(
text_inp, target=1, attack_kwargs={"ind": 1}
)
self.assertEqual(pert, None)
self.assertEqual(target_inp, None)
def test_minimal_pert_alt_correct(self) -> None:
model = BasicModel_MultiLayer()
text_inp = ["abc", "zyd", "ghi"]
minimal_pert = MinParamPerturbation(
forward_func=model,
attack=add_char,
arg_name="char_val",
arg_min=0,
arg_max=26,
arg_step=1,
preproc_fn=text_preproc_fn,
apply_before_preproc=True,
correct_fn=alt_correct_fn,
num_attempts=5,
)
expected_list = ["abc", "ezyd", "ghi"]
target_inp, pert = minimal_pert.evaluate(
text_inp,
target=1,
attack_kwargs={"ind": 1},
correct_fn_kwargs={"threshold": 1200},
perturbations_per_eval=5,
)
self.assertEqual(pert, 21)
self.assertListEqual(target_inp, expected_list)
target_inp_single, pert_single = minimal_pert.evaluate(
text_inp,
target=1,
attack_kwargs={"ind": 1},
correct_fn_kwargs={"threshold": 1200},
)
self.assertEqual(pert_single, 21)
self.assertListEqual(target_inp_single, expected_list)
def test_minimal_pert_additional_forward_args(self) -> None:
model = BasicModel_MultiLayer()
text_inp = [["abc", "zyd", "ghi"], ["abc", "uyd", "ghi"]]
additional_forward_args = torch.ones((2, 3)) * -97
model = BasicModel_MultiLayer()
minimal_pert = MinParamPerturbation(
forward_func=model,
attack=add_char_batch,
arg_name="char_val",
arg_min=0,
arg_max=26,
arg_step=1,
preproc_fn=batch_text_preproc_fn,
apply_before_preproc=True,
correct_fn=alt_correct_fn,
)
expected_list = [["abc", "uzyd", "ghi"], ["abc", "uuyd", "ghi"]]
target_inp, pert = minimal_pert.evaluate(
text_inp,
target=1,
attack_kwargs={"ind": 1},
correct_fn_kwargs={"threshold": 100},
perturbations_per_eval=15,
additional_forward_args=(additional_forward_args,),
)
self.assertEqual(pert, 5)
self.assertListEqual(target_inp, expected_list)
target_inp_single, pert_single = minimal_pert.evaluate(
text_inp,
target=1,
attack_kwargs={"ind": 1},
correct_fn_kwargs={"threshold": 100},
additional_forward_args=(additional_forward_args,),
)
self.assertEqual(pert_single, 5)
self.assertListEqual(target_inp_single, expected_list)
def test_minimal_pert_tuple_test(self) -> None:
model = BasicModel_MultiLayer()
text_inp = (
[["abc", "zyd", "ghi"], ["abc", "uyd", "ghi"]],
torch.ones((2, 3)) * -97,
)
model = BasicModel_MultiLayer()
minimal_pert = MinParamPerturbation(
forward_func=lambda x: model(*x),
attack=lambda x, ind, char_val: (add_char_batch(x[0], ind, char_val), x[1]),
arg_name="char_val",
arg_min=0,
arg_max=26,
arg_step=1,
preproc_fn=lambda x: (batch_text_preproc_fn(x[0]), x[1]),
apply_before_preproc=True,
correct_fn=alt_correct_fn,
)
expected_list = [["abc", "uzyd", "ghi"], ["abc", "uuyd", "ghi"]]
target_inp, pert = minimal_pert.evaluate(
text_inp,
target=1,
attack_kwargs={"ind": 1},
correct_fn_kwargs={"threshold": 100},
perturbations_per_eval=15,
)
self.assertEqual(pert, 5)
self.assertListEqual(target_inp[0], expected_list)
| [
"[email protected]"
] | |
b7ab6b965d93d7f35c0afff80685ffb4a1e42305 | f115984d89ee91e1fefa7bd0546f60db251dfee6 | /herg-real-data/fit-full2vclinleak.py | 4acedcf938ceadc4884a65115c703c0a6b52626b | [
"BSD-3-Clause"
] | permissive | CardiacModelling/VoltageClampModel | f483fc3ad2129f75e377df210b9b91b1cdcb7565 | f30271da75e3c70526e53fb51dc12b317ab3b714 | refs/heads/master | 2023-07-05T10:07:59.771334 | 2021-03-03T11:05:35 | 2021-03-03T11:05:35 | 227,666,074 | 3 | 0 | BSD-3-Clause | 2021-03-03T11:04:46 | 2019-12-12T17:58:13 | Python | UTF-8 | Python | false | false | 8,270 | py | #!/usr/bin/env python2
from __future__ import print_function
import sys
sys.path.append('../lib/')
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pints
import model as m
from parameters import full2vc
from parameters import full2vc_fix, full2vc_fix_typical_values
from parameters import full2vc_typical_values
import parametertransform
from priors import IKrLogPrior, VoltageClampLogPrior
from protocols import leak_staircase as protocol_def
"""
Run fit for single experiment data
"""
try:
file_name = sys.argv[1]
cell = sys.argv[2]
except:
print('Usage: python %s [str:file_name]' % os.path.basename(__file__)
+ ' [str:cell_id] --optional [N_repeats]')
sys.exit()
file_dir = '../data'
file_list = [
'herg25oc1',
'herg27oc1',
'herg27oc2',
'herg30oc1',
'herg30oc2',
'herg33oc1',
'herg33oc2',
'herg37oc3',
]
temperatures = [25.0, 27.0, 27.0, 30.0, 30.0, 33.0, 33.0, 37.0]
useFilterCap = False
if file_name not in file_list:
raise ValueError('Input `file_name` must be in `file_list`')
temperature = temperatures[file_list.index(file_name)]
savedir = './out/' + file_name + '-full2vclinleak'
if not os.path.isdir(savedir):
os.makedirs(savedir)
data_file_name = file_name + '-staircaseramp-' + cell + '.csv'
time_file_name = file_name + '-staircaseramp-times.csv'
print('Fitting to ', data_file_name)
print('Temperature: ', temperature)
saveas = data_file_name[:-4]
# Control fitting seed --> OR DONT
# fit_seed = np.random.randint(0, 2**30)
fit_seed = 542811797
print('Fit seed: ', fit_seed)
np.random.seed(fit_seed)
# Set parameter transformation
transform_to_ikr = parametertransform.log_transform_to_ikr
transform_from_ikr = parametertransform.log_transform_from_ikr
transform_to_vc = parametertransform.log_transform_to_vc
transform_from_vc = parametertransform.log_transform_from_vc
transform_to_leak = parametertransform.donothing
transform_from_leak = parametertransform.donothing
n_ikr_parameters = IKrLogPrior(None, None).n_parameters()
n_vc_parameters = VoltageClampLogPrior(None, None).n_parameters()
transform_to_ikr_vc = parametertransform.ComposeTransformation(
transform_to_ikr, transform_to_vc, n_ikr_parameters)
transform_from_ikr_vc = parametertransform.ComposeTransformation(
transform_from_ikr, transform_from_vc, n_ikr_parameters)
transform_to_model_param = parametertransform.ComposeTransformation(
transform_to_ikr_vc, transform_to_leak,
n_ikr_parameters + n_vc_parameters)
transform_from_model_param = parametertransform.ComposeTransformation(
transform_from_ikr_vc, transform_from_leak,
n_ikr_parameters + n_vc_parameters)
# Model
model = m.Model('../mmt-model-files/full2-voltage-clamp-ikr-linleak.mmt',
protocol_def=protocol_def,
temperature=273.15 + temperature, # K
transform=transform_to_model_param,
useFilterCap=useFilterCap) # ignore capacitive spike
model.set_parameters(full2vc + ['voltageclamp.gLeak'])
fix_p = {}
for i, j in zip(full2vc_fix, full2vc_fix_typical_values):
fix_p[i] = j
model.set_fix_parameters(fix_p)
# Load data
data = np.loadtxt(file_dir + '/' + data_file_name,
delimiter=',', skiprows=1) # headers
times = np.loadtxt(file_dir + '/' + time_file_name,
delimiter=',', skiprows=1) # headers
times = times * 1e3 # s -> ms
noise_sigma = np.std(data[:500])
print('Estimated noise level: ', noise_sigma)
if useFilterCap:
# Apply capacitance filter to data
data = data * model.cap_filter(times)
#
# Fit
#
# Create Pints stuffs
problem = pints.SingleOutputProblem(model, times, data)
loglikelihood = pints.GaussianKnownSigmaLogLikelihood(problem, noise_sigma)
ikr_prior = IKrLogPrior(transform_to_ikr, transform_from_ikr)
vc_prior = VoltageClampLogPrior(transform_to_vc, transform_from_vc)
ileak_prior = pints.UniformLogPrior([transform_from_leak(-1e3)],
[transform_from_leak(1e3)])
logprior = pints.ComposedLogPrior(ikr_prior, vc_prior, ileak_prior)
logposterior = pints.LogPosterior(loglikelihood, logprior)
# Check logposterior is working fine
priorparams = np.append(np.copy(full2vc_typical_values), 0)
transform_priorparams = transform_from_model_param(priorparams)
print('Score at prior parameters: ',
logposterior(transform_priorparams))
for _ in range(10):
assert(logposterior(transform_priorparams) ==\
logposterior(transform_priorparams))
# Run
try:
N = int(sys.argv[3])
except IndexError:
N = 3
params, logposteriors = [], []
for i in range(N):
for _ in range(100):
try:
if i == 0: # OK for real data
x0 = transform_priorparams
else:
# Randomly pick a starting point
x0 = logprior.sample()[0]
logposterior(x0)
except ValueError:
continue
break
print('Starting point: ', x0)
# Create optimiser
print('Starting logposterior: ', logposterior(x0))
opt = pints.OptimisationController(logposterior, x0.T, method=pints.CMAES)
opt.set_max_iterations(None)
opt.set_parallel(False)
# Run optimisation
try:
with np.errstate(all='ignore'):
# Tell numpy not to issue warnings
p, s = opt.run()
p = transform_to_model_param(p)
params.append(p)
logposteriors.append(s)
print('Found solution: Prior parameters:' )
for k, x in enumerate(p):
print(pints.strfloat(x) + ' ' + \
pints.strfloat(priorparams[k]))
except ValueError:
import traceback
traceback.print_exc()
#
# Done
#
# Order from best to worst
order = np.argsort(logposteriors)[::-1] # (use [::-1] for LL)
logposteriors = np.asarray(logposteriors)[order]
params = np.asarray(params)[order]
# Show results
bestn = min(3, N)
print('Best %d logposteriors:' % bestn)
for i in range(bestn):
print(logposteriors[i])
print('Mean & std of logposterior:')
print(np.mean(logposteriors))
print(np.std(logposteriors))
print('Worst logposterior:')
print(logposteriors[-1])
# Extract best 3
obtained_logposterior0 = logposteriors[0]
obtained_parameters0 = params[0]
obtained_logposterior1 = logposteriors[1]
obtained_parameters1 = params[1]
obtained_logposterior2 = logposteriors[2]
obtained_parameters2 = params[2]
# Show results
print('Found solution: Prior parameters:' )
# Store output
with open('%s/%s-solution-%s.txt' % (savedir, saveas, fit_seed), 'w') as f:
for k, x in enumerate(obtained_parameters0):
print(pints.strfloat(x) + ' ' + pints.strfloat(priorparams[k]))
f.write(pints.strfloat(x) + '\n')
print('Found solution: Prior parameters:' )
# Store output
with open('%s/%s-solution-%s-2.txt' % (savedir, saveas, fit_seed), 'w') as f:
for k, x in enumerate(obtained_parameters1):
print(pints.strfloat(x) + ' ' + pints.strfloat(priorparams[k]))
f.write(pints.strfloat(x) + '\n')
print('Found solution: Prior parameters:' )
# Store output
with open('%s/%s-solution-%s-3.txt' % (savedir, saveas, fit_seed), 'w') as f:
for k, x in enumerate(obtained_parameters2):
print(pints.strfloat(x) + ' ' + pints.strfloat(priorparams[k]))
f.write(pints.strfloat(x) + '\n')
fig, axes = plt.subplots(2, 1, sharex=True, figsize=(8, 6))
sol0 = problem.evaluate(transform_from_model_param(obtained_parameters0))
sol1 = problem.evaluate(transform_from_model_param(obtained_parameters1))
sol2 = problem.evaluate(transform_from_model_param(obtained_parameters2))
vol = model.voltage(times)
axes[0].plot(times, vol, c='#7f7f7f')
axes[0].set_ylabel('Voltage [mV]')
axes[1].plot(times, data, alpha=0.5, label='data')
axes[1].plot(times, sol0, label='found solution')
axes[1].plot(times, sol1, label='found solution')
axes[1].plot(times, sol2, label='found solution')
axes[1].legend()
axes[1].set_ylabel('Current [pA]')
axes[1].set_xlabel('Time [ms]')
plt.subplots_adjust(hspace=0)
plt.savefig('%s/%s-solution-%s.png' % (savedir, saveas, fit_seed), bbox_inches='tight')
plt.close()
| [
"[email protected]"
] | |
4517a2973ced5a7f7821ef8b9c39134321bf76b9 | 24e7e0dfaaeaca8f911b40fcc2937342a0f278fd | /venv/Lib/site-packages/pandas/tests/frame/methods/test_replace.py | 83dfd42ae2a6ee2db8dd44070761be1616e746d0 | [
"MIT"
] | permissive | BimiLevi/Covid19 | 90e234c639192d62bb87364ef96d6a46d8268fa0 | 5f07a9a4609383c02597373d76d6b6485d47936e | refs/heads/master | 2023-08-04T13:13:44.480700 | 2023-08-01T08:36:36 | 2023-08-01T08:36:36 | 288,455,446 | 1 | 0 | MIT | 2021-01-22T19:36:26 | 2020-08-18T12:53:43 | HTML | UTF-8 | Python | false | false | 58,773 | py | from datetime import datetime
from io import StringIO
import re
from typing import Dict, List, Union
import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Index, Series, Timestamp, date_range
import pandas._testing as tm
@pytest.fixture
def mix_ab() -> Dict[str, List[Union[int, str]]]:
return {"a": list(range(4)), "b": list("ab..")}
@pytest.fixture
def mix_abc() -> Dict[str, List[Union[float, str]]]:
return {"a": list(range(4)), "b": list("ab.."), "c": ["a", "b", np.nan, "d"]}
class TestDataFrameReplace:
def test_replace_inplace(self, datetime_frame, float_string_frame):
datetime_frame["A"][:5] = np.nan
datetime_frame["A"][-5:] = np.nan
tsframe = datetime_frame.copy()
return_value = tsframe.replace(np.nan, 0, inplace=True)
assert return_value is None
tm.assert_frame_equal(tsframe, datetime_frame.fillna(0))
# mixed type
mf = float_string_frame
mf.iloc[5:20, mf.columns.get_loc("foo")] = np.nan
mf.iloc[-10:, mf.columns.get_loc("A")] = np.nan
result = float_string_frame.replace(np.nan, 0)
expected = float_string_frame.fillna(value=0)
tm.assert_frame_equal(result, expected)
tsframe = datetime_frame.copy()
return_value = tsframe.replace([np.nan], [0], inplace=True)
assert return_value is None
tm.assert_frame_equal(tsframe, datetime_frame.fillna(0))
def test_regex_replace_scalar(self, mix_ab):
obj = {"a": list("ab.."), "b": list("efgh")}
dfobj = DataFrame(obj)
dfmix = DataFrame(mix_ab)
# simplest cases
# regex -> value
# obj frame
res = dfobj.replace(r"\s*\.\s*", np.nan, regex=True)
tm.assert_frame_equal(dfobj, res.fillna("."))
# mixed
res = dfmix.replace(r"\s*\.\s*", np.nan, regex=True)
tm.assert_frame_equal(dfmix, res.fillna("."))
# regex -> regex
# obj frame
res = dfobj.replace(r"\s*(\.)\s*", r"\1\1\1", regex=True)
objc = obj.copy()
objc["a"] = ["a", "b", "...", "..."]
expec = DataFrame(objc)
tm.assert_frame_equal(res, expec)
# with mixed
res = dfmix.replace(r"\s*(\.)\s*", r"\1\1\1", regex=True)
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
# everything with compiled regexs as well
res = dfobj.replace(re.compile(r"\s*\.\s*"), np.nan, regex=True)
tm.assert_frame_equal(dfobj, res.fillna("."))
# mixed
res = dfmix.replace(re.compile(r"\s*\.\s*"), np.nan, regex=True)
tm.assert_frame_equal(dfmix, res.fillna("."))
# regex -> regex
# obj frame
res = dfobj.replace(re.compile(r"\s*(\.)\s*"), r"\1\1\1")
objc = obj.copy()
objc["a"] = ["a", "b", "...", "..."]
expec = DataFrame(objc)
tm.assert_frame_equal(res, expec)
# with mixed
res = dfmix.replace(re.compile(r"\s*(\.)\s*"), r"\1\1\1")
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
res = dfmix.replace(regex=re.compile(r"\s*(\.)\s*"), value=r"\1\1\1")
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
res = dfmix.replace(regex=r"\s*(\.)\s*", value=r"\1\1\1")
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
def test_regex_replace_scalar_inplace(self, mix_ab):
obj = {"a": list("ab.."), "b": list("efgh")}
dfobj = DataFrame(obj)
dfmix = DataFrame(mix_ab)
# simplest cases
# regex -> value
# obj frame
res = dfobj.copy()
return_value = res.replace(r"\s*\.\s*", np.nan, regex=True, inplace=True)
assert return_value is None
tm.assert_frame_equal(dfobj, res.fillna("."))
# mixed
res = dfmix.copy()
return_value = res.replace(r"\s*\.\s*", np.nan, regex=True, inplace=True)
assert return_value is None
tm.assert_frame_equal(dfmix, res.fillna("."))
# regex -> regex
# obj frame
res = dfobj.copy()
return_value = res.replace(r"\s*(\.)\s*", r"\1\1\1", regex=True, inplace=True)
assert return_value is None
objc = obj.copy()
objc["a"] = ["a", "b", "...", "..."]
expec = DataFrame(objc)
tm.assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
return_value = res.replace(r"\s*(\.)\s*", r"\1\1\1", regex=True, inplace=True)
assert return_value is None
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
# everything with compiled regexs as well
res = dfobj.copy()
return_value = res.replace(
re.compile(r"\s*\.\s*"), np.nan, regex=True, inplace=True
)
assert return_value is None
tm.assert_frame_equal(dfobj, res.fillna("."))
# mixed
res = dfmix.copy()
return_value = res.replace(
re.compile(r"\s*\.\s*"), np.nan, regex=True, inplace=True
)
assert return_value is None
tm.assert_frame_equal(dfmix, res.fillna("."))
# regex -> regex
# obj frame
res = dfobj.copy()
return_value = res.replace(
re.compile(r"\s*(\.)\s*"), r"\1\1\1", regex=True, inplace=True
)
assert return_value is None
objc = obj.copy()
objc["a"] = ["a", "b", "...", "..."]
expec = DataFrame(objc)
tm.assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
return_value = res.replace(
re.compile(r"\s*(\.)\s*"), r"\1\1\1", regex=True, inplace=True
)
assert return_value is None
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
res = dfobj.copy()
return_value = res.replace(regex=r"\s*\.\s*", value=np.nan, inplace=True)
assert return_value is None
tm.assert_frame_equal(dfobj, res.fillna("."))
# mixed
res = dfmix.copy()
return_value = res.replace(regex=r"\s*\.\s*", value=np.nan, inplace=True)
assert return_value is None
tm.assert_frame_equal(dfmix, res.fillna("."))
# regex -> regex
# obj frame
res = dfobj.copy()
return_value = res.replace(regex=r"\s*(\.)\s*", value=r"\1\1\1", inplace=True)
assert return_value is None
objc = obj.copy()
objc["a"] = ["a", "b", "...", "..."]
expec = DataFrame(objc)
tm.assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
return_value = res.replace(regex=r"\s*(\.)\s*", value=r"\1\1\1", inplace=True)
assert return_value is None
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
# everything with compiled regexs as well
res = dfobj.copy()
return_value = res.replace(
regex=re.compile(r"\s*\.\s*"), value=np.nan, inplace=True
)
assert return_value is None
tm.assert_frame_equal(dfobj, res.fillna("."))
# mixed
res = dfmix.copy()
return_value = res.replace(
regex=re.compile(r"\s*\.\s*"), value=np.nan, inplace=True
)
assert return_value is None
tm.assert_frame_equal(dfmix, res.fillna("."))
# regex -> regex
# obj frame
res = dfobj.copy()
return_value = res.replace(
regex=re.compile(r"\s*(\.)\s*"), value=r"\1\1\1", inplace=True
)
assert return_value is None
objc = obj.copy()
objc["a"] = ["a", "b", "...", "..."]
expec = DataFrame(objc)
tm.assert_frame_equal(res, expec)
# with mixed
res = dfmix.copy()
return_value = res.replace(
regex=re.compile(r"\s*(\.)\s*"), value=r"\1\1\1", inplace=True
)
assert return_value is None
mixc = mix_ab.copy()
mixc["b"] = ["a", "b", "...", "..."]
expec = DataFrame(mixc)
tm.assert_frame_equal(res, expec)
def test_regex_replace_list_obj(self):
obj = {"a": list("ab.."), "b": list("efgh"), "c": list("helo")}
dfobj = DataFrame(obj)
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r"\s*\.\s*", r"e|f|g"]
values = [np.nan, "crap"]
res = dfobj.replace(to_replace_res, values, regex=True)
expec = DataFrame(
{
"a": ["a", "b", np.nan, np.nan],
"b": ["crap"] * 3 + ["h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r"\s*(\.)\s*", r"(e|f|g)"]
values = [r"\1\1", r"\1_crap"]
res = dfobj.replace(to_replace_res, values, regex=True)
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["e_crap", "f_crap", "g_crap", "h"],
"c": ["h", "e_crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r"\s*(\.)\s*", r"e"]
values = [r"\1\1", r"crap"]
res = dfobj.replace(to_replace_res, values, regex=True)
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["crap", "f", "g", "h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
to_replace_res = [r"\s*(\.)\s*", r"e"]
values = [r"\1\1", r"crap"]
res = dfobj.replace(value=values, regex=to_replace_res)
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["crap", "f", "g", "h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
def test_regex_replace_list_obj_inplace(self):
# same as above with inplace=True
# lists of regexes and values
obj = {"a": list("ab.."), "b": list("efgh"), "c": list("helo")}
dfobj = DataFrame(obj)
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r"\s*\.\s*", r"e|f|g"]
values = [np.nan, "crap"]
res = dfobj.copy()
return_value = res.replace(to_replace_res, values, inplace=True, regex=True)
assert return_value is None
expec = DataFrame(
{
"a": ["a", "b", np.nan, np.nan],
"b": ["crap"] * 3 + ["h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r"\s*(\.)\s*", r"(e|f|g)"]
values = [r"\1\1", r"\1_crap"]
res = dfobj.copy()
return_value = res.replace(to_replace_res, values, inplace=True, regex=True)
assert return_value is None
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["e_crap", "f_crap", "g_crap", "h"],
"c": ["h", "e_crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r"\s*(\.)\s*", r"e"]
values = [r"\1\1", r"crap"]
res = dfobj.copy()
return_value = res.replace(to_replace_res, values, inplace=True, regex=True)
assert return_value is None
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["crap", "f", "g", "h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
to_replace_res = [r"\s*(\.)\s*", r"e"]
values = [r"\1\1", r"crap"]
res = dfobj.copy()
return_value = res.replace(value=values, regex=to_replace_res, inplace=True)
assert return_value is None
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["crap", "f", "g", "h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
def test_regex_replace_list_mixed(self, mix_ab):
# mixed frame to make sure this doesn't break things
dfmix = DataFrame(mix_ab)
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r"\s*\.\s*", r"a"]
values = [np.nan, "crap"]
mix2 = {"a": list(range(4)), "b": list("ab.."), "c": list("halo")}
dfmix2 = DataFrame(mix2)
res = dfmix2.replace(to_replace_res, values, regex=True)
expec = DataFrame(
{
"a": mix2["a"],
"b": ["crap", "b", np.nan, np.nan],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r"\s*(\.)\s*", r"(a|b)"]
values = [r"\1\1", r"\1_crap"]
res = dfmix.replace(to_replace_res, values, regex=True)
expec = DataFrame({"a": mix_ab["a"], "b": ["a_crap", "b_crap", "..", ".."]})
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r"\s*(\.)\s*", r"a", r"(b)"]
values = [r"\1\1", r"crap", r"\1_crap"]
res = dfmix.replace(to_replace_res, values, regex=True)
expec = DataFrame({"a": mix_ab["a"], "b": ["crap", "b_crap", "..", ".."]})
tm.assert_frame_equal(res, expec)
to_replace_res = [r"\s*(\.)\s*", r"a", r"(b)"]
values = [r"\1\1", r"crap", r"\1_crap"]
res = dfmix.replace(regex=to_replace_res, value=values)
expec = DataFrame({"a": mix_ab["a"], "b": ["crap", "b_crap", "..", ".."]})
tm.assert_frame_equal(res, expec)
def test_regex_replace_list_mixed_inplace(self, mix_ab):
dfmix = DataFrame(mix_ab)
# the same inplace
# lists of regexes and values
# list of [re1, re2, ..., reN] -> [v1, v2, ..., vN]
to_replace_res = [r"\s*\.\s*", r"a"]
values = [np.nan, "crap"]
res = dfmix.copy()
return_value = res.replace(to_replace_res, values, inplace=True, regex=True)
assert return_value is None
expec = DataFrame({"a": mix_ab["a"], "b": ["crap", "b", np.nan, np.nan]})
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [re1, re2, .., reN]
to_replace_res = [r"\s*(\.)\s*", r"(a|b)"]
values = [r"\1\1", r"\1_crap"]
res = dfmix.copy()
return_value = res.replace(to_replace_res, values, inplace=True, regex=True)
assert return_value is None
expec = DataFrame({"a": mix_ab["a"], "b": ["a_crap", "b_crap", "..", ".."]})
tm.assert_frame_equal(res, expec)
# list of [re1, re2, ..., reN] -> [(re1 or v1), (re2 or v2), ..., (reN
# or vN)]
to_replace_res = [r"\s*(\.)\s*", r"a", r"(b)"]
values = [r"\1\1", r"crap", r"\1_crap"]
res = dfmix.copy()
return_value = res.replace(to_replace_res, values, inplace=True, regex=True)
assert return_value is None
expec = DataFrame({"a": mix_ab["a"], "b": ["crap", "b_crap", "..", ".."]})
tm.assert_frame_equal(res, expec)
to_replace_res = [r"\s*(\.)\s*", r"a", r"(b)"]
values = [r"\1\1", r"crap", r"\1_crap"]
res = dfmix.copy()
return_value = res.replace(regex=to_replace_res, value=values, inplace=True)
assert return_value is None
expec = DataFrame({"a": mix_ab["a"], "b": ["crap", "b_crap", "..", ".."]})
tm.assert_frame_equal(res, expec)
def test_regex_replace_dict_mixed(self, mix_abc):
dfmix = DataFrame(mix_abc)
# dicts
# single dict {re1: v1}, search the whole frame
# need test for this...
# list of dicts {re1: v1, re2: v2, ..., re3: v3}, search the whole
# frame
res = dfmix.replace({"b": r"\s*\.\s*"}, {"b": np.nan}, regex=True)
res2 = dfmix.copy()
return_value = res2.replace(
{"b": r"\s*\.\s*"}, {"b": np.nan}, inplace=True, regex=True
)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": ["a", "b", np.nan, np.nan], "c": mix_abc["c"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
# list of dicts {re1: re11, re2: re12, ..., reN: re1N}, search the
# whole frame
res = dfmix.replace({"b": r"\s*(\.)\s*"}, {"b": r"\1ty"}, regex=True)
res2 = dfmix.copy()
return_value = res2.replace(
{"b": r"\s*(\.)\s*"}, {"b": r"\1ty"}, inplace=True, regex=True
)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": ["a", "b", ".ty", ".ty"], "c": mix_abc["c"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
res = dfmix.replace(regex={"b": r"\s*(\.)\s*"}, value={"b": r"\1ty"})
res2 = dfmix.copy()
return_value = res2.replace(
regex={"b": r"\s*(\.)\s*"}, value={"b": r"\1ty"}, inplace=True
)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": ["a", "b", ".ty", ".ty"], "c": mix_abc["c"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
# scalar -> dict
# to_replace regex, {value: value}
expec = DataFrame(
{"a": mix_abc["a"], "b": [np.nan, "b", ".", "."], "c": mix_abc["c"]}
)
res = dfmix.replace("a", {"b": np.nan}, regex=True)
res2 = dfmix.copy()
return_value = res2.replace("a", {"b": np.nan}, regex=True, inplace=True)
assert return_value is None
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
res = dfmix.replace("a", {"b": np.nan}, regex=True)
res2 = dfmix.copy()
return_value = res2.replace(regex="a", value={"b": np.nan}, inplace=True)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": [np.nan, "b", ".", "."], "c": mix_abc["c"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
def test_regex_replace_dict_nested(self, mix_abc):
# nested dicts will not work until this is implemented for Series
dfmix = DataFrame(mix_abc)
res = dfmix.replace({"b": {r"\s*\.\s*": np.nan}}, regex=True)
res2 = dfmix.copy()
res4 = dfmix.copy()
return_value = res2.replace(
{"b": {r"\s*\.\s*": np.nan}}, inplace=True, regex=True
)
assert return_value is None
res3 = dfmix.replace(regex={"b": {r"\s*\.\s*": np.nan}})
return_value = res4.replace(regex={"b": {r"\s*\.\s*": np.nan}}, inplace=True)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": ["a", "b", np.nan, np.nan], "c": mix_abc["c"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
tm.assert_frame_equal(res3, expec)
tm.assert_frame_equal(res4, expec)
def test_regex_replace_dict_nested_non_first_character(self):
# GH 25259
df = pd.DataFrame({"first": ["abc", "bca", "cab"]})
expected = pd.DataFrame({"first": [".bc", "bc.", "c.b"]})
result = df.replace({"a": "."}, regex=True)
tm.assert_frame_equal(result, expected)
def test_regex_replace_dict_nested_gh4115(self):
df = pd.DataFrame({"Type": ["Q", "T", "Q", "Q", "T"], "tmp": 2})
expected = DataFrame({"Type": [0, 1, 0, 0, 1], "tmp": 2})
result = df.replace({"Type": {"Q": 0, "T": 1}})
tm.assert_frame_equal(result, expected)
def test_regex_replace_list_to_scalar(self, mix_abc):
df = DataFrame(mix_abc)
expec = DataFrame(
{
"a": mix_abc["a"],
"b": np.array([np.nan] * 4),
"c": [np.nan, np.nan, np.nan, "d"],
}
)
res = df.replace([r"\s*\.\s*", "a|b"], np.nan, regex=True)
res2 = df.copy()
res3 = df.copy()
return_value = res2.replace(
[r"\s*\.\s*", "a|b"], np.nan, regex=True, inplace=True
)
assert return_value is None
return_value = res3.replace(
regex=[r"\s*\.\s*", "a|b"], value=np.nan, inplace=True
)
assert return_value is None
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
tm.assert_frame_equal(res3, expec)
def test_regex_replace_str_to_numeric(self, mix_abc):
# what happens when you try to replace a numeric value with a regex?
df = DataFrame(mix_abc)
res = df.replace(r"\s*\.\s*", 0, regex=True)
res2 = df.copy()
return_value = res2.replace(r"\s*\.\s*", 0, inplace=True, regex=True)
assert return_value is None
res3 = df.copy()
return_value = res3.replace(regex=r"\s*\.\s*", value=0, inplace=True)
assert return_value is None
expec = DataFrame({"a": mix_abc["a"], "b": ["a", "b", 0, 0], "c": mix_abc["c"]})
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
tm.assert_frame_equal(res3, expec)
def test_regex_replace_regex_list_to_numeric(self, mix_abc):
df = DataFrame(mix_abc)
res = df.replace([r"\s*\.\s*", "b"], 0, regex=True)
res2 = df.copy()
return_value = res2.replace([r"\s*\.\s*", "b"], 0, regex=True, inplace=True)
assert return_value is None
res3 = df.copy()
return_value = res3.replace(regex=[r"\s*\.\s*", "b"], value=0, inplace=True)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": ["a", 0, 0, 0], "c": ["a", 0, np.nan, "d"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
tm.assert_frame_equal(res3, expec)
def test_regex_replace_series_of_regexes(self, mix_abc):
df = DataFrame(mix_abc)
s1 = Series({"b": r"\s*\.\s*"})
s2 = Series({"b": np.nan})
res = df.replace(s1, s2, regex=True)
res2 = df.copy()
return_value = res2.replace(s1, s2, inplace=True, regex=True)
assert return_value is None
res3 = df.copy()
return_value = res3.replace(regex=s1, value=s2, inplace=True)
assert return_value is None
expec = DataFrame(
{"a": mix_abc["a"], "b": ["a", "b", np.nan, np.nan], "c": mix_abc["c"]}
)
tm.assert_frame_equal(res, expec)
tm.assert_frame_equal(res2, expec)
tm.assert_frame_equal(res3, expec)
def test_regex_replace_numeric_to_object_conversion(self, mix_abc):
df = DataFrame(mix_abc)
expec = DataFrame({"a": ["a", 1, 2, 3], "b": mix_abc["b"], "c": mix_abc["c"]})
res = df.replace(0, "a")
tm.assert_frame_equal(res, expec)
assert res.a.dtype == np.object_
@pytest.mark.parametrize("metachar", ["[]", "()", r"\d", r"\w", r"\s"])
def test_replace_regex_metachar(self, metachar):
df = DataFrame({"a": [metachar, "else"]})
result = df.replace({"a": {metachar: "paren"}})
expected = DataFrame({"a": ["paren", "else"]})
tm.assert_frame_equal(result, expected)
def test_replace(self, datetime_frame):
datetime_frame["A"][:5] = np.nan
datetime_frame["A"][-5:] = np.nan
zero_filled = datetime_frame.replace(np.nan, -1e8)
tm.assert_frame_equal(zero_filled, datetime_frame.fillna(-1e8))
tm.assert_frame_equal(zero_filled.replace(-1e8, np.nan), datetime_frame)
datetime_frame["A"][:5] = np.nan
datetime_frame["A"][-5:] = np.nan
datetime_frame["B"][:5] = -1e8
# empty
df = DataFrame(index=["a", "b"])
tm.assert_frame_equal(df, df.replace(5, 7))
# GH 11698
# test for mixed data types.
df = pd.DataFrame(
[("-", pd.to_datetime("20150101")), ("a", pd.to_datetime("20150102"))]
)
df1 = df.replace("-", np.nan)
expected_df = pd.DataFrame(
[(np.nan, pd.to_datetime("20150101")), ("a", pd.to_datetime("20150102"))]
)
tm.assert_frame_equal(df1, expected_df)
def test_replace_list(self):
obj = {"a": list("ab.."), "b": list("efgh"), "c": list("helo")}
dfobj = DataFrame(obj)
# lists of regexes and values
# list of [v1, v2, ..., vN] -> [v1, v2, ..., vN]
to_replace_res = [r".", r"e"]
values = [np.nan, "crap"]
res = dfobj.replace(to_replace_res, values)
expec = DataFrame(
{
"a": ["a", "b", np.nan, np.nan],
"b": ["crap", "f", "g", "h"],
"c": ["h", "crap", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
# list of [v1, v2, ..., vN] -> [v1, v2, .., vN]
to_replace_res = [r".", r"f"]
values = [r"..", r"crap"]
res = dfobj.replace(to_replace_res, values)
expec = DataFrame(
{
"a": ["a", "b", "..", ".."],
"b": ["e", "crap", "g", "h"],
"c": ["h", "e", "l", "o"],
}
)
tm.assert_frame_equal(res, expec)
def test_replace_with_empty_list(self):
# GH 21977
s = pd.Series([["a", "b"], [], np.nan, [1]])
df = pd.DataFrame({"col": s})
expected = df
result = df.replace([], np.nan)
tm.assert_frame_equal(result, expected)
# GH 19266
with pytest.raises(ValueError, match="cannot assign mismatch"):
df.replace({np.nan: []})
with pytest.raises(ValueError, match="cannot assign mismatch"):
df.replace({np.nan: ["dummy", "alt"]})
def test_replace_series_dict(self):
# from GH 3064
df = DataFrame({"zero": {"a": 0.0, "b": 1}, "one": {"a": 2.0, "b": 0}})
result = df.replace(0, {"zero": 0.5, "one": 1.0})
expected = DataFrame({"zero": {"a": 0.5, "b": 1}, "one": {"a": 2.0, "b": 1.0}})
tm.assert_frame_equal(result, expected)
result = df.replace(0, df.mean())
tm.assert_frame_equal(result, expected)
# series to series/dict
df = DataFrame({"zero": {"a": 0.0, "b": 1}, "one": {"a": 2.0, "b": 0}})
s = Series({"zero": 0.0, "one": 2.0})
result = df.replace(s, {"zero": 0.5, "one": 1.0})
expected = DataFrame({"zero": {"a": 0.5, "b": 1}, "one": {"a": 1.0, "b": 0.0}})
tm.assert_frame_equal(result, expected)
result = df.replace(s, df.mean())
tm.assert_frame_equal(result, expected)
def test_replace_convert(self):
# gh 3907
df = DataFrame([["foo", "bar", "bah"], ["bar", "foo", "bah"]])
m = {"foo": 1, "bar": 2, "bah": 3}
rep = df.replace(m)
expec = Series([np.int64] * 3)
res = rep.dtypes
tm.assert_series_equal(expec, res)
def test_replace_mixed(self, float_string_frame):
mf = float_string_frame
mf.iloc[5:20, mf.columns.get_loc("foo")] = np.nan
mf.iloc[-10:, mf.columns.get_loc("A")] = np.nan
result = float_string_frame.replace(np.nan, -18)
expected = float_string_frame.fillna(value=-18)
tm.assert_frame_equal(result, expected)
tm.assert_frame_equal(result.replace(-18, np.nan), float_string_frame)
result = float_string_frame.replace(np.nan, -1e8)
expected = float_string_frame.fillna(value=-1e8)
tm.assert_frame_equal(result, expected)
tm.assert_frame_equal(result.replace(-1e8, np.nan), float_string_frame)
# int block upcasting
df = DataFrame(
{
"A": Series([1.0, 2.0], dtype="float64"),
"B": Series([0, 1], dtype="int64"),
}
)
expected = DataFrame(
{
"A": Series([1.0, 2.0], dtype="float64"),
"B": Series([0.5, 1], dtype="float64"),
}
)
result = df.replace(0, 0.5)
tm.assert_frame_equal(result, expected)
return_value = df.replace(0, 0.5, inplace=True)
assert return_value is None
tm.assert_frame_equal(df, expected)
# int block splitting
df = DataFrame(
{
"A": Series([1.0, 2.0], dtype="float64"),
"B": Series([0, 1], dtype="int64"),
"C": Series([1, 2], dtype="int64"),
}
)
expected = DataFrame(
{
"A": Series([1.0, 2.0], dtype="float64"),
"B": Series([0.5, 1], dtype="float64"),
"C": Series([1, 2], dtype="int64"),
}
)
result = df.replace(0, 0.5)
tm.assert_frame_equal(result, expected)
# to object block upcasting
df = DataFrame(
{
"A": Series([1.0, 2.0], dtype="float64"),
"B": Series([0, 1], dtype="int64"),
}
)
expected = DataFrame(
{
"A": Series([1, "foo"], dtype="object"),
"B": Series([0, 1], dtype="int64"),
}
)
result = df.replace(2, "foo")
tm.assert_frame_equal(result, expected)
expected = DataFrame(
{
"A": Series(["foo", "bar"], dtype="object"),
"B": Series([0, "foo"], dtype="object"),
}
)
result = df.replace([1, 2], ["foo", "bar"])
tm.assert_frame_equal(result, expected)
# test case from
df = DataFrame(
{"A": Series([3, 0], dtype="int64"), "B": Series([0, 3], dtype="int64")}
)
result = df.replace(3, df.mean().to_dict())
expected = df.copy().astype("float64")
m = df.mean()
expected.iloc[0, 0] = m[0]
expected.iloc[1, 1] = m[1]
tm.assert_frame_equal(result, expected)
def test_replace_simple_nested_dict(self):
df = DataFrame({"col": range(1, 5)})
expected = DataFrame({"col": ["a", 2, 3, "b"]})
result = df.replace({"col": {1: "a", 4: "b"}})
tm.assert_frame_equal(expected, result)
# in this case, should be the same as the not nested version
result = df.replace({1: "a", 4: "b"})
tm.assert_frame_equal(expected, result)
def test_replace_simple_nested_dict_with_nonexistent_value(self):
df = DataFrame({"col": range(1, 5)})
expected = DataFrame({"col": ["a", 2, 3, "b"]})
result = df.replace({-1: "-", 1: "a", 4: "b"})
tm.assert_frame_equal(expected, result)
result = df.replace({"col": {-1: "-", 1: "a", 4: "b"}})
tm.assert_frame_equal(expected, result)
def test_replace_value_is_none(self, datetime_frame):
orig_value = datetime_frame.iloc[0, 0]
orig2 = datetime_frame.iloc[1, 0]
datetime_frame.iloc[0, 0] = np.nan
datetime_frame.iloc[1, 0] = 1
result = datetime_frame.replace(to_replace={np.nan: 0})
expected = datetime_frame.T.replace(to_replace={np.nan: 0}).T
tm.assert_frame_equal(result, expected)
result = datetime_frame.replace(to_replace={np.nan: 0, 1: -1e8})
tsframe = datetime_frame.copy()
tsframe.iloc[0, 0] = 0
tsframe.iloc[1, 0] = -1e8
expected = tsframe
tm.assert_frame_equal(expected, result)
datetime_frame.iloc[0, 0] = orig_value
datetime_frame.iloc[1, 0] = orig2
def test_replace_for_new_dtypes(self, datetime_frame):
# dtypes
tsframe = datetime_frame.copy().astype(np.float32)
tsframe["A"][:5] = np.nan
tsframe["A"][-5:] = np.nan
zero_filled = tsframe.replace(np.nan, -1e8)
tm.assert_frame_equal(zero_filled, tsframe.fillna(-1e8))
tm.assert_frame_equal(zero_filled.replace(-1e8, np.nan), tsframe)
tsframe["A"][:5] = np.nan
tsframe["A"][-5:] = np.nan
tsframe["B"][:5] = -1e8
b = tsframe["B"]
b[b == -1e8] = np.nan
tsframe["B"] = b
result = tsframe.fillna(method="bfill")
tm.assert_frame_equal(result, tsframe.fillna(method="bfill"))
@pytest.mark.parametrize(
"frame, to_replace, value, expected",
[
(DataFrame({"ints": [1, 2, 3]}), 1, 0, DataFrame({"ints": [0, 2, 3]})),
(
DataFrame({"ints": [1, 2, 3]}, dtype=np.int32),
1,
0,
DataFrame({"ints": [0, 2, 3]}, dtype=np.int32),
),
(
DataFrame({"ints": [1, 2, 3]}, dtype=np.int16),
1,
0,
DataFrame({"ints": [0, 2, 3]}, dtype=np.int16),
),
(
DataFrame({"bools": [True, False, True]}),
False,
True,
DataFrame({"bools": [True, True, True]}),
),
(
DataFrame({"complex": [1j, 2j, 3j]}),
1j,
0,
DataFrame({"complex": [0j, 2j, 3j]}),
),
(
DataFrame(
{
"datetime64": Index(
[
datetime(2018, 5, 28),
datetime(2018, 7, 28),
datetime(2018, 5, 28),
]
)
}
),
datetime(2018, 5, 28),
datetime(2018, 7, 28),
DataFrame({"datetime64": Index([datetime(2018, 7, 28)] * 3)}),
),
# GH 20380
(
DataFrame({"dt": [datetime(3017, 12, 20)], "str": ["foo"]}),
"foo",
"bar",
DataFrame({"dt": [datetime(3017, 12, 20)], "str": ["bar"]}),
),
(
DataFrame(
{
"A": date_range("20130101", periods=3, tz="US/Eastern"),
"B": [0, np.nan, 2],
}
),
Timestamp("20130102", tz="US/Eastern"),
Timestamp("20130104", tz="US/Eastern"),
DataFrame(
{
"A": [
Timestamp("20130101", tz="US/Eastern"),
Timestamp("20130104", tz="US/Eastern"),
Timestamp("20130103", tz="US/Eastern"),
],
"B": [0, np.nan, 2],
}
),
),
],
)
def test_replace_dtypes(self, frame, to_replace, value, expected):
result = getattr(frame, "replace")(to_replace, value)
tm.assert_frame_equal(result, expected)
def test_replace_input_formats_listlike(self):
# both dicts
to_rep = {"A": np.nan, "B": 0, "C": ""}
values = {"A": 0, "B": -1, "C": "missing"}
df = DataFrame(
{"A": [np.nan, 0, np.inf], "B": [0, 2, 5], "C": ["", "asdf", "fd"]}
)
filled = df.replace(to_rep, values)
expected = {k: v.replace(to_rep[k], values[k]) for k, v in df.items()}
tm.assert_frame_equal(filled, DataFrame(expected))
result = df.replace([0, 2, 5], [5, 2, 0])
expected = DataFrame(
{"A": [np.nan, 5, np.inf], "B": [5, 2, 0], "C": ["", "asdf", "fd"]}
)
tm.assert_frame_equal(result, expected)
# scalar to dict
values = {"A": 0, "B": -1, "C": "missing"}
df = DataFrame(
{"A": [np.nan, 0, np.nan], "B": [0, 2, 5], "C": ["", "asdf", "fd"]}
)
filled = df.replace(np.nan, values)
expected = {k: v.replace(np.nan, values[k]) for k, v in df.items()}
tm.assert_frame_equal(filled, DataFrame(expected))
# list to list
to_rep = [np.nan, 0, ""]
values = [-2, -1, "missing"]
result = df.replace(to_rep, values)
expected = df.copy()
for i in range(len(to_rep)):
return_value = expected.replace(to_rep[i], values[i], inplace=True)
assert return_value is None
tm.assert_frame_equal(result, expected)
msg = r"Replacement lists must match in length\. Expecting 3 got 2"
with pytest.raises(ValueError, match=msg):
df.replace(to_rep, values[1:])
def test_replace_input_formats_scalar(self):
df = DataFrame(
{"A": [np.nan, 0, np.inf], "B": [0, 2, 5], "C": ["", "asdf", "fd"]}
)
# dict to scalar
to_rep = {"A": np.nan, "B": 0, "C": ""}
filled = df.replace(to_rep, 0)
expected = {k: v.replace(to_rep[k], 0) for k, v in df.items()}
tm.assert_frame_equal(filled, DataFrame(expected))
msg = "value argument must be scalar, dict, or Series"
with pytest.raises(TypeError, match=msg):
df.replace(to_rep, [np.nan, 0, ""])
# list to scalar
to_rep = [np.nan, 0, ""]
result = df.replace(to_rep, -1)
expected = df.copy()
for i in range(len(to_rep)):
return_value = expected.replace(to_rep[i], -1, inplace=True)
assert return_value is None
tm.assert_frame_equal(result, expected)
def test_replace_limit(self):
pass
def test_replace_dict_no_regex(self):
answer = Series(
{
0: "Strongly Agree",
1: "Agree",
2: "Neutral",
3: "Disagree",
4: "Strongly Disagree",
}
)
weights = {
"Agree": 4,
"Disagree": 2,
"Neutral": 3,
"Strongly Agree": 5,
"Strongly Disagree": 1,
}
expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1})
result = answer.replace(weights)
tm.assert_series_equal(result, expected)
def test_replace_series_no_regex(self):
answer = Series(
{
0: "Strongly Agree",
1: "Agree",
2: "Neutral",
3: "Disagree",
4: "Strongly Disagree",
}
)
weights = Series(
{
"Agree": 4,
"Disagree": 2,
"Neutral": 3,
"Strongly Agree": 5,
"Strongly Disagree": 1,
}
)
expected = Series({0: 5, 1: 4, 2: 3, 3: 2, 4: 1})
result = answer.replace(weights)
tm.assert_series_equal(result, expected)
def test_replace_dict_tuple_list_ordering_remains_the_same(self):
df = DataFrame(dict(A=[np.nan, 1]))
res1 = df.replace(to_replace={np.nan: 0, 1: -1e8})
res2 = df.replace(to_replace=(1, np.nan), value=[-1e8, 0])
res3 = df.replace(to_replace=[1, np.nan], value=[-1e8, 0])
expected = DataFrame({"A": [0, -1e8]})
tm.assert_frame_equal(res1, res2)
tm.assert_frame_equal(res2, res3)
tm.assert_frame_equal(res3, expected)
def test_replace_doesnt_replace_without_regex(self):
raw = """fol T_opp T_Dir T_Enh
0 1 0 0 vo
1 2 vr 0 0
2 2 0 0 0
3 3 0 bt 0"""
df = pd.read_csv(StringIO(raw), sep=r"\s+")
res = df.replace({r"\D": 1})
tm.assert_frame_equal(df, res)
def test_replace_bool_with_string(self):
df = DataFrame({"a": [True, False], "b": list("ab")})
result = df.replace(True, "a")
expected = DataFrame({"a": ["a", False], "b": df.b})
tm.assert_frame_equal(result, expected)
def test_replace_pure_bool_with_string_no_op(self):
df = DataFrame(np.random.rand(2, 2) > 0.5)
result = df.replace("asdf", "fdsa")
tm.assert_frame_equal(df, result)
def test_replace_bool_with_bool(self):
df = DataFrame(np.random.rand(2, 2) > 0.5)
result = df.replace(False, True)
expected = DataFrame(np.ones((2, 2), dtype=bool))
tm.assert_frame_equal(result, expected)
def test_replace_with_dict_with_bool_keys(self):
df = DataFrame({0: [True, False], 1: [False, True]})
with pytest.raises(TypeError, match="Cannot compare types .+"):
df.replace({"asdf": "asdb", True: "yes"})
def test_replace_truthy(self):
df = DataFrame({"a": [True, True]})
r = df.replace([np.inf, -np.inf], np.nan)
e = df
tm.assert_frame_equal(r, e)
def test_nested_dict_overlapping_keys_replace_int(self):
# GH 27660 keep behaviour consistent for simple dictionary and
# nested dictionary replacement
df = DataFrame({"a": list(range(1, 5))})
result = df.replace({"a": dict(zip(range(1, 5), range(2, 6)))})
expected = df.replace(dict(zip(range(1, 5), range(2, 6))))
tm.assert_frame_equal(result, expected)
def test_nested_dict_overlapping_keys_replace_str(self):
# GH 27660
a = np.arange(1, 5)
astr = a.astype(str)
bstr = np.arange(2, 6).astype(str)
df = DataFrame({"a": astr})
result = df.replace(dict(zip(astr, bstr)))
expected = df.replace({"a": dict(zip(astr, bstr))})
tm.assert_frame_equal(result, expected)
def test_replace_swapping_bug(self):
df = pd.DataFrame({"a": [True, False, True]})
res = df.replace({"a": {True: "Y", False: "N"}})
expect = pd.DataFrame({"a": ["Y", "N", "Y"]})
tm.assert_frame_equal(res, expect)
df = pd.DataFrame({"a": [0, 1, 0]})
res = df.replace({"a": {0: "Y", 1: "N"}})
expect = pd.DataFrame({"a": ["Y", "N", "Y"]})
tm.assert_frame_equal(res, expect)
def test_replace_period(self):
d = {
"fname": {
"out_augmented_AUG_2011.json": pd.Period(year=2011, month=8, freq="M"),
"out_augmented_JAN_2011.json": pd.Period(year=2011, month=1, freq="M"),
"out_augmented_MAY_2012.json": pd.Period(year=2012, month=5, freq="M"),
"out_augmented_SUBSIDY_WEEK.json": pd.Period(
year=2011, month=4, freq="M"
),
"out_augmented_AUG_2012.json": pd.Period(year=2012, month=8, freq="M"),
"out_augmented_MAY_2011.json": pd.Period(year=2011, month=5, freq="M"),
"out_augmented_SEP_2013.json": pd.Period(year=2013, month=9, freq="M"),
}
}
df = pd.DataFrame(
[
"out_augmented_AUG_2012.json",
"out_augmented_SEP_2013.json",
"out_augmented_SUBSIDY_WEEK.json",
"out_augmented_MAY_2012.json",
"out_augmented_MAY_2011.json",
"out_augmented_AUG_2011.json",
"out_augmented_JAN_2011.json",
],
columns=["fname"],
)
assert set(df.fname.values) == set(d["fname"].keys())
# We don't support converting object -> specialized EA in
# replace yet.
expected = DataFrame(
{"fname": [d["fname"][k] for k in df.fname.values]}, dtype=object
)
result = df.replace(d)
tm.assert_frame_equal(result, expected)
def test_replace_datetime(self):
d = {
"fname": {
"out_augmented_AUG_2011.json": pd.Timestamp("2011-08"),
"out_augmented_JAN_2011.json": pd.Timestamp("2011-01"),
"out_augmented_MAY_2012.json": pd.Timestamp("2012-05"),
"out_augmented_SUBSIDY_WEEK.json": pd.Timestamp("2011-04"),
"out_augmented_AUG_2012.json": pd.Timestamp("2012-08"),
"out_augmented_MAY_2011.json": pd.Timestamp("2011-05"),
"out_augmented_SEP_2013.json": pd.Timestamp("2013-09"),
}
}
df = pd.DataFrame(
[
"out_augmented_AUG_2012.json",
"out_augmented_SEP_2013.json",
"out_augmented_SUBSIDY_WEEK.json",
"out_augmented_MAY_2012.json",
"out_augmented_MAY_2011.json",
"out_augmented_AUG_2011.json",
"out_augmented_JAN_2011.json",
],
columns=["fname"],
)
assert set(df.fname.values) == set(d["fname"].keys())
expected = DataFrame({"fname": [d["fname"][k] for k in df.fname.values]})
result = df.replace(d)
tm.assert_frame_equal(result, expected)
def test_replace_datetimetz(self):
# GH 11326
# behaving poorly when presented with a datetime64[ns, tz]
df = DataFrame(
{
"A": date_range("20130101", periods=3, tz="US/Eastern"),
"B": [0, np.nan, 2],
}
)
result = df.replace(np.nan, 1)
expected = DataFrame(
{
"A": date_range("20130101", periods=3, tz="US/Eastern"),
"B": Series([0, 1, 2], dtype="float64"),
}
)
tm.assert_frame_equal(result, expected)
result = df.fillna(1)
tm.assert_frame_equal(result, expected)
result = df.replace(0, np.nan)
expected = DataFrame(
{
"A": date_range("20130101", periods=3, tz="US/Eastern"),
"B": [np.nan, np.nan, 2],
}
)
tm.assert_frame_equal(result, expected)
result = df.replace(
Timestamp("20130102", tz="US/Eastern"),
Timestamp("20130104", tz="US/Eastern"),
)
expected = DataFrame(
{
"A": [
Timestamp("20130101", tz="US/Eastern"),
Timestamp("20130104", tz="US/Eastern"),
Timestamp("20130103", tz="US/Eastern"),
],
"B": [0, np.nan, 2],
}
)
tm.assert_frame_equal(result, expected)
result = df.copy()
result.iloc[1, 0] = np.nan
result = result.replace({"A": pd.NaT}, Timestamp("20130104", tz="US/Eastern"))
tm.assert_frame_equal(result, expected)
# coerce to object
result = df.copy()
result.iloc[1, 0] = np.nan
result = result.replace({"A": pd.NaT}, Timestamp("20130104", tz="US/Pacific"))
expected = DataFrame(
{
"A": [
Timestamp("20130101", tz="US/Eastern"),
Timestamp("20130104", tz="US/Pacific"),
Timestamp("20130103", tz="US/Eastern"),
],
"B": [0, np.nan, 2],
}
)
tm.assert_frame_equal(result, expected)
result = df.copy()
result.iloc[1, 0] = np.nan
result = result.replace({"A": np.nan}, Timestamp("20130104"))
expected = DataFrame(
{
"A": [
Timestamp("20130101", tz="US/Eastern"),
Timestamp("20130104"),
Timestamp("20130103", tz="US/Eastern"),
],
"B": [0, np.nan, 2],
}
)
tm.assert_frame_equal(result, expected)
def test_replace_with_empty_dictlike(self, mix_abc):
# GH 15289
df = DataFrame(mix_abc)
tm.assert_frame_equal(df, df.replace({}))
tm.assert_frame_equal(df, df.replace(Series([], dtype=object)))
tm.assert_frame_equal(df, df.replace({"b": {}}))
tm.assert_frame_equal(df, df.replace(Series({"b": {}})))
@pytest.mark.parametrize(
"to_replace, method, expected",
[
(0, "bfill", {"A": [1, 1, 2], "B": [5, np.nan, 7], "C": ["a", "b", "c"]}),
(
np.nan,
"bfill",
{"A": [0, 1, 2], "B": [5.0, 7.0, 7.0], "C": ["a", "b", "c"]},
),
("d", "ffill", {"A": [0, 1, 2], "B": [5, np.nan, 7], "C": ["a", "b", "c"]}),
(
[0, 2],
"bfill",
{"A": [1, 1, 2], "B": [5, np.nan, 7], "C": ["a", "b", "c"]},
),
(
[1, 2],
"pad",
{"A": [0, 0, 0], "B": [5, np.nan, 7], "C": ["a", "b", "c"]},
),
(
(1, 2),
"bfill",
{"A": [0, 2, 2], "B": [5, np.nan, 7], "C": ["a", "b", "c"]},
),
(
["b", "c"],
"ffill",
{"A": [0, 1, 2], "B": [5, np.nan, 7], "C": ["a", "a", "a"]},
),
],
)
def test_replace_method(self, to_replace, method, expected):
# GH 19632
df = DataFrame({"A": [0, 1, 2], "B": [5, np.nan, 7], "C": ["a", "b", "c"]})
result = df.replace(to_replace=to_replace, value=None, method=method)
expected = DataFrame(expected)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"replace_dict, final_data",
[({"a": 1, "b": 1}, [[3, 3], [2, 2]]), ({"a": 1, "b": 2}, [[3, 1], [2, 3]])],
)
def test_categorical_replace_with_dict(self, replace_dict, final_data):
# GH 26988
df = DataFrame([[1, 1], [2, 2]], columns=["a", "b"], dtype="category")
final_data = np.array(final_data)
a = pd.Categorical(final_data[:, 0], categories=[3, 2])
excat = [3, 2] if replace_dict["b"] == 1 else [1, 3]
b = pd.Categorical(final_data[:, 1], categories=excat)
expected = DataFrame({"a": a, "b": b})
result = df.replace(replace_dict, 3)
tm.assert_frame_equal(result, expected)
msg = (
r"Attributes of DataFrame.iloc\[:, 0\] \(column name=\"a\"\) are "
"different"
)
with pytest.raises(AssertionError, match=msg):
# ensure non-inplace call does not affect original
tm.assert_frame_equal(df, expected)
return_value = df.replace(replace_dict, 3, inplace=True)
assert return_value is None
tm.assert_frame_equal(df, expected)
@pytest.mark.parametrize(
"df, to_replace, exp",
[
(
{"col1": [1, 2, 3], "col2": [4, 5, 6]},
{4: 5, 5: 6, 6: 7},
{"col1": [1, 2, 3], "col2": [5, 6, 7]},
),
(
{"col1": [1, 2, 3], "col2": ["4", "5", "6"]},
{"4": "5", "5": "6", "6": "7"},
{"col1": [1, 2, 3], "col2": ["5", "6", "7"]},
),
],
)
def test_replace_commutative(self, df, to_replace, exp):
# GH 16051
# DataFrame.replace() overwrites when values are non-numeric
# also added to data frame whilst issue was for series
df = pd.DataFrame(df)
expected = pd.DataFrame(exp)
result = df.replace(to_replace)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"replacer",
[
pd.Timestamp("20170827"),
np.int8(1),
np.int16(1),
np.float32(1),
np.float64(1),
],
)
def test_replace_replacer_dtype(self, replacer):
# GH26632
df = pd.DataFrame(["a"])
result = df.replace({"a": replacer, "b": replacer})
expected = pd.DataFrame([replacer])
tm.assert_frame_equal(result, expected)
def test_replace_after_convert_dtypes(self):
# GH31517
df = pd.DataFrame({"grp": [1, 2, 3, 4, 5]}, dtype="Int64")
result = df.replace(1, 10)
expected = pd.DataFrame({"grp": [10, 2, 3, 4, 5]}, dtype="Int64")
tm.assert_frame_equal(result, expected)
def test_replace_invalid_to_replace(self):
# GH 18634
# API: replace() should raise an exception if invalid argument is given
df = pd.DataFrame({"one": ["a", "b ", "c"], "two": ["d ", "e ", "f "]})
msg = (
r"Expecting 'to_replace' to be either a scalar, array-like, "
r"dict or None, got invalid type.*"
)
with pytest.raises(TypeError, match=msg):
df.replace(lambda x: x.strip())
@pytest.mark.parametrize("dtype", ["float", "float64", "int64", "Int64", "boolean"])
@pytest.mark.parametrize("value", [np.nan, pd.NA])
def test_replace_no_replacement_dtypes(self, dtype, value):
# https://github.com/pandas-dev/pandas/issues/32988
df = pd.DataFrame(np.eye(2), dtype=dtype)
result = df.replace(to_replace=[None, -np.inf, np.inf], value=value)
tm.assert_frame_equal(result, df)
@pytest.mark.parametrize("replacement", [np.nan, 5])
def test_replace_with_duplicate_columns(self, replacement):
# GH 24798
result = pd.DataFrame({"A": [1, 2, 3], "A1": [4, 5, 6], "B": [7, 8, 9]})
result.columns = list("AAB")
expected = pd.DataFrame(
{"A": [1, 2, 3], "A1": [4, 5, 6], "B": [replacement, 8, 9]}
)
expected.columns = list("AAB")
result["B"] = result["B"].replace(7, replacement)
tm.assert_frame_equal(result, expected)
@pytest.mark.xfail(
reason="replace() changes dtype from period to object, see GH34871", strict=True
)
def test_replace_period_ignore_float(self):
"""
Regression test for GH#34871: if df.replace(1.0, 0.0) is called on a df
with a Period column the old, faulty behavior is to raise TypeError.
"""
df = pd.DataFrame({"Per": [pd.Period("2020-01")] * 3})
result = df.replace(1.0, 0.0)
expected = pd.DataFrame({"Per": [pd.Period("2020-01")] * 3})
tm.assert_frame_equal(expected, result)
def test_replace_value_category_type(self):
"""
Test for #23305: to ensure category dtypes are maintained
after replace with direct values
"""
# create input data
input_dict = {
"col1": [1, 2, 3, 4],
"col2": ["a", "b", "c", "d"],
"col3": [1.5, 2.5, 3.5, 4.5],
"col4": ["cat1", "cat2", "cat3", "cat4"],
"col5": ["obj1", "obj2", "obj3", "obj4"],
}
# explicitly cast columns as category and order them
input_df = pd.DataFrame(data=input_dict).astype(
{"col2": "category", "col4": "category"}
)
input_df["col2"] = input_df["col2"].cat.reorder_categories(
["a", "b", "c", "d"], ordered=True
)
input_df["col4"] = input_df["col4"].cat.reorder_categories(
["cat1", "cat2", "cat3", "cat4"], ordered=True
)
# create expected dataframe
expected_dict = {
"col1": [1, 2, 3, 4],
"col2": ["a", "b", "c", "z"],
"col3": [1.5, 2.5, 3.5, 4.5],
"col4": ["cat1", "catX", "cat3", "cat4"],
"col5": ["obj9", "obj2", "obj3", "obj4"],
}
# explicitly cast columns as category and order them
expected = pd.DataFrame(data=expected_dict).astype(
{"col2": "category", "col4": "category"}
)
expected["col2"] = expected["col2"].cat.reorder_categories(
["a", "b", "c", "z"], ordered=True
)
expected["col4"] = expected["col4"].cat.reorder_categories(
["cat1", "catX", "cat3", "cat4"], ordered=True
)
# replace values in input dataframe
input_df = input_df.replace("d", "z")
input_df = input_df.replace("obj1", "obj9")
result = input_df.replace("cat2", "catX")
tm.assert_frame_equal(result, expected)
@pytest.mark.xfail(
reason="category dtype gets changed to object type after replace, see #35268",
strict=True,
)
def test_replace_dict_category_type(self, input_category_df, expected_category_df):
"""
Test to ensure category dtypes are maintained
after replace with dict values
"""
# create input dataframe
input_dict = {"col1": ["a"], "col2": ["obj1"], "col3": ["cat1"]}
# explicitly cast columns as category
input_df = pd.DataFrame(data=input_dict).astype(
{"col1": "category", "col2": "category", "col3": "category"}
)
# create expected dataframe
expected_dict = {"col1": ["z"], "col2": ["obj9"], "col3": ["catX"]}
# explicitly cast columns as category
expected = pd.DataFrame(data=expected_dict).astype(
{"col1": "category", "col2": "category", "col3": "category"}
)
# replace values in input dataframe using a dict
result = input_df.replace({"a": "z", "obj1": "obj9", "cat1": "catX"})
tm.assert_frame_equal(result, expected)
def test_replace_with_compiled_regex(self):
# https://github.com/pandas-dev/pandas/issues/35680
df = pd.DataFrame(["a", "b", "c"])
regex = re.compile("^a$")
result = df.replace({regex: "z"}, regex=True)
expected = pd.DataFrame(["z", "b", "c"])
tm.assert_frame_equal(result, expected)
def test_replace_intervals(self):
# https://github.com/pandas-dev/pandas/issues/35931
df = pd.DataFrame({"a": [pd.Interval(0, 1), pd.Interval(0, 1)]})
result = df.replace({"a": {pd.Interval(0, 1): "x"}})
expected = pd.DataFrame({"a": ["x", "x"]})
tm.assert_frame_equal(result, expected)
| [
"[email protected]"
] | |
17ab9fad59ece529f0f67bb4d297210e473db480 | c2f92d75d235ff5ed7b213c02c4a0657545ba02f | /newchama_web/2/newchama/repository/migrations/0020_auto__add_field_keyword_is_delete.py | 14c0317f746a4890034163cc993cdb2a38deb7d8 | [] | no_license | cash2one/tstpthon | fab6112691eb15a8a26bd168af3f179913e0c4e0 | fc5c42c024065c7b42bea2b9de1e3874a794a30d | refs/heads/master | 2021-01-20T01:52:06.519021 | 2017-04-14T09:50:55 | 2017-04-14T09:50:55 | 89,338,193 | 0 | 1 | null | 2017-04-25T08:46:06 | 2017-04-25T08:46:06 | null | UTF-8 | Python | false | false | 10,751 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Keyword.is_delete'
db.add_column('repository_keyword', 'is_delete',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Keyword.is_delete'
db.delete_column('repository_keyword', 'is_delete')
models = {
u'area.city': {
'Meta': {'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'province': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Province']"})
},
u'area.continent': {
'Meta': {'object_name': 'Continent'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'area.country': {
'Meta': {'object_name': 'Country'},
'continent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Continent']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'area.province': {
'Meta': {'object_name': 'Province'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Country']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'industry.industry': {
'Meta': {'object_name': 'Industry'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'father': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['industry.Industry']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'repository.accountingfirm': {
'Meta': {'object_name': 'AccountingFirm'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'short_name_cn': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'short_name_en': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'repository.investmentcompany': {
'Meta': {'object_name': 'InvestmentCompany'},
'address_cn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'address_en': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'capital_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.City']", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Country']", 'null': 'True', 'blank': 'True'}),
'found_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'intro_cn': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'intro_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'province': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Province']", 'null': 'True', 'blank': 'True'}),
'short_name_cn': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'short_name_en': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'stock_exchange': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['repository.StockExchange']", 'null': 'True', 'blank': 'True'}),
'stock_symbol': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'stock_symbol_no_pre': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'repository.investmenthistory': {
'Meta': {'object_name': 'InvestmentHistory'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['repository.InvestmentCompany']"}),
'content': ('django.db.models.fields.TextField', [], {}),
'happen_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'repository.keyword': {
'Meta': {'object_name': 'Keyword'},
'add_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'count_preference': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'count_project': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_delete': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True'}),
'update_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now': 'True', 'blank': 'True'})
},
u'repository.listedcompany': {
'Meta': {'object_name': 'ListedCompany'},
'address_cn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'address_en': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.City']", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Country']", 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'found_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'industry': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['industry.Industry']", 'null': 'True', 'blank': 'True'}),
'intro_cn': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'intro_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'province': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['area.Province']", 'null': 'True', 'blank': 'True'}),
'short_name_cn': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'short_name_en': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'stock_exchange': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['repository.StockExchange']"}),
'stock_name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'stock_symbol': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'stock_symbol_no_pre': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'tel': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'repository.stockexchange': {
'Meta': {'object_name': 'StockExchange'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_cn': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'short_name_cn': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'short_name_en': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'})
}
}
complete_apps = ['repository'] | [
"[email protected]"
] | |
151f7484ec7c130b4403dd08c90830b52368805d | 4202a7c678e0ec25ab2065c4c2804b0296f94480 | /MOMI_FCS2/momi_priorset_8_stoch_asym.py | 9d14d73e818b29893f66b4c9d5195a512cecadac | [] | no_license | kaiyaprovost/whole_genome_pipeline | f1c479536560c5b8c68fe3a5ba0917140fbb0793 | 8e605d855c9f0cd6e11e1b73a97260e0d4aa3fae | refs/heads/master | 2023-04-22T20:51:01.344297 | 2023-04-06T19:12:11 | 2023-04-06T19:12:11 | 237,044,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,387 | py | import momi ## momi2 analysis
import logging ## create log file
import numpy as np
import datetime
print("-----\n-----\n-----")
now = datetime.datetime.now()
print("start logging\n")
logging.basicConfig(level=logging.INFO,
filename="momi_log_priorset_8_stoch_asym.txt")
print("load sfs\n")
sfspath = "/home/kprovost/nas1/momi2/cardcard16_sfs_filtered_changelength_monomorphic.txt"
## this is a two-population sfs with monomorphic sites included in "length"
sfs = momi.Sfs.load(sfspath)
print("Avg pairwise heterozygosity", sfs.avg_pairwise_hets[:5])
print("populations", sfs.populations)
print("percent missing data per population", sfs.p_missing)
## set up two-population model with parameters
## use Pure Isolation model as base for all of the models
## because only looking at this particular split, elected to change ranges to get
print("\nPRIORS")
print("MUT RATE: 2.21e-9")
mutrate=2.21e-9
print("GEN TIME: 1")
gentime=1
print("ANCESTRAL NE: 300,000")
ancne=300000
print("DIV TIME RANGE: 500,000 to 1,000,000")
divtimelow=500000
divtimehigh=1000000
print("NE RANGE: 500,000 to 2,000,000")
nelow=500000
nehigh=2000000
print("MIGRATION RANGE: 0 to 0.1")
migratelow=0
migratehigh=0.1
print("MIGRATION DATE RANGE: 25,000 to 250,000\n\n")
migtimelow=25000
migtimehigh=250000
print("begin setting up models\n")
##### PURE ISOLATION MODEL #####
print("\nPure Isolation model (base model)")
pure_isolation_model = momi.DemographicModel(N_e=ancne,muts_per_gen=mutrate,gen_time=gentime) ## why tho -- can you give it something?
pure_isolation_model.set_data(sfs)
## set up divergence times
pure_isolation_model.add_time_param("tdiv_sc",lower=divtimelow,upper=divtimehigh)
## set up effective population size
pure_isolation_model.add_size_param("ne_s",lower=nelow,upper=nehigh) ## this is from Brian's paper on cardinals
pure_isolation_model.add_size_param("ne_c",lower=nelow,upper=nehigh)
## set up populations and phylogeny
pure_isolation_model.add_leaf("Son",N="ne_s")
pure_isolation_model.add_leaf("Chi",N="ne_c")
pure_isolation_model.move_lineages("Son", "Chi", t="tdiv_sc")
## randomize parameters and check them
#pure_isolation_model.set_params(randomize=True)
#print(pure_isolation_model.get_params())
## set up the rest of the models
##### ASYMMETRIC MIGRATION #####
print("\nAsymmetric model (c2s as base)")
asym_model = pure_isolation_model.copy() ## copy isol
asym_model.add_pulse_param("mig_s2c",lower=migratelow,upper=migratehigh)
asym_model.add_pulse_param("mig_c2s",lower=migratelow,upper=migratehigh)
asym_model.add_time_param("tmig_asym",lower=migtimelow,upper=migtimehigh,upper_constraints=["tdiv_sc"])
asym_model.move_lineages("Chi","Son",t="tmig_asym",p="mig_s2c")
asym_model.move_lineages("Son","Chi",t="tmig_asym",p="mig_c2s")
## randomize and check parameters
asym_model.set_params(randomize=True)
print(asym_model.get_params())
## optimize each model once
print("#####")
models = [asym_model]
model_names = ["ASYM"]
AICs = []
count = 0
for model in models:
now = datetime.datetime.now()
name = str(model_names[count])
print("Stochastic optimizing "+name+" model: "+str(now))
model.stochastic_optimize(num_iters=10, n_minibatches=5, save_to_checkpoint="momi_checkpoint_priorset8_stoch_asym.txt", svrg_epoch=-1)
now = datetime.datetime.now()
print("Finished stochastic optimizing "+name+": "+str(now))
print(model.get_params())
print("Starting AIC likelihood for stochastic "+name)
lik = model.log_likelihood()
nparams = len(model.get_params())
aic = 2*nparams - 2*lik
print("AIC {}".format(aic))
AICs.append(aic)
count += 1
print("-----")
count = 0
for model in models:
now = datetime.datetime.now()
print("Fully optimizing "+name+" model: "+str(now))
model.optimize(method="L-BFGS-B")
now = datetime.datetime.now()
print("Finished fully optimizing "+name+": "+str(now))
print(model.get_params())
print("Starting AIC likelihood for full "+name)
lik = model.log_likelihood()
nparams = len(model.get_params())
aic = 2*nparams - 2*lik
print("AIC {}".format(aic))
AICs.append(aic)
count += 1
print("-----")
minv = np.min(AICs)
delta_aic = np.array(AICs) - minv
print("Delta AIC per model: ", delta_aic)
print("AIC weight per model: ", np.exp(-0.5 * delta_aic))
## TODO: add searching multiple times
## TODO: add bootstrapping
| [
"[email protected]"
] | |
7eee6af617335a2d6c8c407680e67b2cc2e81dea | 55f6a9b8f90ae308a90739fd8f77f4e7cd10ff19 | /spacy/tests/lang/sk/test_tokenizer.py | 247847284ad16613d04423f16cb12d3f7d98d573 | [
"MIT"
] | permissive | explosion/spaCy | cce07ee403aa398de7ba8941a2c11d22aea68021 | 3e4264899c3b12f8eabc5cd700146177a34824d0 | refs/heads/master | 2023-08-31T07:18:13.598768 | 2023-08-30T09:58:14 | 2023-08-30T09:58:14 | 21,467,110 | 26,348 | 4,983 | MIT | 2023-09-13T17:56:22 | 2014-07-03T15:15:40 | Python | UTF-8 | Python | false | false | 453 | py | import pytest
SK_BASIC_TOKENIZATION_TESTS = [
(
"Kedy sa narodil Andrej Kiska?",
["Kedy", "sa", "narodil", "Andrej", "Kiska", "?"],
),
]
@pytest.mark.parametrize("text,expected_tokens", SK_BASIC_TOKENIZATION_TESTS)
def test_sk_tokenizer_basic(sk_tokenizer, text, expected_tokens):
tokens = sk_tokenizer(text)
token_list = [token.text for token in tokens if not token.is_space]
assert expected_tokens == token_list
| [
"[email protected]"
] | |
f16f5035356d1ce85a7038be04e25800a6524e58 | 12d05a7f65e6ca8ffa701670ed1bec209af77a51 | /capsule_biblosa/models/biblosa/nn_utils/integration_func.py | aa4c5160c77aa2c81ae964a4097d76df63f6a1d5 | [] | no_license | Bobby-Han/text-classification | ec8015e6bb438fceb8d7b61117519e2d6469c57d | 2fa6d3ed4f3b9288ff7fb385c9cced44daf522ca | refs/heads/master | 2023-02-24T13:27:36.761439 | 2021-02-01T06:27:53 | 2021-02-01T06:27:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,921 | py | # coding: utf-8
from models.biblosa.nn_utils.general import exp_mask_for_high_rank, mask_for_high_rank
from models.biblosa.nn_utils.nn import linear, get_logits, softsel, dropout, bn_dense_layer
from models.biblosa.nn_utils.rnn_cell import SwitchableDropoutWrapper
from models.biblosa.nn_utils.rnn import bidirectional_dynamic_rnn
import tensorflow as tf
from models.biblosa.nn_utils.general import get_last_state, add_reg_without_bias
def traditional_attention(rep_tensor, rep_mask, scope=None,
keep_prob=1., is_train=None, wd=0., activation='elu',
tensor_dict=None, name=None):
bs, sl, vec = tf.shape(rep_tensor)[0], tf.shape(rep_tensor)[1], tf.shape(rep_tensor)[2]
ivec = rep_tensor.get_shape()[2]
with tf.variable_scope(scope or 'traditional_attention'):
rep_tensor_map = bn_dense_layer(rep_tensor, ivec, True, 0., 'bn_dense_map', activation,
False, wd, keep_prob, is_train)
rep_tensor_logits = get_logits([rep_tensor_map], None, False, scope='self_attn_logits',
mask=rep_mask, input_keep_prob=keep_prob, is_train=is_train) # bs,sl
attn_res = softsel(rep_tensor, rep_tensor_logits, rep_mask) # bs,vec
# save attn
if tensor_dict is not None and name is not None:
tensor_dict[name] = tf.nn.softmax(rep_tensor_logits)
return attn_res
def multi_dimensional_attention(rep_tensor, rep_mask, scope=None,
keep_prob=1., is_train=None, wd=0., activation='elu',
tensor_dict=None, name=None):
bs, sl, vec = tf.shape(rep_tensor)[0], tf.shape(rep_tensor)[1], tf.shape(rep_tensor)[2]
ivec = rep_tensor.get_shape()[2]
with tf.variable_scope(scope or 'multi_dimensional_attention'):
map1 = bn_dense_layer(rep_tensor, ivec, True, 0., 'bn_dense_map1', activation,
False, wd, keep_prob, is_train)
map2 = bn_dense_layer(map1, ivec, True, 0., 'bn_dense_map2', 'linear',
False, wd, keep_prob, is_train)
map2_masked = exp_mask_for_high_rank(map2, rep_mask)
soft = tf.nn.softmax(map2_masked, 1) # bs,sl,vec
attn_output = tf.reduce_sum(soft * rep_tensor, 1) # bs, vec
# save attn
if tensor_dict is not None and name is not None:
tensor_dict[name] = soft
return attn_output
def directional_attention_with_dense(rep_tensor, rep_mask, direction=None, scope=None,
keep_prob=1., is_train=None, wd=0., activation='elu',
tensor_dict=None, name=None):
def scaled_tanh(x, scale=5.):
return scale * tf.nn.tanh(1./scale * x)
bs, sl, vec = tf.shape(rep_tensor)[0], tf.shape(rep_tensor)[1], tf.shape(rep_tensor)[2]
ivec = rep_tensor.get_shape()[2]
with tf.variable_scope(scope or 'directional_attention_%s' % direction or 'diag'):
# mask generation
sl_indices = tf.range(sl, dtype=tf.int32)
sl_col, sl_row = tf.meshgrid(sl_indices, sl_indices)
if direction is None:
direct_mask = tf.cast(tf.diag(- tf.ones([sl], tf.int32)) + 1, tf.bool)
else:
if direction == 'forward':
direct_mask = tf.greater(sl_row, sl_col)
else:
direct_mask = tf.greater(sl_col, sl_row)
direct_mask_tile = tf.tile(tf.expand_dims(direct_mask, 0), [bs, 1, 1]) # bs,sl,sl
rep_mask_tile = tf.tile(tf.expand_dims(rep_mask, 1), [1, sl, 1]) # bs,sl,sl
attn_mask = tf.logical_and(direct_mask_tile, rep_mask_tile) # bs,sl,sl
# non-linear
rep_map = bn_dense_layer(rep_tensor, ivec, True, 0., 'bn_dense_map', activation,
False, wd, keep_prob, is_train)
rep_map_tile = tf.tile(tf.expand_dims(rep_map, 1), [1, sl, 1, 1]) # bs,sl,sl,vec
rep_map_dp = dropout(rep_map, keep_prob, is_train)
# attention
with tf.variable_scope('attention'): # bs,sl,sl,vec
f_bias = tf.get_variable('f_bias',[ivec], tf.float64, tf.constant_initializer(0.))
dependent = linear(rep_map_dp, ivec, False, scope='linear_dependent') # bs,sl,vec
dependent_etd = tf.expand_dims(dependent, 1) # bs,1,sl,vec
head = linear(rep_map_dp, ivec, False, scope='linear_head') # bs,sl,vec
head_etd = tf.expand_dims(head, 2) # bs,sl,1,vec
logits = scaled_tanh(dependent_etd + head_etd + f_bias, 5.0) # bs,sl,sl,vec
logits_masked = exp_mask_for_high_rank(logits, attn_mask)
attn_score = tf.nn.softmax(logits_masked, 2) # bs,sl,sl,vec
attn_score = mask_for_high_rank(attn_score, attn_mask)
attn_result = tf.reduce_sum(attn_score * rep_map_tile, 2) # bs,sl,vec
with tf.variable_scope('output'):
o_bias = tf.get_variable('o_bias',[ivec], tf.float64, tf.constant_initializer(0.))
# input gate
fusion_gate = tf.nn.sigmoid(
linear(rep_map, ivec, True, 0., 'linear_fusion_i', False, wd, keep_prob, is_train) +
linear(attn_result, ivec, True, 0., 'linear_fusion_a', False, wd, keep_prob, is_train) +
o_bias)
output = fusion_gate * rep_map + (1-fusion_gate) * attn_result
output = mask_for_high_rank(output, rep_mask)
# save attn
if tensor_dict is not None and name is not None:
tensor_dict[name + '_dependent'] = dependent
tensor_dict[name + '_head'] = head
tensor_dict[name] = attn_score
tensor_dict[name + '_gate'] = fusion_gate
return output
# -------------- rnn --------------
def contextual_bi_rnn(tensor_rep, mask_rep, hn, cell_type, only_final=False,
wd=0., keep_prob=1.,is_train=None, scope=None):
"""
fusing contextual information using bi-direction rnn
:param tensor_rep: [..., sl, vec]
:param mask_rep: [..., sl]
:param hn:
:param cell_type: 'gru', 'lstm', basic_lstm' and 'basic_rnn'
:param only_final: True or False
:param wd:
:param keep_prob:
:param is_train:
:param scope:
:return:
"""
with tf.variable_scope(scope or 'contextual_bi_rnn'): # correct
reuse = None if not tf.get_variable_scope().reuse else True
#print(reuse)
if cell_type == 'gru':
cell_fw = tf.contrib.rnn.GRUCell(hn, reuse=reuse)
cell_bw = tf.contrib.rnn.GRUCell(hn, reuse=reuse)
elif cell_type == 'lstm':
cell_fw = tf.contrib.rnn.LSTMCell(hn, reuse=reuse)
cell_bw = tf.contrib.rnn.LSTMCell(hn, reuse=reuse)
elif cell_type == 'basic_lstm':
cell_fw = tf.contrib.rnn.BasicLSTMCell(hn, reuse=reuse)
cell_bw = tf.contrib.rnn.BasicLSTMCell(hn, reuse=reuse)
elif cell_type == 'basic_rnn':
cell_fw = tf.contrib.rnn.BasicRNNCell(hn, reuse=reuse)
cell_bw = tf.contrib.rnn.BasicRNNCell(hn, reuse=reuse)
else:
raise AttributeError('no cell type \'%s\'' % cell_type)
cell_dp_fw = SwitchableDropoutWrapper(cell_fw,is_train,keep_prob)
cell_dp_bw = SwitchableDropoutWrapper(cell_bw,is_train,keep_prob)
tensor_len = tf.reduce_sum(tf.cast(mask_rep, tf.int32), -1) # [bs]
(outputs_fw, output_bw), _=bidirectional_dynamic_rnn(
cell_dp_fw, cell_dp_bw, tensor_rep, tensor_len,
dtype=tf.float32)
rnn_outputs = tf.concat([outputs_fw,output_bw],-1) # [...,sl,2hn]
if wd > 0:
add_reg_without_bias()
if not only_final:
return rnn_outputs # [....,sl, 2hn]
else:
return get_last_state(rnn_outputs, mask_rep) # [...., 2hn]
# -------------- emb mat--------------
def generate_embedding_mat(dict_size, emb_len, init_mat=None, extra_mat=None,
extra_trainable=False, scope=None):
"""
generate embedding matrix for looking up
:param dict_size: indices 0 and 1 corresponding to empty and unknown token
:param emb_len:
:param init_mat: init mat matching for [dict_size, emb_len]
:param extra_mat: extra tensor [extra_dict_size, emb_len]
:param extra_trainable:
:param scope:
:return: if extra_mat is None, return[dict_size+extra_dict_size,emb_len], else [dict_size,emb_len]
"""
with tf.variable_scope(scope or 'gene_emb_mat'):
emb_mat_ept_and_unk = tf.constant(value=0, dtype=tf.float32, shape=[2, emb_len])
if init_mat is None:
emb_mat_other = tf.get_variable('emb_mat',[dict_size - 2, emb_len], tf.float32)
else:
emb_mat_other = tf.get_variable("emb_mat",[dict_size - 2, emb_len], tf.float32,
initializer=tf.constant_initializer(init_mat[2:], dtype=tf.float32,
verify_shape=True))
emb_mat = tf.concat([emb_mat_ept_and_unk, emb_mat_other], 0)
if extra_mat is not None:
if extra_trainable:
extra_mat_var = tf.get_variable("extra_emb_mat",extra_mat.shape, tf.float32,
initializer=tf.constant_initializer(extra_mat,
dtype=tf.float32,
verify_shape=True))
return tf.concat([emb_mat, extra_mat_var], 0)
else:
#with tf.device('/cpu:0'):
extra_mat_con = tf.constant(extra_mat, dtype=tf.float32)
return tf.concat([emb_mat, extra_mat_con], 0)
else:
return emb_mat
| [
"[email protected]"
] | |
92b74d7f6090a1d0b0e6e8984b12d6284c13a25c | 2063a057c3ff7b78d93e7bcc041e65eaa05bd86d | /pyglet_utils/platformer/player.py | efcec024a90d637cc3827c79f01b82d2cf7ee8d0 | [] | no_license | cm107/pyglet_utils | c45025237bc21bec6f8e0f0d6007428ed6cde559 | c20dc6a7658a1f7de4784c3b6ab69bffe9f33a79 | refs/heads/master | 2022-11-19T20:07:21.326211 | 2020-07-24T15:09:14 | 2020-07-24T15:09:14 | 262,519,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,291 | py | from typing import List, cast
from .resources import PlayerImages
from .game_obj import GameObject
from .frame import Frame
from .render import RenderBox
from .grid import Grid, GridObject
from ..lib.exception_handler import Error
from pyglet.graphics import Batch
from pyglet.shapes import Circle, Rectangle
from common_utils.check_utils import check_value
class ArrowKeyBuffer:
def __init__(self, buffer: List[str]=None):
self.buffer = buffer if buffer is not None else []
self._valid_directions = ['left', 'right']
def press(self, direction: str):
check_value(direction, valid_value_list=self._valid_directions)
self.buffer.append(direction)
def release(self, direction: str):
check_value(direction, valid_value_list=self._valid_directions)
while direction in self.buffer:
idx = self.buffer.index(direction)
del self.buffer[idx]
@property
def is_pressed(self) -> bool:
return len(self.buffer) > 0
@property
def is_released(self) -> bool:
return len(self.buffer) == 0
class Player(GameObject):
def __init__(self, x: int, y: int, frame: Frame, grid: Grid, renderbox: RenderBox, name: str='Player1', batch: Batch=None, debug: bool=False):
# Player Sprite Select Related
self.player_res_list = [PlayerImages.p1, PlayerImages.p2, PlayerImages.p3]
self.player_select = 0
self.player_res = self.player_res_list[self.player_select]
# Initialize Base Class
super().__init__(
x=x, y=y, res=self.player_res.jump_right, frame=frame, grid=grid, renderbox=renderbox, name=name,
batch=batch, usage='dynamic',
is_anchor_x_centered=True
)
# Movement Related
self.BASE_WALKING_SPEED = 200
self.BASE_JUMPING_SPEED = 300
self.vx = 0.0
self.vy = 0.0
self.facing = 'right'
self.status = 'jumping'
self.arrow_key_buffer = ArrowKeyBuffer()
# Grid Related
self.up_contact_obj_list = cast(List[GridObject], [])
self.down_contact_obj_list = cast(List[GridObject], [])
self.left_contact_obj_list = cast(List[GridObject], [])
self.right_contact_obj_list = cast(List[GridObject], [])
# Debug
self.debug = debug
self.ref_point = Circle(x=self.camera_x, y=self.camera_y, radius=5, color=(255,0,0))
self.ref_rect = Rectangle(x=self.camera_x, y=self.camera_y, width=self.width, height=self.height, color=(0,0,255))
self.ref_rect.anchor_x = self.ref_rect.width // 2
@property
def x(self) -> int:
return super().x
@x.setter
def x(self, x: int):
super().x = x
if self.debug:
self.ref_point.x = self.camera_x
self.ref_rect.x = self.camera_x
@property
def y(self) -> int:
return super().y
@y.setter
def y(self, y: int):
super().y = y
if self.debug:
self.ref_point.y = self.camera_y
self.ref_rect.y = self.camera_y
def change_player(self, idx: int):
self.player_res = self.player_res_list[idx]
self.player_select = idx
self.update_sprite()
def toggle_player(self):
self.change_player((self.player_select+1) % len(self.player_res_list))
def toggle_debug(self):
self.debug = not self.debug
self.grid.show_contacts = not self.grid.show_contacts
def change_sprite(self, image):
self.sprite.image = image
if self.debug:
self.ref_rect.width = self.sprite.width
self.ref_rect.height = self.sprite.height
self.ref_rect.anchor_x = self.ref_rect.width // 2
def update_sprite(self):
if self.status == 'idle':
if self.facing == 'right':
self.change_sprite(self.player_res.idle_right.img)
elif self.facing == 'left':
self.change_sprite(self.player_res.idle_left.img)
else:
raise Exception
elif self.status == 'jumping':
if self.facing == 'right':
self.change_sprite(self.player_res.jump_right.img)
elif self.facing == 'left':
self.change_sprite(self.player_res.jump_left.img)
else:
raise Exception
elif self.status == 'walking':
if self.facing == 'right':
self.change_sprite(self.player_res.walk_right_anim.animation)
elif self.facing == 'left':
self.change_sprite(self.player_res.walk_left_anim.animation)
else:
raise Exception
else:
raise Exception
@property
def is_idle(self) -> bool:
return self.status == 'idle'
@property
def is_walking(self) -> bool:
return self.status == 'walking'
@property
def is_jumping(self) -> bool:
return self.status == 'jumping'
def face(self, direction: str):
if direction == 'right':
self.facing = 'right'
self.update_sprite()
elif direction == 'left':
self.facing = 'left'
self.update_sprite()
else:
raise Exception
def start_walking(self, direction: str):
if direction == 'right':
self.vx = self.BASE_WALKING_SPEED
self.facing = 'right'
self.status = 'walking'
self.update_sprite()
elif direction == 'left':
self.vx = -self.BASE_WALKING_SPEED
self.facing = 'left'
self.status = 'walking'
self.update_sprite()
else:
raise Exception
def stop_walking(self):
self.status = 'idle'
self.vx = 0
self.update_sprite()
def start_jumping(self):
self.status = 'jumping'
self.vy = self.BASE_JUMPING_SPEED
self.update_sprite()
def start_falling(self):
self.status = 'jumping'
self.update_sprite()
def stop_jumping(self):
self.status = 'idle'
self.vy = 0
self.update_sprite()
def draw(self):
if self.debug:
self.ref_point.draw()
self.ref_rect.draw()
super().draw()
def move(self, dx: int, dy: int):
player_grid_obj = self.grid.contained_obj_list.get_obj_from_name(self.name)
other_renderable_objects = self.renderbox.get_all_renderable_objects(exclude_names=[self.name])
other_renderable_names = [other_renderable_object.name for other_renderable_object in other_renderable_objects]
other_grid_objects = self.grid.contained_obj_list.get_objects(include_names=other_renderable_names)
all_grid_objects = self.grid.contained_obj_list.get_objects()
self.up_contact_obj_list = []
self.down_contact_obj_list = []
self.left_contact_obj_list = []
self.right_contact_obj_list = []
self.grid.reset_contacts()
# Move X
proposed_player_occupied_spaces = player_grid_obj.get_occupied_spaces(dx=dx)
collision = False
for proposed_player_occupied_space in proposed_player_occupied_spaces:
for other_grid_object in other_grid_objects:
if proposed_player_occupied_space in other_grid_object.occupied_spaces:
other_grid_object.is_in_contact = True
if dx > 0:
self.right_contact_obj_list.append(other_grid_object)
elif dx < 0:
self.left_contact_obj_list.append(other_grid_object)
else:
raise Error(f'Player got stuck in object in x direction.')
collision = True
if not collision:
self.set_x(x=self.x+dx, fix_camera=True)
self.grid.move(dx=-dx)
else:
if len(self.left_contact_obj_list) > 0:
dx_adjustment = self.left_contact_obj_list[0].x_right + 1 - self.x_left
self.set_x(x=self.x+dx_adjustment, fix_camera=True)
self.grid.move(dx=-dx_adjustment)
elif len(self.right_contact_obj_list) > 0:
dx_adjustment = self.right_contact_obj_list[0].x_left - self.x_right
self.set_x(x=self.x+dx_adjustment, fix_camera=True)
self.grid.move(dx=-dx_adjustment)
else:
raise Exception
# Move Y
proposed_player_occupied_spaces = player_grid_obj.get_occupied_spaces(dy=dy)
collision = False
for proposed_player_occupied_space in proposed_player_occupied_spaces:
for other_grid_object in other_grid_objects:
if proposed_player_occupied_space in other_grid_object.occupied_spaces:
other_grid_object.is_in_contact = True
if dy > 0:
self.up_contact_obj_list.append(other_grid_object)
elif dy < 0:
self.down_contact_obj_list.append(other_grid_object)
else:
raise Error(f'Player got stuck in object in y direction.')
collision = True
if not collision:
self.set_y(y=self.y+dy, fix_camera=True)
self.grid.move(dy=-dy)
self.start_falling()
else:
self.vy = 0
if len(self.down_contact_obj_list) > 0:
dy_adjustment = self.down_contact_obj_list[0].y_top - self.y_bottom
self.set_y(y=self.y+dy_adjustment, fix_camera=True)
self.grid.move(dy=-dy_adjustment)
if self.is_jumping:
self.stop_jumping()
if self.arrow_key_buffer.is_pressed:
self.start_walking(direction=self.arrow_key_buffer.buffer[-1])
else:
self.vx = 0
elif len(self.up_contact_obj_list) > 0:
dy_adjustment = self.up_contact_obj_list[0].y_bottom - self.y_top
self.set_y(y=self.y+dy_adjustment, fix_camera=True)
self.grid.move(dy=-dy_adjustment)
else:
raise Exception | [
"[email protected]"
] | |
25a66089f805038eee267a2ad15f97dabe903290 | ccdbe6e17022aae05e6bee60b37fd92a0b44a6d8 | /python/kivyapp/clock.py | b938a8ed87cc8a63c2595f40e017430675c55b3f | [
"Apache-2.0"
] | permissive | bdastur/notes | e580c45ef38abd2b104dce3ec6898031e9c79f27 | 74341d8de88c8817c557af02c6e8bd470e56151f | refs/heads/master | 2023-08-15T19:59:54.631621 | 2023-07-24T15:27:08 | 2023-07-24T15:27:08 | 92,065,482 | 4 | 1 | Apache-2.0 | 2023-08-27T19:00:14 | 2017-05-22T14:52:34 | JavaScript | UTF-8 | Python | false | false | 958 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from kivy.app import App
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.uix.gridlayout import GridLayout
class MyClock(GridLayout):
def __init__(self, **kwargs):
super(MyClock, self).__init__(**kwargs)
self.cols = 1
self.myLabel = Label(text="Clock: ")
self.add_widget(self.myLabel)
Clock.schedule_once(self.clockCallback, 3)
def clockCallback(self, duration):
now = datetime.datetime.now()
print("Duration: ", datetime.datetime.strftime(now, "%Y-%m-%d %H:%M:%S"))
self.myLabel.text = "Clock: %s" % datetime.datetime.strftime(now, "%Y-%m-%d %H:%M:%S")
class ClockApp(App):
def build(self):
myclock = MyClock()
Clock.schedule_interval(myclock.clockCallback, 5)
return myclock
def main():
ClockApp().run()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
2adcff104db81c7a2defe60c9f677882631fe561 | 88ae8695987ada722184307301e221e1ba3cc2fa | /chrome/browser/ash/arc/PRESUBMIT.py | 9b7c9a6ae4bf8c95ebee88233e1d5518e78f4fe7 | [
"BSD-3-Clause"
] | permissive | iridium-browser/iridium-browser | 71d9c5ff76e014e6900b825f67389ab0ccd01329 | 5ee297f53dc7f8e70183031cff62f37b0f19d25f | refs/heads/master | 2023-08-03T16:44:16.844552 | 2023-07-20T15:17:00 | 2023-07-23T16:09:30 | 220,016,632 | 341 | 40 | BSD-3-Clause | 2021-08-13T13:54:45 | 2019-11-06T14:32:31 | null | UTF-8 | Python | false | false | 535 | py | # Copyright 2017 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
USE_PYTHON3 = True
def CheckChangeOnUpload(input_api, output_api):
# Apply the same PRESUBMIT for components/arc.
presubmit_path = (
input_api.change.RepositoryRoot() + '/components/arc/PRESUBMIT.py')
presubmit_content = input_api.ReadFile(presubmit_path)
global_vars = {}
exec(presubmit_content, global_vars)
return global_vars['CheckChangeOnUpload'](input_api, output_api)
| [
"[email protected]"
] | |
c92082561955960009d752e4ffe94a930beeedb3 | eeaf323a92254190195ecbb61c03171aae8f28ee | /accounts/migrations/0002_usercompanies_company_name.py | decf6fe96a53b78ea09b7794c2145c537e0264d5 | [] | no_license | abdullakn/job-portal1 | 44295db5c1169494454fa407ad5716f119e6017b | c5d299acae4262eb9d02317f3358aaa6d4314b13 | refs/heads/master | 2023-07-18T07:45:54.610627 | 2021-09-04T11:37:36 | 2021-09-04T11:37:36 | 403,040,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | # Generated by Django 3.2.5 on 2021-07-21 18:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='usercompanies',
name='company_name',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
| [
"[email protected]"
] | |
7b7d347149ddda0cbc9c618459c6c1fa0741bc89 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/virtual-wan/azext_vwan/vendored_sdks/v2018_08_01/v2018_08_01/aio/operations/_load_balancer_frontend_ip_configurations_operations.py | 006f819fc5baa437f0da30dd80a00996d1cbf621 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 8,921 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerFrontendIPConfigurationsOperations:
"""LoadBalancerFrontendIPConfigurationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerFrontendIPConfigurationListResult"]:
"""Gets all the load balancer frontend IP configurations.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerFrontendIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_08_01.models.LoadBalancerFrontendIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerFrontendIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerFrontendIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
frontend_ip_configuration_name: str,
**kwargs
) -> "_models.FrontendIPConfiguration":
"""Gets load balancer frontend IP configuration.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param frontend_ip_configuration_name: The name of the frontend IP configuration.
:type frontend_ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: FrontendIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_08_01.models.FrontendIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.FrontendIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'frontendIPConfigurationName': self._serialize.url("frontend_ip_configuration_name", frontend_ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('FrontendIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/frontendIPConfigurations/{frontendIPConfigurationName}'} # type: ignore
| [
"[email protected]"
] | |
821d5e38bb6bc7cd626ca8d67a825d1a4dc3d9fd | eb8b5cde971573668800146b3632e43ed6e493d2 | /python/oneflow/nn/modules/random_ops.py | fb7503bc898688f8a370e6cae07c381d2f619987 | [
"Apache-2.0"
] | permissive | big-data-ai/oneflow | 16f167f7fb7fca2ce527d6e3383c577a90829e8a | b1c67df42fb9c5ab1335008441b0273272d7128d | refs/heads/master | 2023-07-08T21:21:41.136387 | 2021-08-21T11:31:14 | 2021-08-21T11:31:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,176 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Optional, Union
import oneflow as flow
from oneflow.nn.module import Module
from oneflow.nn.modules.utils import _single
def bernoulli(input, *, generator=None, out=None):
"""This operator returns a Tensor with binaray random numbers (0 / 1) from a Bernoulli distribution.
Args:
input(Tensor) - the input tensor of probability values for the Bernoulli distribution
generator: (optional) – a pseudorandom number generator for sampling
out (Tensor, optional) – the output tensor.
Shape:
- Input: :math:`(*)`. Input can be of any shape
- Output: :math:`(*)`. Output is of the same shape as input
For example:
.. code-block:: python
>>> import numpy as np
>>> import oneflow as flow
>>> arr = np.array(
... [
... [1.0, 1.0, 1.0],
... [1.0, 1.0, 1.0],
... [1.0, 1.0, 1.0],
... ]
... )
>>> x = flow.Tensor(arr)
>>> y = flow.bernoulli(x)
>>> y
tensor([[1., 1., 1.],
[1., 1., 1.],
[1., 1., 1.]], dtype=oneflow.float32)
"""
return flow.F.bernoulli(input, flow.float32, generator)
def _rand_op_common_process(
size, device=None, generator=None, placement=None, sbp=None
):
assert size is not None, "shape must not be None!"
assert isinstance(
size, (int, tuple, list, flow.Size)
), "shape should be int or tuple int!"
if isinstance(device, str):
device = flow.device(device)
size = _single(size)
processed_sbp = sbp
if generator is None:
generator = flow.Generator()
if placement is not None:
assert isinstance(sbp, (flow.sbp.sbp, tuple, list)), "sbp: %s" % sbp
if isinstance(processed_sbp, flow.sbp.sbp):
processed_sbp = (processed_sbp,)
else:
for elem in sbp:
assert isinstance(elem, flow.sbp.sbp), "sbp: %s" % sbp
assert len(processed_sbp) == len(placement.hierarchy)
else:
assert sbp is None, "sbp: %s" % sbp
return size, device, generator, placement, processed_sbp
class Rand(Module):
def __init__(
self,
size,
generator=None,
dtype=None,
layout=None,
device=None,
placement=None,
sbp=None,
requires_grad=False,
) -> None:
super().__init__()
self.requires_grad = requires_grad
(
self.size,
self.device,
self.generator,
self.placement,
self.sbp,
) = _rand_op_common_process(size, device, generator, placement, sbp)
self.dtype = dtype
def forward(self):
if self.placement is not None:
res = flow.F.consistent_rand(
self.size, self.placement, self.sbp, self.dtype, self.generator
)
else:
res = flow.F.rand(self.size, self.dtype, self.device, self.generator)
res.requires_grad = self.requires_grad
return res
def rand_op(
*size,
out=None,
generator=None,
dtype: Optional[flow.dtype] = None,
layout=None,
device: Union[flow.device, str, None] = None,
placement: flow.placement = None,
sbp: flow._oneflow_internal.sbp.sbp = None,
requires_grad: bool = False
):
"""
Returns a tensor filled with random numbers from a uniform distribution on the interval [0, 1)
The shape of the tensor is defined by the variable argument ``size``.
Args:
size (int... or flow.Size): Defining the shape of the output tensor.
Can be a variable number of arguments or a collection like a list or tuple or flow.Size.
out (optional): The output tensor.
dtype (flow.dtype, optional): The desired data type of returned tensor. Default: ``flow.float32``.
layout (optional): The desired layout of returned Tensor.
generator (flow.Generator, optional) – a pseudorandom number generator for sampling
device (flow.device, optional): The desired device of returned local tensor. If None, uses the
current device.
placement (flow.placement, optional): The desired device of returned consistent tensor. If None, will
construct local tensor.
sbp (flow.sbp, optional): The desired sbp of returned consistent tensor. It must be equal with the
numbers of placement.
requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.
For example:
.. code-block:: python
>>> import oneflow as flow
>>> x = flow.rand(3,3)
>>> x.shape
flow.Size([3, 3])
>>> x.is_consistent
False
>>> placement = flow.placement("cpu", {0: [0]})
>>> sbp = flow.sbp.broadcast
>>> x = flow.rand(3, 3, placement=placement, sbp=sbp)
>>> x.is_consistent
True
"""
assert out is None, "out not supported yet"
assert layout is None, "layout not supported yet"
if generator is None:
generator = flow.default_generator()
return Rand(size, generator, dtype, layout, device, placement, sbp, requires_grad)()
class RandN(Module):
def __init__(
self,
size,
generator=None,
dtype=None,
layout=None,
device=None,
placement=None,
sbp=None,
requires_grad=False,
) -> None:
super().__init__()
self.requires_grad = requires_grad
(
self.size,
self.device,
self.generator,
self.placement,
self.sbp,
) = _rand_op_common_process(size, device, generator, placement, sbp)
self.dtype = dtype
def forward(self):
if self.placement is not None:
res = flow.F.consistent_randn(
self.size, self.placement, self.sbp, self.dtype, self.generator
)
else:
res = flow.F.randn(self.size, self.dtype, self.device, self.generator)
res.requires_grad = self.requires_grad
return res
def randn_op(
*size,
out=None,
generator=None,
dtype: Optional[flow.dtype] = None,
layout=None,
device: Union[flow.device, str, None] = None,
placement: flow.placement = None,
sbp: flow._oneflow_internal.sbp.sbp = None,
requires_grad: bool = False
):
"""
Returns a tensor filled with random numbers from a normal distribution with mean 0 and variance 1 (also called the standard normal distribution).
The shape of the tensor is defined by the variable argument ``size``.
Args:
size (int... or flow.Size): Defining the shape of the output tensor.
Can be a variable number of arguments or a collection like a list or tuple or flow.Size.
out (optional): The output tensor.
dtype (flow.dtype, optional): The desired data type of returned tensor. Default: ``flow.float32``.
layout (optional): The desired layout of returned Tensor.
generator (flow.Generator, optional) – a pseudorandom number generator for sampling
device (flow.device, optional): The desired device of returned local tensor. If None, uses the
current device.
placement (flow.placement, optional): The desired device of returned consistent tensor. If None, will
construct local tensor.
sbp (flow.sbp, optional): The desired sbp of returned consistent tensor. It must be equal with the
numbers of placement.
requires_grad (bool, optional): If autograd should record operations on the returned tensor. Default: False.
For example:
.. code-block:: python
>>> import oneflow as flow
>>> x = flow.randn(3,3)
>>> x.shape
flow.Size([3, 3])
>>> x.is_consistent
False
>>> placement = flow.placement("cpu", {0:[0]})
>>> sbp = flow.sbp.broadcast
>>> x = flow.randn(3,3,placement=placement,sbp=sbp)
>>> x.is_consistent
True
"""
assert out is None, "out not supported yet"
assert layout is None, "layout not supported yet"
if generator is None:
generator = flow.default_generator()
return RandN(
size, generator, dtype, layout, device, placement, sbp, requires_grad
)()
class Randperm(Module):
def __init__(
self,
n,
generator: flow.Generator = None,
dtype: flow.dtype = flow.int32,
layout=None,
device: Union[flow.device, str, None] = None,
placement: flow.placement = None,
sbp: flow._oneflow_internal.sbp.sbp = None,
requires_grad: bool = False,
pin_memory: bool = False,
) -> None:
super().__init__()
assert n >= 0
self.n = n
self.requires_grad = requires_grad
(
self.size,
self.device,
self.generator,
self.placement,
self.sbp,
) = _rand_op_common_process(1, device, generator, placement, sbp)
self.dtype = dtype
def forward(self, out=None):
if self.placement is not None:
res = flow.F.consistent_randperm(
self.n, self.placement, self.sbp, self.generator
)
else:
res = flow.F.randperm(self.n, self.device, self.generator)
res.requires_grad = self.requires_grad
return res.to(dtype=self.dtype)
def randperm(
n: flow.int32,
generator: flow.Generator = None,
out=None,
dtype: flow.dtype = flow.int32,
layout=None,
device: Union[flow.device, str, None] = None,
placement: flow.placement = None,
sbp: flow._oneflow_internal.sbp.sbp = None,
requires_grad: bool = False,
pin_memory: bool = False,
):
r"""
Returns a random permutation of integers from ``0`` to ``n - 1``.
Args:
n (int): the upper bound (exclusive)
Keyword args:
generator(:class:`oneflow.Generator`, optional): a pseudorandom number generator for sampling
out (Tensor, optional): output Tensor,not supported yet.
dtype (:class:`oneflow.dtype`, optional): the desired data type of returned tensor.
Default: ``oneflow.int32``.
layout: layout is not supported yet.
device: the desired device of returned tensor. Default: cpu.
placement:(:class:`flow.placement`, optional): The desired device of returned consistent tensor. If None,
will construct local tensor.
sbp: (:class:`flow.sbp`, optional): The desired sbp of returned consistent tensor. It must be equal with the
numbers of placement.
requires_grad(bool, optional): If autograd should record operations on the returned tensor. Default: False.
pin_memory(bool, optional):pin_memory is not supported yet.
Example:
.. code-block:: python
>>> import oneflow as flow
>>> generator = flow.Generator()
>>> generator.manual_seed(0)
>>> flow.randperm(5, generator=generator)
tensor([2, 4, 3, 0, 1], dtype=oneflow.int32)
"""
assert out is None, "out not supported yet"
assert layout is None, "layout not supported yet"
if generator is None:
generator = flow.default_generator()
return Randperm(
n, generator, dtype, layout, device, placement, sbp, requires_grad, pin_memory
)(out)
if __name__ == "__main__":
import doctest
doctest.testmod(raise_on_error=True)
| [
"[email protected]"
] | |
16bd10b11e4d259ea0624fbef79cfa7bedb25b02 | 78c4ccb183a99ebaabcdc3a3a69f029e4aee0f5c | /AlgorithmStudy/백준/무지성 랜덤풀이/9월/9.19/1057 토너먼트.py | 3ebf3c81fb50fc00a0a769e5763627af7126e159 | [] | no_license | cladren123/study | ef2c45bc489fa658dbc9360fb0b0de53250500e5 | 241326e618f1f3bb1568d588bf6f53b78920587a | refs/heads/master | 2023-09-02T02:21:24.560967 | 2021-11-05T12:20:06 | 2021-11-05T12:20:06 | 368,753,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | """
문제유형 :
수학
브루트포스
1
1 3
1 2 3 4
두 수를 2로 나눈 몫을 서로 같아질때까지 빼다보면 라운드 수가 나온다.
"""
import sys
input = sys.stdin.readline
n, p1, p2 = map(int, input().split())
count = 0
while p1 != p2 :
p1 -= p1//2
p2 -= p2//2
count += 1
print(count)
| [
"[email protected]"
] | |
0051f0b263c771d0d796d609592be3b693a8b0bf | 446571f13b3c1604cdfbcee8fdc2f956568d7c8d | /geeksforgeeks/arrays/zero_flip.py | dfa147be96fa075eaaa1e5a24d01659937c19f41 | [] | no_license | ssarangi/algorithms | 2e8f0a4be6bf0f4a3d75b224ed993e1fb0ca0229 | e151307f2706214cf8cefa6231290aeb2e5cfd82 | refs/heads/master | 2021-01-17T02:28:23.291588 | 2018-01-06T18:35:13 | 2018-01-06T18:35:13 | 51,458,833 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | def flip_zeros(arr, m):
current_not_flipped = m
start = 0
max_till_now = 0
positions = []
best_positions = None
cons_zero = 0
end_of_zeros = -1
while 0 <= start < len(arr):
i = start
while (i < len(arr)) and (current_not_flipped != 0 or arr[i] != 0):
if arr[i] == 0:
current_not_flipped -= 1
positions.append(i)
if current_not_flipped == 0:
end_of_zeros = i + 1
cons_zero += 1
i += 1
if cons_zero > max_till_now:
best_positions = [p for p in positions]
max_till_now = cons_zero
positions.clear()
cons_zero = 0
current_not_flipped = m
start = end_of_zeros
end_of_zeros = -1
return max_till_now, best_positions
def main():
arr = [1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1]
m = 2
print(flip_zeros(arr, m))
arr = [1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1]
m = 1
print(flip_zeros(arr, m))
arr = [0, 0, 0, 1]
m = 4
print(flip_zeros(arr, m))
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
6ca7f5d24c91b078fc989d351c41a011332afca9 | 099deeb2c308bdc00a2c423743e4b2aacdac866c | /week7/tuple/youngyun.py | 890eaa5146674579d96aa1f65f6d9eff1b6cb958 | [] | no_license | Joonsun-Hwang/coding-test-study-lamda | 76fed2f18a3220f6731775984425dff49b4379eb | 0632ec9dd60024203ed10ebeab07aa7da4782806 | refs/heads/main | 2023-05-01T21:31:48.174944 | 2021-05-05T09:48:23 | 2021-05-05T09:48:23 | 329,205,708 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 317 | py | import re
def solution(s):
s = eval(s[1:-1])
s = list(s)
if len(s) > 1:
s.sort(key=lambda x: len(x))
else:
return list(s)
answer = []
for sg in s:
for ssg in sg:
if ssg not in answer:
answer.append(ssg)
return answer
| [
"[email protected]"
] | |
11b31f35dc668112d0124a59dd723b8cb872acea | 535a4d3c3f57f5f67d36be3d7d54fdbf9fc30a92 | /やってみよう_必修編/chapter09/9_10_imported_restaurant.py | 5ef1486dd7e1b597ed37100f39228f9f6ac49553 | [] | no_license | kyuugi/saitan-python | 4d28c6ecacb4d0b846292ab94f54814dde1cbab0 | 8196a91a9aac5011bc29782381b93f143b0ae25a | refs/heads/master | 2022-12-06T05:49:13.363864 | 2020-09-01T23:19:32 | 2020-09-01T23:19:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | from restaurant import Restaurant
restaurant = Restaurant('malaychan', '東南アジア料理')
restaurant.describe_restaurant()
restaurant.open_restaurant()
| [
"[email protected]"
] | |
be9c7f1407518d5042bb9f9141452c9827f5bc14 | 693c76bf548ad67232dba7951be51274a1d6e7d0 | /CodeChef/forgotten language.py | eb4eaa2796a22da44e9b55467ed31e12f3533067 | [] | no_license | Kartavya-verma/Competitive-Pratice | 7958e3034e5e766d6e1186fee2f23562fac70a9b | 8c684996410b376263082a7421d4a14a85cf624b | refs/heads/master | 2022-12-22T10:38:37.981529 | 2020-09-14T12:22:50 | 2020-09-14T12:22:50 | 295,405,048 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | for i in range(int(input())):
n, k = map(int, input().split())
a = input().split()
res = ['NO']*(n)
for j in range(k):
s = input().split()
s = s[1:]
for j in s:
if j in a:
res[a.index(j)] = 'YES'
print(' '.join(res)) | [
"[email protected]"
] | |
915c97efaf9ef352a967f2d1bed523eda5004a13 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02994/s580686016.py | a70a698f4cc8e26a206d2c2f0cdf4a1574d17b2c | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | n,k=map(int,input().split());a=[k+i for i in range(n)]
sumA=sum(a);min_num=float('inf');result=0
for i in a:
if min_num > abs(sumA-(sumA-i)):
min_num=abs(sumA-(sumA-i))
result=sumA-i
print(result) | [
"[email protected]"
] | |
48943f3f567ebfe249a08dc5ebe90c5e9841dd43 | d68ca034018d66f73024223d4b2266b3c3c901d7 | /prev/myworks/onelifefitness/chainxy/spiders/onelifefitness.py | 446dae58b98362a5967083b62453bedc1b6f9e84 | [] | no_license | GoodyIT/scrapy | caff30d26660f778008ad50532e364ab36aba4c2 | 5ae80cf83dc62c4e1bd2bfa11049ca39a3ca3488 | refs/heads/master | 2021-01-20T10:10:18.365194 | 2017-07-22T13:09:07 | 2017-07-22T13:09:07 | 90,330,210 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,546 | py | import scrapy
import json
import re
import csv
import requests
from scrapy.spiders import Spider
from scrapy.http import FormRequest
from scrapy.http import Request
from scrapy.selector import HtmlXPathSelector
from chainxy.items import ChainItem
import pdb
import usaddress
class Onelifefitness(scrapy.Spider):
name = "onelifefitness"
domain = "https://www.onelifefitness.com/"
start_urls = ["https://www.onelifefitness.com"]
store_id = []
def parse(self, response):
parents = response.xpath('.//div[@id="hs_menu_wrapper_module_14652775546295339"]//ul/li[2]')
for parent in parents:
if parent.xpath('.//a/text()').extract_first().find('Locations') != -1:
branch_list = parent.xpath('.//ul[contains(@class, "hs-menu-children-wrapper")]/li/a/@href').extract()
for branch in branch_list:
branch = branch.replace('https://www.onelifefitness.com', '')
if branch.find('onelifekc') == -1:
request = scrapy.Request(url="https://www.onelifefitness.com%s" % branch, callback=self.parse_clubs)
else:
request = scrapy.Request(url=branch, callback=self.parse_kensas)
yield request
def parse_kensas(self, response):
item = ChainItem()
item['store_number'] = ''
item['coming_soon'] = "0"
item['store_name'] = response.xpath('.//a[@class="standard-logo"]/img/@alt').extract_first()
address = response.xpath('.//address/a[1]/text()').extract()
address = [tp.strip().replace('\n', '') for tp in address if tp.strip() != ""]
addr = usaddress.parse(" ".join(address))
city = state = zip_code = street = ''
for temp in addr:
if temp[1] == 'PlaceName':
city += temp[0].replace(',','') + ' '
elif temp[1] == 'StateName':
state = temp[0].replace(',','')
elif temp[1] == 'ZipCode':
zip_code = temp[0].replace(',','')
else:
street += temp[0].replace(',','') + ' '
item['address'] = street
item['country'] = 'United States'
item['city'] = city
item['state'] = state
item['zip_code'] = zip_code
item['phone_number'] = response.xpath('.//address/a/text()').extract_first()
item['latitude'] = ''
item['longitude'] = ''
item['store_hours'] = ""
item['other_fields'] = ""
yield item
def parse_clubs(self, response):
club_list = response.xpath('.//ul[contains(@class, "gym_locations")]/li')
for club in club_list:
request = scrapy.Request(url="https://www.onelifefitness.com%s" % club.xpath('.//a/@href').extract_first(), callback=self.parse_store)
request.meta['lat'] = club.xpath('.//@data-lat').extract_first()
request.meta['lng'] = club.xpath('.//@data-ln').extract_first()
yield request
def parse_store(self, response):
# try:
item = ChainItem()
item['store_number'] = ''
item['store_name'] = response.xpath('.//div[@class="banner-header"]/h1/text()').extract_first()
item['address'] = response.xpath('.//span[@id="hs_cos_wrapper_module_14684752951104419"]/p/span[1]/text()').extract_first()
address = ''
# if item['store_name'].find('Windermere Gym') != -1:
# pdb.set_trace()
if item['address'] == None:
item['address'] = response.xpath('.//span[@id="hs_cos_wrapper_module_14684752951104419"]/p[1]/text()').extract_first()
if item['address'] == None:
item['address'] = response.xpath('.//span[@id="hs_cos_wrapper_module_14684752951104419"]/text()').extract_first()
address = response.xpath('.//span[@id="hs_cos_wrapper_module_14684752951104419"]/text()').extract()[1]
else:
address = response.xpath('.//span[@id="hs_cos_wrapper_module_14684752951104419"]/p/text()').extract()[1]
else:
address = response.xpath('.//span[@id="hs_cos_wrapper_module_14684752951104419"]/p/span[2]/text()').extract_first()
if len(address.split(',')) == 2:
item['city'] = address.split(',')[0].strip()
item['state'] = address.split(',')[1].strip().split(' ')[0].strip()
item['zip_code'] = address.split(',')[1].strip().split(' ')[1].strip()
elif len(address.split(',')) == 3:
item['city'] = address.split(',')[0].strip()
item['state'] = address.split(',')[1].strip()
item['zip_code'] = address.split(',')[2].strip()
else:
item['city'] = address.split(' ')[0].strip()
item['state'] = address.split(' ')[1].strip()
item['zip_code'] = address.split(' ')[2].strip()
item['address2'] = ''
item['country'] = 'United States'
item['coming_soon'] = "0"
item['latitude'] = response.meta['lat']
item['longitude'] = response.meta['lng']
item['other_fields'] = ""
phone = response.xpath('.//span[@id="hs_cos_wrapper_module_14684754122179512"]/p/text()').extract_first()
if phone == None:
phone = response.xpath('.//span[@id="hs_cos_wrapper_module_14684754122179512"]/p/a/text()').extract_first()
if phone == None:
item['phone_number'] = ''
elif phone.find('Coming Soon') == -1:
item['phone_number'] = self.validate(phone)
else:
item['coming_soon'] = "1"
item['store_hours'] = ""
hours = response.xpath('.//span[@id="hs_cos_wrapper_module_14684754134419869"]/p/text()').extract_first()
if hours != None and hours.find('Coming Soon') != -1:
item['coming_soon'] = "1"
else:
try:
item['store_hours'] = "; ".join(response.xpath('.//span[@id="hs_cos_wrapper_module_14684754134419869"]/p/text()').extract()).strip()
item['store_hours'] = item['store_hours'][2:].strip()
except:
item['store_hours'] = ""
# if item['store_name'].find('Crabapple Gym') != -1:
# pdb.set_trace()
item['store_hours'] = self.validate(item['store_hours'])
# except:
# pdb.set_trace()
yield item
def validate(self, value):
return value.encode('utf8').replace('\xc2\xa0', ' ')
| [
"[email protected]"
] | |
68fbb29c364a4f6b17cf269f611afac8fb2b7027 | 956f13e160b8381d3f8bbbb4b97bf66981ce0490 | /index/migrations/0012_auto_20181119_1617.py | b89c05a472c13c4b6b1a52e677e55b53234dfbd9 | [] | no_license | yzp0111/zyogo | e910d0ad029fb1a3f95beb8422336fee474c635a | b4807f4418d8cb1d195097f87e4e74637346cb6d | refs/heads/master | 2020-04-07T23:46:44.946618 | 2018-11-23T11:43:50 | 2018-11-23T11:43:50 | 158,824,828 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 798 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-11-19 08:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('index', '0011_auto_20181119_0852'),
]
operations = [
migrations.AlterField(
model_name='goodsinfo',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='index.UserInfo', verbose_name='卖家'),
),
migrations.AlterField(
model_name='windows',
name='goods',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='index.GoodsInfo', verbose_name='商品'),
),
]
| [
"[email protected]"
] | |
cde2edf4a10c79aa2209df50159f375a1a6b8a53 | 3d37f595a8aaaa7c5723ddbd6758ecac5147dce2 | /factorial-trailing-zeroes/factorial-trailing-zeroes.py | 7087830bf6b61d0261c4e9cb1b2af12cdc58875f | [] | no_license | baggy2797/Leetcode | ec218b155ebb972cd793253f25c3e18117216703 | 469c1541579401768f7a1da55d504a9e8656b21e | refs/heads/main | 2023-06-24T17:03:42.708935 | 2021-07-16T22:31:24 | 2021-07-16T22:31:24 | 342,979,700 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | class Solution:
def trailingZeroes(self, n: int) -> int:
count = 0
for i in range(5,n+1,5):
temp = 5
while i% temp == 0:
count = count + 1
temp = temp * 5
return count
| [
"[email protected]"
] | |
e82c5392a6049ce180717cc3145908adaa3f3fc4 | 01fdd206c8c825b30870bdd3f6e75f0aa113b849 | /test/record/parser/test_response_whois_sgnic_sg_property_nameservers_schema_2.py | 8a8146c2727ea7d9dd262c2deba6226bf92c8635 | [
"MIT"
] | permissive | huyphan/pyyawhois | 0fbc5a7d64a53ae6e3393fdc1c7ff0d0ac5f22b5 | 77fb2f73a9c67989f1d41d98f37037406a69d136 | refs/heads/master | 2021-01-23T22:42:55.989651 | 2015-09-19T16:40:06 | 2015-09-19T16:40:06 | 23,335,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,408 | py |
# This file is autogenerated. Do not edit it manually.
# If you want change the content of this file, edit
#
# spec/fixtures/responses/whois.sgnic.sg/property_nameservers_schema_2
#
# and regenerate the tests with the following script
#
# $ scripts/generate_tests.py
#
from nose.tools import *
from dateutil.parser import parse as time_parse
import yawhois
class TestWhoisSgnicSgPropertyNameserversSchema2(object):
def setUp(self):
fixture_path = "spec/fixtures/responses/whois.sgnic.sg/property_nameservers_schema_2.txt"
host = "whois.sgnic.sg"
part = yawhois.record.Part(open(fixture_path, "r").read(), host)
self.record = yawhois.record.Record(None, [part])
def test_nameservers(self):
eq_(self.record.nameservers.__class__.__name__, 'list')
eq_(len(self.record.nameservers), 4)
eq_(self.record.nameservers[0].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[0].name, "ns1.google.com")
eq_(self.record.nameservers[1].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[1].name, "ns2.google.com")
eq_(self.record.nameservers[2].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[2].name, "ns3.google.com")
eq_(self.record.nameservers[3].__class__.__name__, 'Nameserver')
eq_(self.record.nameservers[3].name, "ns4.google.com")
| [
"[email protected]"
] | |
9111ff6d364693b213af14c932a89fef59ae75aa | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5658571765186560_0/Python/Verum/D-OminousOmino.py | fc2ac8e040f5e876b4f9b9ae8a8a363707c04c6b | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,118 | py | #input_filename = "D-test.txt"
#output_filename = "D-test-out.txt"
input_filename = "D-small-attempt0.in"
output_filename = "D-small-attempt0.out"
#input_filename = "D-large.in"
#output_filename = "D-large.out"
def solve(x,r,c):
r, c = min(r,c), max(r,c)
if (r*c) % x != 0:
return False
elif x == 3 and r < 2:
return False
elif x == 4 and r < 3:
return False
elif x == 5 and r < 3:
return False
elif x == 5 and r == 3 and c < 10:
return False
elif x == 6 and r < 4:
return False
elif x > 6:
return False
else:
return True
with open(input_filename, "r") as ifile:
with open(output_filename, "w") as ofile:
T = int(ifile.readline())
for case in range(1, T+1):
x, r, c = map(int, ifile.readline().split())
print("\nCase %d" % case)
print("Task: %s" % str( (x,r,c) ))
result = solve(x,r,c)
result = "GABRIEL" if result else "RICHARD"
ofile.write("Case #%d: %s\n" % (case, result))
print("Solve: %s" % result)
| [
"[email protected]"
] | |
33fa2896293d944a73e801b87353e72d965ebd79 | 385c027fc4b9c09706a4d880bdb8aa5897d0ebca | /tests/greedy/test_transforms.py | b8189adbd88900a9f717953dc14be5efddba0e93 | [
"MIT"
] | permissive | rodluger/starry | 076d46324473a6ac634781a3382021d02a5f4fdd | b72dff08588532f96bd072f2f1005e227d8e4ed8 | refs/heads/master | 2023-05-23T16:37:07.835744 | 2022-07-14T15:38:11 | 2022-07-14T15:38:11 | 120,621,593 | 131 | 31 | MIT | 2021-11-16T16:48:10 | 2018-02-07T13:54:20 | Python | UTF-8 | Python | false | false | 639 | py | import numpy as np
import starry
def test_latlon_grid():
# Just check that these don't cause errors
map = starry.Map(10)
lat, lon = map.get_latlon_grid(projection="rect")
lat, lon = map.get_latlon_grid(projection="ortho")
lat, lon = map.get_latlon_grid(projection="moll")
def test_pixel_transforms():
map = starry.Map(10)
lat, lon, Y2P, P2Y, Dx, Dy = map.get_pixel_transforms()
# Check that the back-and-forth transform is the identity (ish)
assert np.max(np.abs(P2Y @ Y2P - np.eye(map.Ny))) < 1e-6
# Just check that the derivatives are finite
assert not np.isnan(np.sum(Dx) + np.sum(Dy))
| [
"[email protected]"
] | |
ee4c3fe497c236a93beff8da80c41af918eabd5c | 1cd37c59751344c0f89fe9102e25121e50f4cdfa | /python/ZvvHbb13TeVmacros/launchFakeMET.py | 8255ef80ffa75f80954f890b1646c569a6eb13c5 | [] | no_license | jmduarte/Xbb | 7bd2f919c320e4cda1306c0282f1d4c0df220b05 | 2f23c163b81c0d4d3f979369b86690ddfb7920fd | refs/heads/master | 2021-01-12T16:57:59.009289 | 2016-06-15T09:43:41 | 2016-06-15T09:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,485 | py | import sys
from doFakeMET import *
#from doFakeMETStupid import *
try:
fileNames = sys.argv[1]
outName = sys.argv[2]
print
print "Launching doFakeMET with:"
print "fileNames:", fileNames
print "outName:", outName
print
except:
print
print "example:"
print "python launchFakeMET.py tree_100_*.root newFile.root"
print sys.argv
print
print "fileNames: ",fileNames
from os import walk
dirpath_ = ""
dirnames_ = []
files_ = ""
# filenames = []
inputFiles = []
folder_prefix = ''
exit = False
for (dirpath_, dirnames_, files_) in walk(fileNames):
for filename_ in files_:
print file
if 'root' in filename_ and 'tree' in filename_ and not 'failed' in dirpath_:
exit = True
if exit: break
if exit: break
print dirpath_
path = dirpath_+'/'+ files_[0]
path = path.split("tree_")[0]
path = path + "tree_*.root"
inputs = []
for file_ in files_:
inputs.append((dirpath_+'/'+ file_,outName+'/'+file_))
quick = False
function = None
expoRatio = None
if quick:
firstFile = inputs[0][1]
gROOT.ProcessLine(".x "+firstFile.replace(".root","_fit.C"))
function = gDirectory.Get("histo")
function = copy.copy(function)
gROOT.ProcessLine(".x "+firstFile.replace(".root","_fit4.C"))
expoRatio = f4.Get("c1").GetPrimitive("expoRatio")
expoRatio = copy.copy(expoRatio)
for (inpt,outpt) in inputs:
function,expoRatio = doFile(inpt,outpt,function,expoRatio)
# print inpt,outpt
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.