hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cbe6a49cfbcddcf0f1360867b73aef43e61a651e
| 487 |
py
|
Python
|
tests/demoproject/demo/sample/urls.py
|
roman-karpovich/etools-validator
|
e5050a675d506f3d18a4703e8b73425d93919f0c
|
[
"Apache-2.0"
] | null | null | null |
tests/demoproject/demo/sample/urls.py
|
roman-karpovich/etools-validator
|
e5050a675d506f3d18a4703e8b73425d93919f0c
|
[
"Apache-2.0"
] | null | null | null |
tests/demoproject/demo/sample/urls.py
|
roman-karpovich/etools-validator
|
e5050a675d506f3d18a4703e8b73425d93919f0c
|
[
"Apache-2.0"
] | null | null | null |
from django.conf.urls import url
from .views import DemoCreateView, DemoUpdateNonSerializedView, DemoUpdateView
app_name = "sample"
urlpatterns = (
url(r'^create/$', view=DemoCreateView.as_view(), name='create'),
url(
r'^update/(?P<pk>\d+)/$',
view=DemoUpdateView.as_view(),
name='update'
),
url(
r'^update-non-serialized/(?P<pk>\d+)/$',
view=DemoUpdateNonSerializedView.as_view(),
name='update-non-serialized'
),
)
| 24.35 | 78 | 0.62423 |
68083934a20c88153061cf67729f6e5ae836bc8c
| 20,510 |
py
|
Python
|
cms/apps/pages/tests/test_models.py
|
cresset-group/cms
|
727b81e40dd1196e85c240e728a7824121163d4d
|
[
"BSD-3-Clause"
] | 13 |
2015-03-13T21:32:16.000Z
|
2020-08-07T08:09:02.000Z
|
cms/apps/pages/tests/test_models.py
|
cresset-group/cms
|
727b81e40dd1196e85c240e728a7824121163d4d
|
[
"BSD-3-Clause"
] | 131 |
2015-04-04T11:27:14.000Z
|
2020-10-16T13:39:16.000Z
|
cms/apps/pages/tests/test_models.py
|
cresset-group/cms
|
727b81e40dd1196e85c240e728a7824121163d4d
|
[
"BSD-3-Clause"
] | 16 |
2015-06-05T12:56:28.000Z
|
2021-01-06T15:15:53.000Z
|
'''Tests for the pages app.'''
from datetime import timedelta
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command
from django.db import models
from django.test import TestCase
from django.utils.timezone import now
from reversion import create_revision
from watson import search
from ....models.managers import publication_manager
from ...testing_models.models import (TestSection, TestPageContent,
TestPageContentWithSections)
from ..models import (ContentBase, Page, PageSearchAdapter, PageSitemap,
filter_indexable_pages)
class TestPage(TestCase):
def setUp(self):
call_command('installwatson')
with search.update_index():
content_type = ContentType.objects.get_for_model(TestPageContent)
self.homepage = Page.objects.create(
title='Homepage',
slug='homepage',
content_type=content_type,
)
TestPageContent.objects.create(
page=self.homepage,
)
self.section = Page.objects.create(
parent=self.homepage,
title='Section',
content_type=content_type,
)
TestPageContent.objects.create(
page=self.section,
)
self.subsection = Page.objects.create(
parent=self.section,
title='Subsection',
content_type=content_type,
)
TestPageContent.objects.create(
page=self.subsection,
)
self.subsubsection = Page.objects.create(
parent=self.subsection,
title='Subsubsection',
content_type=content_type,
)
TestPageContent.objects.create(
page=self.subsubsection,
)
def test_children(self):
homepage = Page.objects.get_homepage()
subsection = homepage.children[0].children[0]
self.assertEqual(subsection.title, 'Subsection')
subsection = homepage.navigation[0].navigation[0]
self.assertEqual(subsection.title, 'Subsection')
subsubsection = subsection.children[0]
self.assertEqual(subsubsection.title, 'Subsubsection')
subsubsection = subsection.children[0]
self.assertEqual(subsubsection.title, 'Subsubsection')
def test_page_reverse(self):
url = self.homepage.reverse('detail', kwargs={
'slug': self.homepage.slug
})
self.assertEqual(url, '/homepage/')
url = self.homepage.reverse('index')
self.assertEqual(url, '/')
def test_filter_indexable_pages(self):
pages = Page.objects.all()
self.assertEqual(len(pages), 4)
pages = filter_indexable_pages(Page.objects.all())
self.assertEqual(len(pages), 4)
# Turn off indexing on the homepage.
self.homepage.robots_index = False
self.homepage.save()
pages = filter_indexable_pages(Page.objects.all())
self.assertEqual(len(pages), 3)
def test_pagesitemap_items(self):
sitemap = PageSitemap()
self.assertEqual(len(sitemap.items()), 4)
# Turn off indexing on the homepage.
self.homepage.robots_index = False
self.homepage.save()
self.assertEqual(len(sitemap.items()), 3)
def test_contentbase_unicode(self):
self.assertEqual(self.homepage.content.__str__(), 'Homepage')
self.assertEqual(self.section.content.__str__(), 'Section')
self.assertEqual(self.subsection.content.__str__(), 'Subsection')
self.assertEqual(self.subsubsection.content.__str__(), 'Subsubsection')
def test_pagesearchadapter_get_live_queryset(self):
self.assertEqual(len(search.search('Homepage', models=(Page,))), 1)
with publication_manager.select_published(True):
self.assertEqual(len(search.search('Homepage', models=(Page,))), 1)
self.homepage.is_online = False
self.homepage.save()
self.assertEqual(len(search.search('Homepage', models=(Page,))), 0)
def test_page_get_absolute_url(self):
with search.update_index():
Page.objects.all().delete()
content_type = ContentType.objects.get_for_model(TestPageContent)
new_page = Page(
content_type=content_type,
parent=None,
left=None,
right=None,
)
new_page.save()
TestPageContent.objects.create(
page=new_page,
)
self.assertEqual(new_page.get_absolute_url(), '/')
new_page = Page.objects.get(pk=new_page.pk)
self.assertEqual(new_page.get_absolute_url(), '/')
def test_last_modified(self):
# We have no versions
self.assertEqual(self.homepage.last_modified(), '-')
# Create an initial revision.
with create_revision():
self.homepage.save()
# We have reversion and a version in the db, last_modified should not be empty
self.assertNotEqual(self.homepage.last_modified(), '-')
def test_publication(self):
self.homepage.publication_date = now() + timedelta(days=10)
self.homepage.save()
self.section.publication_date = now() + timedelta(days=10)
self.section.save()
self.subsection.publication_date = now() + timedelta(days=10)
self.subsection.save()
self.subsubsection.publication_date = now() + timedelta(days=10)
self.subsubsection.save()
with publication_manager.select_published(True):
self.assertEqual(Page.objects.count(), 0)
with publication_manager.select_published(False):
self.assertEqual(Page.objects.count(), 4)
# We need to generate an exception within the published block.
with self.assertRaises(TypeError), \
publication_manager.select_published(True):
assert 1 / 'a'
class TestSectionPage(TestCase):
def setUp(self):
with search.update_index():
content_type = ContentType.objects.get_for_model(TestPageContentWithSections)
self.homepage = Page.objects.create(
title='Homepage',
slug='homepage',
content_type=content_type,
)
TestPageContentWithSections.objects.create(
page=self.homepage,
)
def test_pagesearchadapter_get_content(self):
search_adapter = PageSearchAdapter(Page)
content = search_adapter.get_content(self.homepage)
self.assertEqual(content, ' homepage Homepage testing')
class TestPageComplex(TestCase):
'''
Page structure:
Homepage
|
+------------------+-----------------------+
| | |
Tree 1 - Page 1 Tree 2 - Page 1 Tree 3 - Page 1
| |
+----------+----------+ +----------+----------+
| | | |
Tree 1 - Page 2 Tree 1 - Page 3 Tree 3 - Page 2 Tree 3 - Page 3
|
+----------+----------+
| |
Tree 3 - Page 4 Tree 3 - Page 5
'''
def setUp(self):
structure = {
'title': 'Homepage',
'children': [
{
'title': 'Tree 1 - Page 1',
'children': [
{
'title': 'Tree 1 - Page 2'
},
{
'title': 'Tree 1 - Page 3'
}
]
},
{
'title': 'Tree 2 - Page 1'
},
{
'title': 'Tree 3 - Page 1',
'children': [
{
'title': 'Tree 3 - Page 2',
'children': [
{
'title': 'Tree 3 - Page 4'
},
{
'title': 'Tree 3 - Page 5'
}
]
},
{
'title': 'Tree 3 - Page 3'
}
]
}
]
}
content_type = ContentType.objects.get_for_model(TestPageContent)
self.page_ids = {}
self.pages = {}
def _add_page(page, parent=None):
slug = page['title'].replace(' ', '_').replace('-', '_')
page_obj = Page.objects.create(
title=page['title'],
slug=slug,
content_type=content_type,
parent=parent,
)
TestPageContent.objects.create(
page=page_obj,
)
self.page_ids[slug] = page_obj.pk
if page.get('children', None):
for child in page['children']:
_add_page(child, page_obj)
with search.update_index():
_add_page(structure)
self._rebuild_page_dict()
def _rebuild_page_dict(self):
self.pages = {}
for page in self.page_ids:
try:
self.pages[page] = Page.objects.get(pk=self.page_ids[page])
# Handle tests involving deletions.
except Page.DoesNotExist:
pass
def test_page_excise_branch(self):
# Excising a branch which hasn't been deleted should have no affect.
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 20)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 7)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 3)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 4)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 5)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 6)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 8)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 9)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 10)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 19)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 16)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 17)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 18)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 12)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 13)
self.assertEqual(self.pages['Tree_3___Page_5'].left, 14)
self.assertEqual(self.pages['Tree_3___Page_5'].right, 15)
self.pages['Homepage']._excise_branch()
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 20)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 7)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 3)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 4)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 5)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 6)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 8)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 9)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 10)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 19)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 16)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 17)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 18)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 12)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 13)
self.assertEqual(self.pages['Tree_3___Page_5'].left, 14)
self.assertEqual(self.pages['Tree_3___Page_5'].right, 15)
def test_page_save__create_with_sides(self):
with search.update_index():
content_type = ContentType.objects.get_for_model(TestPageContent)
# Create a page with a manual left and right defined.
page_obj = Page.objects.create(
title='Foo',
content_type=content_type,
parent=self.pages['Tree_1___Page_1'],
left=7,
right=8,
)
TestPageContent.objects.create(
page=page_obj,
)
self.assertEqual(page_obj.title, 'Foo')
def test_page_save__move_branch_left(self):
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 20)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 7)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 3)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 4)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 5)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 6)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 8)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 9)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 10)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 19)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 16)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 17)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 18)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 12)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 13)
self.assertEqual(self.pages['Tree_3___Page_5'].left, 14)
self.assertEqual(self.pages['Tree_3___Page_5'].right, 15)
self.pages['Tree_3___Page_1'].parent = self.pages['Tree_1___Page_1']
self.pages['Tree_3___Page_1'].save()
# Rebuild page dict.
self._rebuild_page_dict()
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 20)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 17)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 3)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 4)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 5)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 6)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 18)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 19)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 7)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 16)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 8)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 13)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 14)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 15)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 9)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 10)
self.assertEqual(self.pages['Tree_3___Page_5'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_5'].right, 12)
def test_page_save__move_branch_right(self):
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 20)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 7)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 3)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 4)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 5)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 6)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 8)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 9)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 10)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 19)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 16)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 17)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 18)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 12)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 13)
self.assertEqual(self.pages['Tree_3___Page_5'].left, 14)
self.assertEqual(self.pages['Tree_3___Page_5'].right, 15)
self.pages['Tree_1___Page_1'].parent = self.pages['Tree_3___Page_1']
self.pages['Tree_1___Page_1'].save()
# Rebuild page dict.
self._rebuild_page_dict()
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 20)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 13)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 18)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 14)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 15)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 16)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 17)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 3)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 4)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 19)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 5)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 10)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 12)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 6)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 7)
self.assertEqual(self.pages['Tree_3___Page_5'].left, 8)
self.assertEqual(self.pages['Tree_3___Page_5'].right, 9)
def test_page_delete(self):
self.pages['Tree_3___Page_5'].content.delete()
self.pages['Tree_3___Page_5'].delete()
# Rebuild page dict.
self._rebuild_page_dict()
self.assertEqual(self.pages['Homepage'].left, 1)
self.assertEqual(self.pages['Homepage'].right, 18)
self.assertEqual(self.pages['Tree_1___Page_1'].left, 2)
self.assertEqual(self.pages['Tree_1___Page_1'].right, 7)
self.assertEqual(self.pages['Tree_1___Page_2'].left, 3)
self.assertEqual(self.pages['Tree_1___Page_2'].right, 4)
self.assertEqual(self.pages['Tree_1___Page_3'].left, 5)
self.assertEqual(self.pages['Tree_1___Page_3'].right, 6)
self.assertEqual(self.pages['Tree_2___Page_1'].left, 8)
self.assertEqual(self.pages['Tree_2___Page_1'].right, 9)
self.assertEqual(self.pages['Tree_3___Page_1'].left, 10)
self.assertEqual(self.pages['Tree_3___Page_1'].right, 17)
self.assertEqual(self.pages['Tree_3___Page_2'].left, 11)
self.assertEqual(self.pages['Tree_3___Page_2'].right, 14)
self.assertEqual(self.pages['Tree_3___Page_3'].left, 15)
self.assertEqual(self.pages['Tree_3___Page_3'].right, 16)
self.assertEqual(self.pages['Tree_3___Page_4'].left, 12)
self.assertEqual(self.pages['Tree_3___Page_4'].right, 13)
with self.assertRaises(KeyError):
self.pages['Tree_3___Page_5']
| 40.775348 | 89 | 0.595027 |
0ee8d852f5df28381578238f6269c0fabd266e10
| 44 |
py
|
Python
|
src/thenewboston/utils/exceptions.py
|
achalpatel/thenewboston-python
|
4044ce07cb5e0d1f92b4332bbd8c6ac8f33bcdb9
|
[
"MIT"
] | 122 |
2020-07-12T23:08:49.000Z
|
2021-12-18T16:14:10.000Z
|
src/thenewboston/utils/exceptions.py
|
achalpatel/thenewboston-python
|
4044ce07cb5e0d1f92b4332bbd8c6ac8f33bcdb9
|
[
"MIT"
] | 47 |
2020-07-15T02:18:09.000Z
|
2021-09-22T19:51:59.000Z
|
src/thenewboston/utils/exceptions.py
|
achalpatel/thenewboston-python
|
4044ce07cb5e0d1f92b4332bbd8c6ac8f33bcdb9
|
[
"MIT"
] | 52 |
2020-07-13T10:49:52.000Z
|
2021-10-30T03:34:55.000Z
|
class NetworkException(Exception):
pass
| 14.666667 | 34 | 0.772727 |
2ed242d7fd9a6986808747e038f01b474fdf650f
| 1,053 |
py
|
Python
|
the-simple-graphql-service/python/setup.py
|
mttfarmer/serverless
|
e7b28421708cd4643ce61d28940357c3664ed2a8
|
[
"MIT"
] | 1,627 |
2020-01-12T10:35:06.000Z
|
2022-03-31T13:12:51.000Z
|
the-simple-graphql-service/python/setup.py
|
mttfarmer/serverless
|
e7b28421708cd4643ce61d28940357c3664ed2a8
|
[
"MIT"
] | 58 |
2020-02-09T06:57:35.000Z
|
2022-02-26T23:25:10.000Z
|
the-simple-graphql-service/python/setup.py
|
mttfarmer/serverless
|
e7b28421708cd4643ce61d28940357c3664ed2a8
|
[
"MIT"
] | 240 |
2020-01-19T09:09:44.000Z
|
2022-03-30T19:12:17.000Z
|
import setuptools
with open("README.md") as fp:
long_description = fp.read()
setuptools.setup(
name="the_simple_graphql_service",
version="0.0.1",
description="An empty CDK Python app",
long_description=long_description,
long_description_content_type="text/markdown",
author="author",
package_dir={"": "the_simple_graphql_service"},
packages=setuptools.find_packages(where="the_simple_graphql_service"),
install_requires=[
],
python_requires=">=3.6",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: JavaScript",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development :: Code Generators",
"Topic :: Utilities",
"Typing :: Typed",
],
)
| 23.4 | 74 | 0.62773 |
9afbabbfd39a01e73de45b97fa2f81a6711e3ea4
| 12,340 |
py
|
Python
|
kubernetes/client/models/v1beta1_stateful_set_status.py
|
Scalr/kubernetes-client-python
|
07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38
|
[
"Apache-2.0"
] | 3 |
2019-05-19T05:05:37.000Z
|
2020-03-20T04:56:20.000Z
|
kubernetes/client/models/v1beta1_stateful_set_status.py
|
Scalr/kubernetes-client-python
|
07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/models/v1beta1_stateful_set_status.py
|
Scalr/kubernetes-client-python
|
07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1StatefulSetStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'collision_count': 'int',
'conditions': 'list[V1beta1StatefulSetCondition]',
'current_replicas': 'int',
'current_revision': 'str',
'observed_generation': 'int',
'ready_replicas': 'int',
'replicas': 'int',
'update_revision': 'str',
'updated_replicas': 'int'
}
attribute_map = {
'collision_count': 'collisionCount',
'conditions': 'conditions',
'current_replicas': 'currentReplicas',
'current_revision': 'currentRevision',
'observed_generation': 'observedGeneration',
'ready_replicas': 'readyReplicas',
'replicas': 'replicas',
'update_revision': 'updateRevision',
'updated_replicas': 'updatedReplicas'
}
def __init__(self, collision_count=None, conditions=None, current_replicas=None, current_revision=None, observed_generation=None, ready_replicas=None, replicas=None, update_revision=None, updated_replicas=None):
"""
V1beta1StatefulSetStatus - a model defined in Swagger
"""
self._collision_count = None
self._conditions = None
self._current_replicas = None
self._current_revision = None
self._observed_generation = None
self._ready_replicas = None
self._replicas = None
self._update_revision = None
self._updated_replicas = None
self.discriminator = None
if collision_count is not None:
self.collision_count = collision_count
if conditions is not None:
self.conditions = conditions
if current_replicas is not None:
self.current_replicas = current_replicas
if current_revision is not None:
self.current_revision = current_revision
if observed_generation is not None:
self.observed_generation = observed_generation
if ready_replicas is not None:
self.ready_replicas = ready_replicas
self.replicas = replicas
if update_revision is not None:
self.update_revision = update_revision
if updated_replicas is not None:
self.updated_replicas = updated_replicas
@property
def collision_count(self):
"""
Gets the collision_count of this V1beta1StatefulSetStatus.
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
:return: The collision_count of this V1beta1StatefulSetStatus.
:rtype: int
"""
return self._collision_count
@collision_count.setter
def collision_count(self, collision_count):
"""
Sets the collision_count of this V1beta1StatefulSetStatus.
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
:param collision_count: The collision_count of this V1beta1StatefulSetStatus.
:type: int
"""
self._collision_count = collision_count
@property
def conditions(self):
"""
Gets the conditions of this V1beta1StatefulSetStatus.
Represents the latest available observations of a statefulset's current state.
:return: The conditions of this V1beta1StatefulSetStatus.
:rtype: list[V1beta1StatefulSetCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""
Sets the conditions of this V1beta1StatefulSetStatus.
Represents the latest available observations of a statefulset's current state.
:param conditions: The conditions of this V1beta1StatefulSetStatus.
:type: list[V1beta1StatefulSetCondition]
"""
self._conditions = conditions
@property
def current_replicas(self):
"""
Gets the current_replicas of this V1beta1StatefulSetStatus.
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
:return: The current_replicas of this V1beta1StatefulSetStatus.
:rtype: int
"""
return self._current_replicas
@current_replicas.setter
def current_replicas(self, current_replicas):
"""
Sets the current_replicas of this V1beta1StatefulSetStatus.
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
:param current_replicas: The current_replicas of this V1beta1StatefulSetStatus.
:type: int
"""
self._current_replicas = current_replicas
@property
def current_revision(self):
"""
Gets the current_revision of this V1beta1StatefulSetStatus.
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas).
:return: The current_revision of this V1beta1StatefulSetStatus.
:rtype: str
"""
return self._current_revision
@current_revision.setter
def current_revision(self, current_revision):
"""
Sets the current_revision of this V1beta1StatefulSetStatus.
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas).
:param current_revision: The current_revision of this V1beta1StatefulSetStatus.
:type: str
"""
self._current_revision = current_revision
@property
def observed_generation(self):
"""
Gets the observed_generation of this V1beta1StatefulSetStatus.
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
:return: The observed_generation of this V1beta1StatefulSetStatus.
:rtype: int
"""
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
"""
Sets the observed_generation of this V1beta1StatefulSetStatus.
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
:param observed_generation: The observed_generation of this V1beta1StatefulSetStatus.
:type: int
"""
self._observed_generation = observed_generation
@property
def ready_replicas(self):
"""
Gets the ready_replicas of this V1beta1StatefulSetStatus.
readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition.
:return: The ready_replicas of this V1beta1StatefulSetStatus.
:rtype: int
"""
return self._ready_replicas
@ready_replicas.setter
def ready_replicas(self, ready_replicas):
"""
Sets the ready_replicas of this V1beta1StatefulSetStatus.
readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition.
:param ready_replicas: The ready_replicas of this V1beta1StatefulSetStatus.
:type: int
"""
self._ready_replicas = ready_replicas
@property
def replicas(self):
"""
Gets the replicas of this V1beta1StatefulSetStatus.
replicas is the number of Pods created by the StatefulSet controller.
:return: The replicas of this V1beta1StatefulSetStatus.
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""
Sets the replicas of this V1beta1StatefulSetStatus.
replicas is the number of Pods created by the StatefulSet controller.
:param replicas: The replicas of this V1beta1StatefulSetStatus.
:type: int
"""
if replicas is None:
raise ValueError("Invalid value for `replicas`, must not be `None`")
self._replicas = replicas
@property
def update_revision(self):
"""
Gets the update_revision of this V1beta1StatefulSetStatus.
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas)
:return: The update_revision of this V1beta1StatefulSetStatus.
:rtype: str
"""
return self._update_revision
@update_revision.setter
def update_revision(self, update_revision):
"""
Sets the update_revision of this V1beta1StatefulSetStatus.
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas)
:param update_revision: The update_revision of this V1beta1StatefulSetStatus.
:type: str
"""
self._update_revision = update_revision
@property
def updated_replicas(self):
"""
Gets the updated_replicas of this V1beta1StatefulSetStatus.
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
:return: The updated_replicas of this V1beta1StatefulSetStatus.
:rtype: int
"""
return self._updated_replicas
@updated_replicas.setter
def updated_replicas(self, updated_replicas):
"""
Sets the updated_replicas of this V1beta1StatefulSetStatus.
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
:param updated_replicas: The updated_replicas of this V1beta1StatefulSetStatus.
:type: int
"""
self._updated_replicas = updated_replicas
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1StatefulSetStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 35.056818 | 221 | 0.662399 |
37c2c7b16406620f524488084218c718b5573277
| 41 |
py
|
Python
|
03Aula09-09/ex05.py
|
danicon/Curso-IPE
|
3b9e2a9d187492d6561a512363bd06156286df6a
|
[
"MIT"
] | 2 |
2020-09-09T12:50:57.000Z
|
2020-09-09T12:56:02.000Z
|
03Aula09-09/ex05.py
|
danicon/Curso-IPE
|
3b9e2a9d187492d6561a512363bd06156286df6a
|
[
"MIT"
] | null | null | null |
03Aula09-09/ex05.py
|
danicon/Curso-IPE
|
3b9e2a9d187492d6561a512363bd06156286df6a
|
[
"MIT"
] | null | null | null |
c=0
while c <= 100:
print(c)
c+=2
| 10.25 | 15 | 0.463415 |
cd1e315f835702b48458327613fa47204be4eda3
| 31 |
py
|
Python
|
fenfen.py
|
chenfenfen/test
|
30f3dadb6c968de56f320dbbde73621269892429
|
[
"Apache-2.0"
] | null | null | null |
fenfen.py
|
chenfenfen/test
|
30f3dadb6c968de56f320dbbde73621269892429
|
[
"Apache-2.0"
] | null | null | null |
fenfen.py
|
chenfenfen/test
|
30f3dadb6c968de56f320dbbde73621269892429
|
[
"Apache-2.0"
] | null | null | null |
print "hello fenfen.comne on!"
| 15.5 | 30 | 0.741935 |
1263317970bb680fa2c5c4637b527640302db975
| 6,897 |
py
|
Python
|
kadet/__init__.py
|
kapicorp/kadet
|
aca4d155b6e6e58e34fd363995dc582cfe183b3c
|
[
"Apache-2.0"
] | 11 |
2021-02-11T21:23:13.000Z
|
2022-02-04T05:19:26.000Z
|
kadet/__init__.py
|
kapicorp/kadet
|
aca4d155b6e6e58e34fd363995dc582cfe183b3c
|
[
"Apache-2.0"
] | 11 |
2021-02-11T21:26:11.000Z
|
2021-12-06T11:12:05.000Z
|
kadet/__init__.py
|
kapicorp/kadet
|
aca4d155b6e6e58e34fd363995dc582cfe183b3c
|
[
"Apache-2.0"
] | 3 |
2021-02-25T09:42:04.000Z
|
2021-06-06T20:15:55.000Z
|
# SPDX-FileCopyrightText: 2021 The Kadet Authors <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0
import hashlib
import json
from collections import defaultdict
import yaml
from typeguard import check_type
class Dict(defaultdict):
"""Dict."""
def __getattr__(self, name):
"""__getattr__.
Parameters
----------
name :
name of attribute to get
"""
return self.__getitem__(name)
def __setattr__(self, name, value):
"""__setattr__.
Parameters
----------
name :
name of attribute to set
value :
value of attribute to set
"""
if type(value) == dict:
value = Dict(from_dict=value)
return self.__setitem__(name, value)
def __repr__(self):
"""__repr__."""
return dict.__repr__(self)
def __init__(self, from_dict=None):
"""__init__.
Parameters
----------
from_dict :
dictionary to load from
"""
super().__init__(Dict)
if from_dict:
check_type(from_dict, from_dict, dict)
self.update(from_dict)
def dump(self):
"""Dump object to dict representation."""
return dict(self)
class BaseObj(object):
"""BaseObj."""
def __init__(self, **kwargs):
"""Return a BaseObj.
kwargs will be saved into self.kwargs values in self.root are
returned as dict/list via self.dump()
"""
self.root = Dict()
self.kwargs = Dict(kwargs)
self.new()
self.body()
def __str__(self):
"""__str__."""
return str(self.dump())
def __repr__(self):
"""__repr__."""
return f"<{self.__class__.__name__} at {hex(id(self))} {self.dump()}>"
@classmethod
def from_json(cls, file_path):
"""Return a BaseObj initialised with json content from file_path."""
with open(file_path) as fp:
json_obj = json.load(fp)
return cls.from_dict(json_obj)
@classmethod
def from_yaml(cls, file_path):
"""Return a BaseObj initialised with yaml content from file_path."""
with open(file_path) as fp:
yaml_obj = yaml.safe_load(fp)
return cls.from_dict(yaml_obj)
@classmethod
def from_yaml_multidoc(cls, file_path):
"""Return list generator of BaseObj initialised with file_path data."""
with open(file_path) as fp:
yaml_objs = yaml.safe_load_all(fp)
for yaml_obj in yaml_objs:
yield cls.from_dict(yaml_obj)
@classmethod
def from_dict(cls, dict_value):
"""Return a BaseObj initialise with dict_value."""
bobj = cls()
bobj.root = Dict(from_dict=dict_value)
return bobj
def root_file(self, file_path):
"""Update self.root with YAML/JSON content in file_path.
Raises ValueError if file_path does not end with .yaml, .yml or
.json.
"""
with open(file_path) as fp:
if file_path.endswith(".yaml") or file_path.endswith(".yml"):
yaml_obj = yaml.safe_load(fp)
_copy = dict(self.root)
_copy.update(yaml_obj)
self.root = Dict(_copy)
elif file_path.endswith(".json"):
json_obj = json.load(fp)
_copy = dict(self.root)
_copy.update(json_obj)
self.root = Dict(_copy)
else:
# XXX in Kapitan this is CompileError
raise ValueError(
"file_path is neither JSON or YAML: {}".format(file_path)
)
def need(self, key, msg="key and value needed", istype=None):
"""Require that key is in self.kwargs.
Error with msg if key not set. Raises TypeError if key value
does not match type passed in istype.
"""
err_msg = '{}: "{}": {}'.format(self.__class__.__name__, key, msg)
if key not in self.kwargs:
raise ValueError(err_msg) # XXX in Kapitan this is CompileError
elif istype is not None:
check_type(key, self.kwargs[key], istype)
def optional(self, key, default=None, istype=None):
"""Set self.kwargs key as optional.
Use default value if set. Raise TypeError if key value does not
match type passed in istype.
"""
if key in self.kwargs and istype is not None:
check_type(key, self.kwargs[key], istype)
if key not in self.kwargs:
if default is None:
self.kwargs[key] = default
elif istype is not None:
check_type(key, default, istype)
self.kwargs[key] = default
def new(self):
"""Initialise need()ed keys for a new BaseObj."""
pass
def new_with(self, **kwargs):
"""new_with.
Parameters
----------
kwargs :
kwargs
"""
self.kwargs.update(kwargs)
super(type(self), self).new()
def body(self):
"""Set values/logic for self.root."""
pass
def _dump(self, obj):
"""Recursively update obj should it contain other BaseObj values."""
if isinstance(obj, BaseObj):
if isinstance(obj.root, list):
obj.root = [self._dump(item) for item in obj.root]
# root is just a list, return itself
return obj.root
else:
# Update all dict/Dict root items
for k, v in obj.root.items():
obj.root[k] = self._dump(v)
# return and dump leaf depending on instance type
#
if isinstance(obj.root, Dict):
# root is Dict, dump as dict
return obj.root.dump()
if isinstance(obj.root, dict):
# root is just a dict, return itself
return obj.root
# BaseObj needs to return dump()
else:
return obj.root.dump()
elif isinstance(obj, Dict):
return obj.dump()
elif isinstance(obj, list):
obj = [self._dump(item) for item in obj]
# list has no .dump, return itself
return obj
elif isinstance(obj, dict):
for k, v in obj.items():
obj[k] = self._dump(v)
# dict has no .dump, return itself
return obj
# anything else, return itself
return obj
def dump(self):
"""Return object dict/list."""
return self._dump(self)
def sha256(self):
"""Return sha256 hexdigest for self.root."""
return hashlib.sha256(str(self.dump()).encode()).hexdigest()
| 30.117904 | 82 | 0.54531 |
867141fb2cb34d11363e191eab72431238116590
| 1,229 |
py
|
Python
|
src/__init__.py
|
alphagov-mirror/tagging-suggester
|
1d5a6d54cefbf03efb32f67ae779eedd2f3d0071
|
[
"FTL"
] | 2 |
2019-07-18T10:59:11.000Z
|
2020-01-01T16:19:30.000Z
|
src/__init__.py
|
alphagov-mirror/tagging-suggester
|
1d5a6d54cefbf03efb32f67ae779eedd2f3d0071
|
[
"FTL"
] | 1 |
2019-11-07T12:36:59.000Z
|
2019-11-07T12:36:59.000Z
|
src/__init__.py
|
alphagov-mirror/tagging-suggester
|
1d5a6d54cefbf03efb32f67ae779eedd2f3d0071
|
[
"FTL"
] | 3 |
2019-08-29T14:02:20.000Z
|
2021-04-10T20:25:48.000Z
|
from flask import Flask, request, jsonify, g
from src.models.tagging_suggester import *
import nltk
from datetime import datetime
import src.utils.app_config as app_config
from models import Request
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
app = Flask(__name__)
tagging_suggester = TaggingSuggester()
@app.before_request
def before_request():
engine = create_engine(app_config.database_url(), strategy='threadlocal')
Session = sessionmaker(bind=engine)
g.db_session = Session()
@app.route('/create', methods=['POST'])
def tagging_suggestion():
json = request.get_json()
request_record = Request(
created_at = datetime.now().isoformat(),
edition_id = json['edition_id'],
branch_predictor_probabilities = "",
api_version = "1.0",
)
result, request_record = tagging_suggester.predict(json['text'], request_record)
g.db_session.add(request_record)
g.db_session.commit()
return jsonify({ "suggestions": result })
@app.after_request
def after_request(response):
if g.db_session is not None:
g.db_session.close()
return response
if __name__=="__main__":
nltk.download('punkt')
app.run(debug=True)
| 27.931818 | 84 | 0.724166 |
2852b402b9bc402d90adf3313050dd0237197553
| 2,899 |
py
|
Python
|
accounts/migrations/0009_auto_20190809_1659.py
|
vftens/Django-CRM
|
fd02e42b2e9525abcc0e14ee924e5bdf569117bb
|
[
"MIT"
] | null | null | null |
accounts/migrations/0009_auto_20190809_1659.py
|
vftens/Django-CRM
|
fd02e42b2e9525abcc0e14ee924e5bdf569117bb
|
[
"MIT"
] | null | null | null |
accounts/migrations/0009_auto_20190809_1659.py
|
vftens/Django-CRM
|
fd02e42b2e9525abcc0e14ee924e5bdf569117bb
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.3 on 2019-08-09 11:29
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
("contacts", "0003_merge_20190214_1427"),
("accounts", "0008_account_assigned_to"),
]
operations = [
migrations.RenameField(
model_name="email", old_name="sent_at", new_name="created_on",
),
migrations.RenameField(
model_name="email", old_name="sender", new_name="from_account",
),
migrations.RemoveField(model_name="email", name="recipient",),
migrations.AddField(
model_name="email",
name="from_email",
field=models.EmailField(default=django.utils.timezone.now, max_length=254),
preserve_default=False,
),
migrations.AddField(
model_name="email",
name="recipients",
field=models.ManyToManyField(
related_name="recieved_email", to="contacts.Contact"
),
),
migrations.AddField(
model_name="email",
name="rendered_message_body",
field=models.TextField(null=True),
),
migrations.AddField(
model_name="email",
name="scheduled_date_time",
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name="email",
name="scheduled_later",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="email",
name="timezone",
field=models.CharField(default="UTC", max_length=100),
),
migrations.CreateModel(
name="EmailLog",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("is_sent", models.BooleanField(default=False)),
(
"contact",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="contact_email_log",
to="contacts.Contact",
),
),
(
"email",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
related_name="email_log",
to="accounts.Email",
),
),
],
),
]
| 32.211111 | 87 | 0.481545 |
2cdc0510a1034e518bffdc0424f5a2fb6c37cb6d
| 2,977 |
py
|
Python
|
core/SymbolTable.py
|
BhanuPrakashNani/jackCompiler
|
20fd60546ab97b34cc95aa81c6f6eea2b30c2832
|
[
"MIT"
] | null | null | null |
core/SymbolTable.py
|
BhanuPrakashNani/jackCompiler
|
20fd60546ab97b34cc95aa81c6f6eea2b30c2832
|
[
"MIT"
] | null | null | null |
core/SymbolTable.py
|
BhanuPrakashNani/jackCompiler
|
20fd60546ab97b34cc95aa81c6f6eea2b30c2832
|
[
"MIT"
] | 3 |
2019-10-07T19:53:05.000Z
|
2021-10-04T09:51:05.000Z
|
""" SYMBOL TABLE"""
class SymbolTable:
def __init__(self):
self.global_scope = {}
self.local_scope = {}
self.current_scope = self.global_scope
self.arg_counter = 0
self.var_counter = 0
self.static_counter = 0
self.field_counter = 0
self.if_counter = 0
self.while_counter = 0
def startSubroutine(self, name):
""" resets the local symbol table"""
self.local_scope[name] = {}
self.arg_counter = 0
self.var_counter = 0
self.if_counter = 0
self.while_counter = 0
def define(self, name, type, kind):
"""defines (or) adds new entry to the symbol table field and static have class
level scope while var, arg have subRoutine level scope."""
if( kind == "field"):
self.global_scope[name] = (type, kind, self.field_counter)
self.field_counter += 1
elif (kind == "static"):
self.global_scope[name] = (type, kind, self.static_counter)
self.static_counter += 1
elif (kind == "var"):
self.current_scope[name] = (type, kind, self.var_counter)
self.var_counter += 1
elif (kind == "arg"):
self.current_scope[name] = (type, kind, self.arg_counter)
self.arg_counter +=1
def varCount(self, kind):
#prints the number of variables defined in the current scope
return len([v for (k,v) in self.current_scope.items() if (v[1] == kind)])
def globalCount(self, kind):
return len([v for (k,v) in self.global_scope.items() if (v[1] == kind)])
def kindOf(self, name):
"""If the given variable is found in local scope it returns its kind,
if it is found in gobal scope it returns its respective kind else
returns NONE"""
if name in self.current_scope:
return self.current_scope[name][1]
elif name in self.global_scope:
return self.global_scope[name][1]
else:
return "NONE"
def typeOf(self, name):
if name in self.current_scope:
return self.current_scope[name][0]
elif name in self.global_scope:
return self.global_scope[name][0]
else:
return "NONE"
def indexOf(self, name):
if name in self.current_scope:
return self.current_scope[name][2]
elif name in self.global_scope:
return self.global_scope[name][2]
else:
return "NONE"
def setScope(self, name):
if (name == "global"):
self.current_scope = self.global_scope
else:
self.current_scope = self.local_scope[name]
def main():
st = SymbolTable()
st.define("manikishan", "int" , "field")
print(st.kindOf("manikishan"))
if __name__ == "__main__":
main()
| 27.82243 | 86 | 0.562983 |
921b3b83e45b8dde4efb169af42fc09e0c6cce3f
| 72,990 |
py
|
Python
|
openstack_dashboard/api/neutron.py
|
prankul88/horizon
|
07faca470166341f9010f66cee8195be84435cea
|
[
"Apache-2.0"
] | 1 |
2018-10-09T05:54:57.000Z
|
2018-10-09T05:54:57.000Z
|
openstack_dashboard/api/neutron.py
|
prankul88/horizon
|
07faca470166341f9010f66cee8195be84435cea
|
[
"Apache-2.0"
] | null | null | null |
openstack_dashboard/api/neutron.py
|
prankul88/horizon
|
07faca470166341f9010f66cee8195be84435cea
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Cisco Systems, Inc.
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import collections
import copy
import logging
import netaddr
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from neutronclient.common import exceptions as neutron_exc
from neutronclient.v2_0 import client as neutron_client
from novaclient import exceptions as nova_exc
import six
from horizon import exceptions
from horizon import messages
from horizon.utils.memoized import memoized
from horizon.utils.memoized import memoized_with_request
from openstack_dashboard.api import base
from openstack_dashboard.api import nova
from openstack_dashboard.contrib.developer.profiler import api as profiler
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
IP_VERSION_DICT = {4: 'IPv4', 6: 'IPv6'}
OFF_STATE = 'OFF'
ON_STATE = 'ON'
ROUTER_INTERFACE_OWNERS = (
'network:router_interface',
'network:router_interface_distributed',
'network:ha_router_replicated_interface'
)
VNIC_TYPES = [
('normal', _('Normal')),
('direct', _('Direct')),
('direct-physical', _('Direct Physical')),
('macvtap', _('MacVTap')),
('baremetal', _('Bare Metal')),
('virtio-forwarder', _('Virtio Forwarder')),
]
class NeutronAPIDictWrapper(base.APIDictWrapper):
def __init__(self, apidict):
if 'admin_state_up' in apidict:
if apidict['admin_state_up']:
apidict['admin_state'] = 'UP'
else:
apidict['admin_state'] = 'DOWN'
# Django cannot handle a key name with ':', so use '__'.
apidict.update({
key.replace(':', '__'): value
for key, value in apidict.items()
if ':' in key
})
super(NeutronAPIDictWrapper, self).__init__(apidict)
def set_id_as_name_if_empty(self, length=8):
try:
if not self._apidict['name'].strip():
id = self._apidict['id']
if length:
id = id[:length]
self._apidict['name'] = '(%s)' % id
except KeyError:
pass
def items(self):
return self._apidict.items()
@property
def name_or_id(self):
return (self._apidict.get('name').strip() or
'(%s)' % self._apidict['id'][:13])
class Agent(NeutronAPIDictWrapper):
"""Wrapper for neutron agents."""
class Network(NeutronAPIDictWrapper):
"""Wrapper for neutron Networks."""
class Subnet(NeutronAPIDictWrapper):
"""Wrapper for neutron subnets."""
def __init__(self, apidict):
apidict['ipver_str'] = get_ipver_str(apidict['ip_version'])
super(Subnet, self).__init__(apidict)
AUTO_ALLOCATE_ID = '__auto_allocate__'
class PreAutoAllocateNetwork(Network):
def __init__(self, request):
tenant_id = request.user.tenant_id
auto_allocated_subnet = Subnet({
'name': 'auto_allocated_subnet',
'id': AUTO_ALLOCATE_ID,
'network_id': 'auto',
'tenant_id': tenant_id,
# The following two fields are fake so that Subnet class
# and the network topology view work without errors.
'ip_version': 4,
'cidr': '0.0.0.0/0',
})
auto_allocated_network = {
'name': 'auto_allocated_network',
'description': 'Network to be allocated automatically',
'id': AUTO_ALLOCATE_ID,
'status': 'ACTIVE',
'admin_state_up': True,
'shared': False,
'router:external': False,
'subnets': [auto_allocated_subnet],
'tenant_id': tenant_id,
}
super(PreAutoAllocateNetwork, self).__init__(auto_allocated_network)
class Trunk(NeutronAPIDictWrapper):
"""Wrapper for neutron trunks."""
@property
def subport_count(self):
return len(self._apidict.get('sub_ports', []))
def to_dict(self):
trunk_dict = super(Trunk, self).to_dict()
trunk_dict['name_or_id'] = self.name_or_id
trunk_dict['subport_count'] = self.subport_count
return trunk_dict
class SubnetPool(NeutronAPIDictWrapper):
"""Wrapper for neutron subnetpools."""
class Port(NeutronAPIDictWrapper):
"""Wrapper for neutron ports."""
def __init__(self, apidict):
if 'mac_learning_enabled' in apidict:
apidict['mac_state'] = \
ON_STATE if apidict['mac_learning_enabled'] else OFF_STATE
pairs = apidict.get('allowed_address_pairs')
if pairs:
apidict = copy.deepcopy(apidict)
wrapped_pairs = [PortAllowedAddressPair(pair) for pair in pairs]
apidict['allowed_address_pairs'] = wrapped_pairs
super(Port, self).__init__(apidict)
class PortTrunkParent(Port):
"""Neutron ports that are trunk parents.
There's no need to add extra attributes for a trunk parent, because it
already has 'trunk_details'. See also class PortTrunkSubport.
"""
class PortTrunkSubport(Port):
"""Neutron ports that are trunk subports.
The Neutron API expresses port subtyping information in a surprisingly
complex way. When you see a port with attribute 'trunk_details' you know
it's a trunk parent. But when you see a port without the 'trunk_details'
attribute you can't tell if it's a trunk subport or a regular one without
looking beyond the port's attributes. You must go and check if trunks
(and/or trunk_details of trunk parent ports) refer to this port.
Since this behavior is awfully complex we hide this from the rest of
horizon by introducing types PortTrunkParent and PortTrunkSubport.
"""
def __init__(self, apidict, trunk_subport_info):
for field in ['trunk_id', 'segmentation_type', 'segmentation_id']:
apidict[field] = trunk_subport_info[field]
super(PortTrunkSubport, self).__init__(apidict)
class PortAllowedAddressPair(NeutronAPIDictWrapper):
"""Wrapper for neutron port allowed address pairs."""
def __init__(self, addr_pair):
super(PortAllowedAddressPair, self).__init__(addr_pair)
# Horizon references id property for table operations
self.id = addr_pair['ip_address']
class Router(NeutronAPIDictWrapper):
"""Wrapper for neutron routers."""
class RouterStaticRoute(NeutronAPIDictWrapper):
"""Wrapper for neutron routes extra route."""
def __init__(self, route):
super(RouterStaticRoute, self).__init__(route)
# Horizon references id property for table operations
self.id = route['nexthop'] + ":" + route['destination']
class SecurityGroup(NeutronAPIDictWrapper):
# Required attributes: id, name, description, tenant_id, rules
def __init__(self, sg, sg_dict=None):
if sg_dict is None:
sg_dict = {sg['id']: sg['name']}
sg['rules'] = [SecurityGroupRule(rule, sg_dict)
for rule in sg['security_group_rules']]
super(SecurityGroup, self).__init__(sg)
def to_dict(self):
return {k: self._apidict[k] for k in self._apidict if k != 'rules'}
@six.python_2_unicode_compatible
class SecurityGroupRule(NeutronAPIDictWrapper):
# Required attributes:
# id, parent_group_id
# ip_protocol, from_port, to_port, ip_range, group
# ethertype, direction (Neutron specific)
def _get_secgroup_name(self, sg_id, sg_dict):
if sg_id:
if sg_dict is None:
sg_dict = {}
# If sg name not found in sg_dict,
# first two parts of UUID is used as sg name.
return sg_dict.get(sg_id, sg_id[:13])
else:
return u''
def __init__(self, sgr, sg_dict=None):
# In Neutron, if both remote_ip_prefix and remote_group_id are None,
# it means all remote IP range is allowed, i.e., 0.0.0.0/0 or ::/0.
if not sgr['remote_ip_prefix'] and not sgr['remote_group_id']:
if sgr['ethertype'] == 'IPv6':
sgr['remote_ip_prefix'] = '::/0'
else:
sgr['remote_ip_prefix'] = '0.0.0.0/0'
rule = {
'id': sgr['id'],
'parent_group_id': sgr['security_group_id'],
'direction': sgr['direction'],
'ethertype': sgr['ethertype'],
'ip_protocol': sgr['protocol'],
'from_port': sgr['port_range_min'],
'to_port': sgr['port_range_max'],
'description': sgr.get('description', '')
}
cidr = sgr['remote_ip_prefix']
rule['ip_range'] = {'cidr': cidr} if cidr else {}
group = self._get_secgroup_name(sgr['remote_group_id'], sg_dict)
rule['group'] = {'name': group} if group else {}
super(SecurityGroupRule, self).__init__(rule)
def __str__(self):
if 'name' in self.group:
remote = self.group['name']
elif 'cidr' in self.ip_range:
remote = self.ip_range['cidr']
else:
remote = 'ANY'
direction = 'to' if self.direction == 'egress' else 'from'
if self.from_port:
if self.from_port == self.to_port:
proto_port = ("%s/%s" %
(self.from_port, self.ip_protocol.lower()))
else:
proto_port = ("%s-%s/%s" %
(self.from_port, self.to_port,
self.ip_protocol.lower()))
elif self.ip_protocol:
try:
ip_proto = int(self.ip_protocol)
proto_port = "ip_proto=%d" % ip_proto
except Exception:
# well-defined IP protocol name like TCP, UDP, ICMP.
proto_port = self.ip_protocol
else:
proto_port = ''
return (_('ALLOW %(ethertype)s %(proto_port)s '
'%(direction)s %(remote)s') %
{'ethertype': self.ethertype,
'proto_port': proto_port,
'remote': remote,
'direction': direction})
class SecurityGroupManager(object):
"""Manager class to implement Security Group methods
SecurityGroup object returned from methods in this class
must contains the following attributes:
* id: ID of Security Group (int for Nova, uuid for Neutron)
* name
* description
* tenant_id
* rules: A list of SecurityGroupRule objects
SecurityGroupRule object should have the following attributes
(The attribute names and their formats are borrowed from nova
security group implementation):
* id
* direction
* ethertype
* parent_group_id: security group the rule belongs to
* ip_protocol
* from_port: lower limit of allowed port range (inclusive)
* to_port: upper limit of allowed port range (inclusive)
* ip_range: remote IP CIDR (source for ingress, dest for egress).
The value should be a format of "{'cidr': <cidr>}"
* group: remote security group. The value should be a format of
"{'name': <secgroup_name>}"
"""
backend = 'neutron'
def __init__(self, request):
self.request = request
self.client = neutronclient(request)
def _list(self, **filters):
secgroups = self.client.list_security_groups(**filters)
return [SecurityGroup(sg) for sg in secgroups.get('security_groups')]
@profiler.trace
def list(self, **params):
"""Fetches a list all security groups.
:returns: List of SecurityGroup objects
"""
# This is to ensure tenant_id key is not populated
# if tenant_id=None is specified.
tenant_id = params.pop('tenant_id', self.request.user.tenant_id)
if tenant_id:
params['tenant_id'] = tenant_id
return self._list(**params)
def _sg_name_dict(self, sg_id, rules):
"""Create a mapping dict from secgroup id to its name."""
related_ids = set([sg_id])
related_ids |= set(filter(None, [r['remote_group_id'] for r in rules]))
related_sgs = self.client.list_security_groups(id=related_ids,
fields=['id', 'name'])
related_sgs = related_sgs.get('security_groups')
return dict((sg['id'], sg['name']) for sg in related_sgs)
@profiler.trace
def get(self, sg_id):
"""Fetches the security group.
:returns: SecurityGroup object corresponding to sg_id
"""
secgroup = self.client.show_security_group(sg_id).get('security_group')
sg_dict = self._sg_name_dict(sg_id, secgroup['security_group_rules'])
return SecurityGroup(secgroup, sg_dict)
@profiler.trace
def create(self, name, desc):
"""Create a new security group.
:returns: SecurityGroup object created
"""
body = {'security_group': {'name': name,
'description': desc,
'tenant_id': self.request.user.project_id}}
secgroup = self.client.create_security_group(body)
return SecurityGroup(secgroup.get('security_group'))
@profiler.trace
def update(self, sg_id, name, desc):
body = {'security_group': {'name': name,
'description': desc}}
secgroup = self.client.update_security_group(sg_id, body)
return SecurityGroup(secgroup.get('security_group'))
@profiler.trace
def delete(self, sg_id):
"""Delete the specified security group."""
self.client.delete_security_group(sg_id)
@profiler.trace
def rule_create(self, parent_group_id,
direction=None, ethertype=None,
ip_protocol=None, from_port=None, to_port=None,
cidr=None, group_id=None, description=None):
"""Create a new security group rule.
:param parent_group_id: security group id a rule is created to
:param direction: ``ingress`` or ``egress``
:param ethertype: ``IPv4`` or ``IPv6``
:param ip_protocol: tcp, udp, icmp
:param from_port: L4 port range min
:param to_port: L4 port range max
:param cidr: Remote IP CIDR
:param group_id: ID of Source Security Group
:returns: SecurityGroupRule object
"""
if not cidr:
cidr = None
if from_port < 0:
from_port = None
if to_port < 0:
to_port = None
if isinstance(ip_protocol, int) and ip_protocol < 0:
ip_protocol = None
params = {'security_group_id': parent_group_id,
'direction': direction,
'ethertype': ethertype,
'protocol': ip_protocol,
'port_range_min': from_port,
'port_range_max': to_port,
'remote_ip_prefix': cidr,
'remote_group_id': group_id}
if description is not None:
params['description'] = description
body = {'security_group_rule': params}
try:
rule = self.client.create_security_group_rule(body)
except neutron_exc.OverQuotaClient:
raise exceptions.Conflict(
_('Security group rule quota exceeded.'))
except neutron_exc.Conflict:
raise exceptions.Conflict(
_('Security group rule already exists.'))
rule = rule.get('security_group_rule')
sg_dict = self._sg_name_dict(parent_group_id, [rule])
return SecurityGroupRule(rule, sg_dict)
@profiler.trace
def rule_delete(self, sgr_id):
"""Delete the specified security group rule."""
self.client.delete_security_group_rule(sgr_id)
@profiler.trace
def list_by_instance(self, instance_id):
"""Gets security groups of an instance.
:returns: List of SecurityGroup objects associated with the instance
"""
ports = port_list(self.request, device_id=instance_id)
sg_ids = []
for p in ports:
sg_ids += p.security_groups
return self._list(id=set(sg_ids)) if sg_ids else []
@profiler.trace
def update_instance_security_group(self, instance_id,
new_security_group_ids):
"""Update security groups of a specified instance."""
ports = port_list(self.request, device_id=instance_id)
for p in ports:
params = {'security_groups': new_security_group_ids}
port_update(self.request, p.id, **params)
class FloatingIp(base.APIDictWrapper):
_attrs = ['id', 'ip', 'fixed_ip', 'port_id', 'instance_id',
'instance_type', 'pool', 'dns_domain', 'dns_name']
def __init__(self, fip):
fip['ip'] = fip['floating_ip_address']
fip['fixed_ip'] = fip['fixed_ip_address']
fip['pool'] = fip['floating_network_id']
super(FloatingIp, self).__init__(fip)
class FloatingIpPool(base.APIDictWrapper):
pass
class FloatingIpTarget(base.APIDictWrapper):
"""Representation of floating IP association target.
The following parameter needs to be passed when instantiating the class:
:param port: ``Port`` object which represents a neutron port.
:param ip_address: IP address of the ``port``. It must be one of
IP address of a given port.
:param label: String displayed in the floating IP association form.
IP address will be appended to a specified label.
"""
def __init__(self, port, ip_address, label):
name = '%s: %s' % (label, ip_address) if label else ip_address
target = {'name': name,
'id': '%s_%s' % (port.id, ip_address),
'port_id': port.id,
'instance_id': port.device_id}
super(FloatingIpTarget, self).__init__(target)
class FloatingIpManager(object):
"""Manager class to implement Floating IP methods
The FloatingIP object returned from methods in this class
must contains the following attributes:
* id: ID of Floating IP
* ip: Floating IP address
* pool: ID of Floating IP pool from which the address is allocated
* fixed_ip: Fixed IP address of a VIF associated with the address
* port_id: ID of a VIF associated with the address
(instance_id when Nova floating IP is used)
* instance_id: Instance ID of an associated with the Floating IP
"""
device_owner_map = {
'compute:': 'compute',
'neutron:LOADBALANCER': 'loadbalancer',
}
def __init__(self, request):
self.request = request
self.client = neutronclient(request)
@profiler.trace
def list_pools(self):
"""Fetches a list of all floating IP pools.
:returns: List of FloatingIpPool objects
"""
search_opts = {'router:external': True}
return [FloatingIpPool(pool) for pool
in self.client.list_networks(**search_opts).get('networks')]
def _get_instance_type_from_device_owner(self, device_owner):
for key, value in self.device_owner_map.items():
if device_owner.startswith(key):
return value
return device_owner
def _set_instance_info(self, fip, port=None):
if fip['port_id']:
if not port:
port = port_get(self.request, fip['port_id'])
fip['instance_id'] = port.device_id
fip['instance_type'] = self._get_instance_type_from_device_owner(
port.device_owner)
else:
fip['instance_id'] = None
fip['instance_type'] = None
@profiler.trace
def list(self, all_tenants=False, **search_opts):
"""Fetches a list of all floating IPs.
:returns: List of FloatingIp object
"""
if not all_tenants:
tenant_id = self.request.user.tenant_id
# In Neutron, list_floatingips returns Floating IPs from
# all tenants when the API is called with admin role, so
# we need to filter them with tenant_id.
search_opts['tenant_id'] = tenant_id
port_search_opts = {'tenant_id': tenant_id}
else:
port_search_opts = {}
fips = self.client.list_floatingips(**search_opts)
fips = fips.get('floatingips')
# Get port list to add instance_id to floating IP list
# instance_id is stored in device_id attribute
ports = port_list(self.request, **port_search_opts)
port_dict = collections.OrderedDict([(p['id'], p) for p in ports])
for fip in fips:
self._set_instance_info(fip, port_dict.get(fip['port_id']))
return [FloatingIp(fip) for fip in fips]
@profiler.trace
def get(self, floating_ip_id):
"""Fetches the floating IP.
:returns: FloatingIp object corresponding to floating_ip_id
"""
fip = self.client.show_floatingip(floating_ip_id).get('floatingip')
self._set_instance_info(fip)
return FloatingIp(fip)
@profiler.trace
def allocate(self, pool, tenant_id=None, **params):
"""Allocates a floating IP to the tenant.
You must provide a pool name or id for which you would like to
allocate a floating IP.
:returns: FloatingIp object corresponding to an allocated floating IP
"""
if not tenant_id:
tenant_id = self.request.user.project_id
create_dict = {'floating_network_id': pool,
'tenant_id': tenant_id}
if 'subnet_id' in params:
create_dict['subnet_id'] = params['subnet_id']
if 'floating_ip_address' in params:
create_dict['floating_ip_address'] = params['floating_ip_address']
if 'description' in params:
create_dict['description'] = params['description']
if 'dns_domain' in params:
create_dict['dns_domain'] = params['dns_domain']
if 'dns_name' in params:
create_dict['dns_name'] = params['dns_name']
fip = self.client.create_floatingip(
{'floatingip': create_dict}).get('floatingip')
self._set_instance_info(fip)
return FloatingIp(fip)
@profiler.trace
def release(self, floating_ip_id):
"""Releases a floating IP specified."""
self.client.delete_floatingip(floating_ip_id)
@profiler.trace
def associate(self, floating_ip_id, port_id):
"""Associates the floating IP to the port.
``port_id`` represents a VNIC of an instance.
``port_id`` argument is different from a normal neutron port ID.
A value passed as ``port_id`` must be one of target_id returned by
``list_targets``, ``get_target_by_instance`` or
``list_targets_by_instance`` method.
"""
# NOTE: In Neutron Horizon floating IP support, port_id is
# "<port_id>_<ip_address>" format to identify multiple ports.
pid, ip_address = port_id.split('_', 1)
update_dict = {'port_id': pid,
'fixed_ip_address': ip_address}
self.client.update_floatingip(floating_ip_id,
{'floatingip': update_dict})
@profiler.trace
def disassociate(self, floating_ip_id):
"""Disassociates the floating IP specified."""
update_dict = {'port_id': None}
self.client.update_floatingip(floating_ip_id,
{'floatingip': update_dict})
def _get_reachable_subnets(self, ports, fetch_router_ports=False):
if not is_enabled_by_config('enable_fip_topology_check', True):
# All subnets are reachable from external network
return set(
p.fixed_ips[0]['subnet_id'] for p in ports if p.fixed_ips
)
# Retrieve subnet list reachable from external network
ext_net_ids = [ext_net.id for ext_net in self.list_pools()]
gw_routers = [r.id for r in router_list(self.request)
if (r.external_gateway_info and
r.external_gateway_info.get('network_id')
in ext_net_ids)]
if fetch_router_ports:
router_ports = port_list(self.request,
device_owner=ROUTER_INTERFACE_OWNERS)
else:
router_ports = [p for p in ports
if p.device_owner in ROUTER_INTERFACE_OWNERS]
reachable_subnets = set([p.fixed_ips[0]['subnet_id']
for p in router_ports
if p.device_id in gw_routers])
# we have to include any shared subnets as well because we may not
# have permission to see the router interface to infer connectivity
shared = set([s.id for n in network_list(self.request, shared=True)
for s in n.subnets])
return reachable_subnets | shared
@profiler.trace
def list_targets(self):
"""Returns a list of association targets of instance VIFs.
Each association target is represented as FloatingIpTarget object.
FloatingIpTarget is a APIResourceWrapper/APIDictWrapper and
'id' and 'name' attributes must be defined in each object.
FloatingIpTarget.id can be passed as port_id in associate().
FloatingIpTarget.name is displayed in Floating Ip Association Form.
"""
tenant_id = self.request.user.tenant_id
ports = port_list(self.request, tenant_id=tenant_id)
servers, has_more = nova.server_list(self.request, detailed=False)
server_dict = collections.OrderedDict(
[(s.id, s.name) for s in servers])
reachable_subnets = self._get_reachable_subnets(ports)
targets = []
for p in ports:
# Remove network ports from Floating IP targets
if p.device_owner.startswith('network:'):
continue
server_name = server_dict.get(p.device_id)
for ip in p.fixed_ips:
if ip['subnet_id'] not in reachable_subnets:
continue
# Floating IPs can only target IPv4 addresses.
if netaddr.IPAddress(ip['ip_address']).version != 4:
continue
targets.append(FloatingIpTarget(p, ip['ip_address'],
server_name))
return targets
def _target_ports_by_instance(self, instance_id):
if not instance_id:
return None
search_opts = {'device_id': instance_id}
return port_list(self.request, **search_opts)
@profiler.trace
def list_targets_by_instance(self, instance_id, target_list=None):
"""Returns a list of FloatingIpTarget objects of FIP association.
:param instance_id: ID of target VM instance
:param target_list: (optional) a list returned by list_targets().
If specified, looking up is done against the specified list
to save extra API calls to a back-end. Otherwise target list
is retrieved from a back-end inside the method.
"""
if target_list is not None:
# We assume that target_list was returned by list_targets()
# so we can assume checks for subnet reachability and IP version
# have been done already. We skip all checks here.
return [target for target in target_list
if target['instance_id'] == instance_id]
else:
ports = self._target_ports_by_instance(instance_id)
reachable_subnets = self._get_reachable_subnets(
ports, fetch_router_ports=True)
name = self._get_server_name(instance_id)
targets = []
for p in ports:
for ip in p.fixed_ips:
if ip['subnet_id'] not in reachable_subnets:
continue
# Floating IPs can only target IPv4 addresses.
if netaddr.IPAddress(ip['ip_address']).version != 4:
continue
targets.append(FloatingIpTarget(p, ip['ip_address'], name))
return targets
def _get_server_name(self, server_id):
try:
server = nova.server_get(self.request, server_id)
return server.name
except nova_exc.NotFound:
return ''
def is_simple_associate_supported(self):
"""Returns True if the default floating IP pool is enabled."""
# NOTE: There are two reason that simple association support
# needs more considerations. (1) Neutron does not support the
# default floating IP pool at the moment. It can be avoided
# in case where only one floating IP pool exists.
# (2) Neutron floating IP is associated with each VIF and
# we need to check whether such VIF is only one for an instance
# to enable simple association support.
return False
def is_supported(self):
"""Returns True if floating IP feature is supported."""
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
return network_config.get('enable_router', True)
def get_ipver_str(ip_version):
"""Convert an ip version number to a human-friendly string."""
return IP_VERSION_DICT.get(ip_version, '')
def get_auth_params_from_request(request):
return (
request.user.token.id,
base.url_for(request, 'network'),
base.url_for(request, 'identity')
)
@memoized_with_request(get_auth_params_from_request)
def neutronclient(request_auth_params):
token_id, neutron_url, auth_url = request_auth_params
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
c = neutron_client.Client(token=token_id,
auth_url=auth_url,
endpoint_url=neutron_url,
insecure=insecure, ca_cert=cacert)
return c
@profiler.trace
def list_resources_with_long_filters(list_method,
filter_attr, filter_values, **params):
"""List neutron resources with handling RequestURITooLong exception.
If filter parameters are long, list resources API request leads to
414 error (URL is too long). For such case, this method split
list parameters specified by a list_field argument into chunks
and call the specified list_method repeatedly.
:param list_method: Method used to retrieve resource list.
:param filter_attr: attribute name to be filtered. The value corresponding
to this attribute is specified by "filter_values".
If you want to specify more attributes for a filter condition,
pass them as keyword arguments like "attr2=values2".
:param filter_values: values of "filter_attr" to be filtered.
If filter_values are too long and the total URI length exceed the
maximum length supported by the neutron server, filter_values will
be split into sub lists if filter_values is a list.
:param params: parameters to pass a specified listing API call
without any changes. You can specify more filter conditions
in addition to a pair of filter_attr and filter_values.
"""
try:
params[filter_attr] = filter_values
return list_method(**params)
except neutron_exc.RequestURITooLong as uri_len_exc:
# The URI is too long because of too many filter values.
# Use the excess attribute of the exception to know how many
# filter values can be inserted into a single request.
# We consider only the filter condition from (filter_attr,
# filter_values) and do not consider other filter conditions
# which may be specified in **params.
if not isinstance(filter_values, (list, tuple, set, frozenset)):
filter_values = [filter_values]
# Length of each query filter is:
# <key>=<value>& (e.g., id=<uuid>)
# The length will be key_len + value_maxlen + 2
all_filter_len = sum(len(filter_attr) + len(val) + 2
for val in filter_values)
allowed_filter_len = all_filter_len - uri_len_exc.excess
val_maxlen = max(len(val) for val in filter_values)
filter_maxlen = len(filter_attr) + val_maxlen + 2
chunk_size = allowed_filter_len // filter_maxlen
resources = []
for i in range(0, len(filter_values), chunk_size):
params[filter_attr] = filter_values[i:i + chunk_size]
resources.extend(list_method(**params))
return resources
@profiler.trace
def trunk_show(request, trunk_id):
LOG.debug("trunk_show(): trunk_id=%s", trunk_id)
trunk = neutronclient(request).show_trunk(trunk_id).get('trunk')
return Trunk(trunk)
@profiler.trace
def trunk_list(request, **params):
LOG.debug("trunk_list(): params=%s", params)
trunks = neutronclient(request).list_trunks(**params).get('trunks')
return [Trunk(t) for t in trunks]
@profiler.trace
def trunk_create(request, **params):
LOG.debug("trunk_create(): params=%s", params)
if 'project_id' not in params:
params['project_id'] = request.user.project_id
body = {'trunk': params}
trunk = neutronclient(request).create_trunk(body=body).get('trunk')
return Trunk(trunk)
@profiler.trace
def trunk_delete(request, trunk_id):
LOG.debug("trunk_delete(): trunk_id=%s", trunk_id)
neutronclient(request).delete_trunk(trunk_id)
def _prepare_body_update_trunk(prop_diff):
"""Prepare body for PUT /v2.0/trunks/TRUNK_ID."""
return {'trunk': prop_diff}
def _prepare_body_remove_subports(subports):
"""Prepare body for PUT /v2.0/trunks/TRUNK_ID/remove_subports."""
return {'sub_ports': [{'port_id': sp['port_id']} for sp in subports]}
def _prepare_body_add_subports(subports):
"""Prepare body for PUT /v2.0/trunks/TRUNK_ID/add_subports."""
return {'sub_ports': subports}
@profiler.trace
def trunk_update(request, trunk_id, old_trunk, new_trunk):
"""Handle update to a trunk in (at most) three neutron calls.
The JavaScript side should know only about the old and new state of a
trunk. However it should not know anything about how the old and new are
meant to be diffed and sent to neutron. We handle that here.
This code was adapted from Heat, see: https://review.openstack.org/442496
Call #1) Update all changed properties but 'sub_ports'.
PUT /v2.0/trunks/TRUNK_ID
openstack network trunk set
Call #2) Delete subports not needed anymore.
PUT /v2.0/trunks/TRUNK_ID/remove_subports
openstack network trunk unset --subport
Call #3) Create new subports.
PUT /v2.0/trunks/TRUNK_ID/add_subports
openstack network trunk set --subport
A single neutron port cannot be two subports at the same time (ie.
have two segmentation (type, ID)s on the same trunk or to belong to
two trunks). Therefore we have to delete old subports before creating
new ones to avoid conflicts.
"""
LOG.debug("trunk_update(): trunk_id=%s", trunk_id)
# NOTE(bence romsics): We want to do set operations on the subports,
# however we receive subports represented as dicts. In Python
# mutable objects like dicts are not hashable so they cannot be
# inserted into sets. So we convert subport dicts to (immutable)
# frozensets in order to do the set operations.
def dict2frozenset(d):
"""Convert a dict to a frozenset.
Create an immutable equivalent of a dict, so it's hashable
therefore can be used as an element of a set or a key of another
dictionary.
"""
return frozenset(d.items())
# cf. neutron_lib/api/definitions/trunk.py
updatable_props = ('admin_state_up', 'description', 'name')
prop_diff = {
k: new_trunk[k]
for k in updatable_props
if old_trunk[k] != new_trunk[k]}
subports_old = {dict2frozenset(d): d
for d in old_trunk.get('sub_ports', [])}
subports_new = {dict2frozenset(d): d
for d in new_trunk.get('sub_ports', [])}
old_set = set(subports_old.keys())
new_set = set(subports_new.keys())
delete = old_set - new_set
create = new_set - old_set
dicts_delete = [subports_old[fs] for fs in delete]
dicts_create = [subports_new[fs] for fs in create]
trunk = old_trunk
if prop_diff:
LOG.debug('trunk_update(): update properties of trunk %s: %s',
trunk_id, prop_diff)
body = _prepare_body_update_trunk(prop_diff)
trunk = neutronclient(request).update_trunk(
trunk_id, body=body).get('trunk')
if dicts_delete:
LOG.debug('trunk_update(): delete subports of trunk %s: %s',
trunk_id, dicts_delete)
body = _prepare_body_remove_subports(dicts_delete)
trunk = neutronclient(request).trunk_remove_subports(
trunk_id, body=body)
if dicts_create:
LOG.debug('trunk_update(): create subports of trunk %s: %s',
trunk_id, dicts_create)
body = _prepare_body_add_subports(dicts_create)
trunk = neutronclient(request).trunk_add_subports(
trunk_id, body=body)
return Trunk(trunk)
@profiler.trace
def network_list(request, **params):
LOG.debug("network_list(): params=%s", params)
networks = neutronclient(request).list_networks(**params).get('networks')
# Get subnet list to expand subnet info in network list.
subnets = subnet_list(request)
subnet_dict = dict([(s['id'], s) for s in subnets])
# Expand subnet list from subnet_id to values.
for n in networks:
# Due to potential timing issues, we can't assume the subnet_dict data
# is in sync with the network data.
n['subnets'] = [subnet_dict[s] for s in n.get('subnets', []) if
s in subnet_dict]
return [Network(n) for n in networks]
def _is_auto_allocated_network_supported(request):
try:
neutron_auto_supported = is_service_enabled(
request, 'enable_auto_allocated_network',
'auto-allocated-topology', default=False)
except Exception:
exceptions.handle(request, _('Failed to check if neutron supports '
'"auto_allocated_network".'))
neutron_auto_supported = False
if not neutron_auto_supported:
return False
try:
# server_create needs to support both features,
# so we need to pass both features here.
nova_auto_supported = nova.is_feature_available(
request, ("instance_description",
"auto_allocated_network"))
except Exception:
exceptions.handle(request, _('Failed to check if nova supports '
'"auto_allocated_network".'))
nova_auto_supported = False
return nova_auto_supported
@profiler.trace
def network_list_for_tenant(request, tenant_id, include_external=False,
include_pre_auto_allocate=False,
**params):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If requested_networks specified, it searches requested_networks only.
"""
LOG.debug("network_list_for_tenant(): tenant_id=%(tenant_id)s, "
"params=%(params)s", {'tenant_id': tenant_id, 'params': params})
networks = []
shared = params.get('shared')
if shared is not None:
del params['shared']
if shared in (None, False):
# If a user has admin role, network list returned by Neutron API
# contains networks that do not belong to that tenant.
# So we need to specify tenant_id when calling network_list().
networks += network_list(request, tenant_id=tenant_id,
shared=False, **params)
if shared in (None, True):
# In the current Neutron API, there is no way to retrieve
# both owner networks and public networks in a single API call.
networks += network_list(request, shared=True, **params)
# Hack for auto allocated network
if include_pre_auto_allocate and not networks:
if _is_auto_allocated_network_supported(request):
networks.append(PreAutoAllocateNetwork(request))
params['router:external'] = params.get('router:external', True)
if params['router:external'] and include_external:
if shared is not None:
params['shared'] = shared
fetched_net_ids = [n.id for n in networks]
# Retrieves external networks when router:external is not specified
# in (filtering) params or router:external=True filter is specified.
# When router:external=False is specified there is no need to query
# networking API because apparently nothing will match the filter.
ext_nets = network_list(request, **params)
networks += [n for n in ext_nets if
n.id not in fetched_net_ids]
return networks
@profiler.trace
def network_get(request, network_id, expand_subnet=True, **params):
LOG.debug("network_get(): netid=%(network_id)s, params=%(params)s",
{'network_id': network_id, 'params': params})
network = neutronclient(request).show_network(network_id,
**params).get('network')
if expand_subnet:
# NOTE(amotoki): There are some cases where a user has no permission
# to get subnet details, but the condition is complicated. We first
# try to fetch subnet details. If successful, the subnet details are
# set to network['subnets'] as a list of "Subent" object.
# If NotFound exception is returned by neutron, network['subnets'] is
# left untouched and a list of subnet IDs are stored.
# Neutron returns NotFound exception if a request user has enough
# permission to access a requested resource, so we catch only
# NotFound exception here.
try:
# Since the number of subnets per network must be small,
# call subnet_get() for each subnet instead of calling
# subnet_list() once.
network['subnets'] = [subnet_get(request, sid)
for sid in network['subnets']]
except neutron_exc.NotFound:
pass
return Network(network)
@profiler.trace
def network_create(request, **kwargs):
"""Create a network object.
:param request: request context
:param tenant_id: (optional) tenant id of the network created
:param name: (optional) name of the network created
:returns: Network object
"""
LOG.debug("network_create(): kwargs = %s", kwargs)
if 'tenant_id' not in kwargs:
kwargs['tenant_id'] = request.user.project_id
body = {'network': kwargs}
network = neutronclient(request).create_network(body=body).get('network')
return Network(network)
@profiler.trace
def network_update(request, network_id, **kwargs):
LOG.debug("network_update(): netid=%(network_id)s, params=%(params)s",
{'network_id': network_id, 'params': kwargs})
body = {'network': kwargs}
network = neutronclient(request).update_network(network_id,
body=body).get('network')
return Network(network)
@profiler.trace
def network_delete(request, network_id):
LOG.debug("network_delete(): netid=%s", network_id)
neutronclient(request).delete_network(network_id)
@profiler.trace
@memoized
def subnet_list(request, **params):
LOG.debug("subnet_list(): params=%s", params)
subnets = neutronclient(request).list_subnets(**params).get('subnets')
return [Subnet(s) for s in subnets]
@profiler.trace
def subnet_get(request, subnet_id, **params):
LOG.debug("subnet_get(): subnetid=%(subnet_id)s, params=%(params)s",
{'subnet_id': subnet_id, 'params': params})
subnet = neutronclient(request).show_subnet(subnet_id,
**params).get('subnet')
return Subnet(subnet)
@profiler.trace
def subnet_create(request, network_id, **kwargs):
"""Create a subnet on a specified network.
:param request: request context
:param network_id: network id a subnet is created on
:param cidr: (optional) subnet IP address range
:param ip_version: (optional) IP version (4 or 6)
:param gateway_ip: (optional) IP address of gateway
:param tenant_id: (optional) tenant id of the subnet created
:param name: (optional) name of the subnet created
:param subnetpool_id: (optional) subnetpool to allocate prefix from
:param prefixlen: (optional) length of prefix to allocate
:returns: Subnet object
Although both cidr+ip_version and subnetpool_id+preifxlen is listed as
optional you MUST pass along one of the combinations to get a successful
result.
"""
LOG.debug("subnet_create(): netid=%(network_id)s, kwargs=%(kwargs)s",
{'network_id': network_id, 'kwargs': kwargs})
body = {'subnet': {'network_id': network_id}}
if 'tenant_id' not in kwargs:
kwargs['tenant_id'] = request.user.project_id
body['subnet'].update(kwargs)
subnet = neutronclient(request).create_subnet(body=body).get('subnet')
return Subnet(subnet)
@profiler.trace
def subnet_update(request, subnet_id, **kwargs):
LOG.debug("subnet_update(): subnetid=%(subnet_id)s, kwargs=%(kwargs)s",
{'subnet_id': subnet_id, 'kwargs': kwargs})
body = {'subnet': kwargs}
subnet = neutronclient(request).update_subnet(subnet_id,
body=body).get('subnet')
return Subnet(subnet)
@profiler.trace
def subnet_delete(request, subnet_id):
LOG.debug("subnet_delete(): subnetid=%s", subnet_id)
neutronclient(request).delete_subnet(subnet_id)
@profiler.trace
def subnetpool_list(request, **params):
LOG.debug("subnetpool_list(): params=%s", params)
subnetpools = \
neutronclient(request).list_subnetpools(**params).get('subnetpools')
return [SubnetPool(s) for s in subnetpools]
@profiler.trace
def subnetpool_get(request, subnetpool_id, **params):
LOG.debug("subnetpool_get(): subnetpoolid=%(subnetpool_id)s, "
"params=%(params)s", {'subnetpool_id': subnetpool_id,
'params': params})
subnetpool = \
neutronclient(request).show_subnetpool(subnetpool_id,
**params).get('subnetpool')
return SubnetPool(subnetpool)
@profiler.trace
def subnetpool_create(request, name, prefixes, **kwargs):
"""Create a subnetpool.
ip_version is auto-detected in back-end.
Parameters:
request -- Request context
name -- Name for subnetpool
prefixes -- List of prefixes for pool
Keyword Arguments (optional):
min_prefixlen -- Minimum prefix length for allocations from pool
max_prefixlen -- Maximum prefix length for allocations from pool
default_prefixlen -- Default prefix length for allocations from pool
default_quota -- Default quota for allocations from pool
shared -- Subnetpool should be shared (Admin-only)
tenant_id -- Owner of subnetpool
Returns:
SubnetPool object
"""
LOG.debug("subnetpool_create(): name=%(name)s, prefixes=%(prefixes)s, "
"kwargs=%(kwargs)s", {'name': name, 'prefixes': prefixes,
'kwargs': kwargs})
body = {'subnetpool':
{'name': name,
'prefixes': prefixes,
}
}
if 'tenant_id' not in kwargs:
kwargs['tenant_id'] = request.user.project_id
body['subnetpool'].update(kwargs)
subnetpool = \
neutronclient(request).create_subnetpool(body=body).get('subnetpool')
return SubnetPool(subnetpool)
@profiler.trace
def subnetpool_update(request, subnetpool_id, **kwargs):
LOG.debug("subnetpool_update(): subnetpoolid=%(subnetpool_id)s, "
"kwargs=%(kwargs)s", {'subnetpool_id': subnetpool_id,
'kwargs': kwargs})
body = {'subnetpool': kwargs}
subnetpool = \
neutronclient(request).update_subnetpool(subnetpool_id,
body=body).get('subnetpool')
return SubnetPool(subnetpool)
@profiler.trace
def subnetpool_delete(request, subnetpool_id):
LOG.debug("subnetpool_delete(): subnetpoolid=%s", subnetpool_id)
return neutronclient(request).delete_subnetpool(subnetpool_id)
@profiler.trace
@memoized
def port_list(request, **params):
LOG.debug("port_list(): params=%s", params)
ports = neutronclient(request).list_ports(**params).get('ports')
return [Port(p) for p in ports]
@profiler.trace
@memoized
def port_list_with_trunk_types(request, **params):
"""List neutron Ports for this tenant with possible TrunkPort indicated
:param request: request context
NOTE Performing two API calls is not atomic, but this is not worse
than the original idea when we call port_list repeatedly for
each network to perform identification run-time. We should
handle the inconsistencies caused by non-atomic API requests
gracefully.
"""
LOG.debug("port_list_with_trunk_types(): params=%s", params)
# When trunk feature is disabled in neutron, we have no need to fetch
# trunk information and port_list() is enough.
if not is_extension_supported(request, 'trunk'):
return port_list(request, **params)
ports = neutronclient(request).list_ports(**params)['ports']
trunk_filters = {}
if 'tenant_id' in params:
trunk_filters['tenant_id'] = params['tenant_id']
trunks = neutronclient(request).list_trunks(**trunk_filters)['trunks']
parent_ports = set([t['port_id'] for t in trunks])
# Create a dict map for child ports (port ID to trunk info)
child_ports = dict([(s['port_id'],
{'trunk_id': t['id'],
'segmentation_type': s['segmentation_type'],
'segmentation_id': s['segmentation_id']})
for t in trunks
for s in t['sub_ports']])
def _get_port_info(port):
if port['id'] in parent_ports:
return PortTrunkParent(port)
elif port['id'] in child_ports:
return PortTrunkSubport(port, child_ports[port['id']])
else:
return Port(port)
return [_get_port_info(p) for p in ports]
@profiler.trace
def port_get(request, port_id, **params):
LOG.debug("port_get(): portid=%(port_id)s, params=%(params)s",
{'port_id': port_id, 'params': params})
port = neutronclient(request).show_port(port_id, **params).get('port')
return Port(port)
def unescape_port_kwargs(**kwargs):
for key in kwargs:
if '__' in key:
kwargs[':'.join(key.split('__'))] = kwargs.pop(key)
return kwargs
@profiler.trace
def port_create(request, network_id, **kwargs):
"""Create a port on a specified network.
:param request: request context
:param network_id: network id a subnet is created on
:param device_id: (optional) device id attached to the port
:param tenant_id: (optional) tenant id of the port created
:param name: (optional) name of the port created
:returns: Port object
"""
LOG.debug("port_create(): netid=%(network_id)s, kwargs=%(kwargs)s",
{'network_id': network_id, 'kwargs': kwargs})
kwargs = unescape_port_kwargs(**kwargs)
body = {'port': {'network_id': network_id}}
if 'tenant_id' not in kwargs:
kwargs['tenant_id'] = request.user.project_id
body['port'].update(kwargs)
port = neutronclient(request).create_port(body=body).get('port')
return Port(port)
@profiler.trace
def port_delete(request, port_id):
LOG.debug("port_delete(): portid=%s", port_id)
neutronclient(request).delete_port(port_id)
@profiler.trace
def port_update(request, port_id, **kwargs):
LOG.debug("port_update(): portid=%(port_id)s, kwargs=%(kwargs)s",
{'port_id': port_id, 'kwargs': kwargs})
kwargs = unescape_port_kwargs(**kwargs)
body = {'port': kwargs}
port = neutronclient(request).update_port(port_id, body=body).get('port')
return Port(port)
@profiler.trace
def router_create(request, **kwargs):
LOG.debug("router_create():, kwargs=%s", kwargs)
body = {'router': {}}
if 'tenant_id' not in kwargs:
kwargs['tenant_id'] = request.user.project_id
body['router'].update(kwargs)
router = neutronclient(request).create_router(body=body).get('router')
return Router(router)
@profiler.trace
def router_update(request, r_id, **kwargs):
LOG.debug("router_update(): router_id=%(r_id)s, kwargs=%(kwargs)s",
{'r_id': r_id, 'kwargs': kwargs})
body = {'router': {}}
body['router'].update(kwargs)
router = neutronclient(request).update_router(r_id, body=body)
return Router(router['router'])
@profiler.trace
def router_get(request, router_id, **params):
router = neutronclient(request).show_router(router_id,
**params).get('router')
return Router(router)
@profiler.trace
def router_list(request, **params):
routers = neutronclient(request).list_routers(**params).get('routers')
return [Router(r) for r in routers]
@profiler.trace
def router_list_on_l3_agent(request, l3_agent_id, **params):
routers = neutronclient(request).\
list_routers_on_l3_agent(l3_agent_id,
**params).get('routers')
return [Router(r) for r in routers]
@profiler.trace
def router_delete(request, router_id):
neutronclient(request).delete_router(router_id)
@profiler.trace
def router_add_interface(request, router_id, subnet_id=None, port_id=None):
body = {}
if subnet_id:
body['subnet_id'] = subnet_id
if port_id:
body['port_id'] = port_id
client = neutronclient(request)
return client.add_interface_router(router_id, body)
@profiler.trace
def router_remove_interface(request, router_id, subnet_id=None, port_id=None):
body = {}
if subnet_id:
body['subnet_id'] = subnet_id
if port_id:
body['port_id'] = port_id
neutronclient(request).remove_interface_router(router_id, body)
@profiler.trace
def router_add_gateway(request, router_id, network_id, enable_snat=None):
body = {'network_id': network_id}
if enable_snat is not None:
body['enable_snat'] = enable_snat
neutronclient(request).add_gateway_router(router_id, body)
@profiler.trace
def router_remove_gateway(request, router_id):
neutronclient(request).remove_gateway_router(router_id)
@profiler.trace
def router_static_route_list(request, router_id=None):
router = router_get(request, router_id)
try:
routes = [RouterStaticRoute(r) for r in router.routes]
except AttributeError:
LOG.debug("router_static_route_list(): router_id=%(router_id)s, "
"router=%(router)s", {'router_id': router_id,
'router': router})
return []
return routes
@profiler.trace
def router_static_route_remove(request, router_id, route_ids):
currentroutes = router_static_route_list(request, router_id=router_id)
newroutes = []
for oldroute in currentroutes:
if oldroute.id not in route_ids:
newroutes.append({'nexthop': oldroute.nexthop,
'destination': oldroute.destination})
body = {'routes': newroutes}
new = router_update(request, router_id, **body)
return new
@profiler.trace
def router_static_route_add(request, router_id, newroute):
body = {}
currentroutes = router_static_route_list(request, router_id=router_id)
body['routes'] = [newroute] + [{'nexthop': r.nexthop,
'destination': r.destination}
for r in currentroutes]
new = router_update(request, router_id, **body)
return new
@profiler.trace
def tenant_quota_get(request, tenant_id):
return base.QuotaSet(neutronclient(request).show_quota(tenant_id)['quota'])
@profiler.trace
def tenant_quota_update(request, tenant_id, **kwargs):
quotas = {'quota': kwargs}
return neutronclient(request).update_quota(tenant_id, quotas)
@profiler.trace
def tenant_quota_detail_get(request, tenant_id=None):
tenant_id = tenant_id or request.user.tenant_id
response = neutronclient(request).get('/quotas/%s/details' % tenant_id)
return response['quota']
@profiler.trace
def default_quota_get(request, tenant_id=None):
tenant_id = tenant_id or request.user.tenant_id
response = neutronclient(request).show_quota_default(tenant_id)
return base.QuotaSet(response['quota'])
@profiler.trace
def agent_list(request, **params):
agents = neutronclient(request).list_agents(**params)
return [Agent(a) for a in agents['agents']]
@profiler.trace
def list_dhcp_agent_hosting_networks(request, network, **params):
agents = neutronclient(request).list_dhcp_agent_hosting_networks(network,
**params)
return [Agent(a) for a in agents['agents']]
@profiler.trace
def list_l3_agent_hosting_router(request, router, **params):
agents = neutronclient(request).list_l3_agent_hosting_routers(router,
**params)
return [Agent(a) for a in agents['agents']]
@profiler.trace
def show_network_ip_availability(request, network_id):
ip_availability = neutronclient(request).show_network_ip_availability(
network_id)
return ip_availability
@profiler.trace
def add_network_to_dhcp_agent(request, dhcp_agent, network_id):
body = {'network_id': network_id}
return neutronclient(request).add_network_to_dhcp_agent(dhcp_agent, body)
@profiler.trace
def remove_network_from_dhcp_agent(request, dhcp_agent, network_id):
return neutronclient(request).remove_network_from_dhcp_agent(dhcp_agent,
network_id)
@profiler.trace
def provider_list(request):
providers = neutronclient(request).list_service_providers()
return providers['service_providers']
def floating_ip_pools_list(request):
return FloatingIpManager(request).list_pools()
@memoized
def tenant_floating_ip_list(request, all_tenants=False, **search_opts):
return FloatingIpManager(request).list(all_tenants=all_tenants,
**search_opts)
def tenant_floating_ip_get(request, floating_ip_id):
return FloatingIpManager(request).get(floating_ip_id)
def tenant_floating_ip_allocate(request, pool=None, tenant_id=None, **params):
return FloatingIpManager(request).allocate(pool, tenant_id, **params)
def tenant_floating_ip_release(request, floating_ip_id):
return FloatingIpManager(request).release(floating_ip_id)
def floating_ip_associate(request, floating_ip_id, port_id):
return FloatingIpManager(request).associate(floating_ip_id, port_id)
def floating_ip_disassociate(request, floating_ip_id):
return FloatingIpManager(request).disassociate(floating_ip_id)
def floating_ip_target_list(request):
return FloatingIpManager(request).list_targets()
def floating_ip_target_list_by_instance(request, instance_id, cache=None):
return FloatingIpManager(request).list_targets_by_instance(
instance_id, cache)
def floating_ip_simple_associate_supported(request):
return FloatingIpManager(request).is_simple_associate_supported()
def floating_ip_supported(request):
return FloatingIpManager(request).is_supported()
@memoized
def security_group_list(request, **params):
return SecurityGroupManager(request).list(**params)
def security_group_get(request, sg_id):
return SecurityGroupManager(request).get(sg_id)
def security_group_create(request, name, desc):
return SecurityGroupManager(request).create(name, desc)
def security_group_delete(request, sg_id):
return SecurityGroupManager(request).delete(sg_id)
def security_group_update(request, sg_id, name, desc):
return SecurityGroupManager(request).update(sg_id, name, desc)
def security_group_rule_create(request, parent_group_id,
direction, ethertype,
ip_protocol, from_port, to_port,
cidr, group_id, description=None):
return SecurityGroupManager(request).rule_create(
parent_group_id, direction, ethertype, ip_protocol,
from_port, to_port, cidr, group_id, description)
def security_group_rule_delete(request, sgr_id):
return SecurityGroupManager(request).rule_delete(sgr_id)
def server_security_groups(request, instance_id):
return SecurityGroupManager(request).list_by_instance(instance_id)
def server_update_security_groups(request, instance_id,
new_security_group_ids):
return SecurityGroupManager(request).update_instance_security_group(
instance_id, new_security_group_ids)
# TODO(pkarikh) need to uncomment when osprofiler will have no
# issues with unicode in:
# openstack_dashboard/test/test_data/nova_data.py#L470 data
# @profiler.trace
def servers_update_addresses(request, servers, all_tenants=False):
"""Retrieve servers networking information from Neutron if enabled.
Should be used when up to date networking information is required,
and Nova's networking info caching mechanism is not fast enough.
"""
# NOTE(e0ne): we don't need to call neutron if we have no instances
if not servers:
return
# Get all (filtered for relevant servers) information from Neutron
try:
# NOTE(e0ne): we need tuple here to work with @memoized decorator.
# @memoized works with hashable arguments only.
ports = list_resources_with_long_filters(
port_list, 'device_id',
tuple([instance.id for instance in servers]),
request=request)
fips = FloatingIpManager(request)
if fips.is_supported():
floating_ips = list_resources_with_long_filters(
fips.list, 'port_id', tuple([port.id for port in ports]),
all_tenants=all_tenants)
else:
floating_ips = []
# NOTE(e0ne): we need frozenset here to work with @memoized decorator.
# @memoized works with hashable arguments only
networks = list_resources_with_long_filters(
network_list, 'id', frozenset([port.network_id for port in ports]),
request=request)
except Exception as e:
LOG.error('Unable to connect to Neutron: %s', e)
error_message = _('Unable to connect to Neutron.')
messages.error(request, error_message)
return
# Map instance to its ports
instances_ports = collections.defaultdict(list)
for port in ports:
instances_ports[port.device_id].append(port)
# Map port to its floating ips
ports_floating_ips = collections.defaultdict(list)
for fip in floating_ips:
ports_floating_ips[fip.port_id].append(fip)
# Map network id to its name
network_names = dict(((network.id, network.name) for network in networks))
for server in servers:
try:
addresses = _server_get_addresses(
request,
server,
instances_ports,
ports_floating_ips,
network_names)
except Exception as e:
LOG.error(six.text_type(e))
else:
server.addresses = addresses
def _server_get_addresses(request, server, ports, floating_ips, network_names):
def _format_address(mac, ip, type):
try:
version = netaddr.IPAddress(ip).version
except Exception as e:
LOG.error('Unable to parse IP address %(ip)s: %(exc)s',
{'ip': ip, 'exc': e})
error_message = _('Unable to parse IP address %s.') % ip
messages.error(request, error_message)
raise
return {u'OS-EXT-IPS-MAC:mac_addr': mac,
u'version': version,
u'addr': ip,
u'OS-EXT-IPS:type': type}
addresses = collections.defaultdict(list)
instance_ports = ports.get(server.id, [])
for port in instance_ports:
network_name = network_names.get(port.network_id)
if network_name is not None:
for fixed_ip in port.fixed_ips:
addresses[network_name].append(
_format_address(port.mac_address,
fixed_ip['ip_address'],
u'fixed'))
port_fips = floating_ips.get(port.id, [])
for fip in port_fips:
addresses[network_name].append(
_format_address(port.mac_address,
fip.floating_ip_address,
u'floating'))
return dict(addresses)
@profiler.trace
@memoized_with_request(neutronclient)
def list_extensions(neutron_api):
"""List neutron extensions.
:param request: django request object
"""
try:
extensions_list = neutron_api.list_extensions()
except exceptions.ServiceCatalogException:
return {}
if 'extensions' in extensions_list:
return tuple(extensions_list['extensions'])
else:
return ()
@profiler.trace
def is_extension_supported(request, extension_alias):
"""Check if a specified extension is supported.
:param request: django request object
:param extension_alias: neutron extension alias
"""
extensions = list_extensions(request)
for extension in extensions:
if extension['alias'] == extension_alias:
return True
else:
return False
def is_enabled_by_config(name, default=True):
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
return network_config.get(name, default)
@memoized
def is_service_enabled(request, config_name, ext_name, default=True):
return (is_enabled_by_config(config_name, default) and
is_extension_supported(request, ext_name))
@memoized
def is_quotas_extension_supported(request):
return (is_enabled_by_config('enable_quotas', False) and
is_extension_supported(request, 'quotas'))
@memoized
def is_router_enabled(request):
return (is_enabled_by_config('enable_router') and
is_extension_supported(request, 'router'))
# FEATURE_MAP is used to define:
# - related neutron extension name (key: "extension")
# - corresponding dashboard config (key: "config")
# - RBAC policies (key: "poclies")
# If a key is not contained, the corresponding permission check is skipped.
FEATURE_MAP = {
'dvr': {
'extension': 'dvr',
'config': {
'name': 'enable_distributed_router',
'default': False,
},
'policies': {
'get': 'get_router:distributed',
'create': 'create_router:distributed',
'update': 'update_router:distributed',
}
},
'l3-ha': {
'extension': 'l3-ha',
'config': {'name': 'enable_ha_router',
'default': False},
'policies': {
'get': 'get_router:ha',
'create': 'create_router:ha',
'update': 'update_router:ha',
}
},
'ext-gw-mode': {
'extension': 'ext-gw-mode',
'policies': {
'create_router_enable_snat':
'create_router:external_gateway_info:enable_snat',
'update_router_enable_snat':
'update_router:external_gateway_info:enable_snat',
}
},
}
def get_feature_permission(request, feature, operation=None):
"""Check if a feature-specific field can be displayed.
This method check a permission for a feature-specific field.
Such field is usually provided through Neutron extension.
:param request: Request Object
:param feature: feature name defined in FEATURE_MAP
:param operation (optional): Operation type. The valid value should be
defined in FEATURE_MAP[feature]['policies']
It must be specified if FEATURE_MAP[feature] has 'policies'.
"""
network_config = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {})
feature_info = FEATURE_MAP.get(feature)
if not feature_info:
raise ValueError("The requested feature '%(feature)s' is unknown. "
"Please make sure to specify a feature defined "
"in FEATURE_MAP.")
# Check dashboard settings
feature_config = feature_info.get('config')
if feature_config:
if not network_config.get(feature_config['name'],
feature_config['default']):
return False
# Check policy
feature_policies = feature_info.get('policies')
if feature_policies:
policy_name = feature_policies.get(operation)
if not policy_name:
raise ValueError("The 'operation' parameter for "
"get_feature_permission '%(feature)s' "
"is invalid. It should be one of %(allowed)s"
% {'feature': feature,
'allowed': ' '.join(feature_policies.keys())})
role = (('network', policy_name),)
if not policy.check(role, request):
return False
# Check if a required extension is enabled
feature_extension = feature_info.get('extension')
if feature_extension:
try:
return is_extension_supported(request, feature_extension)
except Exception:
LOG.info("Failed to check Neutron '%s' extension is not supported",
feature_extension)
return False
# If all checks are passed, now a given feature is allowed.
return True
class QoSPolicy(NeutronAPIDictWrapper):
"""Wrapper for neutron QoS Policy."""
def to_dict(self):
return self._apidict
def policy_create(request, **kwargs):
"""Create a QoS Policy.
:param request: request context
:param name: name of the policy
:param description: description of policy
:param shared: boolean (true or false)
:return: QoSPolicy object
"""
body = {'policy': kwargs}
policy = neutronclient(request).create_qos_policy(body=body).get('policy')
return QoSPolicy(policy)
def policy_list(request, **kwargs):
"""List of QoS Policies."""
policies = neutronclient(request).list_qos_policies(
**kwargs).get('policies')
return [QoSPolicy(p) for p in policies]
@profiler.trace
def policy_get(request, policy_id, **kwargs):
"""Get QoS policy for a given policy id."""
policy = neutronclient(request).show_qos_policy(
policy_id, **kwargs).get('policy')
return QoSPolicy(policy)
@profiler.trace
def policy_delete(request, policy_id):
"""Delete QoS policy for a given policy id."""
neutronclient(request).delete_qos_policy(policy_id)
@profiler.trace
def list_availability_zones(request, resource=None, state=None):
az_list = neutronclient(request).list_availability_zones().get(
'availability_zones')
if resource:
az_list = [az for az in az_list if az['resource'] == resource]
if state:
az_list = [az for az in az_list if az['state'] == state]
return sorted(az_list, key=lambda zone: zone['name'])
| 36.73377 | 79 | 0.643636 |
9194376178f47e389f359ab76519b33f1556d5c1
| 731 |
py
|
Python
|
semana06/funciones-04.py
|
haroldtr/python-p32021
|
774552481a8f941b193bc9f1f4af93cd37786be6
|
[
"MIT"
] | 1 |
2021-09-30T15:00:26.000Z
|
2021-09-30T15:00:26.000Z
|
semana06/funciones-04.py
|
haroldtr/python-p32021
|
774552481a8f941b193bc9f1f4af93cd37786be6
|
[
"MIT"
] | null | null | null |
semana06/funciones-04.py
|
haroldtr/python-p32021
|
774552481a8f941b193bc9f1f4af93cd37786be6
|
[
"MIT"
] | null | null | null |
# recibiendo varios parametros
# pasando argumentos a funcion print
def datosEstudiantes(matricula, nombre, apellido, genero, edad, carrera):
print('La matricula del estudiante es: \t %s' % matricula)
print('El nombre del estudiante es: \t %s' % nombre)
print('El apellido del estudiante es: \t %s' % apellido)
print('El genero del estudiante es: \t %s' % genero)
print('La edad del estudiante es: \t %s' % edad)
print('La carrera del estudiante es: \t %s' % carrera)
print('El estudiante %s que esta cursando la carrera de %s tiene %s años' %
(nombre, carrera, edad))
vMatricula = input("Ingresa la matricula")
datosEstudiantes(vMatricula, 'David Jose', 'Tejada', 'Masculino', 18, 'ISC')
| 40.611111 | 79 | 0.682627 |
8b8a2f06b49828e269824fe6cc8c018fdfdb59ad
| 37,273 |
py
|
Python
|
vta/python/vta/ir_pass.py
|
Orion34C/incubator-tvm
|
27a02844cb52e883a4a66da68a527590d76f7d01
|
[
"Apache-2.0"
] | 1 |
2021-03-20T02:03:00.000Z
|
2021-03-20T02:03:00.000Z
|
vta/python/vta/ir_pass.py
|
Orion34C/incubator-tvm
|
27a02844cb52e883a4a66da68a527590d76f7d01
|
[
"Apache-2.0"
] | null | null | null |
vta/python/vta/ir_pass.py
|
Orion34C/incubator-tvm
|
27a02844cb52e883a4a66da68a527590d76f7d01
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Additional IR Pass for VTA"""
# pylint: disable=len-as-condition, no-else-return
import tvm
from topi import util
from .environment import get_env
def _match_pragma(stmt, key):
"""Internal helper to match stmt to pragma stmt.
Parameters
----------
stmt : Stmt
The AttrStmt
key : str
The pragma key
"""
return ((stmt.attr_key == "pragma_" + key) or
(stmt.attr_key == "pragma_scope" and stmt.value.value == key))
def fold_uop_loop(stmt_in):
"""Detect and fold uop loop.
VTA support uop programming model
that recognizes loop structure.
This pass detect the loop structure
and extract that into uop loop AST.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Output statement.
"""
env = get_env()
def _fold_outermost_loop(body):
stmt = body
while not isinstance(stmt, tvm.tir.For):
if isinstance(stmt, (tvm.tir.ProducerConsumer,)):
stmt = stmt.body
else:
return None, body, None
loop_var = stmt.loop_var
gemm_offsets = [None, None, None]
fail = [False]
def _post_order(op):
assert isinstance(op, tvm.tir.Call)
base_args = 2
if op.name == "VTAUopPush":
args = []
args += op.args[:base_args]
for i in range(3):
m = tvm.arith.DetectLinearEquation(
op.args[i + base_args], [loop_var])
if not m:
fail[0] = True
return op
if gemm_offsets[i] is not None:
if not tvm.ir_pass.Equal(m[0], gemm_offsets[i]):
fail[0] = True
return op
args.append(m[1])
else:
gemm_offsets[i] = m[0]
args.append(m[1])
args += op.args[base_args+3:]
return tvm.call_extern("int32", "VTAUopPush", *args)
if op.name not in ("VTATLSCommandHandle", "tvm_thread_context"):
raise RuntimeError("unexpected op %s" % op)
return op
ret = tvm.ir_pass.IRTransform(
stmt.body, None, _post_order, ["Call"])
if not fail[0] and all(x is not None for x in gemm_offsets):
def _visit(op):
if op.same_as(loop_var):
fail[0] = True
tvm.ir_pass.PostOrderVisit(ret, _visit)
if not fail[0]:
begin = tvm.call_extern(
"int32", "VTAUopLoopBegin", stmt.extent, *gemm_offsets)
end = tvm.call_extern("int32", "VTAUopLoopEnd")
return [begin, ret, end]
raise ValueError("Failed to fold the GEMM instructions..")
def _do_fold(stmt):
if (stmt.attr_key == "coproc_uop_scope" and
isinstance(stmt.value, tvm.tir.StringImm) and
stmt.value.value == env.dev.vta_push_uop.value):
body = stmt.body
begins = []
ends = []
try:
begin, body, end = _fold_outermost_loop(body)
if begin is not None:
begins.append(begin)
if end is not None:
ends.append(end)
begin, body, end = _fold_outermost_loop(body)
if begin is not None:
begins.append(begin)
if end is not None:
ends.append(end)
except ValueError:
pass
if body == stmt.body:
return stmt
ends = list(reversed(ends))
body = tvm.tir.stmt_seq(*(begins + [body] + ends))
return tvm.tir.AttrStmt(
stmt.node, stmt.attr_key, stmt.value, body)
return None
out = tvm.ir_pass.IRTransform(
stmt_in, _do_fold, None, ["AttrStmt"])
return out
def cpu_access_rewrite(stmt_in):
"""Detect CPU access to VTA buffer and get address correctly.
VTA's buffer is an opaque handle that do not
correspond to address in CPU.
This pass detect CPU access and rewrite to use pointer
returned VTABufferCPUPtr for CPU access.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
env = get_env()
rw_info = {}
def _post_order(op):
if isinstance(op, tvm.tir.Allocate):
buffer_var = op.buffer_var
if not buffer_var in rw_info:
return None
new_var = rw_info[buffer_var]
let_stmt = tvm.tir.LetStmt(
new_var, tvm.call_extern(
"handle", "VTABufferCPUPtr",
env.dev.command_handle,
buffer_var), op.body)
alloc = tvm.tir.Allocate(
buffer_var, op.dtype, op.extents,
op.condition, let_stmt)
del rw_info[buffer_var]
return alloc
if isinstance(op, tvm.tir.Load):
buffer_var = op.buffer_var
if not buffer_var in rw_info:
rw_info[buffer_var] = tvm.var(
buffer_var.name + "_ptr", "handle")
new_var = rw_info[buffer_var]
return tvm.tir.Load(op.dtype, new_var, op.index)
if isinstance(op, tvm.tir.Store):
buffer_var = op.buffer_var
if not buffer_var in rw_info:
rw_info[buffer_var] = tvm.var(
buffer_var.name + "_ptr", "handle")
new_var = rw_info[buffer_var]
return tvm.tir.Store(new_var, op.value, op.index)
raise RuntimeError("not reached")
stmt = tvm.ir_pass.IRTransform(
stmt_in, None, _post_order, ["Allocate", "Load", "Store"])
for buffer_var, new_var in rw_info.items():
stmt = tvm.tir.LetStmt(
new_var, tvm.call_extern(
"handle", "VTABufferCPUPtr",
env.dev.command_handle,
buffer_var), stmt)
return stmt
def lift_alloc_to_scope_begin(stmt_in):
"""Lift allocate to beginning of the current scope.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
lift_stmt = [[]]
def _merge_block(slist, body):
for op in slist:
if op.body == body:
body = op
elif isinstance(op, tvm.tir.Allocate):
body = tvm.tir.Allocate(
op.buffer_var, op.dtype,
op.extents, op.condition, body)
elif isinstance(op, tvm.tir.AttrStmt):
body = tvm.tir.AttrStmt(
op.node, op.attr_key, op.value, body)
elif isinstance(op, tvm.tir.For):
body = tvm.tir.For(
op.loop_var, op.min, op.extent, op.for_type,
op.device_api, body)
else:
raise RuntimeError("unexpected op")
del slist[:]
return body
def _pre_order(op):
if isinstance(op, tvm.tir.For):
lift_stmt.append([])
elif isinstance(op, tvm.tir.AttrStmt):
if op.attr_key == "virtual_thread":
lift_stmt.append([])
def _post_order(op):
if isinstance(op, tvm.tir.Allocate):
lift_stmt[-1].append(op)
return op.body
if isinstance(op, tvm.tir.AttrStmt):
if op.attr_key == "storage_scope":
lift_stmt[-1].append(op)
return op.body
if op.attr_key == "virtual_thread":
return _merge_block(lift_stmt.pop() + [op], op.body)
return op
if isinstance(op, tvm.tir.For):
return _merge_block(lift_stmt.pop() + [op], op.body)
raise RuntimeError("not reached")
stmt = tvm.ir_pass.IRTransform(
stmt_in, _pre_order, _post_order, ["Allocate", "AttrStmt", "For"])
assert len(lift_stmt) == 1
return _merge_block(lift_stmt[0], stmt)
def inject_skip_copy(stmt_in):
"""Pass to inject skip copy stmt, used for debug purpose.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
def _do_fold(stmt):
if _match_pragma(stmt, "skip_dma_copy"):
return tvm.tir.Evaluate(0)
return None
return tvm.ir_pass.IRTransform(
stmt_in, _do_fold, None, ["AttrStmt"])
def inject_coproc_sync(stmt_in):
"""Pass to inject skip copy stmt, used in debug.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
success = [False]
def _do_fold(stmt):
if _match_pragma(stmt, "coproc_sync"):
success[0] = True
sync = tvm.tir.Call(
"int32", "vta.coproc_sync", [], tvm.tir.Call.Intrinsic, None, 0)
return tvm.tir.SeqStmt([stmt.body, tvm.tir.Evaluate(sync)])
if _match_pragma(stmt, "trim_loop"):
op = stmt.body
assert isinstance(op, tvm.tir.For)
return tvm.tir.For(
op.loop_var, op.min, 2, op.for_type,
op.device_api, op.body)
return None
stmt = tvm.ir_pass.IRTransform(
stmt_in, None, _do_fold, ["AttrStmt"])
stmt = tvm.ir_pass.CoProcSync(stmt)
return stmt
def inject_dma_intrin(stmt_in):
"""Pass to inject DMA copy intrinsics.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
env = get_env()
idxd = tvm.indexdiv
idxm = tvm.indexmod
def _check_compact(buf):
ndim = len(buf.shape)
size = tvm.const(1, buf.shape[0].dtype)
for i in reversed(range(ndim)):
if not util.equal_const_int(size - buf.strides[i], 0):
raise RuntimeError(
"Cannot prove compact: shape=%s, strides=%s" % (buf.shape, buf.strides))
size = size * buf.shape[i]
def _fold_buffer_dim(buf, scope, elem_block):
ndim = len(buf.shape)
x_size = 1
base = 0
for i in range(1, ndim + 1):
if not util.equal_const_int(buf.strides[ndim - i] - x_size, 0):
raise RuntimeError("scope %s needs to have block=%d" % (scope, elem_block))
x_size = x_size * buf.shape[ndim - i]
if util.equal_const_int(x_size - elem_block, 0):
base = i + 1
break
if base == 0:
raise RuntimeError("scope %s need to have block=%d, shape=%s" % (
scope, elem_block, buf.shape))
shape = [elem_block]
strides = [1]
if base < ndim + 1 and not util.equal_const_int(buf.strides[ndim - base], elem_block):
shape.append(1)
strides.append(elem_block)
while base < ndim + 1:
x_size = 1
x_stride = buf.strides[ndim - base]
next_base = base
if not util.equal_const_int(idxm(x_stride, elem_block), 0):
raise RuntimeError(
"scope %s need to have block=%d, shape=%s, strides=%s" % (
scope, elem_block, buf.shape, buf.strides))
for i in range(base, ndim + 1):
k = ndim - i
if not util.equal_const_int(x_size * x_stride - buf.strides[k], 0):
break
x_size = x_size * buf.shape[k]
next_base = i + 1
shape.append(tvm.ir_pass.Simplify(x_size))
strides.append(x_stride)
assert next_base != base
base = next_base
strides = list(reversed(strides))
shape = list(reversed(shape))
return shape, strides
def _get_2d_pattern(buf, elem_width, elem_bytes, dtype, scope, allow_fold):
elem_block = elem_bytes * 8 // elem_width
if buf.dtype != dtype:
raise RuntimeError("Expect buffer type to be %s instead of %s" %
(dtype, buf.dtype))
shape, strides = buf.shape, buf.strides
if not util.equal_const_int(idxm(buf.elem_offset, elem_block), 0):
raise RuntimeError("scope %s need to have block=%d" % (scope, elem_block))
if allow_fold:
shape, strides = _fold_buffer_dim(buf, scope, elem_block)
else:
shape = list(x for x in shape)
strides = list(x for x in strides)
def raise_error():
"""Internal function to raise error """
raise RuntimeError(
("Scope[%s]: cannot detect 2d pattern with elem_block=%d:" +
" shape=%s, strides=%s") % (scope, elem_block, buf.shape, buf.strides))
ndim = len(shape)
# Check if the inner-tensor is already flat
flat = util.equal_const_int(shape[-1], elem_block)
if flat:
if not util.equal_const_int(strides[-1], 1):
raise_error()
if ndim == 1:
x_size = 1
x_stride = 1
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not util.equal_const_int(strides[-2] - elem_block, 0):
raise_error()
if ndim == 2:
x_size = shape[-2]
x_stride = shape[-2]
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not util.equal_const_int(idxm(strides[-3], elem_block), 0):
raise_error()
if ndim == 3:
x_size = shape[-2]
x_stride = idxd(strides[-3], elem_block)
y_size = shape[-3]
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
else:
if not util.equal_const_int(strides[-1], 1):
raise_error()
if not util.equal_const_int(strides[-2] - shape[-1], 0):
raise_error()
if not util.equal_const_int(shape[-1] * shape[-2], elem_block):
raise_error()
if ndim == 2:
x_size = 1
x_stride = 1
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not util.equal_const_int(strides[-3], elem_block):
raise_error()
if ndim == 3:
x_size = shape[-3]
x_stride = shape[-3]
y_size = 1
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
if not util.equal_const_int(idxm(strides[-4], elem_block), 0):
raise_error()
if ndim == 4:
x_size = shape[-3]
x_stride = idxd(strides[-4], elem_block)
y_size = shape[-4]
return x_size, y_size, x_stride, idxd(buf.elem_offset, elem_block)
raise_error()
def _inject_copy(src, dst, pad_before, pad_after, pad_value):
# FIXME: pad_value is ignored...
_ = pad_value
if dst.scope == "global":
# Store
if pad_before or pad_after:
raise RuntimeError("Do not support copy into DRAM with pad")
if src.scope == env.acc_scope:
elem_width = env.OUT_WIDTH
elem_bytes = env.OUT_ELEM_BYTES
mem_type = env.dev.MEM_ID_OUT
data_type = "int%d" % env.OUT_WIDTH
task_qid = env.dev.QID_STORE_OUT
else:
raise RuntimeError("Do not support copy %s->dram" % (src.scope))
_check_compact(src)
x_size, y_size, x_stride, offset = _get_2d_pattern(
dst, elem_width, elem_bytes, data_type, src.scope, allow_fold=True)
irb = tvm.ir_builder.create()
irb.scope_attr(env.dev.vta_axis, "coproc_scope",
env.dev.get_task_qid(task_qid))
irb.emit(tvm.call_extern(
"int32", "VTAStoreBuffer2D",
env.dev.command_handle,
src.access_ptr("r", "int32"),
mem_type, dst.data, offset, x_size, y_size, x_stride))
return irb.get()
elif src.scope == "global":
if dst.scope == env.acc_scope:
elem_width = env.ACC_WIDTH
elem_bytes = env.ACC_ELEM_BYTES
mem_type = env.dev.MEM_ID_ACC
data_type = "int%d" % env.ACC_WIDTH
task_qid = env.dev.QID_LOAD_OUT
elif dst.scope == env.inp_scope:
elem_width = env.INP_WIDTH
elem_bytes = env.INP_ELEM_BYTES
mem_type = env.dev.MEM_ID_INP
data_type = "int%d" % env.INP_WIDTH
task_qid = env.dev.QID_LOAD_INP
elif dst.scope == env.wgt_scope:
elem_width = env.WGT_WIDTH
elem_bytes = env.WGT_ELEM_BYTES
mem_type = env.dev.MEM_ID_WGT
data_type = "int%d" % env.WGT_WIDTH
task_qid = env.dev.QID_LOAD_WGT
else:
raise RuntimeError("Do not support copy dram->%s" % (dst.scope))
# collect pad statistics
if pad_before:
assert pad_after
ndim = len(pad_before)
if ndim <= 2 or ndim > 5:
raise ValueError("Limitation of 2D pad load forbid ndim=%d" % ndim)
if ndim == 5:
# This case occurs when batch size N > 1
y_pad_before = pad_before[1]
x_pad_before = pad_before[2]
y_pad_after = pad_after[1]
x_pad_after = pad_after[2]
for dim in range(3, ndim):
if not util.equal_const_int(pad_before[dim], 0):
raise ValueError("Do not support pad on the innermost block")
if not util.equal_const_int(pad_after[dim], 0):
raise ValueError("Do not support pad on the innermost block")
else:
y_pad_before = pad_before[0]
x_pad_before = pad_before[1]
y_pad_after = pad_after[0]
x_pad_after = pad_after[1]
for dim in range(2, ndim):
if not util.equal_const_int(pad_before[dim], 0):
raise ValueError("Do not support pad on the innermost block")
if not util.equal_const_int(pad_after[dim], 0):
raise ValueError("Do not support pad on the innermost block")
allow_fold = False
else:
x_pad_before = 0
y_pad_before = 0
x_pad_after = 0
y_pad_after = 0
allow_fold = True
_check_compact(dst)
x_size, y_size, x_stride, offset = _get_2d_pattern(
src, elem_width, elem_bytes, data_type,
dst.scope, allow_fold=allow_fold)
irb = tvm.ir_builder.create()
irb.scope_attr(env.dev.vta_axis, "coproc_scope",
env.dev.get_task_qid(task_qid))
irb.emit(tvm.call_extern(
"int32", "VTALoadBuffer2D",
env.dev.command_handle,
src.data, offset, x_size, y_size, x_stride,
x_pad_before, y_pad_before,
x_pad_after, y_pad_after,
dst.access_ptr("r", "int32"), mem_type))
return irb.get()
else:
raise RuntimeError("Do not support copy %s->%s" % (src.scope, dst.scope))
return tvm.ir_pass.InjectCopyIntrin(stmt_in, "dma_copy", _inject_copy)
def _get_gemm_intrin_buffer():
env = get_env()
wgt_lanes = env.WGT_ELEM_BITS // env.WGT_WIDTH
assert wgt_lanes == env.BLOCK_OUT * env.BLOCK_IN
wgt_shape = (env.BLOCK_OUT, env.BLOCK_IN)
assert wgt_shape[0] * wgt_shape[1] == wgt_lanes
inp_lanes = env.INP_ELEM_BITS // env.INP_WIDTH
assert inp_lanes == env.BATCH * env.BLOCK_IN
inp_shape = (env.BATCH, env.BLOCK_IN)
assert inp_shape[0] * inp_shape[1] == inp_lanes
out_lanes = env.ACC_ELEM_BITS // env.ACC_WIDTH
assert out_lanes == env.BATCH * env.BLOCK_OUT
out_shape = (env.BATCH, env.BLOCK_OUT)
assert out_shape[0] * out_shape[1] == out_lanes
wgt = tvm.placeholder((wgt_shape[0], wgt_shape[1]),
dtype="int%d" % env.WGT_WIDTH,
name=env.wgt_scope)
inp = tvm.placeholder((inp_shape[0], inp_shape[1]),
dtype="int%d" % env.INP_WIDTH,
name=env.inp_scope)
k = tvm.reduce_axis((0, wgt_shape[1]), name="k")
out_dtype = "int%d" % env.ACC_WIDTH
out = tvm.compute((out_shape[0], out_shape[1]),
lambda i, j: tvm.sum(inp[i, k].astype(out_dtype) *
wgt[j, k].astype(out_dtype),
axis=[k]),
name="out")
wgt_layout = tvm.decl_buffer(
wgt.shape, wgt.dtype, env.wgt_scope,
scope=env.wgt_scope, offset_factor=wgt_lanes, data_alignment=wgt_lanes)
inp_layout = tvm.decl_buffer(
inp.shape, inp.dtype, env.inp_scope,
scope=env.inp_scope, offset_factor=inp_lanes, data_alignment=inp_lanes)
out_layout = tvm.decl_buffer(
out.shape, out.dtype, env.acc_scope,
scope=env.acc_scope, offset_factor=out_lanes, data_alignment=out_lanes)
return wgt_layout, inp_layout, out_layout
def inject_conv2d_transpose_skip(stmt_in):
"""Pass to skip 0-weights in conv2d transpose with stride > 1.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
env = get_env()
dwgt, dinp, dout = _get_gemm_intrin_buffer()
calls = []
selects = []
def _find_basics(op):
if isinstance(op, tvm.tir.Call):
calls.append(op)
elif isinstance(op, tvm.tir.Select):
selects.append(op)
def _do_fold(op):
if _match_pragma(op, "conv2d_transpose_gemm"):
is_init = ".init" in str(op)
tvm.ir_pass.PostOrderVisit(op, _find_basics)
if is_init:
# create inner most block
irb = tvm.ir_builder.create()
dev = env.dev
irb.scope_attr(dev.vta_axis, "coproc_scope", dev.get_task_qid(dev.QID_COMPUTE))
irb.scope_attr(dev.vta_axis, "coproc_uop_scope", dev.vta_push_uop)
irb.emit(tvm.call_extern("int32", "VTAUopPush",
0, 1,
dout.access_ptr("rw", "int32"),
0, 0,
0, 0, 0))
inner = irb.get()
args = op.body.body.args
res_tensor = op.body.body.func.output(0)
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3], 1, 0, 1, 0, env.BLOCK_OUT)
inner = tvm.tir.AttrStmt(
[dout, res_tensor], 'buffer_bind_scope',
tvm.call_intrin('handle', 'tvm_tuple', *tpl), inner)
return inner
else:
conv_call, data_call, kernel_call = calls[-3:]
pad_data_tensor = data_call.func.output(0)
kernel_tensor = kernel_call.func.output(0)
res_tensor = conv_call.func.output(0)
if selects:
condition = selects[0].condition
else:
condition = tvm.const(1, 'int')
# create inner most block
irb = tvm.ir_builder.create()
with irb.if_scope(condition):
dev = env.dev
irb.scope_attr(dev.vta_axis, "coproc_scope", dev.get_task_qid(dev.QID_COMPUTE))
irb.scope_attr(dev.vta_axis, "coproc_uop_scope", dev.vta_push_uop)
irb.emit(tvm.call_extern("int32", "VTAUopPush",
0, 0,
dout.access_ptr("rw", "int32"),
dinp.access_ptr("r", "int32"),
dwgt.access_ptr("r", "int32"),
0, 0, 0))
inner = irb.get()
args = conv_call.args
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3],
1, 0, 1, 0, env.BLOCK_OUT)
inner = tvm.tir.AttrStmt(
[dout, res_tensor], 'buffer_bind_scope',
tvm.call_intrin('handle', 'tvm_tuple', *tpl), inner)
args = kernel_call.args
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3],
1, 0, env.BLOCK_OUT, 0, env.BLOCK_IN)
inner = tvm.tir.AttrStmt(
[dwgt, kernel_tensor], 'buffer_bind_scope',
tvm.call_intrin('handle', 'tvm_tuple', *tpl), inner)
args = data_call.args
tpl = (args[0], 1, args[1], 1, args[2], 1, args[3],
1, 0, 1, 0, env.BLOCK_IN)
inner = tvm.tir.AttrStmt(
[dinp, pad_data_tensor], 'buffer_bind_scope',
tvm.call_intrin('handle', 'tvm_tuple', *tpl), inner)
return inner
return None
ret = tvm.ir_pass.IRTransform(
stmt_in, _do_fold, None, ["AttrStmt"])
return ret
def annotate_alu_coproc_scope(stmt_in):
"""Pass to insert ALU instruction.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
env = get_env()
def _do_fold(stmt):
if _match_pragma(stmt, "alu"):
irb = tvm.ir_builder.create()
irb.scope_attr(env.dev.vta_axis, "coproc_scope",
env.dev.get_task_qid(env.dev.QID_COMPUTE))
irb.scope_attr(env.dev.vta_axis, "coproc_uop_scope",
tvm.tir.StringImm("VTAPushALUOp"))
irb.emit(stmt)
return irb.get()
if _match_pragma(stmt, "skip_alu"):
return tvm.tir.Evaluate(0)
return stmt
stmt_out = tvm.ir_pass.IRTransform(
stmt_in, None, _do_fold, ["AttrStmt"])
return stmt_out
def inject_alu_intrin(stmt_in):
"""Pass to inject ALU micro-ops.
Parameters
----------
stmt_in : Stmt
Input statement
Returns
-------
stmt_out : Stmt
Transformed statement
"""
env = get_env()
idxm = tvm.indexmod
def _do_fold(stmt):
def _equal(x, y):
return tvm.ir_pass.Equal(tvm.ir_pass.Simplify(x - y), 0)
def _flatten_loop(src_coeff, dst_coeff, extents):
src_coeff = list(src_coeff)
dst_coeff = list(dst_coeff)
extents = list(extents)
rev_src_coeff = [src_coeff.pop()]
rev_dst_coeff = [dst_coeff.pop()]
rev_extents = []
assert src_coeff
vsrc = src_coeff.pop()
vdst = dst_coeff.pop()
vext = extents.pop()
while src_coeff:
next_src = src_coeff.pop()
next_dst = dst_coeff.pop()
next_ext = extents.pop()
if _equal(next_src, vsrc * vext) and _equal(next_dst, vdst * vext):
vext = tvm.ir_pass.Simplify(vext * next_ext)
else:
rev_src_coeff.append(vsrc)
rev_dst_coeff.append(vdst)
rev_extents.append(vext)
vsrc = next_src
vdst = next_dst
vext = next_ext
rev_src_coeff.append(vsrc)
rev_dst_coeff.append(vdst)
rev_extents.append(vext)
rev_src_coeff.reverse()
rev_dst_coeff.reverse()
rev_extents.reverse()
return rev_src_coeff, rev_dst_coeff, rev_extents
if _match_pragma(stmt, "alu"):
# Get to the innermost loop body
loop_body = stmt.body
nest_size = 0
while isinstance(loop_body, tvm.tir.For):
loop_body = loop_body.body
nest_size += 1
# Get the src/dst arguments
dst_var = loop_body.buffer_var
dst_idx = loop_body.index
# Derive loop variables and extents
tmp_body = stmt.body
indices = []
extents = []
for _ in range(nest_size):
indices.append(tmp_body.loop_var)
extents.append(tmp_body.extent)
tmp_body = tmp_body.body
# Derive opcode
if isinstance(loop_body.value, tvm.tir.Add):
alu_opcode = env.dev.ALU_OPCODE_ADD
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Sub):
alu_opcode = env.dev.ALU_OPCODE_SUB
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Mul):
alu_opcode = env.dev.ALU_OPCODE_MUL
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Min):
alu_opcode = env.dev.ALU_OPCODE_MIN
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Max):
alu_opcode = env.dev.ALU_OPCODE_MAX
lhs = loop_body.value.a
rhs = loop_body.value.b
elif isinstance(loop_body.value, tvm.tir.Call):
if loop_body.value.name == 'shift_left':
alu_opcode = env.dev.ALU_OPCODE_SHR
lhs = loop_body.value.args[0]
rhs = tvm.ir_pass.Simplify(-loop_body.value.args[1])
elif loop_body.value.name == 'shift_right':
alu_opcode = env.dev.ALU_OPCODE_SHR
lhs = loop_body.value.args[0]
rhs = loop_body.value.args[1]
else:
raise RuntimeError(
"Function call not recognized %s" % (loop_body.value.name))
elif isinstance(loop_body.value, tvm.tir.Load):
alu_opcode = env.dev.ALU_OPCODE_SHR
lhs = loop_body.value
rhs = tvm.const(0, "int32")
else:
raise RuntimeError(
"Expression not recognized %s, %s, %s" % (
type(loop_body.value), str(loop_body.value), str(stmt)))
# Derive array index coefficients
dst_coeff = tvm.arith.DetectLinearEquation(dst_idx, indices)
# Check if lhs/rhs is immediate
use_imm = False
imm_val = None
if isinstance(rhs, tvm.tir.IntImm):
assert lhs.buffer_var.same_as(dst_var)
src_coeff = tvm.arith.DetectLinearEquation(lhs.index, indices)
use_imm = True
imm_val = rhs
if isinstance(lhs, tvm.tir.IntImm):
assert rhs.buffer_var.same_as(dst_var)
src_coeff = tvm.arith.DetectLinearEquation(rhs.index, indices)
use_imm = True
imm_val = lhs
if imm_val is None:
imm_val = 0
assert lhs.buffer_var.same_as(dst_var) and rhs.buffer_var.same_as(dst_var)
src_lhs_coeff = tvm.arith.DetectLinearEquation(lhs.index, indices)
src_rhs_coeff = tvm.arith.DetectLinearEquation(rhs.index, indices)
# Determine which side has the same coefficients
lhs_equal = True
rhs_equal = True
for i, coef in enumerate(dst_coeff):
if not tvm.ir_pass.Equal(coef, src_lhs_coeff[i]):
lhs_equal = False
if not tvm.ir_pass.Equal(coef, src_rhs_coeff[i]):
rhs_equal = False
# Make sure at least one of the source is identical to the
# destination (in-place computation)
assert lhs_equal or rhs_equal
# Assign the source coefficients
if lhs_equal:
src_coeff = src_rhs_coeff
else:
src_coeff = src_lhs_coeff
# Ensure that we have the proper tensor dimensions in the
# innermost loop (pattern match)
src_coeff = list(src_coeff)
dst_coeff = list(dst_coeff)
extents = list(extents)
assert len(src_coeff) > 1
assert len(dst_coeff) > 1
assert len(extents) != 0
assert tvm.ir_pass.Equal(
tvm.ir_pass.Simplify(
idxm(src_coeff[-1], env.BATCH * env.BLOCK_OUT)), 0)
assert tvm.ir_pass.Equal(
tvm.ir_pass.Simplify(
idxm(dst_coeff[-1], env.BATCH * env.BLOCK_OUT)), 0)
assert tvm.ir_pass.Equal(src_coeff[-2], 1)
assert tvm.ir_pass.Equal(dst_coeff[-2], 1)
if env.BATCH > 1:
assert len(src_coeff) > 2
assert len(dst_coeff) > 2
assert len(extents) > 1
assert tvm.ir_pass.Equal(src_coeff[-3], env.BLOCK_OUT)
assert tvm.ir_pass.Equal(dst_coeff[-3], env.BLOCK_OUT)
# Apply tensorization of the loop coefficients
src_offset = src_coeff[-1]
dst_offset = dst_coeff[-1]
if env.BATCH == 1:
src_coeff = src_coeff[:-2]
dst_coeff = dst_coeff[:-2]
extents = extents[:-1]
else:
src_coeff = src_coeff[:-3]
dst_coeff = dst_coeff[:-3]
extents = extents[:-2]
src_coeff.append(src_offset)
dst_coeff.append(dst_offset)
src_coeff = [
tvm.ir_pass.Simplify(c // (env.BATCH * env.BLOCK_OUT)) for c in src_coeff]
dst_coeff = [
tvm.ir_pass.Simplify(c // (env.BATCH * env.BLOCK_OUT)) for c in dst_coeff]
# Flatten the outer loops
if extents:
src_coeff, dst_coeff, extents = _flatten_loop(src_coeff, dst_coeff, extents)
# Insert ALU micro-ops
irb = tvm.ir_builder.create()
for idx, extent in enumerate(extents):
irb.emit(tvm.call_extern(
"int32", "VTAUopLoopBegin",
extent, dst_coeff[idx], src_coeff[idx], 0))
use_imm = int(use_imm)
irb.emit(tvm.call_extern(
"int32", "VTAUopPush",
1, 0,
dst_coeff[len(dst_coeff)-1],
src_coeff[len(src_coeff)-1],
0,
alu_opcode, use_imm, imm_val))
for extent in extents:
irb.emit(tvm.call_extern(
"int32", "VTAUopLoopEnd"))
return irb.get()
return stmt
stmt_out = tvm.ir_pass.IRTransform(
stmt_in, None, _do_fold, ["AttrStmt"])
return stmt_out
def debug_print(stmt):
"""A debug pass that print the stmt
Parameters
----------
stmt : Stmt
The input statement
Returns
-------
stmt : Stmt
The
"""
# pylint: disable=superfluous-parens
print(stmt)
return stmt
| 37.573589 | 99 | 0.526655 |
01473ae689b7ca5a3509a4b7c982b3c3a5588470
| 732 |
py
|
Python
|
cride/users/admin.py
|
eocode/Rider-App
|
9629f76f97c605a3f40486a4d93707afbaf22563
|
[
"MIT"
] | 9 |
2020-05-10T05:56:40.000Z
|
2022-01-24T08:49:27.000Z
|
cride/users/admin.py
|
eocode/Rider-App
|
9629f76f97c605a3f40486a4d93707afbaf22563
|
[
"MIT"
] | 6 |
2020-04-10T20:26:38.000Z
|
2021-06-10T20:02:11.000Z
|
cride/users/admin.py
|
eocode/Rider-App
|
9629f76f97c605a3f40486a4d93707afbaf22563
|
[
"MIT"
] | 5 |
2020-04-24T11:38:25.000Z
|
2021-01-02T09:41:04.000Z
|
"""User models admin"""
# Django
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
# Models
from cride.users.models import User, Profile
class CustomUserAdmin(UserAdmin):
"""User model admin"""
list_display = ('email', 'username', 'first_name', 'last_name', 'is_staff', 'is_client')
list_filter = ('is_client', 'is_staff', 'created', 'modified')
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
"""Profile model admin"""
list_display = ('user', 'reputation', 'rides_taken', 'rides_offered')
search_fields = ('user__username','user__email','user__fist_name', 'user__last_name')
list_filter = ('reputation',)
admin.site.register(User, CustomUserAdmin)
| 27.111111 | 92 | 0.715847 |
46f9e2a0900834092205379ea19ea8a1f8ce58ca
| 44,516 |
py
|
Python
|
gluon/gluoncv2/model_provider.py
|
oliviaweng/imgclsmob
|
80fffbb46f986614b162c725b21f3d208597ac77
|
[
"MIT"
] | null | null | null |
gluon/gluoncv2/model_provider.py
|
oliviaweng/imgclsmob
|
80fffbb46f986614b162c725b21f3d208597ac77
|
[
"MIT"
] | null | null | null |
gluon/gluoncv2/model_provider.py
|
oliviaweng/imgclsmob
|
80fffbb46f986614b162c725b21f3d208597ac77
|
[
"MIT"
] | null | null | null |
from .models.alexnet import *
from .models.zfnet import *
from .models.vgg import *
from .models.bninception import *
from .models.resnet import *
from .models.preresnet import *
from .models.resnext import *
from .models.seresnet import *
from .models.sepreresnet import *
from .models.seresnext import *
from .models.senet import *
from .models.resnesta import *
from .models.ibnresnet import *
from .models.ibnbresnet import *
from .models.ibnresnext import *
from .models.ibndensenet import *
from .models.airnet import *
from .models.airnext import *
from .models.bamresnet import *
from .models.cbamresnet import *
from .models.resattnet import *
from .models.sknet import *
from .models.diaresnet import *
from .models.diapreresnet import *
from .models.pyramidnet import *
from .models.diracnetv2 import *
from .models.sharesnet import *
from .models.crunet import *
from .models.crunetb import *
from .models.densenet import *
from .models.condensenet import *
from .models.sparsenet import *
from .models.peleenet import *
from .models.wrn import *
from .models.drn import *
from .models.dpn import *
from .models.darknet import *
from .models.darknet53 import *
from .models.channelnet import *
from .models.isqrtcovresnet import *
from .models.irevnet import *
from .models.bagnet import *
from .models.dla import *
from .models.msdnet import *
from .models.fishnet import *
from .models.espnetv2 import *
from .models.hrnet import *
from .models.vovnet import *
from .models.selecsls import *
from .models.hardnet import *
from .models.xdensenet import *
from .models.squeezenet import *
from .models.squeezenext import *
from .models.shufflenet import *
from .models.shufflenetv2 import *
from .models.shufflenetv2b import *
from .models.menet import *
from .models.mobilenet import *
from .models.mobilenetb import *
from .models.fdmobilenet import *
from .models.mobilenetv2 import *
from .models.mobilenetv3 import *
from .models.igcv3 import *
from .models.ghostnet import *
from .models.mnasnet import *
from .models.darts import *
from .models.proxylessnas import *
from .models.fbnet import *
from .models.xception import *
from .models.inceptionv3 import *
from .models.inceptionv4 import *
from .models.inceptionresnetv2 import *
from .models.polynet import *
from .models.nasnet import *
from .models.pnasnet import *
from .models.spnasnet import *
from .models.efficientnet import *
from .models.efficientnetedge import *
from .models.mixnet import *
from .models.nin_cifar import *
from .models.resnet_cifar import *
from .models.preresnet_cifar import *
from .models.resnext_cifar import *
from .models.seresnet_cifar import *
from .models.sepreresnet_cifar import *
from .models.pyramidnet_cifar import *
from .models.densenet_cifar import *
from .models.xdensenet_cifar import *
from .models.wrn_cifar import *
from .models.wrn1bit_cifar import *
from .models.ror_cifar import *
from .models.rir_cifar import *
from .models.resdropresnet_cifar import *
from .models.shakeshakeresnet_cifar import *
from .models.shakedropresnet_cifar import *
from .models.fractalnet_cifar import *
from .models.diaresnet_cifar import *
from .models.diapreresnet_cifar import *
from .models.octresnet import *
from .models.octresnet_cifar import *
from .models.res2net import *
from .models.resneta import *
from .models.resnetd import *
from .models.fastseresnet import *
from .models.resnet_cub import *
from .models.seresnet_cub import *
from .models.mobilenet_cub import *
from .models.proxylessnas_cub import *
from .models.ntsnet_cub import *
from .models.fcn8sd import *
from .models.pspnet import *
from .models.deeplabv3 import *
from .models.icnet import *
from .models.sinet import *
from .models.bisenet import *
from .models.danet import *
from .models.superpointnet import *
from .models.alphapose_coco import *
from .models.simplepose_coco import *
from .models.simpleposemobile_coco import *
from .models.lwopenpose_cmupan import *
from .models.ibppose_coco import *
from .models.centernet import *
from .models.lffd import *
from .models.visemenet import *
from .models.voca import *
from .models.nvpattexp import *
# from .models.others.oth_simple_pose_resnet import *
# from .models.others.oth_mobile_pose import *
# from .models.others.oth_alpha_pose import *
# from .models.others.oth_icnet import *
# from .models.others.oth_centernet import *
# from .models.others.oth_resnest import *
from .models.others.oth_danet import *
__all__ = ['get_model']
_models = {
'alexnet': alexnet,
'alexnetb': alexnetb,
'zfnet': zfnet,
'zfnetb': zfnetb,
'vgg11': vgg11,
'vgg13': vgg13,
'vgg16': vgg16,
'vgg19': vgg19,
'bn_vgg11': bn_vgg11,
'bn_vgg13': bn_vgg13,
'bn_vgg16': bn_vgg16,
'bn_vgg19': bn_vgg19,
'bn_vgg11b': bn_vgg11b,
'bn_vgg13b': bn_vgg13b,
'bn_vgg16b': bn_vgg16b,
'bn_vgg19b': bn_vgg19b,
'bninception': bninception,
'resnet10': resnet10,
'resnet12': resnet12,
'resnet14': resnet14,
'resnetbc14b': resnetbc14b,
'resnet16': resnet16,
'resnet18_wd4': resnet18_wd4,
'resnet18_wd2': resnet18_wd2,
'resnet18_w3d4': resnet18_w3d4,
'resnet18': resnet18,
'resnet26': resnet26,
'resnetbc26b': resnetbc26b,
'resnet34': resnet34,
'resnetbc38b': resnetbc38b,
'resnet50': resnet50,
'resnet50b': resnet50b,
'resnet101': resnet101,
'resnet101b': resnet101b,
'resnet152': resnet152,
'resnet152b': resnet152b,
'resnet200': resnet200,
'resnet200b': resnet200b,
'preresnet10': preresnet10,
'preresnet12': preresnet12,
'preresnet14': preresnet14,
'preresnetbc14b': preresnetbc14b,
'preresnet16': preresnet16,
'preresnet18_wd4': preresnet18_wd4,
'preresnet18_wd2': preresnet18_wd2,
'preresnet18_w3d4': preresnet18_w3d4,
'preresnet18': preresnet18,
'preresnet26': preresnet26,
'preresnetbc26b': preresnetbc26b,
'preresnet34': preresnet34,
'preresnetbc38b': preresnetbc38b,
'preresnet50': preresnet50,
'preresnet50b': preresnet50b,
'preresnet101': preresnet101,
'preresnet101b': preresnet101b,
'preresnet152': preresnet152,
'preresnet152b': preresnet152b,
'preresnet200': preresnet200,
'preresnet200b': preresnet200b,
'preresnet269b': preresnet269b,
'resnext14_16x4d': resnext14_16x4d,
'resnext14_32x2d': resnext14_32x2d,
'resnext14_32x4d': resnext14_32x4d,
'resnext26_16x4d': resnext26_16x4d,
'resnext26_32x2d': resnext26_32x2d,
'resnext26_32x4d': resnext26_32x4d,
'resnext38_32x4d': resnext38_32x4d,
'resnext50_32x4d': resnext50_32x4d,
'resnext101_32x4d': resnext101_32x4d,
'resnext101_64x4d': resnext101_64x4d,
'seresnet10': seresnet10,
'seresnet12': seresnet12,
'seresnet14': seresnet14,
'seresnet16': seresnet16,
'seresnet18': seresnet18,
'seresnet26': seresnet26,
'seresnetbc26b': seresnetbc26b,
'seresnet34': seresnet34,
'seresnetbc38b': seresnetbc38b,
'seresnet50': seresnet50,
'seresnet50b': seresnet50b,
'seresnet101': seresnet101,
'seresnet101b': seresnet101b,
'seresnet152': seresnet152,
'seresnet152b': seresnet152b,
'seresnet200': seresnet200,
'seresnet200b': seresnet200b,
'sepreresnet10': sepreresnet10,
'sepreresnet12': sepreresnet12,
'sepreresnet14': sepreresnet14,
'sepreresnet16': sepreresnet16,
'sepreresnet18': sepreresnet18,
'sepreresnet26': sepreresnet26,
'sepreresnetbc26b': sepreresnetbc26b,
'sepreresnet34': sepreresnet34,
'sepreresnetbc38b': sepreresnetbc38b,
'sepreresnet50': sepreresnet50,
'sepreresnet50b': sepreresnet50b,
'sepreresnet101': sepreresnet101,
'sepreresnet101b': sepreresnet101b,
'sepreresnet152': sepreresnet152,
'sepreresnet152b': sepreresnet152b,
'sepreresnet200': sepreresnet200,
'sepreresnet200b': sepreresnet200b,
'seresnext50_32x4d': seresnext50_32x4d,
'seresnext101_32x4d': seresnext101_32x4d,
'seresnext101_64x4d': seresnext101_64x4d,
'senet16': senet16,
'senet28': senet28,
'senet40': senet40,
'senet52': senet52,
'senet103': senet103,
'senet154': senet154,
'resnestabc14': resnestabc14,
'resnesta18': resnesta18,
'resnestabc26': resnestabc26,
'resnesta50': resnesta50,
'resnesta101': resnesta101,
'resnesta152': resnesta152,
'resnesta200': resnesta200,
'resnesta269': resnesta269,
'ibn_resnet50': ibn_resnet50,
'ibn_resnet101': ibn_resnet101,
'ibn_resnet152': ibn_resnet152,
'ibnb_resnet50': ibnb_resnet50,
'ibnb_resnet101': ibnb_resnet101,
'ibnb_resnet152': ibnb_resnet152,
'ibn_resnext50_32x4d': ibn_resnext50_32x4d,
'ibn_resnext101_32x4d': ibn_resnext101_32x4d,
'ibn_resnext101_64x4d': ibn_resnext101_64x4d,
'ibn_densenet121': ibn_densenet121,
'ibn_densenet161': ibn_densenet161,
'ibn_densenet169': ibn_densenet169,
'ibn_densenet201': ibn_densenet201,
'airnet50_1x64d_r2': airnet50_1x64d_r2,
'airnet50_1x64d_r16': airnet50_1x64d_r16,
'airnet101_1x64d_r2': airnet101_1x64d_r2,
'airnext50_32x4d_r2': airnext50_32x4d_r2,
'airnext101_32x4d_r2': airnext101_32x4d_r2,
'airnext101_32x4d_r16': airnext101_32x4d_r16,
'bam_resnet18': bam_resnet18,
'bam_resnet34': bam_resnet34,
'bam_resnet50': bam_resnet50,
'bam_resnet101': bam_resnet101,
'bam_resnet152': bam_resnet152,
'cbam_resnet18': cbam_resnet18,
'cbam_resnet34': cbam_resnet34,
'cbam_resnet50': cbam_resnet50,
'cbam_resnet101': cbam_resnet101,
'cbam_resnet152': cbam_resnet152,
'resattnet56': resattnet56,
'resattnet92': resattnet92,
'resattnet128': resattnet128,
'resattnet164': resattnet164,
'resattnet200': resattnet200,
'resattnet236': resattnet236,
'resattnet452': resattnet452,
'sknet50': sknet50,
'sknet101': sknet101,
'sknet152': sknet152,
'diaresnet10': diaresnet10,
'diaresnet12': diaresnet12,
'diaresnet14': diaresnet14,
'diaresnetbc14b': diaresnetbc14b,
'diaresnet16': diaresnet16,
'diaresnet18': diaresnet18,
'diaresnet26': diaresnet26,
'diaresnetbc26b': diaresnetbc26b,
'diaresnet34': diaresnet34,
'diaresnetbc38b': diaresnetbc38b,
'diaresnet50': diaresnet50,
'diaresnet50b': diaresnet50b,
'diaresnet101': diaresnet101,
'diaresnet101b': diaresnet101b,
'diaresnet152': diaresnet152,
'diaresnet152b': diaresnet152b,
'diaresnet200': diaresnet200,
'diaresnet200b': diaresnet200b,
'diapreresnet10': diapreresnet10,
'diapreresnet12': diapreresnet12,
'diapreresnet14': diapreresnet14,
'diapreresnetbc14b': diapreresnetbc14b,
'diapreresnet16': diapreresnet16,
'diapreresnet18': diapreresnet18,
'diapreresnet26': diapreresnet26,
'diapreresnetbc26b': diapreresnetbc26b,
'diapreresnet34': diapreresnet34,
'diapreresnetbc38b': diapreresnetbc38b,
'diapreresnet50': diapreresnet50,
'diapreresnet50b': diapreresnet50b,
'diapreresnet101': diapreresnet101,
'diapreresnet101b': diapreresnet101b,
'diapreresnet152': diapreresnet152,
'diapreresnet152b': diapreresnet152b,
'diapreresnet200': diapreresnet200,
'diapreresnet200b': diapreresnet200b,
'diapreresnet269b': diapreresnet269b,
'pyramidnet101_a360': pyramidnet101_a360,
'diracnet18v2': diracnet18v2,
'diracnet34v2': diracnet34v2,
'sharesnet18': sharesnet18,
'sharesnet34': sharesnet34,
'sharesnet50': sharesnet50,
'sharesnet50b': sharesnet50b,
'sharesnet101': sharesnet101,
'sharesnet101b': sharesnet101b,
'sharesnet152': sharesnet152,
'sharesnet152b': sharesnet152b,
'crunet56': crunet56,
'crunet116': crunet116,
'crunet56b': crunet56b,
'crunet116b': crunet116b,
'densenet121': densenet121,
'densenet161': densenet161,
'densenet169': densenet169,
'densenet201': densenet201,
'condensenet74_c4_g4': condensenet74_c4_g4,
'condensenet74_c8_g8': condensenet74_c8_g8,
'sparsenet121': sparsenet121,
'sparsenet161': sparsenet161,
'sparsenet169': sparsenet169,
'sparsenet201': sparsenet201,
'sparsenet264': sparsenet264,
'peleenet': peleenet,
'wrn50_2': wrn50_2,
'drnc26': drnc26,
'drnc42': drnc42,
'drnc58': drnc58,
'drnd22': drnd22,
'drnd38': drnd38,
'drnd54': drnd54,
'drnd105': drnd105,
'dpn68': dpn68,
'dpn68b': dpn68b,
'dpn98': dpn98,
'dpn107': dpn107,
'dpn131': dpn131,
'darknet_ref': darknet_ref,
'darknet_tiny': darknet_tiny,
'darknet19': darknet19,
'darknet53': darknet53,
'channelnet': channelnet,
'irevnet301': irevnet301,
'bagnet9': bagnet9,
'bagnet17': bagnet17,
'bagnet33': bagnet33,
'dla34': dla34,
'dla46c': dla46c,
'dla46xc': dla46xc,
'dla60': dla60,
'dla60x': dla60x,
'dla60xc': dla60xc,
'dla102': dla102,
'dla102x': dla102x,
'dla102x2': dla102x2,
'dla169': dla169,
'msdnet22': msdnet22,
'fishnet99': fishnet99,
'fishnet150': fishnet150,
'espnetv2_wd2': espnetv2_wd2,
'espnetv2_w1': espnetv2_w1,
'espnetv2_w5d4': espnetv2_w5d4,
'espnetv2_w3d2': espnetv2_w3d2,
'espnetv2_w2': espnetv2_w2,
'hrnet_w18_small_v1': hrnet_w18_small_v1,
'hrnet_w18_small_v2': hrnet_w18_small_v2,
'hrnetv2_w18': hrnetv2_w18,
'hrnetv2_w30': hrnetv2_w30,
'hrnetv2_w32': hrnetv2_w32,
'hrnetv2_w40': hrnetv2_w40,
'hrnetv2_w44': hrnetv2_w44,
'hrnetv2_w48': hrnetv2_w48,
'hrnetv2_w64': hrnetv2_w64,
'vovnet27s': vovnet27s,
'vovnet39': vovnet39,
'vovnet57': vovnet57,
'selecsls42': selecsls42,
'selecsls42b': selecsls42b,
'selecsls60': selecsls60,
'selecsls60b': selecsls60b,
'selecsls84': selecsls84,
'hardnet39ds': hardnet39ds,
'hardnet68ds': hardnet68ds,
'hardnet68': hardnet68,
'hardnet85': hardnet85,
'xdensenet121_2': xdensenet121_2,
'xdensenet161_2': xdensenet161_2,
'xdensenet169_2': xdensenet169_2,
'xdensenet201_2': xdensenet201_2,
'squeezenet_v1_0': squeezenet_v1_0,
'squeezenet_v1_1': squeezenet_v1_1,
'squeezeresnet_v1_0': squeezeresnet_v1_0,
'squeezeresnet_v1_1': squeezeresnet_v1_1,
'sqnxt23_w1': sqnxt23_w1,
'sqnxt23_w3d2': sqnxt23_w3d2,
'sqnxt23_w2': sqnxt23_w2,
'sqnxt23v5_w1': sqnxt23v5_w1,
'sqnxt23v5_w3d2': sqnxt23v5_w3d2,
'sqnxt23v5_w2': sqnxt23v5_w2,
'shufflenet_g1_w1': shufflenet_g1_w1,
'shufflenet_g2_w1': shufflenet_g2_w1,
'shufflenet_g3_w1': shufflenet_g3_w1,
'shufflenet_g4_w1': shufflenet_g4_w1,
'shufflenet_g8_w1': shufflenet_g8_w1,
'shufflenet_g1_w3d4': shufflenet_g1_w3d4,
'shufflenet_g3_w3d4': shufflenet_g3_w3d4,
'shufflenet_g1_wd2': shufflenet_g1_wd2,
'shufflenet_g3_wd2': shufflenet_g3_wd2,
'shufflenet_g1_wd4': shufflenet_g1_wd4,
'shufflenet_g3_wd4': shufflenet_g3_wd4,
'shufflenetv2_wd2': shufflenetv2_wd2,
'shufflenetv2_w1': shufflenetv2_w1,
'shufflenetv2_w3d2': shufflenetv2_w3d2,
'shufflenetv2_w2': shufflenetv2_w2,
'shufflenetv2b_wd2': shufflenetv2b_wd2,
'shufflenetv2b_w1': shufflenetv2b_w1,
'shufflenetv2b_w3d2': shufflenetv2b_w3d2,
'shufflenetv2b_w2': shufflenetv2b_w2,
'menet108_8x1_g3': menet108_8x1_g3,
'menet128_8x1_g4': menet128_8x1_g4,
'menet160_8x1_g8': menet160_8x1_g8,
'menet228_12x1_g3': menet228_12x1_g3,
'menet256_12x1_g4': menet256_12x1_g4,
'menet348_12x1_g3': menet348_12x1_g3,
'menet352_12x1_g8': menet352_12x1_g8,
'menet456_24x1_g3': menet456_24x1_g3,
'mobilenet_w1': mobilenet_w1,
'mobilenet_w3d4': mobilenet_w3d4,
'mobilenet_wd2': mobilenet_wd2,
'mobilenet_wd4': mobilenet_wd4,
'mobilenetb_w1': mobilenetb_w1,
'mobilenetb_w3d4': mobilenetb_w3d4,
'mobilenetb_wd2': mobilenetb_wd2,
'mobilenetb_wd4': mobilenetb_wd4,
'fdmobilenet_w1': fdmobilenet_w1,
'fdmobilenet_w3d4': fdmobilenet_w3d4,
'fdmobilenet_wd2': fdmobilenet_wd2,
'fdmobilenet_wd4': fdmobilenet_wd4,
'mobilenetv2_w1': mobilenetv2_w1,
'mobilenetv2_w3d4': mobilenetv2_w3d4,
'mobilenetv2_wd2': mobilenetv2_wd2,
'mobilenetv2_wd4': mobilenetv2_wd4,
'mobilenetv2b_w1': mobilenetv2b_w1,
'mobilenetv2b_w3d4': mobilenetv2b_w3d4,
'mobilenetv2b_wd2': mobilenetv2b_wd2,
'mobilenetv2b_wd4': mobilenetv2b_wd4,
'mobilenetv3_small_w7d20': mobilenetv3_small_w7d20,
'mobilenetv3_small_wd2': mobilenetv3_small_wd2,
'mobilenetv3_small_w3d4': mobilenetv3_small_w3d4,
'mobilenetv3_small_w1': mobilenetv3_small_w1,
'mobilenetv3_small_w5d4': mobilenetv3_small_w5d4,
'mobilenetv3_large_w7d20': mobilenetv3_large_w7d20,
'mobilenetv3_large_wd2': mobilenetv3_large_wd2,
'mobilenetv3_large_w3d4': mobilenetv3_large_w3d4,
'mobilenetv3_large_w1': mobilenetv3_large_w1,
'mobilenetv3_large_w5d4': mobilenetv3_large_w5d4,
'igcv3_w1': igcv3_w1,
'igcv3_w3d4': igcv3_w3d4,
'igcv3_wd2': igcv3_wd2,
'igcv3_wd4': igcv3_wd4,
'ghostnet': ghostnet,
'mnasnet_b1': mnasnet_b1,
'mnasnet_a1': mnasnet_a1,
'mnasnet_small': mnasnet_small,
'darts': darts,
'proxylessnas_cpu': proxylessnas_cpu,
'proxylessnas_gpu': proxylessnas_gpu,
'proxylessnas_mobile': proxylessnas_mobile,
'proxylessnas_mobile14': proxylessnas_mobile14,
'fbnet_cb': fbnet_cb,
'xception': xception,
'inceptionv3': inceptionv3,
'inceptionv4': inceptionv4,
'inceptionresnetv2': inceptionresnetv2,
'polynet': polynet,
'nasnet_4a1056': nasnet_4a1056,
'nasnet_6a4032': nasnet_6a4032,
'pnasnet5large': pnasnet5large,
'spnasnet': spnasnet,
'efficientnet_b0': efficientnet_b0,
'efficientnet_b1': efficientnet_b1,
'efficientnet_b2': efficientnet_b2,
'efficientnet_b3': efficientnet_b3,
'efficientnet_b4': efficientnet_b4,
'efficientnet_b5': efficientnet_b5,
'efficientnet_b6': efficientnet_b6,
'efficientnet_b7': efficientnet_b7,
'efficientnet_b8': efficientnet_b8,
'efficientnet_b0b': efficientnet_b0b,
'efficientnet_b1b': efficientnet_b1b,
'efficientnet_b2b': efficientnet_b2b,
'efficientnet_b3b': efficientnet_b3b,
'efficientnet_b4b': efficientnet_b4b,
'efficientnet_b5b': efficientnet_b5b,
'efficientnet_b6b': efficientnet_b6b,
'efficientnet_b7b': efficientnet_b7b,
'efficientnet_b0c': efficientnet_b0c,
'efficientnet_b1c': efficientnet_b1c,
'efficientnet_b2c': efficientnet_b2c,
'efficientnet_b3c': efficientnet_b3c,
'efficientnet_b4c': efficientnet_b4c,
'efficientnet_b5c': efficientnet_b5c,
'efficientnet_b6c': efficientnet_b6c,
'efficientnet_b7c': efficientnet_b7c,
'efficientnet_b8c': efficientnet_b8c,
'efficientnet_edge_small_b': efficientnet_edge_small_b,
'efficientnet_edge_medium_b': efficientnet_edge_medium_b,
'efficientnet_edge_large_b': efficientnet_edge_large_b,
'mixnet_s': mixnet_s,
'mixnet_m': mixnet_m,
'mixnet_l': mixnet_l,
'nin_cifar10': nin_cifar10,
'nin_cifar100': nin_cifar100,
'nin_svhn': nin_svhn,
'nonresnet20_cifar10': nonresnet20_cifar10, # LIV
'nonresnet56_cifar10': nonresnet56_cifar10, # LIV
'nonresnet110_cifar10': nonresnet110_cifar10, # LIV
'nonresnet20_cifar100': nonresnet20_cifar100, # LIV
'nonresnet20_svhn': nonresnet20_svhn, # LIV
'resnet20_cifar10': resnet20_cifar10,
'resnet20_cifar100': resnet20_cifar100,
'resnet20_svhn': resnet20_svhn,
'resnet56_cifar10': resnet56_cifar10,
'resnet56_cifar100': resnet56_cifar100,
'resnet56_svhn': resnet56_svhn,
'resnet110_cifar10': resnet110_cifar10,
'resnet110_cifar100': resnet110_cifar100,
'resnet110_svhn': resnet110_svhn,
'resnet164bn_cifar10': resnet164bn_cifar10,
'resnet164bn_cifar100': resnet164bn_cifar100,
'resnet164bn_svhn': resnet164bn_svhn,
'resnet272bn_cifar10': resnet272bn_cifar10,
'resnet272bn_cifar100': resnet272bn_cifar100,
'resnet272bn_svhn': resnet272bn_svhn,
'resnet542bn_cifar10': resnet542bn_cifar10,
'resnet542bn_cifar100': resnet542bn_cifar100,
'resnet542bn_svhn': resnet542bn_svhn,
'resnet1001_cifar10': resnet1001_cifar10,
'resnet1001_cifar100': resnet1001_cifar100,
'resnet1001_svhn': resnet1001_svhn,
'resnet1202_cifar10': resnet1202_cifar10,
'resnet1202_cifar100': resnet1202_cifar100,
'resnet1202_svhn': resnet1202_svhn,
'preresnet20_cifar10': preresnet20_cifar10,
'preresnet20_cifar100': preresnet20_cifar100,
'preresnet20_svhn': preresnet20_svhn,
'preresnet56_cifar10': preresnet56_cifar10,
'preresnet56_cifar100': preresnet56_cifar100,
'preresnet56_svhn': preresnet56_svhn,
'preresnet110_cifar10': preresnet110_cifar10,
'preresnet110_cifar100': preresnet110_cifar100,
'preresnet110_svhn': preresnet110_svhn,
'preresnet164bn_cifar10': preresnet164bn_cifar10,
'preresnet164bn_cifar100': preresnet164bn_cifar100,
'preresnet164bn_svhn': preresnet164bn_svhn,
'preresnet272bn_cifar10': preresnet272bn_cifar10,
'preresnet272bn_cifar100': preresnet272bn_cifar100,
'preresnet272bn_svhn': preresnet272bn_svhn,
'preresnet542bn_cifar10': preresnet542bn_cifar10,
'preresnet542bn_cifar100': preresnet542bn_cifar100,
'preresnet542bn_svhn': preresnet542bn_svhn,
'preresnet1001_cifar10': preresnet1001_cifar10,
'preresnet1001_cifar100': preresnet1001_cifar100,
'preresnet1001_svhn': preresnet1001_svhn,
'preresnet1202_cifar10': preresnet1202_cifar10,
'preresnet1202_cifar100': preresnet1202_cifar100,
'preresnet1202_svhn': preresnet1202_svhn,
'resnext20_1x64d_cifar10': resnext20_1x64d_cifar10,
'resnext20_1x64d_cifar100': resnext20_1x64d_cifar100,
'resnext20_1x64d_svhn': resnext20_1x64d_svhn,
'resnext20_2x32d_cifar10': resnext20_2x32d_cifar10,
'resnext20_2x32d_cifar100': resnext20_2x32d_cifar100,
'resnext20_2x32d_svhn': resnext20_2x32d_svhn,
'resnext20_2x64d_cifar10': resnext20_2x64d_cifar10,
'resnext20_2x64d_cifar100': resnext20_2x64d_cifar100,
'resnext20_2x64d_svhn': resnext20_2x64d_svhn,
'resnext20_4x16d_cifar10': resnext20_4x16d_cifar10,
'resnext20_4x16d_cifar100': resnext20_4x16d_cifar100,
'resnext20_4x16d_svhn': resnext20_4x16d_svhn,
'resnext20_4x32d_cifar10': resnext20_4x32d_cifar10,
'resnext20_4x32d_cifar100': resnext20_4x32d_cifar100,
'resnext20_4x32d_svhn': resnext20_4x32d_svhn,
'resnext20_8x8d_cifar10': resnext20_8x8d_cifar10,
'resnext20_8x8d_cifar100': resnext20_8x8d_cifar100,
'resnext20_8x8d_svhn': resnext20_8x8d_svhn,
'resnext20_8x16d_cifar10': resnext20_8x16d_cifar10,
'resnext20_8x16d_cifar100': resnext20_8x16d_cifar100,
'resnext20_8x16d_svhn': resnext20_8x16d_svhn,
'resnext20_16x4d_cifar10': resnext20_16x4d_cifar10,
'resnext20_16x4d_cifar100': resnext20_16x4d_cifar100,
'resnext20_16x4d_svhn': resnext20_16x4d_svhn,
'resnext20_16x8d_cifar10': resnext20_16x8d_cifar10,
'resnext20_16x8d_cifar100': resnext20_16x8d_cifar100,
'resnext20_16x8d_svhn': resnext20_16x8d_svhn,
'resnext20_32x2d_cifar10': resnext20_32x2d_cifar10,
'resnext20_32x2d_cifar100': resnext20_32x2d_cifar100,
'resnext20_32x2d_svhn': resnext20_32x2d_svhn,
'resnext20_32x4d_cifar10': resnext20_32x4d_cifar10,
'resnext20_32x4d_cifar100': resnext20_32x4d_cifar100,
'resnext20_32x4d_svhn': resnext20_32x4d_svhn,
'resnext20_64x1d_cifar10': resnext20_64x1d_cifar10,
'resnext20_64x1d_cifar100': resnext20_64x1d_cifar100,
'resnext20_64x1d_svhn': resnext20_64x1d_svhn,
'resnext20_64x2d_cifar10': resnext20_64x2d_cifar10,
'resnext20_64x2d_cifar100': resnext20_64x2d_cifar100,
'resnext20_64x2d_svhn': resnext20_64x2d_svhn,
'resnext29_32x4d_cifar10': resnext29_32x4d_cifar10,
'resnext29_32x4d_cifar100': resnext29_32x4d_cifar100,
'resnext29_32x4d_svhn': resnext29_32x4d_svhn,
'resnext29_16x64d_cifar10': resnext29_16x64d_cifar10,
'resnext29_16x64d_cifar100': resnext29_16x64d_cifar100,
'resnext29_16x64d_svhn': resnext29_16x64d_svhn,
'resnext56_1x64d_cifar10': resnext56_1x64d_cifar10,
'resnext56_1x64d_cifar100': resnext56_1x64d_cifar100,
'resnext56_1x64d_svhn': resnext56_1x64d_svhn,
'resnext56_2x32d_cifar10': resnext56_2x32d_cifar10,
'resnext56_2x32d_cifar100': resnext56_2x32d_cifar100,
'resnext56_2x32d_svhn': resnext56_2x32d_svhn,
'resnext56_4x16d_cifar10': resnext56_4x16d_cifar10,
'resnext56_4x16d_cifar100': resnext56_4x16d_cifar100,
'resnext56_4x16d_svhn': resnext56_4x16d_svhn,
'resnext56_8x8d_cifar10': resnext56_8x8d_cifar10,
'resnext56_8x8d_cifar100': resnext56_8x8d_cifar100,
'resnext56_8x8d_svhn': resnext56_8x8d_svhn,
'resnext56_16x4d_cifar10': resnext56_16x4d_cifar10,
'resnext56_16x4d_cifar100': resnext56_16x4d_cifar100,
'resnext56_16x4d_svhn': resnext56_16x4d_svhn,
'resnext56_32x2d_cifar10': resnext56_32x2d_cifar10,
'resnext56_32x2d_cifar100': resnext56_32x2d_cifar100,
'resnext56_32x2d_svhn': resnext56_32x2d_svhn,
'resnext56_64x1d_cifar10': resnext56_64x1d_cifar10,
'resnext56_64x1d_cifar100': resnext56_64x1d_cifar100,
'resnext56_64x1d_svhn': resnext56_64x1d_svhn,
'resnext272_1x64d_cifar10': resnext272_1x64d_cifar10,
'resnext272_1x64d_cifar100': resnext272_1x64d_cifar100,
'resnext272_1x64d_svhn': resnext272_1x64d_svhn,
'resnext272_2x32d_cifar10': resnext272_2x32d_cifar10,
'resnext272_2x32d_cifar100': resnext272_2x32d_cifar100,
'resnext272_2x32d_svhn': resnext272_2x32d_svhn,
'seresnet20_cifar10': seresnet20_cifar10,
'seresnet20_cifar100': seresnet20_cifar100,
'seresnet20_svhn': seresnet20_svhn,
'seresnet56_cifar10': seresnet56_cifar10,
'seresnet56_cifar100': seresnet56_cifar100,
'seresnet56_svhn': seresnet56_svhn,
'seresnet110_cifar10': seresnet110_cifar10,
'seresnet110_cifar100': seresnet110_cifar100,
'seresnet110_svhn': seresnet110_svhn,
'seresnet164bn_cifar10': seresnet164bn_cifar10,
'seresnet164bn_cifar100': seresnet164bn_cifar100,
'seresnet164bn_svhn': seresnet164bn_svhn,
'seresnet272bn_cifar10': seresnet272bn_cifar10,
'seresnet272bn_cifar100': seresnet272bn_cifar100,
'seresnet272bn_svhn': seresnet272bn_svhn,
'seresnet542bn_cifar10': seresnet542bn_cifar10,
'seresnet542bn_cifar100': seresnet542bn_cifar100,
'seresnet542bn_svhn': seresnet542bn_svhn,
'seresnet1001_cifar10': seresnet1001_cifar10,
'seresnet1001_cifar100': seresnet1001_cifar100,
'seresnet1001_svhn': seresnet1001_svhn,
'seresnet1202_cifar10': seresnet1202_cifar10,
'seresnet1202_cifar100': seresnet1202_cifar100,
'seresnet1202_svhn': seresnet1202_svhn,
'sepreresnet20_cifar10': sepreresnet20_cifar10,
'sepreresnet20_cifar100': sepreresnet20_cifar100,
'sepreresnet20_svhn': sepreresnet20_svhn,
'sepreresnet56_cifar10': sepreresnet56_cifar10,
'sepreresnet56_cifar100': sepreresnet56_cifar100,
'sepreresnet56_svhn': sepreresnet56_svhn,
'sepreresnet110_cifar10': sepreresnet110_cifar10,
'sepreresnet110_cifar100': sepreresnet110_cifar100,
'sepreresnet110_svhn': sepreresnet110_svhn,
'sepreresnet164bn_cifar10': sepreresnet164bn_cifar10,
'sepreresnet164bn_cifar100': sepreresnet164bn_cifar100,
'sepreresnet164bn_svhn': sepreresnet164bn_svhn,
'sepreresnet272bn_cifar10': sepreresnet272bn_cifar10,
'sepreresnet272bn_cifar100': sepreresnet272bn_cifar100,
'sepreresnet272bn_svhn': sepreresnet272bn_svhn,
'sepreresnet542bn_cifar10': sepreresnet542bn_cifar10,
'sepreresnet542bn_cifar100': sepreresnet542bn_cifar100,
'sepreresnet542bn_svhn': sepreresnet542bn_svhn,
'sepreresnet1001_cifar10': sepreresnet1001_cifar10,
'sepreresnet1001_cifar100': sepreresnet1001_cifar100,
'sepreresnet1001_svhn': sepreresnet1001_svhn,
'sepreresnet1202_cifar10': sepreresnet1202_cifar10,
'sepreresnet1202_cifar100': sepreresnet1202_cifar100,
'sepreresnet1202_svhn': sepreresnet1202_svhn,
'pyramidnet110_a48_cifar10': pyramidnet110_a48_cifar10,
'pyramidnet110_a48_cifar100': pyramidnet110_a48_cifar100,
'pyramidnet110_a48_svhn': pyramidnet110_a48_svhn,
'pyramidnet110_a84_cifar10': pyramidnet110_a84_cifar10,
'pyramidnet110_a84_cifar100': pyramidnet110_a84_cifar100,
'pyramidnet110_a84_svhn': pyramidnet110_a84_svhn,
'pyramidnet110_a270_cifar10': pyramidnet110_a270_cifar10,
'pyramidnet110_a270_cifar100': pyramidnet110_a270_cifar100,
'pyramidnet110_a270_svhn': pyramidnet110_a270_svhn,
'pyramidnet164_a270_bn_cifar10': pyramidnet164_a270_bn_cifar10,
'pyramidnet164_a270_bn_cifar100': pyramidnet164_a270_bn_cifar100,
'pyramidnet164_a270_bn_svhn': pyramidnet164_a270_bn_svhn,
'pyramidnet200_a240_bn_cifar10': pyramidnet200_a240_bn_cifar10,
'pyramidnet200_a240_bn_cifar100': pyramidnet200_a240_bn_cifar100,
'pyramidnet200_a240_bn_svhn': pyramidnet200_a240_bn_svhn,
'pyramidnet236_a220_bn_cifar10': pyramidnet236_a220_bn_cifar10,
'pyramidnet236_a220_bn_cifar100': pyramidnet236_a220_bn_cifar100,
'pyramidnet236_a220_bn_svhn': pyramidnet236_a220_bn_svhn,
'pyramidnet272_a200_bn_cifar10': pyramidnet272_a200_bn_cifar10,
'pyramidnet272_a200_bn_cifar100': pyramidnet272_a200_bn_cifar100,
'pyramidnet272_a200_bn_svhn': pyramidnet272_a200_bn_svhn,
'densenet40_k12_cifar10': densenet40_k12_cifar10,
'densenet40_k12_cifar100': densenet40_k12_cifar100,
'densenet40_k12_svhn': densenet40_k12_svhn,
'densenet40_k12_bc_cifar10': densenet40_k12_bc_cifar10,
'densenet40_k12_bc_cifar100': densenet40_k12_bc_cifar100,
'densenet40_k12_bc_svhn': densenet40_k12_bc_svhn,
'densenet40_k24_bc_cifar10': densenet40_k24_bc_cifar10,
'densenet40_k24_bc_cifar100': densenet40_k24_bc_cifar100,
'densenet40_k24_bc_svhn': densenet40_k24_bc_svhn,
'densenet40_k36_bc_cifar10': densenet40_k36_bc_cifar10,
'densenet40_k36_bc_cifar100': densenet40_k36_bc_cifar100,
'densenet40_k36_bc_svhn': densenet40_k36_bc_svhn,
'densenet100_k12_cifar10': densenet100_k12_cifar10,
'densenet100_k12_cifar100': densenet100_k12_cifar100,
'densenet100_k12_svhn': densenet100_k12_svhn,
'densenet100_k24_cifar10': densenet100_k24_cifar10,
'densenet100_k24_cifar100': densenet100_k24_cifar100,
'densenet100_k24_svhn': densenet100_k24_svhn,
'densenet100_k12_bc_cifar10': densenet100_k12_bc_cifar10,
'densenet100_k12_bc_cifar100': densenet100_k12_bc_cifar100,
'densenet100_k12_bc_svhn': densenet100_k12_bc_svhn,
'densenet190_k40_bc_cifar10': densenet190_k40_bc_cifar10,
'densenet190_k40_bc_cifar100': densenet190_k40_bc_cifar100,
'densenet190_k40_bc_svhn': densenet190_k40_bc_svhn,
'densenet250_k24_bc_cifar10': densenet250_k24_bc_cifar10,
'densenet250_k24_bc_cifar100': densenet250_k24_bc_cifar100,
'densenet250_k24_bc_svhn': densenet250_k24_bc_svhn,
'xdensenet40_2_k24_bc_cifar10': xdensenet40_2_k24_bc_cifar10,
'xdensenet40_2_k24_bc_cifar100': xdensenet40_2_k24_bc_cifar100,
'xdensenet40_2_k24_bc_svhn': xdensenet40_2_k24_bc_svhn,
'xdensenet40_2_k36_bc_cifar10': xdensenet40_2_k36_bc_cifar10,
'xdensenet40_2_k36_bc_cifar100': xdensenet40_2_k36_bc_cifar100,
'xdensenet40_2_k36_bc_svhn': xdensenet40_2_k36_bc_svhn,
'wrn16_10_cifar10': wrn16_10_cifar10,
'wrn16_10_cifar100': wrn16_10_cifar100,
'wrn16_10_svhn': wrn16_10_svhn,
'wrn28_10_cifar10': wrn28_10_cifar10,
'wrn28_10_cifar100': wrn28_10_cifar100,
'wrn28_10_svhn': wrn28_10_svhn,
'wrn40_8_cifar10': wrn40_8_cifar10,
'wrn40_8_cifar100': wrn40_8_cifar100,
'wrn40_8_svhn': wrn40_8_svhn,
'wrn20_10_1bit_cifar10': wrn20_10_1bit_cifar10,
'wrn20_10_1bit_cifar100': wrn20_10_1bit_cifar100,
'wrn20_10_1bit_svhn': wrn20_10_1bit_svhn,
'wrn20_10_32bit_cifar10': wrn20_10_32bit_cifar10,
'wrn20_10_32bit_cifar100': wrn20_10_32bit_cifar100,
'wrn20_10_32bit_svhn': wrn20_10_32bit_svhn,
'ror3_56_cifar10': ror3_56_cifar10,
'ror3_56_cifar100': ror3_56_cifar100,
'ror3_56_svhn': ror3_56_svhn,
'ror3_110_cifar10': ror3_110_cifar10,
'ror3_110_cifar100': ror3_110_cifar100,
'ror3_110_svhn': ror3_110_svhn,
'ror3_164_cifar10': ror3_164_cifar10,
'ror3_164_cifar100': ror3_164_cifar100,
'ror3_164_svhn': ror3_164_svhn,
'rir_cifar10': rir_cifar10,
'rir_cifar100': rir_cifar100,
'rir_svhn': rir_svhn,
'resdropresnet20_cifar10': resdropresnet20_cifar10,
'resdropresnet20_cifar100': resdropresnet20_cifar100,
'resdropresnet20_svhn': resdropresnet20_svhn,
'shakeshakeresnet20_2x16d_cifar10': shakeshakeresnet20_2x16d_cifar10,
'shakeshakeresnet20_2x16d_cifar100': shakeshakeresnet20_2x16d_cifar100,
'shakeshakeresnet20_2x16d_svhn': shakeshakeresnet20_2x16d_svhn,
'shakeshakeresnet26_2x32d_cifar10': shakeshakeresnet26_2x32d_cifar10,
'shakeshakeresnet26_2x32d_cifar100': shakeshakeresnet26_2x32d_cifar100,
'shakeshakeresnet26_2x32d_svhn': shakeshakeresnet26_2x32d_svhn,
'shakedropresnet20_cifar10': shakedropresnet20_cifar10,
'shakedropresnet20_cifar100': shakedropresnet20_cifar100,
'shakedropresnet20_svhn': shakedropresnet20_svhn,
'fractalnet_cifar10': fractalnet_cifar10,
'fractalnet_cifar100': fractalnet_cifar100,
'diaresnet20_cifar10': diaresnet20_cifar10,
'diaresnet20_cifar100': diaresnet20_cifar100,
'diaresnet20_svhn': diaresnet20_svhn,
'diaresnet56_cifar10': diaresnet56_cifar10,
'diaresnet56_cifar100': diaresnet56_cifar100,
'diaresnet56_svhn': diaresnet56_svhn,
'diaresnet110_cifar10': diaresnet110_cifar10,
'diaresnet110_cifar100': diaresnet110_cifar100,
'diaresnet110_svhn': diaresnet110_svhn,
'diaresnet164bn_cifar10': diaresnet164bn_cifar10,
'diaresnet164bn_cifar100': diaresnet164bn_cifar100,
'diaresnet164bn_svhn': diaresnet164bn_svhn,
'diaresnet1001_cifar10': diaresnet1001_cifar10,
'diaresnet1001_cifar100': diaresnet1001_cifar100,
'diaresnet1001_svhn': diaresnet1001_svhn,
'diaresnet1202_cifar10': diaresnet1202_cifar10,
'diaresnet1202_cifar100': diaresnet1202_cifar100,
'diaresnet1202_svhn': diaresnet1202_svhn,
'diapreresnet20_cifar10': diapreresnet20_cifar10,
'diapreresnet20_cifar100': diapreresnet20_cifar100,
'diapreresnet20_svhn': diapreresnet20_svhn,
'diapreresnet56_cifar10': diapreresnet56_cifar10,
'diapreresnet56_cifar100': diapreresnet56_cifar100,
'diapreresnet56_svhn': diapreresnet56_svhn,
'diapreresnet110_cifar10': diapreresnet110_cifar10,
'diapreresnet110_cifar100': diapreresnet110_cifar100,
'diapreresnet110_svhn': diapreresnet110_svhn,
'diapreresnet164bn_cifar10': diapreresnet164bn_cifar10,
'diapreresnet164bn_cifar100': diapreresnet164bn_cifar100,
'diapreresnet164bn_svhn': diapreresnet164bn_svhn,
'diapreresnet1001_cifar10': diapreresnet1001_cifar10,
'diapreresnet1001_cifar100': diapreresnet1001_cifar100,
'diapreresnet1001_svhn': diapreresnet1001_svhn,
'diapreresnet1202_cifar10': diapreresnet1202_cifar10,
'diapreresnet1202_cifar100': diapreresnet1202_cifar100,
'diapreresnet1202_svhn': diapreresnet1202_svhn,
'isqrtcovresnet18': isqrtcovresnet18,
'isqrtcovresnet34': isqrtcovresnet34,
'isqrtcovresnet50': isqrtcovresnet50,
'isqrtcovresnet50b': isqrtcovresnet50b,
'isqrtcovresnet101': isqrtcovresnet101,
'isqrtcovresnet101b': isqrtcovresnet101b,
'resneta18': resneta18,
'resneta50b': resneta50b,
'resneta101b': resneta101b,
'resneta152b': resneta152b,
'resnetd50b': resnetd50b,
'resnetd101b': resnetd101b,
'resnetd152b': resnetd152b,
'fastseresnet101b': fastseresnet101b,
'octresnet10_ad2': octresnet10_ad2,
'octresnet50b_ad2': octresnet50b_ad2,
'octresnet20_ad2_cifar10': octresnet20_ad2_cifar10,
'octresnet20_ad2_cifar100': octresnet20_ad2_cifar100,
'octresnet20_ad2_svhn': octresnet20_ad2_svhn,
'octresnet56_ad2_cifar10': octresnet56_ad2_cifar10,
'octresnet56_ad2_cifar100': octresnet56_ad2_cifar100,
'octresnet56_ad2_svhn': octresnet56_ad2_svhn,
'res2net50_w14_s8': res2net50_w14_s8,
'res2net50_w26_s8': res2net50_w26_s8,
'resnet10_cub': resnet10_cub,
'resnet12_cub': resnet12_cub,
'resnet14_cub': resnet14_cub,
'resnetbc14b_cub': resnetbc14b_cub,
'resnet16_cub': resnet16_cub,
'resnet18_cub': resnet18_cub,
'resnet26_cub': resnet26_cub,
'resnetbc26b_cub': resnetbc26b_cub,
'resnet34_cub': resnet34_cub,
'resnetbc38b_cub': resnetbc38b_cub,
'resnet50_cub': resnet50_cub,
'resnet50b_cub': resnet50b_cub,
'resnet101_cub': resnet101_cub,
'resnet101b_cub': resnet101b_cub,
'resnet152_cub': resnet152_cub,
'resnet152b_cub': resnet152b_cub,
'resnet200_cub': resnet200_cub,
'resnet200b_cub': resnet200b_cub,
'seresnet10_cub': seresnet10_cub,
'seresnet12_cub': seresnet12_cub,
'seresnet14_cub': seresnet14_cub,
'seresnetbc14b_cub': seresnetbc14b_cub,
'seresnet16_cub': seresnet16_cub,
'seresnet18_cub': seresnet18_cub,
'seresnet26_cub': seresnet26_cub,
'seresnetbc26b_cub': seresnetbc26b_cub,
'seresnet34_cub': seresnet34_cub,
'seresnetbc38b_cub': seresnetbc38b_cub,
'seresnet50_cub': seresnet50_cub,
'seresnet50b_cub': seresnet50b_cub,
'seresnet101_cub': seresnet101_cub,
'seresnet101b_cub': seresnet101b_cub,
'seresnet152_cub': seresnet152_cub,
'seresnet152b_cub': seresnet152b_cub,
'seresnet200_cub': seresnet200_cub,
'seresnet200b_cub': seresnet200b_cub,
'mobilenet_w1_cub': mobilenet_w1_cub,
'mobilenet_w3d4_cub': mobilenet_w3d4_cub,
'mobilenet_wd2_cub': mobilenet_wd2_cub,
'mobilenet_wd4_cub': mobilenet_wd4_cub,
'fdmobilenet_w1_cub': fdmobilenet_w1_cub,
'fdmobilenet_w3d4_cub': fdmobilenet_w3d4_cub,
'fdmobilenet_wd2_cub': fdmobilenet_wd2_cub,
'fdmobilenet_wd4_cub': fdmobilenet_wd4_cub,
'proxylessnas_cpu_cub': proxylessnas_cpu_cub,
'proxylessnas_gpu_cub': proxylessnas_gpu_cub,
'proxylessnas_mobile_cub': proxylessnas_mobile_cub,
'proxylessnas_mobile14_cub': proxylessnas_mobile14_cub,
'ntsnet_cub': ntsnet_cub,
'fcn8sd_resnetd50b_voc': fcn8sd_resnetd50b_voc,
'fcn8sd_resnetd101b_voc': fcn8sd_resnetd101b_voc,
'fcn8sd_resnetd50b_coco': fcn8sd_resnetd50b_coco,
'fcn8sd_resnetd101b_coco': fcn8sd_resnetd101b_coco,
'fcn8sd_resnetd50b_ade20k': fcn8sd_resnetd50b_ade20k,
'fcn8sd_resnetd101b_ade20k': fcn8sd_resnetd101b_ade20k,
'fcn8sd_resnetd50b_cityscapes': fcn8sd_resnetd50b_cityscapes,
'fcn8sd_resnetd101b_cityscapes': fcn8sd_resnetd101b_cityscapes,
'pspnet_resnetd50b_voc': pspnet_resnetd50b_voc,
'pspnet_resnetd101b_voc': pspnet_resnetd101b_voc,
'pspnet_resnetd50b_coco': pspnet_resnetd50b_coco,
'pspnet_resnetd101b_coco': pspnet_resnetd101b_coco,
'pspnet_resnetd50b_ade20k': pspnet_resnetd50b_ade20k,
'pspnet_resnetd101b_ade20k': pspnet_resnetd101b_ade20k,
'pspnet_resnetd50b_cityscapes': pspnet_resnetd50b_cityscapes,
'pspnet_resnetd101b_cityscapes': pspnet_resnetd101b_cityscapes,
'deeplabv3_resnetd50b_voc': deeplabv3_resnetd50b_voc,
'deeplabv3_resnetd101b_voc': deeplabv3_resnetd101b_voc,
'deeplabv3_resnetd152b_voc': deeplabv3_resnetd152b_voc,
'deeplabv3_resnetd50b_coco': deeplabv3_resnetd50b_coco,
'deeplabv3_resnetd101b_coco': deeplabv3_resnetd101b_coco,
'deeplabv3_resnetd152b_coco': deeplabv3_resnetd152b_coco,
'deeplabv3_resnetd50b_ade20k': deeplabv3_resnetd50b_ade20k,
'deeplabv3_resnetd101b_ade20k': deeplabv3_resnetd101b_ade20k,
'deeplabv3_resnetd50b_cityscapes': deeplabv3_resnetd50b_cityscapes,
'deeplabv3_resnetd101b_cityscapes': deeplabv3_resnetd101b_cityscapes,
'icnet_resnetd50b_cityscapes': icnet_resnetd50b_cityscapes,
'sinet_cityscapes': sinet_cityscapes,
'bisenet_resnet18_celebamaskhq': bisenet_resnet18_celebamaskhq,
'superpointnet': superpointnet,
'alphapose_fastseresnet101b_coco': alphapose_fastseresnet101b_coco,
'simplepose_resnet18_coco': simplepose_resnet18_coco,
'simplepose_resnet50b_coco': simplepose_resnet50b_coco,
'simplepose_resnet101b_coco': simplepose_resnet101b_coco,
'simplepose_resnet152b_coco': simplepose_resnet152b_coco,
'simplepose_resneta50b_coco': simplepose_resneta50b_coco,
'simplepose_resneta101b_coco': simplepose_resneta101b_coco,
'simplepose_resneta152b_coco': simplepose_resneta152b_coco,
'simplepose_mobile_resnet18_coco': simplepose_mobile_resnet18_coco,
'simplepose_mobile_resnet50b_coco': simplepose_mobile_resnet50b_coco,
'simplepose_mobile_mobilenet_w1_coco': simplepose_mobile_mobilenet_w1_coco,
'simplepose_mobile_mobilenetv2b_w1_coco': simplepose_mobile_mobilenetv2b_w1_coco,
'simplepose_mobile_mobilenetv3_small_w1_coco': simplepose_mobile_mobilenetv3_small_w1_coco,
'simplepose_mobile_mobilenetv3_large_w1_coco': simplepose_mobile_mobilenetv3_large_w1_coco,
'lwopenpose2d_mobilenet_cmupan_coco': lwopenpose2d_mobilenet_cmupan_coco,
'lwopenpose3d_mobilenet_cmupan_coco': lwopenpose3d_mobilenet_cmupan_coco,
'ibppose_coco': ibppose_coco,
'centernet_resnet18_voc': centernet_resnet18_voc,
'centernet_resnet18_coco': centernet_resnet18_coco,
'centernet_resnet50b_voc': centernet_resnet50b_voc,
'centernet_resnet50b_coco': centernet_resnet50b_coco,
'centernet_resnet101b_voc': centernet_resnet101b_voc,
'centernet_resnet101b_coco': centernet_resnet101b_coco,
'lffd20x5s320v2_widerface': lffd20x5s320v2_widerface,
'lffd25x8s560v1_widerface': lffd25x8s560v1_widerface,
'visemenet20': visemenet20,
'voca8flame': voca8flame,
'nvpattexp116bazel76': nvpattexp116bazel76,
# 'oth_simple_pose_resnet18_v1b': oth_simple_pose_resnet18_v1b,
# 'oth_simple_pose_resnet50_v1b': oth_simple_pose_resnet50_v1b,
# 'oth_simple_pose_resnet101_v1b': oth_simple_pose_resnet101_v1b,
# 'oth_simple_pose_resnet152_v1b': oth_simple_pose_resnet152_v1b,
# 'oth_simple_pose_resnet50_v1d': oth_simple_pose_resnet50_v1d,
# 'oth_simple_pose_resnet101_v1d': oth_simple_pose_resnet101_v1d,
# 'oth_simple_pose_resnet152_v1d': oth_simple_pose_resnet152_v1d,
#
# 'oth_mobile_pose_resnet18_v1b': oth_mobile_pose_resnet18_v1b,
# 'oth_mobile_pose_resnet50_v1b': oth_mobile_pose_resnet50_v1b,
# 'oth_mobile_pose_mobilenet1_0': oth_mobile_pose_mobilenet1_0,
# 'oth_mobile_pose_mobilenetv2_1_0': oth_mobile_pose_mobilenetv2_1_0,
# 'oth_mobile_pose_mobilenetv3_small': oth_mobile_pose_mobilenetv3_small,
# 'oth_mobile_pose_mobilenetv3_large': oth_mobile_pose_mobilenetv3_large,
#
# 'oth_alpha_pose_resnet101_v1b_coco': oth_alpha_pose_resnet101_v1b_coco,
# 'oth_resnet50_v1d': oth_resnet50_v1d,
# 'oth_resnet101_v1d': oth_resnet101_v1d,
# 'oth_resnet152_v1d': oth_resnet152_v1d,
# 'oth_mobilenet_v2_1_0': oth_mobilenet_v2_1_0,
# 'oth_mobilenet_v2_0_75': oth_mobilenet_v2_0_75,
# 'oth_mobilenet_v2_0_5': oth_mobilenet_v2_0_5,
# 'oth_mobilenet_v2_0_25': oth_mobilenet_v2_0_25,
# 'oth_icnet_resnet50_citys': oth_icnet_resnet50_citys,
# 'center_net_resnet18_v1b_voc': center_net_resnet18_v1b_voc,
# 'center_net_resnet18_v1b_coco': center_net_resnet18_v1b_coco,
# 'center_net_resnet50_v1b_voc': center_net_resnet50_v1b_voc,
# 'center_net_resnet50_v1b_coco': center_net_resnet50_v1b_coco,
# 'center_net_resnet101_v1b_voc': center_net_resnet101_v1b_voc,
# 'center_net_resnet101_v1b_coco': center_net_resnet101_v1b_coco,
# 'oth_resnest14': oth_resnest14,
# 'oth_resnest26': oth_resnest26,
# 'oth_resnest50': oth_resnest50,
# 'oth_resnest101': oth_resnest101,
# 'oth_resnest200': oth_resnest200,
# 'oth_resnest269': oth_resnest269,
'danet_resnetd50b_cityscapes': danet_resnetd50b_cityscapes,
'danet_resnetd101b_cityscapes': danet_resnetd101b_cityscapes,
'oth_danet_resnet50_citys': oth_danet_resnet50_citys,
'oth_danet_resnet101_citys': oth_danet_resnet101_citys,
}
def get_model(name, **kwargs):
"""
Get supported model.
Parameters:
----------
name : str
Name of model.
Returns
-------
HybridBlock
Resulted model.
"""
name = name.lower()
if name not in _models:
raise ValueError("Unsupported model: {}".format(name))
net = _models[name](**kwargs)
return net
| 37.408403 | 95 | 0.769678 |
069494ee6eb57825203331b3f8750153e4ece365
| 2,575 |
py
|
Python
|
babeval/agreement_across_RC/score.py
|
vilavivida/Babeval
|
04d3d13635b352479779bb64d79cf4a7febe098c
|
[
"MIT"
] | 1 |
2020-04-30T21:54:32.000Z
|
2020-04-30T21:54:32.000Z
|
babeval/agreement_across_RC/score.py
|
vilavivida/Babeval
|
04d3d13635b352479779bb64d79cf4a7febe098c
|
[
"MIT"
] | null | null | null |
babeval/agreement_across_RC/score.py
|
vilavivida/Babeval
|
04d3d13635b352479779bb64d79cf4a7febe098c
|
[
"MIT"
] | null | null | null |
from pathlib import Path
from typing import List
copulas_singular = ["is", "'s", "was"]
copulas_plural = ["are", "'re", "were"]
templates = [
'object-relative',
'subject-relative',
]
prediction_categories = (
"non-start\nword-piece\nor\n[UNK]",
"correct\ncopula",
"false\ncopula",
"other",
)
# load word lists
nouns_singular = (Path(__file__).parent / 'word_lists' / 'nouns_singular_annotator2.txt').open().read().split("\n")
nouns_plural = (Path(__file__).parent / 'word_lists' / 'nouns_plural_annotator2.txt').open().read().split("\n")
# check for list overlap
for w in nouns_singular:
assert w not in nouns_plural
for w in nouns_plural:
assert w not in nouns_singular
nouns_singular += ['one', '[NAME]']
nouns_plural = set(nouns_plural)
nouns_singular = set(nouns_singular)
def categorize_by_template(sentences_in, sentences_out: List[List[str]]):
template2sentences_out = {}
template2mask_index = {}
for s1, s2 in zip(sentences_in, sentences_out):
if s1[4] in {'like', 'likes'}:
template2sentences_out.setdefault(templates[0], []).append(s2)
if templates[0] not in template2mask_index:
template2mask_index[templates[0]] = s1.index('[MASK]')
else:
template2sentences_out.setdefault(templates[1], []).append(s2)
if templates[1] not in template2mask_index:
template2mask_index[templates[1]] = s1.index('[MASK]')
return template2sentences_out, template2mask_index
def categorize_predictions(sentences_out: List[List[str]], mask_index: int):
res = {k: 0 for k in prediction_categories}
for sentence in sentences_out:
predicted_word = sentence[mask_index]
targeted_noun = sentence[1]
# [UNK]
if predicted_word.startswith('##') or predicted_word == "[UNK]":
res["non-start\nword-piece\nor\n[UNK]"] += 1
# correct copula
elif targeted_noun in nouns_plural and predicted_word in copulas_plural:
res["correct\ncopula"] += 1
elif targeted_noun in nouns_singular and predicted_word in copulas_singular:
res["correct\ncopula"] += 1
# false copula
elif targeted_noun in nouns_plural and predicted_word in copulas_singular:
res["false\ncopula"] += 1
elif targeted_noun in nouns_singular and predicted_word in copulas_plural:
res["false\ncopula"] += 1
# other
else:
res["other"] += 1
return res
def print_stats(sentences):
print('Done')
| 30.294118 | 115 | 0.655534 |
cc598a9851b132640d3b946cb3e811023381c744
| 3,080 |
py
|
Python
|
homeassistant/components/traccar/__init__.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 1 |
2020-12-18T12:23:04.000Z
|
2020-12-18T12:23:04.000Z
|
homeassistant/components/traccar/__init__.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 60 |
2020-08-03T07:32:56.000Z
|
2022-03-31T06:02:07.000Z
|
homeassistant/components/traccar/__init__.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 4 |
2017-01-10T04:17:33.000Z
|
2021-09-02T16:37:24.000Z
|
"""Support for Traccar."""
from aiohttp import web
import voluptuous as vol
from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER
from homeassistant.const import (
ATTR_ID,
CONF_WEBHOOK_ID,
HTTP_OK,
HTTP_UNPROCESSABLE_ENTITY,
)
from homeassistant.helpers import config_entry_flow
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import (
ATTR_ACCURACY,
ATTR_ALTITUDE,
ATTR_BATTERY,
ATTR_BEARING,
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_SPEED,
ATTR_TIMESTAMP,
DOMAIN,
)
TRACKER_UPDATE = f"{DOMAIN}_tracker_update"
DEFAULT_ACCURACY = HTTP_OK
DEFAULT_BATTERY = -1
def _id(value: str) -> str:
"""Coerce id by removing '-'."""
return value.replace("-", "")
WEBHOOK_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ID): vol.All(cv.string, _id),
vol.Required(ATTR_LATITUDE): cv.latitude,
vol.Required(ATTR_LONGITUDE): cv.longitude,
vol.Optional(ATTR_ACCURACY, default=DEFAULT_ACCURACY): vol.Coerce(float),
vol.Optional(ATTR_ALTITUDE): vol.Coerce(float),
vol.Optional(ATTR_BATTERY, default=DEFAULT_BATTERY): vol.Coerce(float),
vol.Optional(ATTR_BEARING): vol.Coerce(float),
vol.Optional(ATTR_SPEED): vol.Coerce(float),
vol.Optional(ATTR_TIMESTAMP): vol.Coerce(int),
}
)
async def async_setup(hass, hass_config):
"""Set up the Traccar component."""
hass.data[DOMAIN] = {"devices": set(), "unsub_device_tracker": {}}
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook with Traccar request."""
try:
data = WEBHOOK_SCHEMA(dict(request.query))
except vol.MultipleInvalid as error:
return web.Response(text=error.error_message, status=HTTP_UNPROCESSABLE_ENTITY)
attrs = {
ATTR_ALTITUDE: data.get(ATTR_ALTITUDE),
ATTR_BEARING: data.get(ATTR_BEARING),
ATTR_SPEED: data.get(ATTR_SPEED),
}
device = data[ATTR_ID]
async_dispatcher_send(
hass,
TRACKER_UPDATE,
device,
data[ATTR_LATITUDE],
data[ATTR_LONGITUDE],
data[ATTR_BATTERY],
data[ATTR_ACCURACY],
attrs,
)
return web.Response(text=f"Setting location for {device}", status=HTTP_OK)
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, "Traccar", entry.data[CONF_WEBHOOK_ID], handle_webhook
)
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, DEVICE_TRACKER)
)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
hass.data[DOMAIN]["unsub_device_tracker"].pop(entry.entry_id)()
await hass.config_entries.async_forward_entry_unload(entry, DEVICE_TRACKER)
return True
async_remove_entry = config_entry_flow.webhook_async_remove_entry
| 27.747748 | 87 | 0.705844 |
4fb886dbab49f960ed432e0530fa691be8612534
| 57,549 |
py
|
Python
|
temp/slurm_script_disordered.py
|
cabb99/CoarseGrainedActin
|
037dfddec2b985e529620e1b83d1cc48bd930b93
|
[
"MIT"
] | 1 |
2021-03-02T22:45:04.000Z
|
2021-03-02T22:45:04.000Z
|
temp/slurm_script_disordered.py
|
cabb99/CoarseGrainedActin
|
037dfddec2b985e529620e1b83d1cc48bd930b93
|
[
"MIT"
] | 1 |
2021-09-17T18:21:39.000Z
|
2021-09-17T18:21:39.000Z
|
temp/slurm_script_disordered.py
|
cabb99/CoarseGrainedActin
|
037dfddec2b985e529620e1b83d1cc48bd930b93
|
[
"MIT"
] | null | null | null |
#!/home/cab22/miniconda3/bin/python
#SBATCH --account=commons
#SBATCH --export=All
#SBATCH --partition=commons
#SBATCH --time=24:00:00
#SBATCH --ntasks=1
#SBATCH --threads-per-core=1
#SBATCH --cpus-per-task=2
#SBATCH --gres=gpu:1
#SBATCH --time=24:00:00
#SBATCH --export=ALL
#SBATCH --array=0-15
#SBATCH --mem=16G
import os
import subprocess
import itertools
import numpy as np
import warnings
import pandas
import time
import argparse
class SlurmJobArray():
""" Selects a single condition from an array of parameters using the SLURM_ARRAY_TASK_ID environment variable. The parameters need to be supplied as a dictionary. if the task is not in a slurm environment, the test parameters will supersede the parameters, and the job_id would be taken as 0. Example:
parameters={"epsilon":[100],
"aligned":[True,False],
"actinLen":[20,40,60,80,100,120,140,160,180,200,220,240,260,280,300],
"repetition":range(5),
"temperature":[300],
"system2D":[False],
"simulation_platform":["OpenCL"]}
test_parameters={"simulation_platform":"CPU"}
sjob=SlurmJobArray("ActinSimv6", parameters, test_parameters)
:var test_run: Boolean: This simulation is a test
:var job_id: SLURM_ARRAY_TASK_ID
:var all_parameters: Parameters used to initialize the job
:var parameters: Parameters for this particular job
:var name: The name (and relative path) of the output
"""
def __init__(self, name, parameters, test_parameters={},test_id=0):
"""
Args:
name:
parameters:
Returns:
name:
parameters:
"""
self.all_parameters=parameters
self.test_parameters=test_parameters
#Parse the slurm variables
self.slurm_variables={}
for key in os.environ:
if len(key.split("_"))>1 and key.split("_")[0]=='SLURM':
self.slurm_variables.update({key:os.environ[key]})
#Check if there is a job id
self.test_run=False
try:
self.job_id=int(self.slurm_variables["SLURM_ARRAY_TASK_ID"])
except KeyError:
self.test_run=True
warnings.warn("Test Run: SLURM_ARRAY_TASK_ID not in environment variables")
self.job_id=test_id
keys=parameters.keys()
self.all_conditions=list(itertools.product(*[parameters[k] for k in keys]))
self.parameter=dict(zip(keys,self.all_conditions[self.job_id]))
#The name only includes enough information to differentiate the simulations.
self.name=f"{name}_{self.job_id:03d}_" + '_'.join([f"{a[0]}_{self[a]}" for a in self.parameter if len(self.all_parameters[a])>1])
def __getitem__(self, name):
if self.test_run:
try:
return self.test_parameters[name]
except KeyError:
return self.parameter[name]
else:
return self.parameter[name]
def __getattr__(self, name: str):
""" The keys of the parameters can be called as attributes
"""
if name in self.__dict__:
return object.__getattribute__(self, name)
elif name in self.parameter:
return self[name]
else:
return object.__getattribute__(self, name)
def __repr__(self):
return str(self.parameter)
def keys(self):
return str(self.parameters.keys())
def print_parameters(self):
print(f"Number of conditions: {len(self.all_conditions)}")
print("Running Conditions")
for k in self.parameter.keys():
print(f"{k} :", f"{self[k]}")
print()
def print_slurm_variables(self):
print("Slurm Variables")
for key in self.slurm_variables:
print (key,":",self.slurm_variables[key])
print()
def write_csv(self, out=""):
s=pandas.concat([pandas.Series(self.parameter), pandas.Series(self.slurm_variables)])
s['test_run']=self.test_run
s['date']=time.strftime("%Y_%m_%d")
s['name']=self.name
s['job_id']=self.job_id
if out=='':
s.to_csv(self.name+'.param')
else:
s.to_csv(out)
################
# Coarse Actin #
################
#!/usr/bin/python3
"""
Coarse Actin simulations using a custom
"""
import openmm
import openmm.app
from simtk import unit
import numpy as np
import pandas
import sklearn.decomposition
import configparser
import prody
import scipy.spatial.distance as sdist
import os
import sys
__author__ = 'Carlos Bueno'
__version__ = '0.2'
#__location__ = os.path.realpath(
# os.path.join(os.getcwd(), os.path.dirname(__file__)))
#__location__="/scratch/cab22/Bundling/Persistence_length/Persistence_length"
__location__='.'
_ef = 1 * unit.kilocalorie / unit.kilojoule # energy scaling factor
_df = 1 * unit.angstrom / unit.nanometer # distance scaling factor
_af = 1 * unit.degree / unit.radian # angle scaling factor
def parseConfigTable(config_section):
"""Parses a section of the configuration file as a table"""
def readData(config_section, a):
"""Filters comments and returns values as a list"""
temp = config_section.get(a).split('#')[0].split()
l = []
for val in temp:
val = val.strip()
try:
x = int(val)
l += [x]
except ValueError:
try:
y = float(val)
l += [y]
except ValueError:
l += [val]
return l
data = []
for a in config_section:
if a == 'name':
columns = readData(config_section, a)
elif len(a) > 3 and a[:3] == 'row':
data += [readData(config_section, a)]
else:
print(f'Unexpected row {readData(config_section, a)}')
return pandas.DataFrame(data, columns=columns)
# Random rotation matrix
def random_rotation():
"""Generate a 3D random rotation matrix.
Returns:
np.matrix: A 3D rotation matrix.
"""
x1, x2, x3 = np.random.rand(3)
R = np.matrix([[np.cos(2 * np.pi * x1), np.sin(2 * np.pi * x1), 0],
[-np.sin(2 * np.pi * x1), np.cos(2 * np.pi * x1), 0],
[0, 0, 1]])
v = np.matrix([[np.cos(2 * np.pi * x2) * np.sqrt(x3)],
[np.sin(2 * np.pi * x2) * np.sqrt(x3)],
[np.sqrt(1 - x3)]])
H = np.eye(3) - 2 * v * v.T
M = -H * R
return M
# Optimal rotation matrix
# The longest coordinate is X, then Y, then Z.
def optimal_rotation(coords):
c = coords.copy()
c -= c.mean(axis=0)
pca = sklearn.decomposition.PCA()
pca.fit(c)
# Change rotoinversion matrices to rotation matrices
rot = pca.components_[[0, 1, 2]]
if np.linalg.det(rot) < 0:
rot = -rot
#print(rot, np.linalg.det(rot))
return rot
class SystemData:
def __init__(self, atoms, bonds=None, angles=None, dihedrals=None, impropers=None):
self.atoms = atoms
self.atoms.index = np.arange(1, len(self.atoms) + 1)
self.masses = atoms[['type', 'mass']].drop_duplicates()
self.masses.index = np.arange(1, len(self.masses) + 1)
self.n_atoms = len(self.atoms)
self.n_atomtypes = len(self.masses)
if bonds is not None:
self.bonds = bonds
self.bonds.index = np.arange(1, len(self.bonds) + 1)
self.bondtypes = bonds[['type', 'x0', 'k']].drop_duplicates()
self.bondtypes.index = np.arange(1, len(self.bondtypes) + 1)
self.n_bonds = len(self.bonds)
self.n_bondtypes = len(self.bondtypes)
else:
self.bonds = pandas.DataFrame()
self.bondtypes = pandas.DataFrame()
self.n_bonds = 0
self.n_bondtypes = 0
if angles is not None:
self.angles = angles
self.angles.index = np.arange(1, len(self.angles) + 1)
self.angletypes = angles[['type', 'x0', 'k']].drop_duplicates()
self.angletypes.index = np.arange(1, len(self.angletypes) + 1)
self.n_angles = len(self.angles)
self.n_angletypes = len(self.angletypes)
else:
self.angles = pandas.DataFrame()
self.angletypes = pandas.DataFrame()
self.n_angles = 0
self.n_angletypes = 0
if dihedrals is not None:
self.dihedrals = dihedrals
self.dihedrals.index = np.arange(1, len(self.dihedrals) + 1)
self.dihedraltypes = dihedrals[['type', 'x0', 'k']].drop_duplicates()
self.dihedraltypes.index = np.arange(1, len(self.dihedraltypes) + 1)
self.n_dihedrals = len(self.dihedrals)
self.n_dihedraltypes = len(self.dihedraltypes)
else:
self.dihedrals = pandas.DataFrame()
self.dihedraltypes = pandas.DataFrame()
self.n_dihedrals = 0
self.n_dihedraltypes = 0
if impropers is not None:
self.impropers = impropers
self.impropers.index = np.arange(1, len(self.impropers) + 1)
self.impropertypes = impropers[['type', 'x0', 'k']].drop_duplicates()
self.impropertypes.index = np.arange(1, len(self.impropertypes) + 1)
self.n_impropers = len(self.impropers)
self.n_impropertypes = len(self.impropertypes)
else:
self.impropers = pandas.DataFrame()
self.impropertypes = pandas.DataFrame()
self.n_impropers = 0
self.n_impropertypes = 0
# self.n_bonds=len(self.bonds)
# self.n_bondtypes=len(self.bondtypes)
# self.xmin,self.xmax=atoms['x'].min(),atoms['x'].max()
# self.ymin,self.ymax=atoms['y'].min(),atoms['y'].max()
# self.zmin,self.zmax=atoms['z'].min(),atoms['z'].max()
def write_data(self, file_name='actin.data', box_size=1000):
self.xmin, self.xmax = 0, box_size
self.ymin, self.ymax = 0, box_size
self.zmin, self.zmax = 0, box_size
with open(file_name, 'w+') as f:
f.write('LAMMPS data file generated with python\n\n')
f.write('\t%i atoms\n' % self.n_atoms)
f.write('\t%i bonds\n' % self.n_bonds)
f.write('\t%i angles\n' % self.n_angles)
f.write('\t%i dihedrals\n' % self.n_dihedrals)
f.write('\t%i impropers\n' % self.n_impropers)
f.write('\n')
f.write('\t%i atom types\n' % self.n_atomtypes)
f.write('\t%i bond types\n' % self.n_bondtypes)
f.write('\t%i angle types\n' % self.n_angletypes)
f.write('\t%i dihedral types\n' % self.n_dihedraltypes)
f.write('\t%i improper types\n' % self.n_impropertypes)
f.write('\n')
f.write('\t %f %f xlo xhi\n' % (self.xmin, self.xmax))
f.write('\t %f %f ylo yhi\n' % (self.ymin, self.ymax))
f.write('\t %f %f zlo zhi\n' % (self.zmin, self.zmax))
f.write('\n')
f.write('Masses\n\n')
for i, m in self.masses.iterrows():
f.write('\t%i\t%f\n' % (i, m.mass))
f.write('\n')
f.write('Atoms\n\n')
for i, a in self.atoms.iterrows():
f.write('\t%i\t%i\t%i\t%f\t%f\t%f\t%f\n' % (i, a.molecule, a.type, a.q, a.x, a.y, a.z))
f.write('\n')
if self.n_bonds > 0:
f.write('Bonds\n\n')
for i, b in self.bonds.iterrows():
f.write('\t%i\t%i\t%i\t%i\n' % (i, b.type, b.i, b.j))
f.write('\n')
if self.n_angles > 0:
f.write('Angles\n\n')
for i, b in self.angles.iterrows():
f.write('\t%i\t%i\t%i\t%i\t%i\n' % (i, b.type, b.i, b.j, b.l))
f.write('\n')
if self.n_dihedrals > 0:
f.write('Dihedrals\n\n')
for i, b in self.dihedrals.iterrows():
f.write('\t%i\t%i\t%i\t%i\t%i\t%i\n' % (i, b.type, b.i, b.j, b.l, b.m))
f.write('\n')
if self.n_impropers > 0:
f.write('Impropers\n\n')
for i, b in self.impropers.iterrows():
f.write('\t%i\t%i\t%i\t%i\t%i\t%i\n' % (i, b.type, b.i, b.j, b.l, b.m))
f.write('\n')
def write_pdb(self, file_name='actin.pdb'):
import string
cc = (string.ascii_uppercase.replace('X','') + string.ascii_lowercase + '1234567890'+'X')*1000
cc_d = dict(zip(range(1, len(cc) + 1), cc))
pdb_line = '%-6s%5i %-4s%1s%3s %1s%4i%1s %8s%8s%8s%6.2f%6.2f %2s%2s\n'
pdb_atoms = self.atoms.copy()
pdb_atoms['serial'] = np.arange(1, len(self.atoms) + 1)
# pdb_atoms['name'] = self.atoms['type'].replace({1:'A1',2:'A2',3:'A3',4:'A4',5:'A5',6:'C1',7:'C2'})
pdb_atoms['altLoc'] = ''
# pdb_atoms['resName'] = self.atoms['molecule_name'].replace({'actin':'ACT','camkii':'CAM'})
pdb_atoms['resName'] = self.atoms['resname']
#pdb_atoms['chainID'] = self.atoms['molecule'].replace(cc_d)
pdb_atoms['chainID'] = self.atoms['chainID']
# assert False
# pdb_atoms['resSeq'] = 0
pdb_atoms['iCode'] = ''
# pdb_atoms['x'] =
# pdb_atoms['y'] =
# pdb_atoms['z'] =
pdb_atoms['occupancy'] = 0
pdb_atoms['tempFactor'] = 0
pdb_atoms['element'] = self.atoms['type'].replace(
{1: 'C', 2: 'O', 3: 'N', 4: 'P', 5: 'H', 6: 'H', 7: 'H', 8: 'Mg', 9: 'Fe', 10: 'C'})
pdb_atoms['charge'] = 0 # self.atoms['q'].astype(int)
with open(file_name, 'w+') as f:
chain = 'NoChain'
resSeq = 0
for i, a in pdb_atoms.iterrows():
if a['chainID'] != chain:
resSeq = 1
chain = a['chainID']
else:
resSeq += 1
f.write(pdb_line % ('ATOM',
int(a['serial']),
a['name'].center(4),
a['altLoc'],
a['resName'],
a['chainID'],
a['resid'],
a['iCode'],
('%8.3f' % (a['x'] / 10))[:8],
('%8.3f' % (a['y'] / 10))[:8],
('%8.3f' % (a['z'] / 10))[:8],
a['occupancy'],
a['tempFactor'],
a['element'],
a['charge']))
def write_psf(self, file_name='actin.psf'):
pass
def write_gro(self, file_name='actin.gro', box_size=1000):
gro_line = "%5d%-5s%5s%5d%8s%8s%8s%8s%8s%8s\n"
pdb_atoms = self.atoms.copy()
pdb_atoms['resName'] = self.atoms[
'resname'] # self.atoms['molecule_name'].replace({'actin':'ACT','camkii':'CAM'})
# pdb_atoms['name'] = self.atoms['type'].replace({1:'Aa',2:'Ab',3:'Ca',4:'Cb',5:'Da',6:'Db'})
pdb_atoms['serial'] = np.arange(1, len(self.atoms) + 1)
pdb_atoms['chainID'] = self.atoms['molecule']
self.xmin, self.xmax = 0, box_size
self.ymin, self.ymax = 0, box_size
self.zmin, self.zmax = 0, box_size
resSeq = 0
with open(file_name, 'w+') as f:
f.write('Generated Model\n')
f.write('%5i\n' % len(pdb_atoms))
chain = 'NoChain'
resSeq = 0
for i, a in pdb_atoms.iterrows():
if a['molecule'] != chain:
resSeq = 1
chain = a['molecule']
else:
resSeq += 1
f.write(gro_line % (a['molecule'],
a['resName'],
a['name'],
int(a['serial']),
('%8.3f' % (a['x'] / 10))[:8],
('%8.3f' % (a['y'] / 10))[:8],
('%8.3f' % (a['z'] / 10))[:8],
'', '', ''))
f.write((' ' + ' '.join(['%8.3f'] * 3) + '\n') % (self.xmax, self.ymax, self.zmax))
def print_coeff(self):
if self.n_bonds > 0:
self.bondtypes = self.bondtypes.sort_values('type')
for i, b in self.bondtypes.iterrows():
print('bond_coeff', int(b.type), b.k, '%.4f' % b['x0'])
if self.n_angles > 0:
for i, b in self.angletypes.iterrows():
print('angle_coeff', int(b.type), b.k, '%.4f' % b['x0'])
if self.n_dihedrals > 0:
for i, b in self.dihedraltypes.iterrows():
print('dihedral_coeff', int(b.type), b.k, '%.4f' % b['x0'])
if self.n_impropers > 0:
for i, b in self.impropertypes.iterrows():
print('improper_coeff', int(b.type), b.k, '%.4f' % b['x0'])
class CoarseActin:
@classmethod
def from_parameters(cls,
box_size=10000,
n_actins=10,
n_camkiis=200,
min_dist=200,
align_actins=False,
bundle=False,
system2D=False,
model='Binding-Qian2',
sname='actin',
actinLenMin=50,
actinLenMax=100):
self = cls()
# Get actin coordinates actin
pdb = prody.parsePDB(f'{__location__}/3j8i.pdb')
mean = np.array([])
for chain in 'DEF':
selection = pdb.select('chain %s' % chain)
D1 = pdb.select('chain %s and (resid 1 to 32 or resid 70 to 144 or resid 338 to 375)' % chain)
D2 = pdb.select('chain %s and (resid 33 to 69)' % chain)
D3 = pdb.select('chain %s and (resid 145 to 180 or resid 270 to 337)' % chain)
D4 = pdb.select('chain %s and (resid 181 to 269)' % chain)
m1 = D1.getCoords().mean(axis=0)
m2 = D2.getCoords().mean(axis=0)
m3 = D3.getCoords().mean(axis=0)
m4 = D4.getCoords().mean(axis=0)
mean = np.concatenate([mean, m1, m2, m3, m4], axis=0)
mean = mean.reshape(-1, 3)
actin = pandas.DataFrame(mean, columns=['x', 'y', 'z'])
name = ['A1', 'A2', 'A3', 'A4'] * 3
resid = [i for j in range(3) for i in [j] * 4]
actin.index = zip(resid, name)
# Build virtual sites
vs = self.virtual_sites_definition
for j in [2]:
for i, s in vs[vs['molecule'] == model].iterrows():
w12 = s['w12']
w13 = s['w13']
wcross = s['wcross']
a = actin.loc[[(j, s['p1'])]].squeeze() / 10
b = actin.loc[[(j, s['p2'])]].squeeze() / 10
c = actin.loc[[(j, s['p3'])]].squeeze() / 10
r12 = b - a
r13 = c - a
rcross = np.cross(r12, r13)
r = (a + w12 * r12 + w13 * r13 + wcross * rcross) * 10
r.name = (j, s['site'])
actin = actin.append(r)
actin_reference = actin.sort_index()
# Build individual actins
factin = []
for i in range(n_actins):
# Set actin length
nactins = actinLenMin + int((actinLenMax-actinLenMin) * np.random.random())
names = ['A1', 'A2', 'A3', 'A4'] * 2 + ['A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7'] * (nactins - 2)
resnames = ['ACD'] * (4 * 2) + ['ACT'] * (7 * (nactins - 2))
resids = [1] * 4 + [2] * 4 + [i + 3 for j in range(nactins - 2) for i in [j] * 7]
# actin_mass=41.74*1E3
Factin = prody.AtomGroup()
Factina = prody.AtomGroup()
Factinb = prody.AtomGroup()
Factin.setCoords(actin_reference)
Factina.setCoords(actin_reference[4:-3])
Factinb.setCoords(actin_reference[:-4 - 3])
for i in range(nactins - 3):
a, t = prody.superpose(Factina, Factinb)
temp0 = Factin.getCoords()
test = prody.applyTransformation(t, Factin)
temp = np.concatenate([test.getCoords(), temp0[-4 - 3:]])
# print(len(temp))
Factin = prody.AtomGroup()
Factina = prody.AtomGroup()
Factinb = prody.AtomGroup()
Factin.setCoords(temp)
Factina.setCoords(temp[4:12])
Factinb.setCoords(temp[0:8])
Factin = prody.AtomGroup()
Factin.setCoords(temp[:])
n = len(Factin)
Factin.setNames(names)
Factin.setResnames(resnames)
Factin.setResnums(resids)
# Factin.setNames(['ALA' for i in range(n)])
prody.writePDB('Factin.pdb', Factin)
print(nactins, (n - 8) / 7. + 2)
atoms = pandas.DataFrame(Factin.getCoords(), columns=['x', 'y', 'z'])
atoms['q'] = -11
atoms['molecule'] = 1
atoms['type'] = [1, 2, 3, 4] * 2 + [1, 2, 3, 4, 5, 6, 7] * (nactins - 2)
atoms['name'] = names
# atoms['mass']=[D1_mass,D2_mass,D3_mass,D4_mass]*2+([D1_mass,D2_mass,D3_mass,D4_mass,0,0,0])*(nactins-2)
atoms['resid'] = resids
atoms['resname'] = resnames
atoms.head()
factin += [atoms.copy()]
# Read camkii
camkii = self.template
# Build box
actins = []
camkiis = []
for i in range(n_actins):
d = 0
while d < min_dist:
f = factin[i][['x', 'y', 'z']].copy()
f = f - f.mean()
if align_actins:
rot = optimal_rotation(f)
else:
rot = random_rotation()
f = pandas.DataFrame(np.dot(rot, f[['x', 'y', 'z']].T).T, columns=f.columns)
f = f - f.mean()
f += [box_size / 2. for j in range(3)]
a, b, c = [box_size * np.random.random() for j in range(3)]
if bundle:
a = 0
if system2D:
c = 0
f += [a, b, c]
f -= (f.mean() > box_size) * box_size
f2 = factin[i].copy()
f2[['x', 'y', 'z']] = f[['x', 'y', 'z']]
# f+=[box_size/2. for i in range(3)]
f2['molecule'] = i + 1
f2['molecule_name'] = 'actin'
f2['resname'] = factin[i]['resname']
try:
d = sdist.cdist(f2[['x', 'y', 'z']], s[s['name'].isin(['A2', 'Cc'])][['x', 'y', 'z']]).min()
except KeyError:
d = min_dist + 100
actins += [f2]
s = pandas.concat(actins)
print("Actins in system")
print(f"Total number of particles: {len(s)}")
for i in range(n_camkiis):
d = 0
while d < min_dist:
f = camkii[['x', 'y', 'z']].copy()
f = f - f.mean()
f = pandas.DataFrame(np.dot(random_rotation(), f[['x', 'y', 'z']].T).T, columns=f.columns)
f = f - f.mean()
f += [box_size / 2. for j in range(3)]
a, b, c = [box_size * np.random.random() for j in range(3)]
if system2D:
c = box_size/10 * np.random.random()
f += [a, b, c]
f -= (f.mean() > box_size) * box_size
f2 = camkii.copy()
f2[['x', 'y', 'z']] = f[['x', 'y', 'z']]
# f+=[box_size/2. for i in range(3)]
f2['molecule'] = n_actins + i + 1
f2['molecule_name'] = 'camkii'
f2['resid'] = i + 1
f2['resname'] = 'CAM'
# f2['mass']/=100
# rr=np.random.randint(2)
# if rr==1:
# f2['type']+=2
d = sdist.cdist(f2[['x', 'y', 'z']], s[s['name'].isin(['A2', 'Cc'])][['x', 'y', 'z']]).min()
camkiis += [f2]
s = pandas.concat(actins + camkiis,sort=True)
print(f"CAMKII {i}")
print("CAMKIIs in system")
print(f"Total number of particles: {len(s)}")
s.index = np.arange(1, len(s) + 1)
s['mass']=np.nan
# Write system
ss = SystemData(s.sort_values(['molecule', 'resid', 'name']))
#ss.write_data(f'{sname}.data')
ss.write_pdb(f'{sname}.pdb')
ss.write_gro(f'{sname}.gro')
ss.print_coeff()
return self.from_topology(topology_file=f'{sname}.pdb', PlaneConstraint=system2D, periodic_box=box_size)
@classmethod
def from_topology(cls, topology_file='actin.pdb', periodic_box=10000, PlaneConstraint=False):
self = cls()
self.periodic_box = [periodic_box * 0.1] * 3
self.forcefield = openmm.app.ForceField(f'{__location__}/ff.xml')
self.top = openmm.app.PDBFile(topology_file)
self.system = self.forcefield.createSystem(self.top.topology)
self.system.setDefaultPeriodicBoxVectors(*np.diag(self.periodic_box))
self.atom_list = self.parseTop()
self.BuildVirtualSites()
self.ComputeTopology()
self.setForces(PlaneConstraint=PlaneConstraint)
return self
# Parse topology data
def parseConfigurationFile(self, configuration_file=f'{__location__}/actinff.conf'):
"""Reads the configuration file for the forcefield"""
self.configuration_file = configuration_file
print(configuration_file)
config = configparser.ConfigParser()
config.read(configuration_file)
self.template = parseConfigTable(config['Template'])
self.bond_definition = parseConfigTable(config['Bonds'])
self.bond_definition['type'] = '1' + self.bond_definition['i'].astype(str) + '-' + \
(1 + self.bond_definition['s']).astype(str) + \
self.bond_definition['j'].astype(str)
self.angle_definition = parseConfigTable(config['Angles'])
self.dihedral_definition = parseConfigTable(config['Dihedrals'])
self.repulsion_definition = parseConfigTable(config['Repulsion'])
self.virtual_sites_definition = parseConfigTable(config['Virtual sites'])
self.bond_definition = self.bond_definition[self.bond_definition.molecule.isin(['Actin-ADP', 'CaMKII'])]
self.angle_definition = self.angle_definition[self.angle_definition.molecule.isin(['Actin-ADP', 'CaMKII'])]
self.dihedral_definition = self.dihedral_definition[
self.dihedral_definition.molecule.isin(['Actin-ADP', 'CaMKII'])]
# self.virtual_sites_definition = self.virtual_sites_definition[self.virtual_sites_definition.molecule.isin(['Actin-ADP', 'CaMKII','Binding-Qian2'])]
def __init__(self):
self.parseConfigurationFile()
# self.forcefield = openmm.app.ForceField(f'{__location__}/ff.xml')
# self.top = top
# self.system = self.forcefield.createSystem(top.topology)
# self.system.setDefaultPeriodicBoxVectors(*np.diag(periodic_box))
# self.atom_list = self.parseTop()
# self.BuildVirtualSites()
# self.ComputeTopology()
# self.setForces()
# Parse topology data
def parseTop(self):
""" Converts the information from the topology to a table"""
cols = ['atom_index', 'atom_id', 'atom_name',
'residue_index', 'residue_id', 'residue_name',
'chain_index', 'chain_id']
data = []
for residue in self.top.topology.residues():
for atom in residue.atoms():
data += [[atom.index, atom.id, atom.name,
residue.index, residue.id, residue.name,
residue.chain.index, residue.chain.id]]
atom_list = pandas.DataFrame(data, columns=cols)
atom_list.index = atom_list['atom_index']
return atom_list
def BuildVirtualSites(self):
""" Sets the parameters for the virtual sites"""
virtual_sites_definition = self.virtual_sites_definition.copy()
virtual_sites_definition.index = [tuple(b) for a, b in
virtual_sites_definition[['molecule', 'site']].iterrows()]
# Actin binding sites parameters
w1 = np.array(virtual_sites_definition.loc[[('Voth-Qian2020', 'A5')], ['w12', 'w13', 'wcross']].squeeze())
w2 = np.array(virtual_sites_definition.loc[[('Voth-Qian2020', 'A6')], ['w12', 'w13', 'wcross']].squeeze())
w3 = np.array(virtual_sites_definition.loc[[('Voth-Qian2020', 'A7')], ['w12', 'w13', 'wcross']].squeeze())
# CAMKII virtual sites
cw1 = np.array(virtual_sites_definition.loc[[('CaMKII', 'C1')], ['w12', 'w13', 'wcross']].squeeze())
cw2 = np.array(virtual_sites_definition.loc[[('CaMKII', 'C2')], ['w12', 'w13', 'wcross']].squeeze())
cw3 = np.array(virtual_sites_definition.loc[[('CaMKII', 'C6')], ['w12', 'w13', 'wcross']].squeeze())
cw4 = np.array(virtual_sites_definition.loc[[('CaMKII', 'C7')], ['w12', 'w13', 'wcross']].squeeze())
# Virtual sites
for _, res in self.atom_list.groupby(['chain_index', 'residue_id']):
assert len(res['residue_name'].unique()) == 1,print(len(res['residue_name'].unique()),_,res['residue_name'].unique())
resname = res['residue_name'].unique()[0]
ix = dict(list(zip(res['atom_name'], res['atom_index'])))
if resname == 'ACT':
# Virtual site positions
a5 = openmm.OutOfPlaneSite(ix['A2'], ix['A1'], ix['A3'], w1[0], w1[1], w1[2])
a6 = openmm.OutOfPlaneSite(ix['A2'], ix['A1'], ix['A3'], w2[0], w2[1], w2[2])
a7 = openmm.OutOfPlaneSite(ix['A2'], ix['A1'], ix['A3'], w3[0], w3[1], w3[2])
# Set up virtual sites
self.system.setVirtualSite(ix['A5'], a5)
self.system.setVirtualSite(ix['A6'], a6)
self.system.setVirtualSite(ix['A7'], a7)
if resname == 'CAM':
# Parent sites
c1 = ix['Cx1']
c2 = ix['Cx2']
c3 = ix['Cx3']
# Virtual site positions
c01 = openmm.OutOfPlaneSite(c1, c2, c3, cw1[0], cw1[1], cw1[2])
c02 = openmm.OutOfPlaneSite(c1, c2, c3, cw2[0], cw2[1], cw2[2])
c03 = openmm.OutOfPlaneSite(c2, c3, c1, cw1[0], cw1[1], cw1[2])
c04 = openmm.OutOfPlaneSite(c2, c3, c1, cw2[0], cw2[1], cw2[2])
c05 = openmm.OutOfPlaneSite(c3, c1, c2, cw1[0], cw1[1], cw1[2])
c06 = openmm.OutOfPlaneSite(c3, c1, c2, cw2[0], cw2[1], cw2[2])
c07 = openmm.OutOfPlaneSite(c1, c2, c3, cw3[0], cw3[1], cw3[2])
c08 = openmm.OutOfPlaneSite(c1, c2, c3, cw4[0], cw4[1], cw4[2])
c09 = openmm.OutOfPlaneSite(c2, c3, c1, cw3[0], cw3[1], cw3[2])
c10 = openmm.OutOfPlaneSite(c2, c3, c1, cw4[0], cw4[1], cw4[2])
c11 = openmm.OutOfPlaneSite(c3, c1, c2, cw3[0], cw3[1], cw3[2])
c12 = openmm.OutOfPlaneSite(c3, c1, c2, cw4[0], cw4[1], cw4[2])
cc = openmm.ThreeParticleAverageSite(c1, c2, c3, 1 / 3., 1 / 3., 1 / 3.)
# Set up virtual positions
self.system.setVirtualSite(ix['C01'], c01)
self.system.setVirtualSite(ix['C02'], c02)
self.system.setVirtualSite(ix['C03'], c03)
self.system.setVirtualSite(ix['C04'], c04)
self.system.setVirtualSite(ix['C05'], c05)
self.system.setVirtualSite(ix['C06'], c06)
self.system.setVirtualSite(ix['C07'], c07)
self.system.setVirtualSite(ix['C08'], c08)
self.system.setVirtualSite(ix['C09'], c09)
self.system.setVirtualSite(ix['C10'], c10)
self.system.setVirtualSite(ix['C11'], c11)
self.system.setVirtualSite(ix['C12'], c12)
self.system.setVirtualSite(ix['Cc'], cc)
self.atom_list['Virtual'] = [self.system.isVirtualSite(a) for a in range(len(self.atom_list))]
def ComputeTopology(self):
# print(bonds)
# Bonds, angles and dihedrals
bonds = []
angles = []
dihedrals = []
for _, c in self.atom_list.groupby('chain_index'):
ix = {}
for name, aa in c.groupby('atom_name'):
ix.update({name: list(aa.index)})
for SB, B in zip([bonds, angles, dihedrals],
[self.bond_definition, self.angle_definition, self.dihedral_definition]):
for _, b in B.iterrows():
temp = pandas.DataFrame(columns=B.columns)
if 's' not in b:
b['s'] = 0
if b['i'] not in ix.keys():
continue
i1 = ix[b['i']][b['s']:]
i2 = ix[b['j']][:-b['s']] if b['s'] != 0 else ix[b['j']]
assert (len(i1) == len(i2))
temp['i'] = i1
temp['j'] = i2
if 'k' in b:
i3 = ix[b['k']]
assert (len(i1) == len(i3))
temp['k'] = i3
if 'l' in b:
i4 = ix[b['l']]
assert (len(i1) == len(i4))
temp['l'] = i4
for col in temp:
if col not in ['i', 'j', 'k', 'l']:
temp[col] = b[col]
SB += [temp]
bonds = pandas.concat(bonds, sort=False)
bonds.sort_values(['i', 'j'], inplace=True)
angles = pandas.concat(angles, sort=False)
angles.sort_values(['i', 'j', 'k'], inplace=True)
dihedrals = pandas.concat(dihedrals, sort=False)
dihedrals.sort_values(['i', 'j', 'k', 'l'], inplace=True)
self.bonds = bonds.reset_index(drop=True)
self.angles = angles.reset_index(drop=True)
self.dihedrals = dihedrals.reset_index(drop=True)
def Bond_diff(self, coord):
# Comparison to starting structure distances
import scipy.spatial.distance as sdist
real_dist = sdist.squareform(sdist.pdist(coord.getPositions(asNumpy=True)))
for i, b in self.bonds.iterrows():
self.bonds.at[i, 'xr'] = real_dist[b['i'], b['j']] * 10
self.bonds['diff'] = ((self.bonds['xr'] - self.bonds['r0']) ** 2) ** .5
return self.bonds.groupby('type').mean().sort_values('diff', ascending=False)
def clearForces(self):
""" Removes all forces from the system """
[self.system.removeForce(0) for i, f in enumerate(self.system.getForces())]
def setForces(self, PlaneConstraint=False, CaMKII_Force='multigaussian', BundleConstraint=False):
""" Adds the forces to the system """
self.clearForces()
# Harmonic Bonds
harmonic_bond = openmm.HarmonicBondForce()
harmonic_bond.setUsesPeriodicBoundaryConditions(True)
for i, b in self.bonds.iterrows():
harmonic_bond.addBond(int(b['i']), int(b['j']), b['r0'] / 10., b['K'] * 4.184 * 100)
self.system.addForce(harmonic_bond)
# Harmonic angles
harmonic_angle = openmm.HarmonicAngleForce()
harmonic_angle.setUsesPeriodicBoundaryConditions(True)
for i, b in self.angles.iterrows():
harmonic_angle.addAngle(int(b['i']), int(b['j']), int(b['k']), b['t0'] / 180 * np.pi, b['K'] * 4.184)
self.system.addForce(harmonic_angle)
# Harmonic torsions
harmonic_torsion = openmm.PeriodicTorsionForce()
harmonic_torsion.setUsesPeriodicBoundaryConditions(True)
for i, b in self.dihedrals.iterrows():
harmonic_torsion.addTorsion(int(b['i']), int(b['j']), int(b['k']), int(b['l']), b['period'],
b['t0'] / 180 * np.pi, b['K'] * 4.184)
self.system.addForce(harmonic_torsion)
# Repulsion
for i, r in self.repulsion_definition.iterrows():
# print(r)
# print(r['force'].format(i))
rf = openmm.CustomNonbondedForce('(epsilon{0}*((sigma{0}/r)^12-2*(sigma{0}/r)^6)+epsilon{0})*step(sigma{0}-r)'.format(i))
rf.setNonbondedMethod(rf.CutoffPeriodic)
rf.addGlobalParameter('epsilon{0}'.format(i), r['epsilon'])
rf.addGlobalParameter('sigma{0}'.format(i), r['sigma'])
rf.setCutoffDistance(10)
rf.setUseLongRangeCorrection(False)
for _, a in self.atom_list.iterrows():
rf.addParticle()
sel1 = self.atom_list[self.atom_list['atom_name'] == r['i']]
sel2 = self.atom_list[self.atom_list['atom_name'] == r['j']]
rf.addInteractionGroup(sel1.index, sel2.index)
rf.createExclusionsFromBonds(self.bonds[['i', 'j']].values.tolist(), 2)
self.system.addForce(rf)
# Donors
Cm = [self.atom_list[self.atom_list['atom_name'] == 'C01'].index,
self.atom_list[self.atom_list['atom_name'] == 'C02'].index,
self.atom_list[self.atom_list['atom_name'] == 'C03'].index,
self.atom_list[self.atom_list['atom_name'] == 'C04'].index,
self.atom_list[self.atom_list['atom_name'] == 'C05'].index,
self.atom_list[self.atom_list['atom_name'] == 'C06'].index,
self.atom_list[self.atom_list['atom_name'] == 'C07'].index,
self.atom_list[self.atom_list['atom_name'] == 'C08'].index,
self.atom_list[self.atom_list['atom_name'] == 'C09'].index,
self.atom_list[self.atom_list['atom_name'] == 'C10'].index,
self.atom_list[self.atom_list['atom_name'] == 'C11'].index,
self.atom_list[self.atom_list['atom_name'] == 'C12'].index]
Cc = self.atom_list[self.atom_list['atom_name'] == 'Cc'].index
comb = [(0, 6), (1, 7), (2, 8), (3, 9), (4, 10), (5, 11)]
if CaMKII_Force=='multigaussian':
for i, j in comb:
gaussian = openmm.CustomHbondForce("-g_eps*g1;"
"g1=(exp(-dd/w1)+exp(-dd/w2))/2;"
"dd=(dist1^2+dist2^2+dist3^2)/3;"
"dist1= distance(a1,d1);"
"dist2= min(distance(a2,d2),distance(a2,d3));"
"dist3= min(distance(a3,d2),distance(a3,d3));")
gaussian.setNonbondedMethod(gaussian.CutoffPeriodic)
gaussian.addGlobalParameter('g_eps', 100) # Energy minimum
gaussian.addGlobalParameter('w1', 5.0) # well1 width
gaussian.addGlobalParameter('w2', 0.5) # well2 width
gaussian.setCutoffDistance(12)
# Aceptors
A1 = self.atom_list[self.atom_list['atom_name'] == 'A5'].index
A2 = self.atom_list[self.atom_list['atom_name'] == 'A6'].index
A3 = self.atom_list[self.atom_list['atom_name'] == 'A7'].index
assert len(A1) == len(A2) == len(A3)
for a1, a2, a3 in zip(A1, A2, A3):
gaussian.addAcceptor(a1, a2, a3)
# Donors
for d1, d2, d3 in zip(Cc, Cm[i], Cm[j]):
gaussian.addDonor(d1, d2, d3)
self.system.addForce(gaussian)
elif CaMKII_Force=='doublegaussian':
for i, j in comb:
gaussian = openmm.CustomHbondForce("-g_eps*g1;"
"g1=(exp(-dd/w1)+exp(-dd/w2))/2;"
"dd=(dist2^2+dist3^2)/2;"
"dist2= min(distance(a2,d2),distance(a2,d3));"
"dist3= min(distance(a3,d2),distance(a3,d3));")
gaussian.setNonbondedMethod(gaussian.CutoffPeriodic)
gaussian.addGlobalParameter('g_eps', 100) # Energy minimum
gaussian.addGlobalParameter('w1', 5.0) # well1 width
gaussian.addGlobalParameter('w2', 0.5) # well2 width
gaussian.setCutoffDistance(12)
# Aceptors
A1 = self.atom_list[self.atom_list['atom_name'] == 'A5'].index
A2 = self.atom_list[self.atom_list['atom_name'] == 'A6'].index
A3 = self.atom_list[self.atom_list['atom_name'] == 'A7'].index
assert len(A1) == len(A2) == len(A3)
for a1, a2, a3 in zip(A1, A2, A3):
gaussian.addAcceptor(a1, a2, a3)
# Donors
for d1, d2, d3 in zip(Cc, Cm[i], Cm[j]):
gaussian.addDonor(d1, d2, d3)
self.system.addForce(gaussian)
if CaMKII_Force=='singlegaussian':
gaussian = openmm.CustomHbondForce("-g_eps*g1;"
"g1=(exp(-dd/w1)+exp(-dd/w2))/2;"
"dd= distance(a1,d1);")
gaussian.setNonbondedMethod(gaussian.CutoffPeriodic)
gaussian.addGlobalParameter('g_eps', 100) # Energy minimum
gaussian.addGlobalParameter('w1', 5.0) # well1 width
gaussian.addGlobalParameter('w2', 0.5) # well2 width
gaussian.setCutoffDistance(12)
# Aceptors
A1 = self.atom_list[self.atom_list['atom_name'] == 'A5'].index
A2 = self.atom_list[self.atom_list['atom_name'] == 'A6'].index
A3 = self.atom_list[self.atom_list['atom_name'] == 'A7'].index
assert len(A1) == len(A2) == len(A3)
for a1, a2, a3 in zip(A1, A2, A3):
gaussian.addAcceptor(a1, -1, -1)
# Donors
for d1 in Cc:
gaussian.addDonor(d1, -1, -1)
self.system.addForce(gaussian)
if PlaneConstraint:
print(self.periodic_box)
midz = self.periodic_box[-1] / 2 / 10
print(midz)
plane_constraint = openmm.CustomExternalForce('kp*(z-mid)^2')
plane_constraint.addGlobalParameter('mid', midz)
plane_constraint.addGlobalParameter('kp', 0.001)
for i in self.atom_list.index:
plane_constraint.addParticle(i, [])
self.system.addForce(plane_constraint)
else:
plane_constraint = openmm.CustomExternalForce('kp*0')
plane_constraint.addGlobalParameter('kp', 0.001)
for i in self.atom_list.index:
plane_constraint.addParticle(i, [])
self.system.addForce(plane_constraint)
if BundleConstraint:
print('Bundle Constraint added')
bundle_constraint = openmm.CustomCentroidBondForce(2, 'kp_bundle*(distance(g1,g2)^2-(x1-x2)^2)')
bundle_constraint.addGlobalParameter('kp_bundle',0.01)
bundle_constraint.setUsesPeriodicBoundaryConditions(True)
cc = 0
for c, chain in self.atom_list.groupby('chain_index'):
if 'ACT' in chain.residue_name.unique():
print(f'Setting up Bundle constraint for chain c')
bundle_constraint.addGroup(
list(chain[chain['atom_name'].isin(['A1', 'A2', 'A3', 'A4'])].index[:16]))
print(len(list(chain[chain['atom_name'].isin(['A1', 'A2', 'A3', 'A4'])].index[:16])))
bundle_constraint.addGroup(
list(chain[chain['atom_name'].isin(['A1', 'A2', 'A3', 'A4'])].index[-16:]))
print(len(list(chain[chain['atom_name'].isin(['A1', 'A2', 'A3', 'A4'])].index[-16:])))
print([cc,cc+1])
bundle_constraint.addBond([cc, cc + 1])
cc += 2
self.system.addForce(bundle_constraint)
print(self.system.getNumForces())
else:
print('Bundled constrain not added')
bundle_constraint = openmm.CustomCentroidBondForce(2, '0*kp_bundle*(distance(g1,g2)^2-(x1-x2)^2)')
bundle_constraint.addGlobalParameter('kp_bundle',0.01)
bundle_constraint.setUsesPeriodicBoundaryConditions(True)
cc = 0
for c, chain in self.atom_list.groupby('chain_index'):
if 'ACT' in chain.residue_name.unique():
bundle_constraint.addGroup(
list(chain[chain['atom_name'].isin(['A1', 'A2', 'A3', 'A4'])].index[16:]))
bundle_constraint.addGroup(
list(chain[chain['atom_name'].isin(['A1', 'A2', 'A3', 'A4'])].index[-16:]))
bundle_constraint.addBond([cc, cc + 1])
cc += 2
self.system.addForce(bundle_constraint)
print(self.system.getNumForces())
'''
if BundleConstraint:
bundle_constraint = openmm.CustomCompoundBondForce(2,'kb*((y1-y2)^2+(z1+z2)^2')
bundle_constraint.addGlobalParameter('kb', 0.1)
for i in self.atom_list.index:
bundle_constraint.addBond(i, [])
self.system.addForce(bundle_constraint)
else:
bundle_constraint = openmm.CustomExternalForce('kb*0')
bundle_constraint.addGlobalParameter('kb', 0.1)
for i in self.atom_list.index:
bundle_constraint.addParticle(i, [])
self.system.addForce(bundle_constraint)
'''
# Tests
def test_basic_MD():
# Create a system
# Test that there is no error
pass
# Actin flexibility
def test_actin_persistence_length():
'''Determines the persistence length of a filament in a simulation.'''
# Simulate a big actin
# Measure the persistence length
class HexGrid():
deltas = [[1,0,-1],[0,1,-1],[-1,1,0],[-1,0,1],[0,-1,1],[1,-1,0]]
a0=0
a1=np.pi/3
a2=-np.pi/3
vecs=np.array([[np.sqrt(3)*np.cos(a0),np.sin(a0)/np.sqrt(3)],
[np.sqrt(3)*np.cos(a1),np.sin(a1)/np.sqrt(3)],
[np.sqrt(3)*np.cos(a2),np.sin(a2)/np.sqrt(3)]])
def __init__(self, radius):
self.radius = radius
self.tiles = {(0, 0, 0): "X"}
for r in range(radius):
a = 0
b = -r
c = +r
for j in range(6):
num_of_hexas_in_edge = r
for i in range(num_of_hexas_in_edge):
a = a+self.deltas[j][0]
b = b+self.deltas[j][1]
c = c+self.deltas[j][2]
self.tiles[a,b,c] = "X"
def coords(self):
tiles=np.array([a for a in hg.tiles.keys()])
coords=np.dot(tiles,self.vecs)
return coords
def show(self):
l = []
for y in range(20):
l.append([])
for x in range(60):
l[y].append(".")
for (a,b,c), tile in self.tiles.items():
l[self.radius-1-b][a-c+(2*(self.radius-1))] = self.tiles[a,b,c]
mapString = ""
for y in range(len(l)):
for x in range(len(l[y])):
mapString += l[y][x]
mapString += "\n"
print(mapString)
if __name__=='__main__':
print(__name__)
###################################
#Setting Conditions for simulation#
###################################
parameters={"epsilon":[100],
"aligned":[False],
"actinLen":[500],
"layers":[2],
# "repetition":range(3),
"disorder":[.5,.75],
"temperature":[300],
"system2D":[False],
"frequency":[1000],
"run_time":[20],
"CaMKII_Force":['multigaussian','doublegaussian','singlegaussian'],
"simulation_platform":["OpenCL"]}
test_parameters={"simulation_platform":"CUDA",
"frequency":1000,
"run_time":1,
"CaMKII_Force":'doublegaussian'
}
job_id=0
if len(sys.argv)>1:
try:
job_id=int(sys.argv[1])
except TypeError:
pass
sjob=SlurmJobArray("ActinBundle", parameters, test_parameters,job_id)
sjob.print_parameters()
sjob.print_slurm_variables()
sjob.write_csv()
print ("name :", sjob.name)
##############
# Parameters #
##############
aligned=sjob["aligned"]
system2D=sjob["system2D"]
actinLen=sjob["actinLen"]
Sname=sjob.name
simulation_platform=sjob["simulation_platform"]
###################
# Build the model #
###################
#Set the points in the actin network
import string
import random
bound_actin_template=pandas.read_csv("CaMKII_bound_with_actin.csv",index_col=0)
def add_bound_actin(full_model, length=100,
twist=2.89942054, shift=-28.21600347,
rotation=np.array([[1.,0.,0.],
[0.,1.,0.],
[0.,0.,1.]]),
translation=np.array([5000,5000,5000])):
q = np.array([[np.cos(twist), -np.sin(twist), 0, 0],
[np.sin(twist), np.cos(twist), 0, 0],
[0, 0, 1, shift],
[0, 0, 0, 1]])
rot = q[:3, :3].T
trans = q[:3, 3]
#Create the points
point=bound_actin_template[['x','y','z']]
points = []
for i in range(length):
points += [point]
point = np.dot(point, rot) + trans
points = np.concatenate(points)
#Create the model
model = pandas.DataFrame(points, columns=['x', 'y', 'z'])
model["resid"] = [j+i for i in range(length) for j in bound_actin_template["resid"]]
model["name"] = [j for i in range(length) for j in bound_actin_template["name"]]
model["type"] = [j for i in range(length) for j in bound_actin_template["type"]]
model["resname"]=[j for i in range(length) for j in bound_actin_template["resname"]]
#Remove two binding points
model=model[~((model['resid']>length-1) & (model['name'].isin(['A5','A6','A7']+['Cc']+[f'C{i+1:02}' for i in range(12)]+[f'Cx{i+1}' for i in range(3)])))]
#Remove all CaMKII except resid 50
#model=model[~((model['resid']!=50) & (model['resname'].isin(['CAM'])))]
model.loc[model[model['resid']==model['resid'].max()].index,'resname']='ACD'
model.loc[model[model['resid']==model['resid'].min()].index,'resname']='ACD'
for chain_name in string.ascii_uppercase+string.ascii_lowercase:
#print(chain_name)
if chain_name in full_model['chainID'].values:
model.loc[model['resname'].isin(['ACT','ACD']),'chainID']=chain_name
continue
model.loc[model['resname'].isin(['ACT','ACD']),'chainID']=chain_name
break
for chain_name in string.ascii_uppercase+string.ascii_lowercase:
#print(chain_name,'A' in model['chainID'])
if chain_name in full_model['chainID'].values or chain_name in model['chainID'].values:
model.loc[model['resname'].isin(['CAM']),'chainID']=chain_name
continue
model.loc[model['resname'].isin(['CAM']),'chainID']=chain_name
break
#model["name"] = [j for i in range(1000) for j in ['A1', 'A2', 'A3', 'A4']]
#Center the model
model[['x', 'y', 'z']] -= model[['x', 'y', 'z']].mean()
#Move the model
model[['x', 'y', 'z']]=np.dot(model[['x', 'y', 'z']], rotation) + translation
full_model=pandas.concat([full_model,model])
full_model.index=range(len(full_model))
return full_model
full_model=pandas.DataFrame(columns=['chainID'])
if sjob["layers"]==1:
hg=HexGrid(2)
coords=hg.coords()[:2]
d=59.499*2
else:
hg=HexGrid(sjob["layers"])
coords=hg.coords()
d=59.499*2
for c in coords:
height=(random.random()-0.5)*39*28.21600347*sjob["disorder"]
print(c[0],c[1],height)
full_model=add_bound_actin(full_model, length=sjob["actinLen"], translation=np.array([5000+d*c[0],5000+d*c[1],5000+height]))
#Remove the CaMKII that are not overlapping
sel=full_model[full_model['name']=='Cc']
i=sel.index
d=sdist.pdist(sel[['x','y','z']])
d=pandas.Series(d,itertools.combinations(i,2))
sel2=sel.loc[[a for a,b in d[d<35].index]]
print(len(sel2))
full_model.loc[:,'chain_resid']=full_model[['chainID','resid',]].apply(lambda x:''.join([str(a) for a in x]),axis=1)
print(len(full_model[full_model['resname'].isin(['ACT','ACD'])]))
print(len(full_model[full_model['chain_resid'].isin(sel2[['chainID','resid',]].apply(lambda x:''.join([str(a) for a in x]),axis=1))]))
full_model=full_model[full_model['resname'].isin(['ACT','ACD']) |
full_model['chain_resid'].isin(sel2[['chainID','resid',]].apply(lambda x:''.join([str(a) for a in x]),axis=1))]
print(len(full_model))
#Remove the CaMKII that are colliding
sel=full_model[full_model['name']=='Cc']
i=sel.index
d=sdist.pdist(sel[['x','y','z']])
d=pandas.Series(d,itertools.combinations(i,2))
sel2=sel.loc[[b for a,b in d[d<35].index]]
print(len(sel2))
full_model.loc[:,'chain_resid']=full_model[['chainID','resid',]].apply(lambda x:''.join([str(a) for a in x]),axis=1)
print(len(full_model[full_model['resname'].isin(['ACT','ACD'])]))
print(len(full_model[full_model['chain_resid'].isin(sel2[['chainID','resid',]].apply(lambda x:''.join([str(a) for a in x]),axis=1))]))
full_model=full_model[~full_model['chain_resid'].isin(sel2[['chainID','resid',]].apply(lambda x:''.join([str(a) for a in x]),axis=1))]
full_model['mass']=1
full_model['molecule']=1
full_model['q']=0
ss = SystemData(full_model.sort_values(['chainID', 'resid', 'name']))
ss.write_data()
ss.write_pdb(f'{Sname}.pdb')
ss.write_gro(f'{Sname}.gro')
ss.print_coeff()
##############
# Simulation #
##############
import sys
sys.path.insert(0,'.')
import openmm
import openmm.app
from simtk.unit import *
import time
from sys import stdout
time.ctime()
platform = openmm.Platform.getPlatformByName(simulation_platform)
#Create system
s=CoarseActin.from_topology(f'{Sname}.pdb',)
print("System initialized")
s.setForces(BundleConstraint=aligned,PlaneConstraint=system2D,
CaMKII_Force=sjob['CaMKII_Force'])
top=openmm.app.PDBFile(f'{Sname}.pdb')
coord=openmm.app.GromacsGroFile(f'{Sname}.gro')
#Set up simulation
temperature=sjob["temperature"]*kelvin
integrator = openmm.LangevinIntegrator(temperature, .0001/picosecond, 1*picoseconds)
simulation = openmm.app.Simulation(top.topology, s.system, integrator,platform)
simulation.context.setPositions(coord.positions)
#Modify parameters
simulation.context.setParameter("g_eps", sjob["epsilon"])
frequency=sjob["frequency"]
#Add reporters
simulation.reporters.append(openmm.app.DCDReporter(f'{Sname}.dcd', frequency),)
simulation.reporters.append(openmm.app.StateDataReporter(stdout, frequency, step=True,time=True,potentialEnergy=True, temperature=True,separator='\t',))
simulation.reporters.append(openmm.app.StateDataReporter(f'{Sname}.log', frequency, step=True,time=True,totalEnergy=True, kineticEnergy=True,potentialEnergy=True, temperature=True))
#Print initial energy
state = simulation.context.getState(getEnergy=True)
energy=state.getPotentialEnergy().value_in_unit(kilojoule_per_mole)
print (f'Initial energy: {energy} KJ/mol')
#Run
simulation.minimizeEnergy()
simulation.context.setVelocitiesToTemperature(temperature*kelvin)
time0=time.ctime()
time_0=time.time()
#simulation.step(100000)
#Turn off nematic parameter
#simulation.context.setParameter('kp_bundle',0)
simulation.runForClockTime(sjob["run_time"])
#Save checkpoint
chk=f'{Sname}.chk'
simulation.saveCheckpoint(chk)
#simulation.step(100000000)
| 43.302483 | 306 | 0.52451 |
c6fd86b37c60429ab9c00cbfaf2c978192a8bd7e
| 416 |
py
|
Python
|
backend/fai_rest_todo/wsgi.py
|
vanessa/fai-rest-todo
|
c0f564a7dad71a3ad3356d561650437a86d52cd4
|
[
"MIT"
] | 2 |
2020-10-01T14:55:04.000Z
|
2021-08-14T05:38:08.000Z
|
backend/fai_rest_todo/wsgi.py
|
vanessa/fai-rest-todo
|
c0f564a7dad71a3ad3356d561650437a86d52cd4
|
[
"MIT"
] | 11 |
2020-02-12T18:47:26.000Z
|
2022-02-26T22:52:21.000Z
|
backend/fai_rest_todo/wsgi.py
|
vanessa/fai-rest-todo
|
c0f564a7dad71a3ad3356d561650437a86d52cd4
|
[
"MIT"
] | null | null | null |
"""
WSGI config for fai_rest_todo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fai_rest_todo.settings.production")
application = get_wsgi_application()
| 23.111111 | 84 | 0.793269 |
f5bf707793a783a272ff1953c8262bad98369208
| 3,007 |
py
|
Python
|
trunk/bin/agntestheader.py
|
svalenti/agnkey
|
d44831a8ae72de0b2692da047994f67545185503
|
[
"MIT"
] | null | null | null |
trunk/bin/agntestheader.py
|
svalenti/agnkey
|
d44831a8ae72de0b2692da047994f67545185503
|
[
"MIT"
] | null | null | null |
trunk/bin/agntestheader.py
|
svalenti/agnkey
|
d44831a8ae72de0b2692da047994f67545185503
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import agnkey
import os,glob,shutil,sys
if len(sys.argv)<=1:
listfits=glob.glob('*fits')
for img in listfits:
print img
img=raw_input('Which image do you want to test ['+str(listfits[0])+'] ? ')
if not img: img=listfits[0]
else:
img=sys.argv[1]
hdr=agnkey.util.readhdr(img)
_imagetype=agnkey.util.readkey3(hdr,'type')
_object=agnkey.util.readkey3(hdr,'object')
_JD=agnkey.util.readkey3(hdr,'JD')
_airmass=agnkey.util.readkey3(hdr,'airmass')
_filter=agnkey.util.readkey3(hdr,'filter')
_grism=agnkey.util.readkey3(hdr,'grism')
_exptime=agnkey.util.readkey3(hdr,'exptime')
_date=agnkey.util.readkey3(hdr,'date-obs')
_gain=agnkey.util.readkey3(hdr,'gain')
_ron=agnkey.util.readkey3(hdr,'ron')
_lampid=agnkey.util.readkey3(hdr,'lampid')
_RA=agnkey.util.readkey3(hdr,'RA')
_DEC=agnkey.util.readkey3(hdr,'DEC')
_ccdmax=agnkey.util.readkey3(hdr,'datamax')
_ccdmin=agnkey.util.readkey3(hdr,'datamin')
_cenwav=agnkey.util.readkey3(hdr,'cenw')
_slitw=agnkey.util.readkey3(hdr,'slit')
_UT=agnkey.util.readkey3(hdr,'ut')
_xdimen=agnkey.util.readkey3(hdr,'NAXIS1')
_ydimen=agnkey.util.readkey3(hdr,'NAXIS2')
_instrument=agnkey.util.readkey3(hdr,'instrume')
_obsmode=agnkey.util.readkey3(hdr,'obsmode')
if not _gain: _gain='########'
if not _ron: _ron='########'
if not _instrument: _instrument=='########'
if not _ydimen: _ydimen='########'
if not _xdimen: _xdimen='########'
if not _filter: _filter='########'
if not _RA: _RA='########'
if not _grism: _grism='########'
if not _slitw: _slitw='########'
if not _lampid: _lampid='#######'
if not _date: _date='#######'
if not _cenwav: _cenwav='#######'
if not _UT: _UT='#######'
if not _ccdmin:_ccdmin='#######'
if not _ccdmax:_ccdmax='#######'
if not _obsmode:_obsmode='#######'
if not _object:_object='#######'
_system='#######'
print '####################################################################'
print 'IMG OBJECT IMAGETYPE EXPTIME FILTER GRISM '
print str(img)+'\t'+str(_object)+'\t'+str(_imagetype)+'\t'+str(_exptime)+'\t'+str(_filter)+'\t'+str(_grism)
print '####################################################################'
print 'AIRMASS JD DATE XDIM YDIM GAIN RON '
print str(_airmass)+'\t'+str(_JD)+'\t'+str(_date)+'\t'+str(_xdimen)+'\t'+str(_ydimen)+'\t'+str(_gain)+'\t'+str(_ron)
print '####################################################################'
print 'LAMP_ID slitw RA DEC CCDMIN CCDMAX CENWAV '
print str(_lampid)+'\t'+str(_slitw)+'\t'+str(_RA)+'\t'+str(_DEC)+'\t'+str(_ccdmin)+'\t'+str(_ccdmax)+'\t'+str(_cenwav)
print '####################################################################'
print ' UT xdimension ydimension instrument SYSTEM OBSMODE '
print str(_UT)+'\t'+str(_xdimen)+'\t'+str(_ydimen)+'\t'+str(_instrument)+'\t'+str(_system)+'\t'+str(_obsmode)
print '####################################################################'
| 40.635135 | 118 | 0.567343 |
2403dd54c5ead196478b7e7b045fdd2f8232e8df
| 1,427 |
py
|
Python
|
BluePrint/apps/forms.py
|
CodeMath/jinrockets
|
6bb26e9ca66ba951ab2d34bf1ffe79b2c605963f
|
[
"MIT"
] | null | null | null |
BluePrint/apps/forms.py
|
CodeMath/jinrockets
|
6bb26e9ca66ba951ab2d34bf1ffe79b2c605963f
|
[
"MIT"
] | null | null | null |
BluePrint/apps/forms.py
|
CodeMath/jinrockets
|
6bb26e9ca66ba951ab2d34bf1ffe79b2c605963f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from flask.ext.wtf import Form
from wtforms import StringField, PasswordField
from wtforms import validators
from wtforms.fields.html5 import EmailField
class JoinForm(Form):
email = EmailField(
u'이메일',
[validators.data_required(u'이메일을 입력하시기 바랍니다.'), validators.Email(u'이메일 형식이 아닙니다.')],
description={'placeholder': u'Enter your e-mail'}
)
password = PasswordField(
u'패스워드',
[validators.data_required(u'패스워드를 입력하시기 바랍니다.'),
validators.EqualTo('confirm_password', message=u'패스워드가 일치하지 않습니다.')],
description={'placeholder': u'enter your password.'}
)
confirm_password = PasswordField(
u'패스워드 확인',
[validators.data_required(u'패스워드를 한번 더 입력하세요.')],
description={'placeholder': u'confirm your password.'}
)
class MajorForm(Form):
major=StringField(
u'학과명',
[validators.data_required(u'학과명을 쓰세요.'),
validators.length(max=15)],
description={'placeholder':u'enter your major'}
)
comments=StringField(
u'한줄 평가',
[validators.data_required(u'한 줄 평가하기!'),
validators.length(max=45)],
description={'placeholder': u'write a comment with length 45'}
)
extra_major=StringField(
u'복전유무',
[validators.data_required(u'복전유무')],
description={'placeholder': u'confirm your extra major'}
)
| 30.361702 | 92 | 0.629993 |
65d57a58c273f9c8f2295254e67148619aa7c15e
| 3,806 |
py
|
Python
|
argo/workflows/client/models/v1alpha1_metrics.py
|
argentumcode/argo-client-python
|
31c1519056379d3f046d4b522f37af87243fdbb4
|
[
"Apache-2.0"
] | null | null | null |
argo/workflows/client/models/v1alpha1_metrics.py
|
argentumcode/argo-client-python
|
31c1519056379d3f046d4b522f37af87243fdbb4
|
[
"Apache-2.0"
] | null | null | null |
argo/workflows/client/models/v1alpha1_metrics.py
|
argentumcode/argo-client-python
|
31c1519056379d3f046d4b522f37af87243fdbb4
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Argo Server API
You can get examples of requests and responses by using the CLI with `--gloglevel=9`, e.g. `argo list --gloglevel=9` # noqa: E501
The version of the OpenAPI document: v3.0.4
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from argo.workflows.client.configuration import Configuration
class V1alpha1Metrics(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'prometheus': 'list[V1alpha1Prometheus]'
}
attribute_map = {
'prometheus': 'prometheus'
}
def __init__(self, prometheus=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1Metrics - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._prometheus = None
self.discriminator = None
self.prometheus = prometheus
@property
def prometheus(self):
"""Gets the prometheus of this V1alpha1Metrics. # noqa: E501
Prometheus is a list of prometheus metrics to be emitted # noqa: E501
:return: The prometheus of this V1alpha1Metrics. # noqa: E501
:rtype: list[V1alpha1Prometheus]
"""
return self._prometheus
@prometheus.setter
def prometheus(self, prometheus):
"""Sets the prometheus of this V1alpha1Metrics.
Prometheus is a list of prometheus metrics to be emitted # noqa: E501
:param prometheus: The prometheus of this V1alpha1Metrics. # noqa: E501
:type: list[V1alpha1Prometheus]
"""
if self.local_vars_configuration.client_side_validation and prometheus is None: # noqa: E501
raise ValueError("Invalid value for `prometheus`, must not be `None`") # noqa: E501
self._prometheus = prometheus
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1Metrics):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1alpha1Metrics):
return True
return self.to_dict() != other.to_dict()
| 30.693548 | 134 | 0.599842 |
57f53479b87409e1dad8be51f5ac33511aae7590
| 225 |
py
|
Python
|
modules/mixers/vdn.py
|
gingkg/pymarl
|
b5a72b3ab6c89b4a492f5853c02c1ce3f9189ea4
|
[
"MIT"
] | 3 |
2021-04-11T07:34:11.000Z
|
2022-03-23T08:43:37.000Z
|
modules/mixers/vdn.py
|
gingkg/pymarl
|
b5a72b3ab6c89b4a492f5853c02c1ce3f9189ea4
|
[
"MIT"
] | null | null | null |
modules/mixers/vdn.py
|
gingkg/pymarl
|
b5a72b3ab6c89b4a492f5853c02c1ce3f9189ea4
|
[
"MIT"
] | 1 |
2021-05-28T11:26:20.000Z
|
2021-05-28T11:26:20.000Z
|
import torch
import torch.nn as nn
class VDNMixer(nn.Module):
def __init__(self):
super(VDNMixer, self).__init__()
def forward(self, agent_qs, batch):
return torch.sum(agent_qs, dim=2, keepdim=True)
| 22.5 | 55 | 0.68 |
a921b681dec82a3adbc461fbf329fd0f05113ec1
| 20,375 |
py
|
Python
|
util.py
|
siralbert/tdapp
|
d8829d10c975c106f7cfd997f1bc729056cef16e
|
[
"MIT"
] | null | null | null |
util.py
|
siralbert/tdapp
|
d8829d10c975c106f7cfd997f1bc729056cef16e
|
[
"MIT"
] | null | null | null |
util.py
|
siralbert/tdapp
|
d8829d10c975c106f7cfd997f1bc729056cef16e
|
[
"MIT"
] | 1 |
2021-06-21T16:58:44.000Z
|
2021-06-21T16:58:44.000Z
|
"""Utilities."""
import asyncio
import logging
import math
import signal
import sys
import time
from dataclasses import fields, is_dataclass
from datetime import date, datetime, time as time_, timedelta, timezone
from typing import AsyncIterator, Awaitable, Callable, Iterator, List, Union
import eventkit as ev
globalErrorEvent = ev.Event()
"""
Event to emit global exceptions.
"""
EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc)
UNSET_INTEGER = 2 ** 31 - 1
UNSET_DOUBLE = sys.float_info.max
def debug(**kwargs):
#print("Progress: {}%".format(var), end=" ", flush=True)
for key, value in kwargs.items():
if key == 'repeat' and value == True:
rp = "\r"
else:
rp = "\n"
if key != 'repeat':
print("%s: %s" %(str(key), str(value)), end=" ")
print("",end=rp, flush=False);
# return ToS option symbol Ex] .ADP191206C250 instead of (ADP_120619C250).
def tdapi_to_tos(option_symbol):
tos_option_symbol = re.sub(r'_','',option_symbol)
tos_option_symbol = '.' + tos_option_symbol
tos_option_symbol = re.sub(r'(\d{4})(\d\d)(C|P)',r'\2\1\3',tos_option_symbol)
return(tos_option_symbol)
# return IB contract from tdapi symbol
def tdapi_to_IB(option_symbol):
list = re.split('(_|(C\d+\.?\d$))',option_symbol)
#for elem in list:
# print (str(elem) + ',')
#print(list[0] +'\n') #ticker
#print(list[1] +'\n') #_
#print(list[2] + '\n') #None
#print(list[3] + '\n') #expiry_date
#print(list[4] + 'n') #C[strike]
#print(list[5] + \n) # \n
r = re.compile(r'(\d+\.?\d)$') # don't add $, the \n will screw up the match
strike = re.search(r,option_symbol).group(1)
dateIB = dateTD_to_dateIB(list[3])
return(list[0], dateIB, strike, list[4][0])
def getFriday_(dt=date.today()):
# day = 4 # 0 = Monday, 1=Tuesday, 2=Wednesday...
day = 4
# BUG: sometimes need timedelta + 1 for expiry_date
if dt.weekday() == 4: #if it is Friday, add timedelta of 7 to result for NEXT Friday
onDay = lambda dt, day: dt + timedelta(days=(day-dt.weekday()) % 7) + timedelta(days = 7)
else: # return this Friday's expiry_date
onDay = lambda dt, day: dt + timedelta(days=(day-dt.weekday()) % 7)
return onDay(dt, day) # YYYY-MM-DD format
def getFriday(dt=date.today()):
# day = 4 # 0 = Monday, 1=Tuesday, 2=Wednesday...
day = 4
# TESTING BUG FIX
if dt.weekday() == 4: #if it is Friday, add timedelta of 7 to result for NEXT Friday
onDay = lambda dt, day: dt + timedelta(days=(day-dt.weekday()) % 7) + timedelta(days = 7)
else: # return this Friday's expiry_date
onDay = lambda dt, day: dt + timedelta(days=(day-dt.weekday()) % 7)
# BUG: sometimes need the +1 for expiry_date
return onDay(dt, day) # YYYY-MM-DD format
def getOptionSymbol(symbol, right='C'):
df=c.optionsDF(symbol)
df['expirationDate'] = df.expirationDate.apply(lambda x: datetime.datetime.strftime(x,"%m%d%y"))
df2=df[['symbol','delta','expirationDate']]
print()
# Fix for the +1 expiry_date bug
r = re.compile(r'(\d+)(C|P)')
# 2019-12-14 != 121319
if df2.iloc[0]['expirationDate'] != re.search(r,df2.iloc[0]['symbol']).group(1):
expiry_date=datetime.datetime.strftime(getFriday_() + timedelta(days=1),"%m%d%y")
# BUG doesn't rollover to next Friday if sold on same day
print("getFriday_(): " + expiry_date)
else:
expiry_date=datetime.datetime.strftime(getFriday(),"%m%d%y")
print("getFriday(): " + expiry_date)
df2.loc[:, ('delta')]=df2['delta'].astype(float)
# filter according to specific expirationDate # BUG use 0.9 for some option chains
row = df2[(df2['expirationDate'] == expiry_date) & (df2['delta'] > 0.95)].tail(1)
if row.empty:
while row.empty:
expiry_date = datetime.datetime.strptime(expiry_date, "%m%d%y")
expiry_date = expiry_date + timedelta(days = 7)
expiry_date = datetime.datetime.strftime(expiry_date,"%m%d%y")
row = df2[(df2['expirationDate'] == expiry_date) & (df2['delta'] > 0.95)].tail(1)
return str(row.iloc[0]['symbol'])
def dateTD_to_dateIB(dateTD):
dateIB = datetime.datetime.strptime(dateTD, "%m%d%y")
return datetime.datetime.strftime(dateIB, "%Y%m%d")
def placeIBTrade(ticker, expiry_date, strike, right, limit, exchange='SMART'):
# TODO: check if limit entered has two decimal places
print(ticker + '\t' + expiry_date + '\t' + strike + '\t' + right + '\t' + str(limit) + '\t' + exchange + '\n')
ib = IB()
ib.connect('127.0.0.1', 7497, clientId=8)
contract = Option(ticker, expiry_date, strike, right, exchange)
ib.qualifyContracts(contract)
"""
print("I have {} students: {} and {}".format(var1,var2,var3))
To access arguments using position
print("I have {0} students: {1} and {2}".format(var1,var2,var3))
You can change the positional argument sequence and accordingly it would take the values from str.format()
print("I have {2} students: {1} and {0}".format(var3,var2,var1))
"""
if args.adjust:
limit = limit + float(args.adjust)
limit = round(limit,2)
print(f"Revised Price: {Fore.YELLOW}" + str(limit) + f"{Style.RESET_ALL}")
print
limitOrder = LimitOrder(args.order_type, args.size, limit)
# order.conditions = [TimeCondition(isMore=True, time='20180501 13:35:00')] # sample time condition
# Will this work for the underlying price?
# order.conditions = [PriceCondition(isLess=True, price='20180501 13:35:00')]
input("Press Enter to submit order...")
limitTrade = ib.placeOrder(contract, limitOrder)
print("\nAbove order entered into queue . . .\n")
# TODO: Check Order Status
ib.disconnect()
def df(objs, labels: List[str] = None):
"""
Create pandas DataFrame from the sequence of same-type objects.
Args:
labels: If supplied, retain only the given labels and drop the rest.
"""
import pandas as pd
from .objects import DynamicObject
if objs:
objs = list(objs)
obj = objs[0]
if is_dataclass(obj):
df = pd.DataFrame.from_records(dataclassAsTuple(o) for o in objs)
df.columns = [field.name for field in fields(obj)]
elif isinstance(obj, DynamicObject):
df = pd.DataFrame.from_records(o.__dict__ for o in objs)
else:
df = pd.DataFrame.from_records(objs)
if isinstance(obj, tuple):
_fields = getattr(obj, '_fields', None)
if _fields:
# assume it's a namedtuple
df.columns = _fields
else:
df = None
if labels:
exclude = [label for label in df if label not in labels]
df = df.drop(exclude, axis=1)
return df
def dataclassAsDict(obj) -> dict:
"""
Return dataclass values as ``dict``.
This is a non-recursive variant of ``dataclasses.asdict``.
"""
if not is_dataclass(obj):
raise TypeError(f'Object {obj} is not a dataclass')
return {field.name: getattr(obj, field.name) for field in fields(obj)}
def dataclassAsTuple(obj) -> tuple:
"""
Return dataclass values as ``tuple``.
This is a non-recursive variant of ``dataclasses.astuple``.
"""
if not is_dataclass(obj):
raise TypeError(f'Object {obj} is not a dataclass')
return tuple(getattr(obj, field.name) for field in fields(obj))
def dataclassNonDefaults(obj) -> dict:
"""
For a ``dataclass`` instance get the fields that are different from the
default values and return as ``dict``.
"""
if not is_dataclass(obj):
raise TypeError(f'Object {obj} is not a dataclass')
values = [getattr(obj, field.name) for field in fields(obj)]
return {
field.name: value for field, value in zip(fields(obj), values)
if value != field.default
and value == value
and not (isinstance(value, list) and value == [])}
def dataclassUpdate(obj, *srcObjs, **kwargs) -> object:
"""
Update fields of the given ``dataclass`` object from zero or more
``dataclass`` source objects and/or from keyword arguments.
"""
if not is_dataclass(obj):
raise TypeError(f'Object {obj} is not a dataclass')
for srcObj in srcObjs:
obj.__dict__.update(dataclassAsDict(srcObj))
obj.__dict__.update(**kwargs)
return obj
def dataclassRepr(obj) -> str:
"""
Provide a culled representation of the given ``dataclass`` instance,
showing only the fields with a non-default value.
"""
attrs = dataclassNonDefaults(obj)
clsName = obj.__class__.__qualname__
kwargs = ', '.join(f'{k}={v!r}' for k, v in attrs.items())
return f'{clsName}({kwargs})'
def isnamedtupleinstance(x):
"""From https://stackoverflow.com/a/2166841/6067848"""
t = type(x)
b = t.__bases__
if len(b) != 1 or b[0] != tuple:
return False
f = getattr(t, '_fields', None)
if not isinstance(f, tuple):
return False
return all(type(n) == str for n in f)
def tree(obj):
"""
Convert object to a tree of lists, dicts and simple values.
The result can be serialized to JSON.
"""
if isinstance(obj, (bool, int, float, str, bytes)):
return obj
elif isinstance(obj, (date, time_)):
return obj.isoformat()
elif isinstance(obj, dict):
return {k: tree(v) for k, v in obj.items()}
elif isnamedtupleinstance(obj):
return {f: tree(getattr(obj, f)) for f in obj._fields}
elif isinstance(obj, (list, tuple, set)):
return [tree(i) for i in obj]
elif is_dataclass(obj):
return {obj.__class__.__qualname__: tree(dataclassNonDefaults(obj))}
else:
return str(obj)
def barplot(bars, title='', upColor='blue', downColor='red'):
"""
Create candlestick plot for the given bars. The bars can be given as
a DataFrame or as a list of bar objects.
"""
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from matplotlib.patches import Rectangle
if isinstance(bars, pd.DataFrame):
ohlcTups = [
tuple(v) for v in bars[['open', 'high', 'low', 'close']].values]
elif bars and hasattr(bars[0], 'open_'):
ohlcTups = [(b.open_, b.high, b.low, b.close) for b in bars]
else:
ohlcTups = [(b.open, b.high, b.low, b.close) for b in bars]
fig, ax = plt.subplots()
ax.set_title(title)
ax.grid(True)
fig.set_size_inches(10, 6)
for n, (open_, high, low, close) in enumerate(ohlcTups):
if close >= open_:
color = upColor
bodyHi, bodyLo = close, open_
else:
color = downColor
bodyHi, bodyLo = open_, close
line = Line2D(
xdata=(n, n),
ydata=(low, bodyLo),
color=color,
linewidth=1)
ax.add_line(line)
line = Line2D(
xdata=(n, n),
ydata=(high, bodyHi),
color=color,
linewidth=1)
ax.add_line(line)
rect = Rectangle(
xy=(n - 0.3, bodyLo),
width=0.6,
height=bodyHi - bodyLo,
edgecolor=color,
facecolor=color,
alpha=0.4,
antialiased=True
)
ax.add_patch(rect)
ax.autoscale_view()
return fig
def allowCtrlC():
"""Allow Control-C to end program."""
signal.signal(signal.SIGINT, signal.SIG_DFL)
def logToFile(path, level=logging.INFO):
"""Create a log handler that logs to the given file."""
logger = logging.getLogger()
logger.setLevel(level)
formatter = logging.Formatter(
'%(asctime)s %(name)s %(levelname)s %(message)s')
handler = logging.FileHandler(path)
handler.setFormatter(formatter)
logger.addHandler(handler)
def logToConsole(level=logging.INFO):
"""Create a log handler that logs to the console."""
logger = logging.getLogger()
logger.setLevel(level)
formatter = logging.Formatter(
'%(asctime)s %(name)s %(levelname)s %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.handlers = [
h for h in logger.handlers
if type(h) is not logging.StreamHandler]
logger.addHandler(handler)
def isNan(x: float) -> bool:
"""Not a number test."""
return x != x
def formatSI(n: float) -> str:
"""Format the integer or float n to 3 significant digits + SI prefix."""
s = ''
if n < 0:
n = -n
s += '-'
if type(n) is int and n < 1000:
s = str(n) + ' '
elif n < 1e-22:
s = '0.00 '
else:
assert n < 9.99e26
log = int(math.floor(math.log10(n)))
i, j = divmod(log, 3)
for _try in range(2):
templ = '%.{}f'.format(2 - j)
val = templ % (n * 10 ** (-3 * i))
if val != '1000':
break
i += 1
j = 0
s += val + ' '
if i != 0:
s += 'yzafpnum kMGTPEZY'[i + 8]
return s
class timeit:
"""Context manager for timing."""
def __init__(self, title='Run'):
self.title = title
def __enter__(self):
self.t0 = time.time()
def __exit__(self, *_args):
print(self.title + ' took ' + formatSI(time.time() - self.t0) + 's')
def run(*awaitables: Awaitable, timeout: float = None):
"""
By default run the event loop forever.
When awaitables (like Tasks, Futures or coroutines) are given then
run the event loop until each has completed and return their results.
An optional timeout (in seconds) can be given that will raise
asyncio.TimeoutError if the awaitables are not ready within the
timeout period.
"""
loop = asyncio.get_event_loop()
if not awaitables:
if loop.is_running():
return
loop.run_forever()
result = None
all_tasks = (
asyncio.all_tasks(loop) # type: ignore
if sys.version_info >= (3, 7) else asyncio.Task.all_tasks())
if all_tasks:
# cancel pending tasks
f = asyncio.gather(*all_tasks)
f.cancel()
try:
loop.run_until_complete(f)
except asyncio.CancelledError:
pass
else:
if len(awaitables) == 1:
future = awaitables[0]
else:
future = asyncio.gather(*awaitables)
if timeout:
future = asyncio.wait_for(future, timeout)
task = asyncio.ensure_future(future)
def onError(_):
task.cancel()
globalErrorEvent.connect(onError)
try:
result = loop.run_until_complete(task)
except asyncio.CancelledError as e:
raise globalErrorEvent.value() or e
finally:
globalErrorEvent.disconnect(onError)
return result
def _fillDate(time: Union[time_, datetime]) -> datetime:
# use today if date is absent
if isinstance(time, time_):
dt = datetime.combine(date.today(), time)
else:
dt = time
return dt
def schedule(
time: Union[time_, datetime], callback: Callable, *args):
"""
Schedule the callback to be run at the given time with
the given arguments.
This will return the Event Handle.
Args:
time: Time to run callback. If given as :py:class:`datetime.time`
then use today as date.
callback: Callable scheduled to run.
args: Arguments for to call callback with.
"""
dt = _fillDate(time)
now = datetime.now(dt.tzinfo)
delay = (dt - now).total_seconds()
loop = asyncio.get_event_loop()
return loop.call_later(delay, callback, *args)
def sleep(secs: float = 0.02) -> bool:
"""
Wait for the given amount of seconds while everything still keeps
processing in the background. Never use time.sleep().
Args:
secs (float): Time in seconds to wait.
"""
run(asyncio.sleep(secs))
return True
def timeRange(
start: Union[time_, datetime],
end: Union[time_, datetime],
step: float) -> Iterator[datetime]:
"""
Iterator that waits periodically until certain time points are
reached while yielding those time points.
Args:
start: Start time, can be specified as datetime.datetime,
or as datetime.time in which case today is used as the date
end: End time, can be specified as datetime.datetime,
or as datetime.time in which case today is used as the date
step (float): The number of seconds of each period
"""
assert step > 0
delta = timedelta(seconds=step)
t = _fillDate(start)
tz = timezone.utc if t.tzinfo else None
now = datetime.now(tz)
while t < now:
t += delta
while t <= _fillDate(end):
waitUntil(t)
yield t
t += delta
def waitUntil(t: Union[time_, datetime]) -> bool:
"""
Wait until the given time t is reached.
Args:
t: The time t can be specified as datetime.datetime,
or as datetime.time in which case today is used as the date.
"""
now = datetime.now(t.tzinfo)
secs = (_fillDate(t) - now).total_seconds()
run(asyncio.sleep(secs))
return True
async def timeRangeAsync(
start: Union[time_, datetime],
end: Union[time_, datetime],
step: float) -> AsyncIterator[datetime]:
"""Async version of :meth:`timeRange`."""
assert step > 0
delta = timedelta(seconds=step)
t = _fillDate(start)
tz = timezone.utc if t.tzinfo else None
now = datetime.now(tz)
while t < now:
t += delta
while t <= _fillDate(end):
await waitUntilAsync(t)
yield t
t += delta
async def waitUntilAsync(t: Union[time_, datetime]) -> bool:
"""Async version of :meth:`waitUntil`."""
now = datetime.now(t.tzinfo)
secs = (_fillDate(t) - now).total_seconds()
await asyncio.sleep(secs)
return True
def patchAsyncio():
"""Patch asyncio to allow nested event loops."""
import nest_asyncio
nest_asyncio.apply()
def startLoop():
"""Use nested asyncio event loop for Jupyter notebooks."""
patchAsyncio()
def useQt(qtLib: str = 'PyQt5', period: float = 0.01):
"""
Run combined Qt5/asyncio event loop.
Args:
qtLib: Name of Qt library to use, can be 'PyQt5' or 'PySide2'.
period: Period in seconds to poll Qt.
"""
def qt_step():
loop.call_later(period, qt_step)
if not stack:
qloop = QEventLoop()
timer = QTimer()
timer.timeout.connect(qloop.quit)
stack.append((qloop, timer))
qloop, timer = stack.pop()
timer.start(0)
qloop.exec_()
timer.stop()
stack.append((qloop, timer))
qApp.processEvents()
if qtLib not in ('PyQt5', 'PySide2'):
raise RuntimeError(f'Unknown Qt library: {qtLib}')
if qtLib == 'PyQt5':
from PyQt5.Qt import QApplication, QTimer, QEventLoop
else:
from PySide2.QtWidgets import QApplication
from PySide2.QtCore import QTimer, QEventLoop
global qApp
qApp = QApplication.instance() or QApplication(sys.argv) # type: ignore
loop = asyncio.get_event_loop()
stack: list = []
qt_step()
def formatIBDatetime(dt: Union[date, datetime, str, None]) -> str:
"""Format date or datetime to string that IB uses."""
if not dt:
s = ''
elif isinstance(dt, datetime):
if dt.tzinfo:
# convert to local system timezone
dt = dt.astimezone()
s = dt.strftime('%Y%m%d %H:%M:%S')
elif isinstance(dt, date):
s = dt.strftime('%Y%m%d 23:59:59')
else:
s = dt
return s
def parseIBDatetime(s: str) -> Union[date, datetime]:
"""Parse string in IB date or datetime format to datetime."""
if len(s) == 8:
# YYYYmmdd
y = int(s[0:4])
m = int(s[4:6])
d = int(s[6:8])
dt = date(y, m, d)
elif s.isdigit():
dt = datetime.fromtimestamp(int(s), timezone.utc)
else:
# YYYYmmdd HH:MM:SS
# or
# YYYY-mm-dd HH:MM:SS.0
ss = s.replace(' ', '').replace('-', '')[:16]
dt = datetime.strptime(ss, '%Y%m%d%H:%M:%S')
return dt
| 31.442901 | 114 | 0.604368 |
9e0d109d9ad635cadba7a8c46b1379e6bebb48ff
| 3,432 |
py
|
Python
|
100DaysofX/01-Code/003_Python_Init/003_session_init.py
|
tobias-fyi/challenges
|
4b4d2a8c5e24a51e33d78ab4191ebb843b788aca
|
[
"MIT"
] | null | null | null |
100DaysofX/01-Code/003_Python_Init/003_session_init.py
|
tobias-fyi/challenges
|
4b4d2a8c5e24a51e33d78ab4191ebb843b788aca
|
[
"MIT"
] | null | null | null |
100DaysofX/01-Code/003_Python_Init/003_session_init.py
|
tobias-fyi/challenges
|
4b4d2a8c5e24a51e33d78ab4191ebb843b788aca
|
[
"MIT"
] | null | null | null |
#! /anaconda3/envs/tobias_fyi/bin/python
# 100DaysofCode - Day 3
# 003_session_init.py
# create directory + journal for daily session
import os
import sys
import datetime
import subprocess
import time
def justify_center(content, width, symbol):
'''Centers string in symbol - width chars wide'''
text = content
lines = text.split('\n')
for i in range(len(lines)):
lines[i] = lines[i].center(width, symbol)
text = '\n'.join(lines)
return text
def table_printer(array, title, left_width, right_width):
'''Formats list - table of contents style'''
print(f'{title}'.center(left_width + right_width, '-'))
for k, v in enumerate(array):
print(str(k).ljust(left_width, '.') + str(v).rjust(right_width))
def dir_picker():
pass
# set up all the time + date variables
c_time = time.strftime("%H:%M", time.localtime(time.time()))
today = datetime.date.today()
c_year = str(today.year)
c_month = str(today.month).zfill(2)
c_day = str(today.day).zfill(2)
c_date = str(today)
start_date = [2019, 3, 4]
start_date = datetime.date(start_date[0], start_date[1], start_date[2])
day_num = str((today - start_date).days).zfill(3)
# aesthetic informatics
v_width = 33
p_icon = 'º'
s_icon = '-'
spacer = ' '
ps_spacer = f'{s_icon*2}{p_icon}{s_icon*2}'
# visual separators - horizontal lines
sep = justify_center(p_icon, v_width, s_icon)
sep_sm = justify_center(p_icon, 17, s_icon)
sep_space = justify_center(p_icon, v_width, spacer)
sep_ps = justify_center(ps_spacer, v_width, spacer)
p_paths = {
'p_all': '/Users/Tobias/Documents/Projects/Challenges/'
} # dict to hold paths
term_commands = {
'code': ['code'],
'code_ws': ['code'],
} # dict to hold terminal commands
# list challenges (directories)
# TODO: convert this into a function
os.chdir(p_paths['p_all'])
p_list = os.listdir(os.getcwd())
p_list.sort()
for proj in p_list:
if os.path.isfile(os.path.join(p_paths['p_all'], proj)):
# removes files from list - .DS_STORE, README, etc.
p_list.remove(proj)
print(sep)
table_printer(p_list, 'Choose your challenge', 8, 25)
print(sep)
print(sep_ps)
try: # find selected directory + add path to dict for later
p_index = int(input())
p_root = p_list[p_index]
p_paths['p_root'] = os.path.join(p_paths['p_all'], p_root)
os.chdir(p_paths['p_root'])
p_path = os.getcwd()
print()
except FileNotFoundError:
print()
print('Challenge not found...')
print(sep)
print(os.getcwd())
# TODO: convert this into a function
c_list = os.listdir(os.getcwd())
c_list.sort()
for challenge in c_list:
if os.path.isfile(os.path.join(p_paths['p_root'], challenge)):
# removes files from list - .DS_STORE, README, etc.
c_list.remove(challenge)
print(sep)
table_printer(c_list, 'Choose again', 8, 25)
print(sep)
print(sep_ps)
try: # find selected directory + add path to dict for later
c_index = int(input())
c_root = c_list[c_index]
p_paths['c_root'] = os.path.join(p_paths['p_root'], c_root)
os.chdir(p_paths['c_root'])
p_path = os.getcwd()
print()
except FileNotFoundError:
print()
print('Challenge not found...')
print(sep)
print(os.getcwd())
# TODO: check for existing dir + creates one if not - nav into it
# TODO: prompts for session goal and any tasks
# TODO: creates journal entry with challenge + day# + date + goal
# TODO: open that journal entry in VSCode
| 25.61194 | 72 | 0.681527 |
1ac55f62ae7027d206c0220c83d61f84286fa32a
| 1,800 |
py
|
Python
|
stock_market_insights/optimize_strategy.py
|
AlbertRtk/stock_market
|
b0f15fe90858f651e72c0520d38becbf7531b1ac
|
[
"MIT"
] | 1 |
2022-03-29T12:23:46.000Z
|
2022-03-29T12:23:46.000Z
|
stock_market_insights/optimize_strategy.py
|
AlbertRtk/stock_market
|
b0f15fe90858f651e72c0520d38becbf7531b1ac
|
[
"MIT"
] | null | null | null |
stock_market_insights/optimize_strategy.py
|
AlbertRtk/stock_market
|
b0f15fe90858f651e72c0520d38becbf7531b1ac
|
[
"MIT"
] | null | null | null |
from simulator import Simulator
from marketools import Stock, Wallet, store_data, StockQuotes
from stock_index import wig20_2019, mwig40
from tqdm import tqdm
from strategies import EmaVolStrategy
StockQuotes.check_for_update = False
store_data()
# === SIMULATOR CONFIG =========================================================
TRADED_TICKERS = wig20_2019
TRADED_TICKERS.update(mwig40)
# ==============================================================================
def test_for_years(strategy, traded_stock, start_year, end_year, step_year):
gains = []
for y in range(start_year, end_year, step_year):
start_date = f'{y}-01-01'
end_date = f'{y}-12-31'
trading_days = Stock('WIG').ohlc[start_date:end_date].index
wallet = Wallet(commission_rate=0.0038, min_commission=3.0)
wallet.money = 10000
my_simulator = Simulator(trading_days, traded_stock, wallet)
result = my_simulator.run(strategy)
result = result.tail(1)['Wallet state']
result = float(result)
gains.append(result-10000)
save_test_results(gains)
def save_test_results(results):
summary = ''
for g in results:
summary += f'{g}\t'
summary += f'{sum(results)}\n'
with open('EMAVolStrategy_optimization_SL.txt', 'a') as f:
f.write(summary)
if __name__ == '__main__':
print('Preparing data...')
stocks_data = dict()
for tck in tqdm(TRADED_TICKERS):
stocks_data[tck] = Stock(tck)
print()
my_strategy = EmaVolStrategy()
for sl_ in range(26, 36, 1):
sl = sl_ / 1000
my_strategy.stop_loss = sl
with open('EMAVolStrategy_optimization_SL.txt', 'a') as f:
f.write(f'{sl}\t')
test_for_years(my_strategy, stocks_data, 2015, 2020, 1)
| 27.272727 | 80 | 0.613889 |
e7c744d1fec644a9c9272ce43f4177c622bc1c0f
| 6,515 |
py
|
Python
|
PathPlanning/QuinticPolynomialsPlanner/quintic_polynomials_planner.py
|
Gjacquenot/PythonRobotics
|
dc1f423b7566d3f4a13c55156bb74ca80a89398b
|
[
"MIT"
] | null | null | null |
PathPlanning/QuinticPolynomialsPlanner/quintic_polynomials_planner.py
|
Gjacquenot/PythonRobotics
|
dc1f423b7566d3f4a13c55156bb74ca80a89398b
|
[
"MIT"
] | null | null | null |
PathPlanning/QuinticPolynomialsPlanner/quintic_polynomials_planner.py
|
Gjacquenot/PythonRobotics
|
dc1f423b7566d3f4a13c55156bb74ca80a89398b
|
[
"MIT"
] | null | null | null |
"""
Quintic Polynomials Planner
author: Atsushi Sakai (@Atsushi_twi)
Ref:
- [Local Path planning And Motion Control For Agv In Positioning](http://ieeexplore.ieee.org/document/637936/)
"""
import math
import matplotlib.pyplot as plt
import numpy as np
# parameter
MAX_T = 100.0 # maximum time to the goal [s]
MIN_T = 5.0 # minimum time to the goal[s]
show_animation = True
class QuinticPolynomial:
def __init__(self, xs, vxs, axs, xe, vxe, axe, T):
# calc coefficient of quintic polynomial
self.a0 = xs
self.a1 = vxs
self.a2 = axs / 2.0
A = np.array([[T**3, T**4, T**5],
[3 * T ** 2, 4 * T ** 3, 5 * T ** 4],
[6 * T, 12 * T ** 2, 20 * T ** 3]])
b = np.array([xe - self.a0 - self.a1 * T - self.a2 * T**2,
vxe - self.a1 - 2 * self.a2 * T,
axe - 2 * self.a2])
x = np.linalg.solve(A, b)
self.a3 = x[0]
self.a4 = x[1]
self.a5 = x[2]
def calc_point(self, t):
xt = self.a0 + self.a1 * t + self.a2 * t**2 + \
self.a3 * t**3 + self.a4 * t**4 + self.a5 * t**5
return xt
def calc_first_derivative(self, t):
xt = self.a1 + 2 * self.a2 * t + \
3 * self.a3 * t**2 + 4 * self.a4 * t**3 + 5 * self.a5 * t**4
return xt
def calc_second_derivative(self, t):
xt = 2 * self.a2 + 6 * self.a3 * t + 12 * self.a4 * t**2 + 20 * self.a5 * t**3
return xt
def calc_third_derivative(self, t):
xt = 6 * self.a3 + 24 * self.a4 * t + 60 * self.a5 * t**2
return xt
def quintic_polynomials_planner(sx, sy, syaw, sv, sa, gx, gy, gyaw, gv, ga, max_accel, max_jerk, dt):
"""
quintic polynomial planner
input
sx: start x position [m]
sy: start y position [m]
syaw: start yaw angle [rad]
sa: start accel [m/ss]
gx: goal x position [m]
gy: goal y position [m]
gyaw: goal yaw angle [rad]
ga: goal accel [m/ss]
max_accel: maximum accel [m/ss]
max_jerk: maximum jerk [m/sss]
dt: time tick [s]
return
time: time result
rx: x position result list
ry: y position result list
ryaw: yaw angle result list
rv: velocity result list
ra: accel result list
"""
vxs = sv * math.cos(syaw)
vys = sv * math.sin(syaw)
vxg = gv * math.cos(gyaw)
vyg = gv * math.sin(gyaw)
axs = sa * math.cos(syaw)
ays = sa * math.sin(syaw)
axg = ga * math.cos(gyaw)
ayg = ga * math.sin(gyaw)
time, rx, ry, ryaw, rv, ra, rj = [], [], [], [], [], [], []
for T in np.arange(MIN_T, MAX_T, MIN_T):
xqp = QuinticPolynomial(sx, vxs, axs, gx, vxg, axg, T)
yqp = QuinticPolynomial(sy, vys, ays, gy, vyg, ayg, T)
time, rx, ry, ryaw, rv, ra, rj = [], [], [], [], [], [], []
for t in np.arange(0.0, T + dt, dt):
time.append(t)
rx.append(xqp.calc_point(t))
ry.append(yqp.calc_point(t))
vx = xqp.calc_first_derivative(t)
vy = yqp.calc_first_derivative(t)
v = np.hypot(vx, vy)
yaw = math.atan2(vy, vx)
rv.append(v)
ryaw.append(yaw)
ax = xqp.calc_second_derivative(t)
ay = yqp.calc_second_derivative(t)
a = np.hypot(ax, ay)
if len(rv) >= 2 and rv[-1] - rv[-2] < 0.0:
a *= -1
ra.append(a)
jx = xqp.calc_third_derivative(t)
jy = yqp.calc_third_derivative(t)
j = np.hypot(jx, jy)
if len(ra) >= 2 and ra[-1] - ra[-2] < 0.0:
j *= -1
rj.append(j)
if max([abs(i) for i in ra]) <= max_accel and max([abs(i) for i in rj]) <= max_jerk:
print("find path!!")
break
if show_animation: # pragma: no cover
for i, _ in enumerate(time):
plt.cla()
# for stopping simulation with the esc key.
plt.gcf().canvas.mpl_connect('key_release_event',
lambda event: [exit(0) if event.key == 'escape' else None])
plt.grid(True)
plt.axis("equal")
plot_arrow(sx, sy, syaw)
plot_arrow(gx, gy, gyaw)
plot_arrow(rx[i], ry[i], ryaw[i])
plt.title("Time[s]:" + str(time[i])[0:4] +
" v[m/s]:" + str(rv[i])[0:4] +
" a[m/ss]:" + str(ra[i])[0:4] +
" jerk[m/sss]:" + str(rj[i])[0:4],
)
plt.pause(0.001)
return time, rx, ry, ryaw, rv, ra, rj
def plot_arrow(x, y, yaw, length=1.0, width=0.5, fc="r", ec="k"): # pragma: no cover
"""
Plot arrow
"""
if not isinstance(x, float):
for (ix, iy, iyaw) in zip(x, y, yaw):
plot_arrow(ix, iy, iyaw)
else:
plt.arrow(x, y, length * math.cos(yaw), length * math.sin(yaw),
fc=fc, ec=ec, head_width=width, head_length=width)
plt.plot(x, y)
def main():
print(__file__ + " start!!")
sx = 10.0 # start x position [m]
sy = 10.0 # start y position [m]
syaw = np.deg2rad(10.0) # start yaw angle [rad]
sv = 1.0 # start speed [m/s]
sa = 0.1 # start accel [m/ss]
gx = 30.0 # goal x position [m]
gy = -10.0 # goal y position [m]
gyaw = np.deg2rad(20.0) # goal yaw angle [rad]
gv = 1.0 # goal speed [m/s]
ga = 0.1 # goal accel [m/ss]
max_accel = 1.0 # max accel [m/ss]
max_jerk = 0.5 # max jerk [m/sss]
dt = 0.1 # time tick [s]
time, x, y, yaw, v, a, j = quintic_polynomials_planner(
sx, sy, syaw, sv, sa, gx, gy, gyaw, gv, ga, max_accel, max_jerk, dt)
if show_animation: # pragma: no cover
plt.plot(x, y, "-r")
plt.subplots()
plt.plot(time, [np.rad2deg(i) for i in yaw], "-r")
plt.xlabel("Time[s]")
plt.ylabel("Yaw[deg]")
plt.grid(True)
plt.subplots()
plt.plot(time, v, "-r")
plt.xlabel("Time[s]")
plt.ylabel("Speed[m/s]")
plt.grid(True)
plt.subplots()
plt.plot(time, a, "-r")
plt.xlabel("Time[s]")
plt.ylabel("accel[m/ss]")
plt.grid(True)
plt.subplots()
plt.plot(time, j, "-r")
plt.xlabel("Time[s]")
plt.ylabel("jerk[m/sss]")
plt.grid(True)
plt.show()
if __name__ == '__main__':
main()
| 28.203463 | 110 | 0.494858 |
9502ad38ab056a6b9219a7255be650a35dab79e8
| 10,175 |
py
|
Python
|
trac/db/tests/sqlite_test.py
|
cboos/trac
|
c0d42829d719dd82fde489611344ced97597aebd
|
[
"BSD-3-Clause"
] | null | null | null |
trac/db/tests/sqlite_test.py
|
cboos/trac
|
c0d42829d719dd82fde489611344ced97597aebd
|
[
"BSD-3-Clause"
] | null | null | null |
trac/db/tests/sqlite_test.py
|
cboos/trac
|
c0d42829d719dd82fde489611344ced97597aebd
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2018 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
import io
import os
import sys
import unittest
from trac.config import ConfigurationError
from trac.db.api import DatabaseManager
from trac.db.schema import Column, Index, Table
from trac.env import Environment
from trac.test import EnvironmentStub, MockRequest, get_dburi, mkdtemp, rmtree
from trac.util import translation
class DatabaseFileTestCase(unittest.TestCase):
stdout = None
stderr = None
devnull = None
@classmethod
def setUpClass(cls):
cls.stdout = sys.stdout
cls.stderr = sys.stderr
cls.devnull = io.open(os.devnull, 'wb')
sys.stdout = sys.stderr = cls.devnull
@classmethod
def tearDownClass(cls):
cls.devnull.close()
sys.stdout = cls.stdout
sys.stderr = cls.stderr
def setUp(self):
self.env_path = mkdtemp()
self.db_path = os.path.join(self.env_path, 'db', 'trac.db')
self.env = None
def tearDown(self):
if self.env:
self.env.shutdown()
rmtree(self.env_path)
def _create_env(self):
env = Environment(self.env_path, create=True)
env.shutdown()
def _db_query(self, env):
env.db_query("SELECT name FROM system")
def test_missing_tracdb(self):
self._create_env()
os.remove(self.db_path)
self.env = Environment(self.env_path)
try:
self._db_query(self.env)
self.fail('ConfigurationError not raised')
except ConfigurationError as e:
self.assertIn('Database "', unicode(e))
self.assertIn('" not found.', unicode(e))
def test_no_permissions(self):
self._create_env()
os.chmod(self.db_path, 0o444)
self.env = Environment(self.env_path)
try:
self._db_query(self.env)
self.fail('ConfigurationError not raised')
except ConfigurationError as e:
self.assertIn('requires read _and_ write permissions', unicode(e))
if os.name == 'posix' and os.getuid() == 0:
del test_no_permissions # For root, os.access() always returns True
def test_error_with_lazy_translation(self):
self._create_env()
os.remove(self.db_path)
self.env = Environment(self.env_path)
req = MockRequest(self.env, authname='trac_auth=1234567890')
translation.make_activable(lambda: req.locale, self.env.path)
try:
self._db_query(self.env)
self.fail('ConfigurationError not raised')
except ConfigurationError as e:
message = unicode(e)
self.assertIn('Database "', message)
self.assertIn('" not found.', message)
finally:
translation.deactivate()
class SQLiteConnectionTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.schema = [
Table('test_simple', key='id')[
Column('id', auto_increment=True),
Column('username'),
Column('email'),
Column('enabled', type='int'),
Column('extra'),
Index(['username'], unique=True),
Index(['email'], unique=False),
],
Table('test_composite', key=['id', 'name'])[
Column('id', type='int'),
Column('name'),
Column('value'),
Column('enabled', type='int'),
Index(['name', 'value'], unique=False),
Index(['name', 'enabled'], unique=True),
],
]
self.dbm = DatabaseManager(self.env)
self.dbm.drop_tables(self.schema)
self.dbm.create_tables(self.schema)
self.dbm.insert_into_tables([
('test_simple',
('username', 'email', 'enabled'),
[('joe', '[email protected]', 1), (u'joé', '[email protected]', 0)]),
('test_composite',
('id', 'name', 'value', 'enabled'),
[(1, 'foo', '42', 1),
(1, 'bar', '42', 1),
(2, 'foo', '43', 0),
(2, 'bar', '43', 0)]),
])
def tearDown(self):
DatabaseManager(self.env).drop_tables(self.schema)
self.env.reset_db()
def _table_info(self, table):
names = ('column', 'type', 'notnull', 'default', 'pk')
with self.env.db_query as db:
cursor = db.cursor()
cursor.execute("PRAGMA table_info(%s)" % db.quote(table))
return [dict(zip(names, row[1:6])) for row in cursor]
def _index_info(self, table):
with self.env.db_query as db:
cursor = db.cursor()
cursor.execute("PRAGMA index_list(%s)" % db.quote(table))
results = {row[1]: {'unique': row[2]} for row in cursor}
for index, info in results.iteritems():
cursor.execute("PRAGMA index_info(%s)" % db.quote(index))
info['columns'] = [row[2] for row in cursor]
return results
def _drop_column(self, table, column):
with self.env.db_transaction as db:
db.drop_column(table, column)
def _query(self, stmt, *args):
return self.env.db_query(stmt, args)
def test_remove_simple_keys(self):
coldef = {
'id': {'column': 'id', 'type': 'integer', 'notnull': 0,
'default': None, 'pk': 1},
'username': {'column': 'username', 'type': 'text',
'notnull': 0, 'default': None, 'pk': 0},
'email': {'column': 'email', 'type': 'text', 'notnull': 0,
'default': None, 'pk': 0},
'enabled': {'column': 'enabled', 'type': 'integer',
'notnull': 0, 'default': None, 'pk': 0},
'extra': {'column': 'extra', 'type': 'text',
'notnull': 0, 'default': None, 'pk': 0},
}
columns_0 = self._table_info('test_simple')
self.assertEqual([coldef['id'], coldef['username'], coldef['email'],
coldef['enabled'], coldef['extra']], columns_0)
indices_0 = self._index_info('test_simple')
self.assertEqual(['test_simple_email_idx', 'test_simple_username_idx'],
sorted(indices_0))
self._drop_column('test_simple', 'extra')
columns_1 = self._table_info('test_simple')
indices_1 = self._index_info('test_simple')
self.assertEqual([coldef['id'], coldef['username'], coldef['email'],
coldef['enabled']], columns_1)
self.assertEqual(indices_1, indices_0)
self._drop_column('test_simple', 'id')
columns_2 = self._table_info('test_simple')
indices_2 = self._index_info('test_simple')
self.assertEqual([coldef['username'], coldef['email'],
coldef['enabled']], columns_2)
self.assertEqual(indices_2, indices_0)
self._drop_column('test_simple', 'username')
columns_3 = self._table_info('test_simple')
indices_3 = self._index_info('test_simple')
self.assertEqual([coldef['email'], coldef['enabled']], columns_3)
self.assertEqual(['test_simple_email_idx'], sorted(indices_3))
self._drop_column('test_simple', 'email')
columns_4 = self._table_info('test_simple')
indices_4 = self._index_info('test_simple')
self.assertEqual([coldef['enabled']], columns_4)
self.assertEqual({}, indices_4)
def test_remove_composite_keys(self):
indices_0 = self._index_info('test_composite')
self.assertEqual(['sqlite_autoindex_test_composite_1',
'test_composite_name_enabled_idx',
'test_composite_name_value_idx'],
sorted(indices_0))
self.assertEqual({'unique': 1, 'columns': ['id', 'name']},
indices_0['sqlite_autoindex_test_composite_1'])
self.assertEqual({'unique': 0, 'columns': ['name', 'value']},
indices_0['test_composite_name_value_idx'])
self.assertEqual({'unique': 1, 'columns': ['name', 'enabled']},
indices_0['test_composite_name_enabled_idx'])
self._drop_column('test_composite', 'id')
indices_1 = self._index_info('test_composite')
self.assertEqual(['test_composite_name_enabled_idx',
'test_composite_name_value_idx'],
sorted(indices_1))
self.assertEqual(indices_0['test_composite_name_value_idx'],
indices_1['test_composite_name_value_idx'])
self.assertEqual(indices_0['test_composite_name_enabled_idx'],
indices_1['test_composite_name_enabled_idx'])
rows = self._query("""SELECT * FROM test_composite
ORDER BY name, value, enabled""")
self.assertEqual([('bar', '42', 1), ('bar', '43', 0),
('foo', '42', 1), ('foo', '43', 0)], rows)
self._drop_column('test_composite', 'name')
self.assertEqual({}, self._index_info('test_composite'))
rows = self._query("""SELECT * FROM test_composite
ORDER BY value, enabled""")
self.assertEqual([('42', 1), ('42', 1), ('43', 0), ('43', 0)], rows)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DatabaseFileTestCase))
if get_dburi().startswith('sqlite:'):
suite.addTest(unittest.makeSuite(SQLiteConnectionTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 39.134615 | 79 | 0.577199 |
648d0a3816ade1cd8284f3da44ac57564a48a50a
| 4,223 |
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/amazon/aws/plugins/module_utils/batch.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 22 |
2021-07-16T08:11:22.000Z
|
2022-03-31T07:15:34.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/amazon/aws/plugins/module_utils/batch.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12 |
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/amazon/aws/plugins/module_utils/batch.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 39 |
2021-07-05T02:31:42.000Z
|
2022-03-31T02:46:03.000Z
|
# Copyright (c) 2017 Ansible Project
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
This module adds shared support for Batch modules.
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
try:
from botocore.exceptions import ClientError
except ImportError:
pass
from ansible.module_utils.common.dict_transformations import snake_dict_to_camel_dict
from .ec2 import boto3_conn
from .ec2 import get_aws_connection_info
class AWSConnection(object):
"""
Create the connection object and client objects as required.
"""
def __init__(self, ansible_obj, resources, boto3=True):
ansible_obj.deprecate("The 'AWSConnection' class is deprecated, please use 'AnsibleAWSModule.client()'",
date='2022-06-01', collection_name='amazon.aws')
self.region, self.endpoint, aws_connect_kwargs = get_aws_connection_info(ansible_obj, boto3=boto3)
self.resource_client = dict()
if not resources:
resources = ['batch']
resources.append('iam')
for resource in resources:
aws_connect_kwargs.update(dict(region=self.region,
endpoint=self.endpoint,
conn_type='client',
resource=resource
))
self.resource_client[resource] = boto3_conn(ansible_obj, **aws_connect_kwargs)
# if region is not provided, then get default profile/session region
if not self.region:
self.region = self.resource_client['batch'].meta.region_name
# set account ID
try:
self.account_id = self.resource_client['iam'].get_user()['User']['Arn'].split(':')[4]
except (ClientError, ValueError, KeyError, IndexError):
self.account_id = ''
def client(self, resource='batch'):
return self.resource_client[resource]
def cc(key):
"""
Changes python key into Camel case equivalent. For example, 'compute_environment_name' becomes
'computeEnvironmentName'.
:param key:
:return:
"""
components = key.split('_')
return components[0] + "".join([token.capitalize() for token in components[1:]])
def set_api_params(module, module_params):
"""
Sets module parameters to those expected by the boto3 API.
:param module:
:param module_params:
:return:
"""
api_params = dict((k, v) for k, v in dict(module.params).items() if k in module_params and v is not None)
return snake_dict_to_camel_dict(api_params)
| 39.46729 | 112 | 0.692872 |
35e83c9d2e9afef792beb830936df30f6ac73275
| 1,206 |
py
|
Python
|
app/recipe/serializers.py
|
jenn0pal/recipe-app-api
|
16da3a3920ef9be003f06ba7eebdb6acfd092bed
|
[
"MIT"
] | null | null | null |
app/recipe/serializers.py
|
jenn0pal/recipe-app-api
|
16da3a3920ef9be003f06ba7eebdb6acfd092bed
|
[
"MIT"
] | null | null | null |
app/recipe/serializers.py
|
jenn0pal/recipe-app-api
|
16da3a3920ef9be003f06ba7eebdb6acfd092bed
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
"""Serializer for tag objects"""
class Meta:
model = Tag
fields = ('id', 'name',)
read_only_fields = ('id',)
class IngredientSerializer(serializers.ModelSerializer):
"""Serializer for ingredient objects"""
class Meta:
model = Ingredient
fields = ('id', 'name')
read_only_fields = ('id',)
class RecipeSerializer(serializers.ModelSerializer):
"""Serializer for recipe objects"""
ingredients = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = ('id', 'title', 'ingredients', 'tags', 'time_minutes',
'price', 'link')
read_only_fields = ('id',)
class RecipeDetailSerializer(RecipeSerializer):
"""Serialize a recipe detail"""
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
| 25.659574 | 71 | 0.650083 |
f032e106a624a09bdbfa4632ea0e6c36ce47e5db
| 6,720 |
py
|
Python
|
vitrage/datasources/aodh/transformer.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 89 |
2015-09-30T21:42:17.000Z
|
2022-03-28T16:31:19.000Z
|
vitrage/datasources/aodh/transformer.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 4 |
2015-12-13T13:06:53.000Z
|
2016-01-03T19:51:28.000Z
|
vitrage/datasources/aodh/transformer.py
|
openstack/vitrage
|
95b33dbf39b040e23915882a2879c87aec239ca9
|
[
"Apache-2.0"
] | 43 |
2015-11-04T15:54:27.000Z
|
2021-12-10T14:24:03.000Z
|
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from vitrage.common.constants import DatasourceProperties as DSProps
from vitrage.common.constants import EdgeLabel
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import GraphAction
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources.alarm_transformer_base import AlarmTransformerBase
from vitrage.datasources.aodh import AODH_DATASOURCE
from vitrage.datasources.aodh.properties import AodhEventType
from vitrage.datasources.aodh.properties import AodhProperties as AodhProps
from vitrage.datasources.aodh.properties import AodhState
from vitrage.datasources import transformer_base as tbase
from vitrage.datasources.transformer_base import Neighbor
from vitrage.datasources.transformer_base import TransformerBase
from vitrage.evaluator.actions.evaluator_event_transformer \
import VITRAGE_DATASOURCE
import vitrage.graph.utils as graph_utils
from vitrage.utils import datetime as datetime_utils
class AodhTransformer(AlarmTransformerBase):
# Event types which need to refer them differently
GRAPH_ACTION_MAPPING = {
AodhEventType.DELETION: GraphAction.DELETE_ENTITY,
}
def _create_snapshot_entity_vertex(self, entity_event):
if _is_vitrage_alarm(entity_event):
return self._create_merge_alarm_vertex(entity_event)
return self._create_vertex(entity_event)
def _create_update_entity_vertex(self, entity_event):
if _is_vitrage_alarm(entity_event):
return self._create_merge_alarm_vertex(entity_event)
return self._create_vertex(entity_event)
def _create_vertex(self, entity_event):
metadata = {
VProps.NAME: entity_event[AodhProps.NAME],
VProps.SEVERITY: entity_event[AodhProps.SEVERITY],
AodhProps.DESCRIPTION: entity_event[AodhProps.DESCRIPTION],
AodhProps.ENABLED: entity_event[AodhProps.ENABLED],
VProps.PROJECT_ID: entity_event.get(AodhProps.PROJECT_ID, None),
AodhProps.REPEAT_ACTIONS: entity_event[AodhProps.REPEAT_ACTIONS],
VProps.RESOURCE_ID: entity_event[AodhProps.RESOURCE_ID],
'alarm_type': entity_event[AodhProps.TYPE]
}
# TODO(annarez): convert EVENT_TYPE to tuple
if entity_event[AodhProps.TYPE] == AodhProps.EVENT:
metadata[AodhProps.EVENT_TYPE] = entity_event[AodhProps.EVENT_TYPE]
elif entity_event[AodhProps.TYPE] == AodhProps.THRESHOLD:
metadata[AodhProps.STATE_TIMESTAMP] = \
entity_event[AodhProps.STATE_TIMESTAMP]
vitrage_sample_timestamp = entity_event[DSProps.SAMPLE_DATE]
update_timestamp = self._format_update_timestamp(
AodhTransformer._timestamp(entity_event), vitrage_sample_timestamp)
return graph_utils.create_vertex(
self._create_entity_key(entity_event),
vitrage_category=EntityCategory.ALARM,
vitrage_type=entity_event[DSProps.ENTITY_TYPE],
vitrage_sample_timestamp=vitrage_sample_timestamp,
entity_id=entity_event[AodhProps.ALARM_ID],
entity_state=self._get_alarm_state(entity_event),
update_timestamp=update_timestamp,
metadata=metadata)
def _create_snapshot_neighbors(self, entity_event):
return self._create_aodh_neighbors(entity_event)
def _create_update_neighbors(self, entity_event):
return self._create_aodh_neighbors(entity_event)
def _create_aodh_neighbors(self, entity_event):
graph_neighbors = entity_event.get(self.QUERY_RESULT, [])
result = []
for vertex in graph_neighbors:
edge = graph_utils.create_edge(
source_id=TransformerBase.uuid_from_deprecated_vitrage_id(
self._create_entity_key(entity_event)),
target_id=vertex.vertex_id,
relationship_type=EdgeLabel.ON)
result.append(Neighbor(vertex, edge))
return result
def _create_merge_alarm_vertex(self, entity_event):
"""Handle an alarm that already has a vitrage_id
This is a deduced alarm created in aodh by vitrage, so it already
exists in the graph.
This function will update the exiting vertex (and not create a new one)
"""
metadata = {
AodhProps.DESCRIPTION: entity_event[AodhProps.DESCRIPTION],
VProps.PROJECT_ID: entity_event[AodhProps.PROJECT_ID],
}
vitrage_sample_timestamp = entity_event[DSProps.SAMPLE_DATE]
update_timestamp = self._format_update_timestamp(
AodhTransformer._timestamp(entity_event), vitrage_sample_timestamp)
return graph_utils.create_vertex(
self._create_entity_key(entity_event),
vitrage_category=EntityCategory.ALARM,
vitrage_type=VITRAGE_DATASOURCE,
vitrage_sample_timestamp=vitrage_sample_timestamp,
entity_id=entity_event.get(AodhProps.ALARM_ID),
update_timestamp=update_timestamp,
metadata=metadata)
def _ok_status(self, entity_event):
return entity_event[AodhProps.STATE] != AodhState.ALARM
def _create_entity_key(self, entity_event):
if _is_vitrage_alarm(entity_event):
return entity_event.get(AodhProps.VITRAGE_ID)
entity_type = entity_event[DSProps.ENTITY_TYPE]
alarm_id = entity_event[AodhProps.ALARM_ID]
return tbase.build_key((EntityCategory.ALARM, entity_type, alarm_id))
@staticmethod
def _timestamp(entity_event):
return datetime_utils.change_time_str_format(
entity_event[AodhProps.TIMESTAMP],
AodhProps.TIME_FORMAT,
tbase.TIMESTAMP_FORMAT)
@staticmethod
def get_enrich_query(event):
affected_resource_id = event.get(AodhProps.RESOURCE_ID, None)
if not affected_resource_id:
return None
return {VProps.ID: affected_resource_id}
def get_vitrage_type(self):
return AODH_DATASOURCE
def _is_vitrage_alarm(entity_event):
return entity_event.get(AodhProps.VITRAGE_ID) is not None
| 42.264151 | 79 | 0.728869 |
c0e88f704b81e26063b8124733978f7c30007df0
| 3,299 |
py
|
Python
|
app/grandchallenge/algorithms/urls.py
|
nlessmann/grand-challenge.org
|
36abf6ccb40e2fc3fd3ff00e81deabd76f7e1ef8
|
[
"Apache-2.0"
] | null | null | null |
app/grandchallenge/algorithms/urls.py
|
nlessmann/grand-challenge.org
|
36abf6ccb40e2fc3fd3ff00e81deabd76f7e1ef8
|
[
"Apache-2.0"
] | null | null | null |
app/grandchallenge/algorithms/urls.py
|
nlessmann/grand-challenge.org
|
36abf6ccb40e2fc3fd3ff00e81deabd76f7e1ef8
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from grandchallenge.algorithms.views import (
AlgorithmAddRepo,
AlgorithmCreate,
AlgorithmDescriptionUpdate,
AlgorithmDetail,
AlgorithmExecutionSessionCreate,
AlgorithmExecutionSessionDetail,
AlgorithmExperimentCreate,
AlgorithmImageCreate,
AlgorithmImageDetail,
AlgorithmImageUpdate,
AlgorithmList,
AlgorithmPermissionRequestCreate,
AlgorithmPermissionRequestList,
AlgorithmPermissionRequestUpdate,
AlgorithmUpdate,
ComponentInterfaceList,
EditorsUpdate,
JobDetail,
JobExperimentDetail,
JobUpdate,
JobViewersUpdate,
JobsList,
UsersUpdate,
)
app_name = "algorithms"
urlpatterns = [
path("", AlgorithmList.as_view(), name="list"),
path("create/", AlgorithmCreate.as_view(), name="create"),
path(
"interfaces/",
ComponentInterfaceList.as_view(),
name="component-interface-list",
),
path("<slug>/", AlgorithmDetail.as_view(), name="detail"),
path("<slug>/update/", AlgorithmUpdate.as_view(), name="update"),
path(
"<slug>/description-update/",
AlgorithmDescriptionUpdate.as_view(),
name="description-update",
),
path("<slug>/add-repo/", AlgorithmAddRepo.as_view(), name="add-repo"),
path(
"<slug>/images/create/",
AlgorithmImageCreate.as_view(),
name="image-create",
),
path(
"<slug>/images/<uuid:pk>/",
AlgorithmImageDetail.as_view(),
name="image-detail",
),
path(
"<slug>/images/<uuid:pk>/update/",
AlgorithmImageUpdate.as_view(),
name="image-update",
),
path(
"<slug>/experiments/create/",
AlgorithmExecutionSessionCreate.as_view(),
name="execution-session-create",
),
path(
"<slug>/experiments/create/flex/",
AlgorithmExperimentCreate.as_view(),
name="execution-session-create-new",
),
path(
"<slug>/experiments/<uuid:pk>/",
AlgorithmExecutionSessionDetail.as_view(),
name="execution-session-detail",
),
path("<slug>/jobs/", JobsList.as_view(), name="job-list"),
path("<slug>/jobs/<uuid:pk>/", JobDetail.as_view(), name="job-detail",),
path(
"<slug>/jobs/<uuid:pk>/update/",
JobUpdate.as_view(),
name="job-update",
),
path(
"<slug>/jobs/<uuid:pk>/experiment/",
JobExperimentDetail.as_view(),
name="job-experiment-detail",
),
path(
"<slug>/jobs/<uuid:pk>/viewers/update/",
JobViewersUpdate.as_view(),
name="job-viewers-update",
),
path(
"<slug>/editors/update/",
EditorsUpdate.as_view(),
name="editors-update",
),
path("<slug>/users/update/", UsersUpdate.as_view(), name="users-update"),
path(
"<slug>/permission-requests/",
AlgorithmPermissionRequestList.as_view(),
name="permission-request-list",
),
path(
"<slug>/permission-requests/create/",
AlgorithmPermissionRequestCreate.as_view(),
name="permission-request-create",
),
path(
"<slug>/permission-requests/<int:pk>/update/",
AlgorithmPermissionRequestUpdate.as_view(),
name="permission-request-update",
),
]
| 28.439655 | 77 | 0.619885 |
0378330ba170494a79d7d67d06733d4f49f5caa9
| 5,870 |
py
|
Python
|
packages/api-server/api_server/models/rmf_api/task_state.py
|
mayman99/rmf-web
|
5670bd943567c6a866ec6345c972e6fb84d73476
|
[
"Apache-2.0"
] | 23 |
2021-04-13T23:01:12.000Z
|
2022-03-21T02:15:24.000Z
|
packages/api-server/api_server/models/rmf_api/task_state.py
|
mayman99/rmf-web
|
5670bd943567c6a866ec6345c972e6fb84d73476
|
[
"Apache-2.0"
] | 326 |
2021-03-10T17:32:17.000Z
|
2022-03-30T04:42:14.000Z
|
packages/api-server/api_server/models/rmf_api/task_state.py
|
mayman99/rmf-web
|
5670bd943567c6a866ec6345c972e6fb84d73476
|
[
"Apache-2.0"
] | 13 |
2021-04-10T10:33:36.000Z
|
2022-02-22T15:39:58.000Z
|
# generated by datamodel-codegen:
# filename: task_state.json
from __future__ import annotations
from enum import Enum
from typing import Any, Dict, List, Optional, Union
from pydantic import BaseModel, Field, conint
class Cancellation(BaseModel):
unix_millis_request_time: int = Field(
..., description="The time that the cancellation request arrived"
)
labels: List[str] = Field(..., description="Labels to describe the cancel request")
class Killed(BaseModel):
unix_millis_request_time: int = Field(
..., description="The time that the cancellation request arrived"
)
labels: List[str] = Field(..., description="Labels to describe the kill request")
class Booking(BaseModel):
id: str = Field(..., description="The unique identifier for this task")
unix_millis_earliest_start_time: Optional[int] = None
priority: Optional[Union[Dict[str, Any], str]] = Field(
None, description="Priority information about this task"
)
labels: Optional[List[str]] = Field(
None, description="Information about how and why this task was booked"
)
class Category(BaseModel):
__root__: str = Field(..., description="The category of this task or phase")
class Detail(BaseModel):
__root__: Union[Dict[str, Any], List, str] = Field(
..., description="Detailed information about a task, phase, or event"
)
class EstimateMillis(BaseModel):
__root__: conint(ge=0) = Field(
...,
description="An estimate, in milliseconds, of how long the subject will take to complete",
)
class Id(BaseModel):
__root__: conint(ge=0)
class ResumedBy(BaseModel):
unix_millis_request_time: Optional[int] = Field(
None, description="The time that the resume request arrived"
)
labels: List[str] = Field(..., description="Labels to describe the resume request")
class Interruption(BaseModel):
unix_millis_request_time: int = Field(
..., description="The time that the interruption request arrived"
)
labels: List[str] = Field(
..., description="Labels to describe the purpose of the interruption"
)
resumed_by: Optional[ResumedBy] = Field(
None,
description="Information about the resume request that ended this interruption. This field will be missing if the interruption is still active.",
)
class Status1(Enum):
uninitialized = "uninitialized"
blocked = "blocked"
error = "error"
failed = "failed"
standby = "standby"
underway = "underway"
delayed = "delayed"
skipped = "skipped"
canceled = "canceled"
killed = "killed"
completed = "completed"
class EventState(BaseModel):
id: Id
status: Optional[Status1] = Field(
None, description="A simple token representing how the task is proceeding"
)
name: Optional[str] = Field(None, description="The brief name of the event")
detail: Optional[Detail] = Field(
None, description="Detailed information about the event"
)
deps: Optional[List[conint(ge=0)]] = Field(
None,
description="This event may depend on other events. This array contains the IDs of those other event dependencies.",
)
class Undo(BaseModel):
unix_millis_request_time: int = Field(
..., description="The time that the undo skip request arrived"
)
labels: List[str] = Field(
..., description="Labels to describe the undo skip request"
)
class SkipPhaseRequest(BaseModel):
unix_millis_request_time: int = Field(
..., description="The time that the skip request arrived"
)
labels: List[str] = Field(
..., description="Labels to describe the purpose of the skip request"
)
undo: Optional[Undo] = Field(
None,
description="Information about an undo skip request that applied to this request",
)
class Phase(BaseModel):
id: Id
category: Optional[Category] = None
detail: Optional[Detail] = None
estimate_millis: Optional[EstimateMillis] = None
final_event_id: Optional[Id] = None
events: Optional[Dict[str, EventState]] = Field(
None,
description="A dictionary of events for this phase. The keys (property names) are the event IDs, which are integers.",
)
skip_requests: Optional[Dict[str, SkipPhaseRequest]] = Field(
None, description="Information about any skip requests that have been received"
)
class TaskState(BaseModel):
booking: Booking
category: Optional[Category] = None
detail: Optional[Detail] = None
unix_millis_start_time: Optional[int] = None
unix_millis_finish_time: Optional[int] = None
estimate_millis: Optional[EstimateMillis] = None
phases: Optional[Dict[str, Phase]] = Field(
None,
description="A dictionary of the states of the phases of the task. The keys (property names) are phase IDs, which are integers.",
)
completed: Optional[List[Id]] = Field(
None, description="An array of the IDs of completed phases of this task"
)
active: Optional[Id] = Field(
None, description="The ID of the active phase for this task"
)
pending: Optional[List[Id]] = Field(
None, description="An array of the pending phases of this task"
)
interruptions: Optional[Dict[str, Interruption]] = Field(
None,
description="A dictionary of interruptions that have been applied to this task. The keys (property names) are the unique token of the interruption request.",
)
cancellation: Optional[Cancellation] = Field(
None,
description="If the task was cancelled, this will describe information about the request.",
)
killed: Optional[Killed] = Field(
None,
description="If the task was killed, this will describe information about the request.",
)
| 33.352273 | 165 | 0.677683 |
5730abc79786d8c29543e4b7e75cea203d273b71
| 1,636 |
py
|
Python
|
lib/exabgp/bgp/message/update/attribute/community/communities.py
|
bopopescu/exabgp_priv
|
25aac31f2b1166350aa40d9f48447173998246b6
|
[
"BSD-3-Clause"
] | 2 |
2017-03-20T22:54:40.000Z
|
2021-02-24T02:05:38.000Z
|
lib/exabgp/bgp/message/update/attribute/community/communities.py
|
bopopescu/exabgp_priv
|
25aac31f2b1166350aa40d9f48447173998246b6
|
[
"BSD-3-Clause"
] | null | null | null |
lib/exabgp/bgp/message/update/attribute/community/communities.py
|
bopopescu/exabgp_priv
|
25aac31f2b1166350aa40d9f48447173998246b6
|
[
"BSD-3-Clause"
] | 1 |
2020-07-23T16:54:49.000Z
|
2020-07-23T16:54:49.000Z
|
# encoding: utf-8
"""
community.py
Created by Thomas Mangin on 2009-11-05.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
# ============================================================== Communities (8)
# http://www.iana.org/assignments/bgp-extended-communities
from exabgp.bgp.message.update.attribute.attribute import Attribute
from exabgp.bgp.message.update.attribute.community.community import Community
from exabgp.bgp.message.notification import Notify
class Communities (Attribute):
ID = Attribute.CODE.COMMUNITY
FLAG = Attribute.Flag.TRANSITIVE | Attribute.Flag.OPTIONAL
# __slots__ = ['communities']
def __init__ (self, communities=None):
# Must be None as = param is only evaluated once
if communities:
self.communities = communities
else:
self.communities = []
def add (self, data):
return self.communities.append(data)
def pack (self, negotiated=None):
if len(self.communities):
return self._attribute(''.join([c.pack() for c in self.communities]))
return ''
def __str__ (self):
l = len(self.communities)
if l > 1:
return "[ %s ]" % " ".join(str(community) for community in self.communities)
if l == 1:
return str(self.communities[0])
return ""
def json (self):
return "[ %s ]" % ", ".join(community.json() for community in self.communities)
@staticmethod
def unpack (data, negotiated):
communities = Communities()
while data:
if data and len(data) < 4:
raise Notify(3,1,'could not decode community %s' % str([hex(ord(_)) for _ in data]))
communities.add(Community.unpack(data[:4],negotiated))
data = data[4:]
return communities
| 27.728814 | 88 | 0.68154 |
7ea2833097a9b8fc9e3ada82e219eba4609a46aa
| 3,283 |
py
|
Python
|
python/dulwich/tests/test_utils.py
|
ckod3/vfxpipe
|
f420537345a5da943fd57ae98a6d1c894ea2c1bb
|
[
"Unlicense"
] | 63 |
2015-03-18T17:47:52.000Z
|
2022-03-07T03:26:23.000Z
|
python/dulwich/tests/test_utils.py
|
ckod3/vfxpipe
|
f420537345a5da943fd57ae98a6d1c894ea2c1bb
|
[
"Unlicense"
] | 3 |
2015-04-06T14:13:14.000Z
|
2020-05-25T21:54:54.000Z
|
python/dulwich/tests/test_utils.py
|
ckod3/vfxpipe
|
f420537345a5da943fd57ae98a6d1c894ea2c1bb
|
[
"Unlicense"
] | 27 |
2015-03-20T21:42:17.000Z
|
2022-01-17T10:55:26.000Z
|
# test_utils.py -- Tests for git test utilities.
# Copyright (C) 2010 Google, Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
"""Tests for git test utilities."""
from dulwich.object_store import (
MemoryObjectStore,
)
from dulwich.objects import (
Blob,
)
from dulwich.tests import (
TestCase,
)
from dulwich.tests.utils import (
make_object,
build_commit_graph,
)
class BuildCommitGraphTest(TestCase):
def setUp(self):
super(BuildCommitGraphTest, self).setUp()
self.store = MemoryObjectStore()
def test_linear(self):
c1, c2 = build_commit_graph(self.store, [[1], [2, 1]])
for obj_id in [c1.id, c2.id, c1.tree, c2.tree]:
self.assertTrue(obj_id in self.store)
self.assertEqual([], c1.parents)
self.assertEqual([c1.id], c2.parents)
self.assertEqual(c1.tree, c2.tree)
self.assertEqual([], list(self.store[c1.tree].iteritems()))
self.assertTrue(c2.commit_time > c1.commit_time)
def test_merge(self):
c1, c2, c3, c4 = build_commit_graph(self.store,
[[1], [2, 1], [3, 1], [4, 2, 3]])
self.assertEqual([c2.id, c3.id], c4.parents)
self.assertTrue(c4.commit_time > c2.commit_time)
self.assertTrue(c4.commit_time > c3.commit_time)
def test_missing_parent(self):
self.assertRaises(ValueError, build_commit_graph, self.store,
[[1], [3, 2], [2, 1]])
def test_trees(self):
a1 = make_object(Blob, data='aaa1')
a2 = make_object(Blob, data='aaa2')
c1, c2 = build_commit_graph(self.store, [[1], [2, 1]],
trees={1: [('a', a1)],
2: [('a', a2, 0o100644)]})
self.assertEqual((0o100644, a1.id), self.store[c1.tree]['a'])
self.assertEqual((0o100644, a2.id), self.store[c2.tree]['a'])
def test_attrs(self):
c1, c2 = build_commit_graph(self.store, [[1], [2, 1]],
attrs={1: {'message': 'Hooray!'}})
self.assertEqual('Hooray!', c1.message)
self.assertEqual('Commit 2', c2.message)
def test_commit_time(self):
c1, c2, c3 = build_commit_graph(self.store, [[1], [2, 1], [3, 2]],
attrs={1: {'commit_time': 124},
2: {'commit_time': 123}})
self.assertEqual(124, c1.commit_time)
self.assertEqual(123, c2.commit_time)
self.assertTrue(c2.commit_time < c1.commit_time < c3.commit_time)
| 38.174419 | 77 | 0.600975 |
0e6c0d0a24ca79980a6be0694895cfa167d77bf0
| 9,740 |
py
|
Python
|
ambari-server/src/main/resources/stacks/ADH/1.4/services/KAFKA/package/scripts/kafka.py
|
Arenadata/ambari
|
4628267441121779113d98936dcdf5d9be60553c
|
[
"Apache-2.0"
] | 5 |
2017-07-20T11:15:10.000Z
|
2020-04-16T15:42:55.000Z
|
ambari-server/src/main/resources/stacks/ADH/1.4/services/KAFKA/package/scripts/kafka.py
|
Arenadata/ambari
|
4628267441121779113d98936dcdf5d9be60553c
|
[
"Apache-2.0"
] | 3 |
2017-08-04T14:02:17.000Z
|
2018-06-06T14:47:25.000Z
|
ambari-server/src/main/resources/stacks/ADH/1.4/services/KAFKA/package/scripts/kafka.py
|
Arenadata/ambari
|
4628267441121779113d98936dcdf5d9be60553c
|
[
"Apache-2.0"
] | 12 |
2017-05-17T09:48:01.000Z
|
2021-08-05T19:01:25.000Z
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import collections
import os
#from resource_management.libraries.functions.version import format_hdp_stack_version, compare_versions
from resource_management.libraries.resources.properties_file import PropertiesFile
from resource_management.libraries.resources.template_config import TemplateConfig
from resource_management.core.resources.system import Directory, Execute, File, Link
from resource_management.core.source import StaticFile, Template, InlineTemplate
from resource_management.libraries.functions import format
from resource_management.core.logger import Logger
def kafka(upgrade_type=None):
import params
ensure_base_directories()
kafka_server_config = mutable_config_dict(params.config['configurations']['kafka-broker'])
# This still has an issue of hostnames being alphabetically out-of-order for broker.id in HDP-2.2.
# Starting in HDP 2.3, Kafka handles the generation of broker.id so Ambari doesn't have to.
effective_version = None
# effective_version = params.hdp_stack_version if upgrade_type is None else format_hdp_stack_version(params.version)
Logger.info(format("Effective stack version: {effective_version}"))
if effective_version is not None and effective_version != "" and compare_versions(effective_version, '2.2.0.0') >= 0 and compare_versions(effective_version, '2.3.0.0') < 0:
if len(params.kafka_hosts) > 0 and params.hostname in params.kafka_hosts:
brokerid = str(sorted(params.kafka_hosts).index(params.hostname))
kafka_server_config['broker.id'] = brokerid
Logger.info(format("Calculating broker.id as {brokerid}"))
# listeners and advertised.listeners are only added in 2.3.0.0 onwards.
if effective_version is not None and effective_version != "" and compare_versions(effective_version, '2.3.0.0') >= 0:
listeners = kafka_server_config['listeners'].replace("localhost", params.hostname)
Logger.info(format("Kafka listeners: {listeners}"))
if params.security_enabled and params.kafka_kerberos_enabled:
Logger.info("Kafka kerberos security is enabled.")
if "SASL" not in listeners:
listeners = listeners.replace("PLAINTEXT", "PLAINTEXTSASL")
kafka_server_config['listeners'] = listeners
kafka_server_config['advertised.listeners'] = listeners
Logger.info(format("Kafka advertised listeners: {listeners}"))
else:
kafka_server_config['listeners'] = listeners
if 'advertised.listeners' in kafka_server_config:
advertised_listeners = kafka_server_config['advertised.listeners'].replace("localhost", params.hostname)
kafka_server_config['advertised.listeners'] = advertised_listeners
Logger.info(format("Kafka advertised listeners: {advertised_listeners}"))
else:
kafka_server_config['host.name'] = params.hostname
if params.has_metric_collector:
kafka_server_config['kafka.timeline.metrics.host'] = params.metric_collector_host
kafka_server_config['kafka.timeline.metrics.port'] = params.metric_collector_port
kafka_data_dir = kafka_server_config['log.dirs']
kafka_data_dirs = filter(None, kafka_data_dir.split(","))
Directory(kafka_data_dirs[:], # Todo: remove list copy when AMBARI-14373 is fixed
mode=0755,
cd_access='a',
owner=params.kafka_user,
group=params.user_group,
create_parents=True)
set_dir_ownership(kafka_data_dirs)
PropertiesFile("server.properties",
dir=params.conf_dir,
properties=kafka_server_config,
owner=params.kafka_user,
group=params.user_group,
)
File(format("{conf_dir}/kafka-env.sh"),
owner=params.kafka_user,
content=InlineTemplate(params.kafka_env_sh_template)
)
if (params.log4j_props != None):
File(format("{conf_dir}/tools-log4j.properties"),
mode=0644,
group=params.user_group,
owner=params.kafka_user,
content=params.log4j_props
)
if params.security_enabled and params.kafka_kerberos_enabled:
TemplateConfig(format("{conf_dir}/kafka_jaas.conf"),
owner=params.kafka_user)
TemplateConfig(format("{conf_dir}/kafka_client_jaas.conf"),
owner=params.kafka_user)
# On some OS this folder could be not exists, so we will create it before pushing there files
Directory(params.limits_conf_dir,
create_parents=True,
owner='root',
group='root'
)
File(os.path.join(params.limits_conf_dir, 'kafka.conf'),
owner='root',
group='root',
mode=0644,
content=Template("kafka.conf.j2")
)
setup_symlink(params.kafka_managed_pid_dir, params.kafka_pid_dir)
setup_symlink(params.kafka_managed_log_dir, params.kafka_log_dir)
def mutable_config_dict(kafka_broker_config):
kafka_server_config = {}
for key, value in kafka_broker_config.iteritems():
kafka_server_config[key] = value
return kafka_server_config
# Used to workaround the hardcoded pid/log dir used on the kafka bash process launcher
def setup_symlink(kafka_managed_dir, kafka_ambari_managed_dir):
import params
backup_folder_path = None
backup_folder_suffix = "_tmp"
if kafka_ambari_managed_dir != kafka_managed_dir:
if os.path.exists(kafka_managed_dir) and not os.path.islink(kafka_managed_dir):
# Backup existing data before delete if config is changed repeatedly to/from default location at any point in time time, as there may be relevant contents (historic logs)
backup_folder_path = backup_dir_contents(kafka_managed_dir, backup_folder_suffix)
Directory(kafka_managed_dir,
action="delete",
create_parents=True)
elif os.path.islink(kafka_managed_dir) and os.path.realpath(kafka_managed_dir) != kafka_ambari_managed_dir:
Link(kafka_managed_dir,
action="delete")
if not os.path.islink(kafka_managed_dir):
Link(kafka_managed_dir,
to=kafka_ambari_managed_dir)
elif os.path.islink(kafka_managed_dir): # If config is changed and coincides with the kafka managed dir, remove the symlink and physically create the folder
Link(kafka_managed_dir,
action="delete")
Directory(kafka_managed_dir,
mode=0755,
cd_access='a',
owner=params.kafka_user,
group=params.user_group,
create_parents=True)
set_dir_ownership(kafka_managed_dir)
if backup_folder_path:
# Restore backed up files to current relevant dirs if needed - will be triggered only when changing to/from default path;
for file in os.listdir(backup_folder_path):
File(os.path.join(kafka_managed_dir,file),
owner=params.kafka_user,
content = StaticFile(os.path.join(backup_folder_path,file)))
# Clean up backed up folder
Directory(backup_folder_path,
action="delete",
create_parents=True)
# Uses agent temp dir to store backup files
def backup_dir_contents(dir_path, backup_folder_suffix):
import params
backup_destination_path = params.tmp_dir + os.path.normpath(dir_path)+backup_folder_suffix
Directory(backup_destination_path,
mode=0755,
cd_access='a',
owner=params.kafka_user,
group=params.user_group,
create_parents=True
)
set_dir_ownership(backup_destination_path)
# Safely copy top-level contents to backup folder
for file in os.listdir(dir_path):
File(os.path.join(backup_destination_path, file),
owner=params.kafka_user,
content = StaticFile(os.path.join(dir_path,file)))
return backup_destination_path
def ensure_base_directories():
"""
Make basic Kafka directories, and make sure that their ownership is correct
"""
import params
base_dirs = [params.kafka_log_dir, params.kafka_pid_dir, params.conf_dir]
Directory(base_dirs[:], # Todo: remove list copy when AMBARI-14373 is fixed
mode=0755,
cd_access='a',
owner=params.kafka_user,
group=params.user_group,
create_parents=True
)
set_dir_ownership(base_dirs)
def set_dir_ownership(targets):
import params
if isinstance(targets, collections.Iterable):
directories = targets
else: # If target is a single object, convert it to list
directories = [targets]
for directory in directories:
# If path is empty or a single slash,
# may corrupt filesystem permissions
if len(directory) > 1:
Execute(('chown', '-R', format("{kafka_user}:{user_group}"), directory),
sudo=True)
else:
Logger.warning("Permissions for the folder \"%s\" were not updated due to "
"empty path passed: " % directory)
| 40.583333 | 176 | 0.708624 |
58e0dabd3bc4ae1c37cbe52e9b52c29aa210ee84
| 899 |
py
|
Python
|
02_crowsnest/crowsnest.py
|
overtune/tiny_python_projects
|
b0a30a2c5b7e1e68e83597e055fa8d6ee2d9edca
|
[
"MIT"
] | null | null | null |
02_crowsnest/crowsnest.py
|
overtune/tiny_python_projects
|
b0a30a2c5b7e1e68e83597e055fa8d6ee2d9edca
|
[
"MIT"
] | null | null | null |
02_crowsnest/crowsnest.py
|
overtune/tiny_python_projects
|
b0a30a2c5b7e1e68e83597e055fa8d6ee2d9edca
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Author : Johan Runesson <[email protected]>
Date : 2020-08-03
Purpose: Crow\'s Nest -- choose the correct article
"""
import argparse
# --------------------------------------------------
def get_args():
"""Get command-line arguments"""
parser = argparse.ArgumentParser(
description='Crow\'s Nest -- choose the correct article',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('word', metavar='word', help='A word')
return parser.parse_args()
# --------------------------------------------------
def main():
"""Start things up"""
args = get_args()
word = args.word
article = 'an' if word[0].lower() in 'aeiou' else 'a'
print(f'Ahoy, Captain, {article} {word} off the larboard bow!')
# --------------------------------------------------
if __name__ == '__main__':
main()
| 23.657895 | 67 | 0.538376 |
62fa3b2ca382ca60c1ae7fa2f5601921fa435cc9
| 3,899 |
py
|
Python
|
a3c.py
|
smartsystems4u/MORL
|
feab5412f1e8c9523437da00de0320a5d018b491
|
[
"MIT"
] | 3 |
2021-07-07T14:51:30.000Z
|
2022-03-24T04:19:28.000Z
|
a3c.py
|
smartsystems4u/MORL
|
feab5412f1e8c9523437da00de0320a5d018b491
|
[
"MIT"
] | 2 |
2021-11-22T13:59:59.000Z
|
2021-12-20T08:26:56.000Z
|
a3c.py
|
smartsystems4u/MORL
|
feab5412f1e8c9523437da00de0320a5d018b491
|
[
"MIT"
] | 1 |
2020-08-27T01:22:43.000Z
|
2020-08-27T01:22:43.000Z
|
import gym
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.distributions import Categorical
# import torch.multiprocessing as mp
import multiprocessing as mp
import time
# Hyperparameters
n_train_processes = 5
learning_rate = 0.0002
update_interval = 5
gamma = 0.98
max_train_ep = 300
max_test_ep = 400
class ActorCritic(nn.Module):
def __init__(self):
super(ActorCritic, self).__init__()
self.fc1 = nn.Linear(4, 256)
self.fc_pi = nn.Linear(256, 2)
self.fc_v = nn.Linear(256, 1)
def pi(self, x, softmax_dim=0):
x = F.relu(self.fc1(x))
x = self.fc_pi(x)
prob = F.softmax(x, dim=softmax_dim)
return prob
def v(self, x):
x = F.relu(self.fc1(x))
v = self.fc_v(x)
return v
def train(global_model, rank):
local_model = ActorCritic()
local_model.load_state_dict(global_model.state_dict())
optimizer = optim.Adam(global_model.parameters(), lr=learning_rate)
env = gym.make('CartPole-v1')
for n_epi in range(max_train_ep):
done = False
s = env.reset()
while not done:
s_lst, a_lst, r_lst = [], [], []
for t in range(update_interval):
prob = local_model.pi(torch.from_numpy(s).float())
m = Categorical(prob)
a = m.sample().item()
s_prime, r, done, info = env.step(a)
s_lst.append(s)
a_lst.append([a])
r_lst.append(r/100.0)
s = s_prime
if done:
break
s_final = torch.tensor(s_prime, dtype=torch.float)
R = 0.0 if done else local_model.v(s_final).item()
td_target_lst = []
for reward in r_lst[::-1]:
R = gamma * R + reward
td_target_lst.append([R])
td_target_lst.reverse()
s_batch, a_batch, td_target = torch.tensor(s_lst, dtype=torch.float), torch.tensor(a_lst), \
torch.tensor(td_target_lst)
advantage = td_target - local_model.v(s_batch)
pi = local_model.pi(s_batch, softmax_dim=1)
pi_a = pi.gather(1, a_batch)
loss = -torch.log(pi_a) * advantage.detach() + \
F.smooth_l1_loss(local_model.v(s_batch), td_target.detach())
optimizer.zero_grad()
loss.mean().backward()
for global_param, local_param in zip(global_model.parameters(), local_model.parameters()):
global_param._grad = local_param.grad
optimizer.step()
local_model.load_state_dict(global_model.state_dict())
env.close()
print("Training process {} reached maximum episode.".format(rank))
def test(global_model):
env = gym.make('CartPole-v1')
score = 0.0
print_interval = 20
for n_epi in range(max_test_ep):
done = False
s = env.reset()
while not done:
prob = global_model.pi(torch.from_numpy(s).float())
a = Categorical(prob).sample().item()
s_prime, r, done, info = env.step(a)
s = s_prime
score += r
if n_epi % print_interval == 0 and n_epi != 0:
print("# of episode :{}, avg score : {:.1f}".format(
n_epi, score/print_interval))
score = 0.0
time.sleep(1)
env.close()
if __name__ == '__main__':
global_model = ActorCritic()
global_model.share_memory()
processes = []
for rank in range(n_train_processes + 1): # + 1 for test process
if rank == 0:
p = mp.Process(target=test, args=(global_model,))
else:
p = mp.Process(target=train, args=(global_model, rank,))
p.start()
processes.append(p)
for p in processes:
p.join()
| 29.992308 | 104 | 0.571172 |
e3878721cd28cbc9587ea975b424d76f88d7f36e
| 22,036 |
py
|
Python
|
Lab6/game.py
|
bellerjb/ISCS375
|
e50cc4547c8dfd8771c1ae04ecdcc7dbaa232698
|
[
"BSD-3-Clause"
] | null | null | null |
Lab6/game.py
|
bellerjb/ISCS375
|
e50cc4547c8dfd8771c1ae04ecdcc7dbaa232698
|
[
"BSD-3-Clause"
] | null | null | null |
Lab6/game.py
|
bellerjb/ISCS375
|
e50cc4547c8dfd8771c1ae04ecdcc7dbaa232698
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
from flask import Flask, redirect, render_template, request
from random import randint
app = Flask(__name__)
app.secret_key = '23psHVWbxBYHYoeKYQp2'
health = 20
power = 0
weapons = {'missile' : 2, 'burst' : 1}#, 'ion' : 3, 'flak' : 9}
money = 20
location = 1
state = 0
stopped = 0
enemy = {'health' : 10, 'stopped' : 0, 'weapons' : {'missile' : 3, 'burst' : 2, 'ion' : -1, 'flak' : -1}, 'reward' : 20}
travel = {1 : {' UP ' : '2', ' DOWN ' : '4'},
2 : {' NEXT ' : '3', ' BACK ' : '1'},
3 : {' NEXT ' : '7', ' BACK ' : '2'},
4 : {' UP ' : '5', ' DOWN ' : '6', ' BACK ' : '1'},
5 : {' NEXT ' : '7', ' BACK ' : '4'},
6 : {' NEXT ' : '8', ' BACK ' : '4'},
7 : {' NEXT ' : '9', ' BACK TOP ' : '3', ' BACK BOT ' : '5'},
8 : {' NEXT ' : '9', ' BACK ' : '6'},
9 : {' NEXT ' : '10', ' BACK TOP ' : '7', ' BACK BOT ' : '8'},
10 : {' END ' : '10'}}
done = {2 : False, 3 : False, 4 : False, 5 : False, 6 : False, 7 : False, 8 : False, 9 : False}
CLOSEPOPPUP = 'javascript:void(document.getElementById("message").style.display="none");'
def renderGame(title, content, options):
return render_template('game/game.html', title=title, location=location, symbol='*', health=health, power=power//2, destinations=travel[location],weapons=weapons , money=money, content=content, options=options)
def incrementCooldown():
global power
global stopped
for key in weapons:
if (weapons[key] > 0):
weapons[key] = weapons[key] - 1
for key in enemy['weapons']:
if (enemy['weapons'][key] > 0):
enemy['weapons'][key] = enemy['weapons'][key] - 1
if (power < 20):
power = power + 1
if (stopped > 0):
stopped = stopped - 1
if (enemy['stopped'] > 0):
enemy['stopped'] = enemy['stopped'] - 1
def initBattle(enabled):
if (enabled == True):
for key in weapons:
if (key == 'missile'):
weapons[key] = 2
if (key == 'burst'):
weapons[key] = 1
if (key == 'ion'):
weapons[key] = 3
if (key == 'flak'):
weapons[key] = 9
else:
for key in weapons:
weapons[key] = -1
def battle(action):
global health
global state
global stopped
global location
global money
text = ''
title = ''
options = {'Close' : CLOSEPOPPUP}
if (stopped == 0):
if (action == 'missile'):
text = text + 'Your pegasus missile landed and dealt two damage to the enemy hull. '
enemy['health'] = enemy['health'] - 2
weapons['missile'] = 2
if (action == 'burst'):
random = randint(0, 3)
text = text + 'Your burst lasers dealt ' + {0:'zero',1:'one',2:'two',3:'three'}[random] + ' damage to the enemy hull. '
enemy['health'] = enemy['health'] - random
weapons['burst'] = 1
if (action == 'ion'):
random = randint(1, 3)
if (random > 1):
text = text + 'Your heavy ion blaster hit and disabled the enemy for a bit. '
enemy['stopped'] = random
else:
text = text + 'Your heavy ion blaster missed. '
weapons['ion'] = 3
if (action == 'flak'):
random = randint(1, 2)
text = text + 'Your flak gun dealt ' + {1:'seven',2:'fourteen'}[random] + ' damage to the enemy hull. '
enemy['health'] = enemy['health'] - (7 * random)
weapons['flak'] = 9
else:
text = text + 'You are still disabled from the enemy ion blast. '
if (enemy['stopped'] == 0):
if (enemy['weapons']['missile'] == 0 and randint(0,1) == 1):
text = text + 'The enemy pegasus missile landed and dealt one damage to your hull. '
enemy['weapons']['missile'] = 3
health = health - 2
elif (enemy['weapons']['burst'] == 0 and randint(0,1) == 1):
random = randint(0, 2)
text = text + 'The enemy lasers dealt ' + {0:'zero',1:'one',2:'two'}[random] + ' damage to your hull. '
health = health - random
enemy['weapons']['burst'] = 2
elif (enemy['weapons']['ion'] == 0 and randint(0,1) == 1):
random = randint(0, 2)
if (random > 0):
text = text + 'The enemy heavy ion blaster hit and disabled the you for a bit. '
else:
text = text + 'The enemy heavy ion blaster fired, but missed. '
stopped = random
enemy['weapons']['ion'] = 4
elif (enemy['weapons']['flak'] == 0 and randint(0,1) == 1):
random = randint(1, 2)
text = text + 'The enemy flak gun dealt ' + {1:'four',2:'eight'}[random] + ' damage to your hull. '
health = health - (4 * random)
enemy['weapons']['missile'] = 10
else:
text = text + 'The enemy is still disabled from the ion blast. '
if (enemy['health'] < 1):
state = 2
done[location] = True
money = money + enemy['reward']
return renderGame(title, text, options)
def read():
global power
global state
global enemy
global health
global money
if (location == 1):
initBattle(False)
power = 20
title = 'Slower Than Light!'
content = 'You load into a game that seems strangely familiar to <a href="https://www.gog.com/game/faster_than_light">another game</a> but you quickly realise that that would be silly, because no one would rip-off <a href="http://store.steampowered.com/app/212680/FTL_Faster_Than_Light/">another game</a> so shamelessly without attribution. If there was a fantastic <a href="https://itunes.apple.com/us/app/ftl-faster-than-light/id833951143?mt=8">indie game</a> like this, (which there <a href="https://www.humblebundle.com/store/ftl-faster-than-light">totally isn\'t</a>) it would definitely be more interesting than this anyways. This web game, which looks quite similar to the authors previous works, throws you directly into the action by telling you that you are piloting a ship with critical information gathered by spies that would help the resistance a lot. With the Galactic Federation hot on your tail, you must reach home-base quickly. Jump to the next point.'
options = {'Embark' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (location == 2):
if (state == 2):
initBattle(False)
power = 20
title = 'Victory! | Slower Than Light!'
content = 'Your final shot flies into the drone. A small puff of gas shoots out of its exhaust before the drone explodes into a massive cloud of metal and gas. You grab $20 out of the wreckage. This sector should be clear. Our journey has only just started and we have many more trials ahead of us.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 3):
initBattle(False)
power = 20
title = 'Sector 2 | Slower Than Light!'
content = 'The sector is completely empty. No life can be found. Some of the debris from the scout is still floating around.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
else:
initBattle(True)
power = 0
enemy = {'health' : 10, 'stopped' : 0, 'weapons' : {'missile' : 3, 'burst' : 2, 'ion' : -1, 'flak' : -1}, 'reward' : 20}
title = 'Scout Drone | Slower Than Light!'
content = 'After that confusing introduction and totally not cliche "start with forked paths" deal out of the way, you scan around the system you just juped too. Your scanners quickly notice a Federation drone scout about three AU away. You can\'t let the Federation find you. Destroy that ship.'
options = {'Ready the Guns!' : '?action=pass'}
state = 1
return renderGame(title, content, options)
elif (location == 3):
if (state == 2):
initBattle(False)
power = 20
title = 'Victory! | Slower Than Light!'
content = 'The hull to the fighter suddenly rips open and the ship implodes on itself. You feel tempted to search the wreckage for any intel or weapons, but out of fear for more ships warping in, you grab $15 and get going.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 3):
initBattle(False)
power = 20
title = 'Sector 3 | Slower Than Light!'
content = 'The sector remains quiet. You hurry past the planet that the fighter warped to out of fear than more might be coming.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
else:
initBattle(True)
power = 0
enemy = {'health' : 8, 'stopped' : 0, 'weapons' : {'missile' : 2, 'burst' : 1, 'ion' : 3, 'flak' : -1}, 'reward' : 15}
title = 'Federation Fighter | Slower Than Light!'
content = 'Your day just keeps getting worse. An advanced Federation Fighter just warped into this system. You try to evade it, but it quickly locates you and sets it\'s guns. The Federation is clearly looking for you.'
options = {'Ready the Guns!' : '?action=pass'}
state = 1
return renderGame(title, content, options)
elif (location == 4):
if (state == 2):
initBattle(False)
power = 0
title = 'Weapons Test | Slower Than Light!'
content = 'You fire the missile, and sure enough it is destroyed by the sheild. The crew of the ship thanks you and sends over a small reward.'
options = {'Next' : 'do?state=4'}
return renderGame(title, content, options)
elif (state == 3):
initBattle(False)
power = 20
title = 'Sector 4 | Slower Than Light!'
content = 'Your ship pauses for a bit as your crew admires the wonderful sheilds on the research vessel.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 4):
initBattle(False)
power = 20
title = 'Sector 4 | Slower Than Light!'
content = 'Your ship pauses for a bit as your crew admires the wonderful sheilds on the research vessel.'
options = {'Close' : CLOSEPOPPUP}
money = money + 20
done[location] = True
return renderGame(title, content, options)
else:
initBattle(False)
power = 20
title = 'Distress Signal | Slower Than Light!'
content = 'You heasitantly approach this next system to inquire about the distress signal. The ship notices you and phones in with a strange request: shoot a missile at their ship. Aparently they are testing a new kind of sheild that is resistant to missiles.'
options = {'Fire the Missile' : 'do?state=2', 'Leave' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (location == 5):
if (state == 3):
initBattle(False)
power = 20
title = 'Sector 5 | Slower Than Light!'
content = 'You dial into Trader-Bot, but get redirected. I guess it is closed for now.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 2):
initBattle(False)
power = 0
title = ''
content = ''
if (money >= 60) :
title = 'TRANSACTION SUCCESSFUL | Slower Than Light!'
content = 'You wire the $60 to Trader-Bot, and a small delivery drone flies over to your ship to deliver your new <b>Flak Gun II</b>. Your engineers quickly scramble to install it and it fits perfectly. It was smaller than you imagined, however.'
money = money - 60
weapons['flak'] = 9
done[5] = True
else:
title = 'TRANSACTION FAILED | Slower Than Light!'
content = 'You attempt to wire $60 to Trader-Bot, but you quickly realize that you do not have that. Come back later when you can afford the <b>Flak Gun II</b>.'
options = {'Next' : '5'}
return renderGame(title, content, options)
else:
initBattle(False)
power = 20
title = 'Shop | Slower Than Light!'
content = 'This next system is the domain of the famous Trader-Bot. You phone in to the bot to see what is in store. Trader-Bot offers a new class of weapon: the <b>Flak Weapon</b>. This weapon is capable of massive damage, but it takes a while to charge up. Trader-Bot wants $60 for it.'
options = {'Buy the Flak Gun II' : 'do?state=2', 'Leave' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (location == 6):
if (state == 3):
initBattle(False)
power = 20
title = 'Sector 6 | Slower Than Light!'
content = 'The mysterious trading vessel is gone now.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 2):
initBattle(False)
power = 0
title = ''
content = ''
if (money >= 45) :
title = 'Transaction Successful| Slower Than Light!'
content = 'You wire the $45 to the mysterious ship, and a beam request pings. You accept it, and a small, four legged creature scurries off the pad, drops your new <b>Heavy Ion Cannon</b> and quickly beams back. Your engineers install it.'
money = money - 45
weapons['ion'] = 3
done[6] = True
else:
title = 'Transaction Failed | Slower Than Light!'
content = 'You attempt to wire $45 to the mysterious ship, but you realize that you do not have that much. You tell the ship that you might come back when you have enough to buy the <b>Heavy Ion Cannon</b>.'
options = {'Next' : '6'}
return renderGame(title, content, options)
else:
initBattle(False)
power = 20
title = 'Blackmarket | Slower Than Light!'
content = 'As you warp into the system you get a mysterious transmission. Another ship nearby is offering to sell you confidential Federation weapons for pretty cheap. You don\'t know if you can trust them, but a <b>Heavy Ion Cannon</b> for only $45 is a great deal.'
options = {'Buy the Heavy Ion Cannon' : 'do?state=2', 'Leave' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (location == 7):
if (state == 2):
initBattle(False)
power = 0
title = 'Helping Hand | Slower Than Light!'
loss = 15
if (money < loss):
loss = money
content = 'You send a crewmember over, but as soon as he confirms that he is on their ship his transmission is cut off. The ship suddently jumps away. He was carrying ${}'.format(loss)
options = {'Next' : 'do?state=4'}
return renderGame(title, content, options)
elif (state == 3):
initBattle(False)
power = 20
title = 'Sector 7 | Slower Than Light!'
content = 'The air smells like lies. You still feel angry even though $15 isn\'t that much.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 4):
initBattle(False)
power = 20
title = 'Sector 7 | Slower Than Light!'
content = 'The air smells like lies. You still feel angry even though $15 isn\'t that much.'
options = {'Close' : CLOSEPOPPUP}
loss = 15
if (money < loss):
loss = money
money = money - loss
done[7] = True
return renderGame(title, content, options)
else:
initBattle(False)
power = 20
title = 'Distress Signal | Slower Than Light!'
content = 'A nearby ship is signalling for help. They claim that there is a creature onboard hiding in their air-system. Their claim reminds you of the plot of a certain movie and you fear that this situation might be similar.'
options = {'Send a Crewmember Over' : 'do?state=2', 'Leave' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (location == 8):
if (state == 2):
initBattle(False)
power = 20
title = 'Victory! | Slower Than Light!'
content = 'The fighter explodes in a beautiful blast. You loot $30 out of the debris. This ship was really strong, so you know that they are close on you. Thankfully you are almost in the safe area.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (state == 3):
initBattle(False)
power = 20
title = 'Sector 8 | Slower Than Light!'
content = 'A good battle was fought here today. You feel a sense of pride.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
else:
initBattle(True)
power = 0
enemy = {'health' : 15, 'stopped' : 0, 'weapons' : {'missile' : 1, 'burst' : 0, 'ion' : 3, 'flak' : -1}, 'reward' : 30}
title = 'Elite Federation Fighter | Slower Than Light!'
content = 'Your sensors suddenly shut off. After a brief period, the turn back on only to reveal an Elite Federation Fighter charging up its missiles. You prepare to attack.'
options = {'Ready the Guns!' : '?action=pass'}
state = 1
return renderGame(title, content, options)
elif (location == 9):
if (state == 3):
new = 10;
if (health > 10):
new = 20 - health
health = health + new
done[location] = True
return redirect('~student23/Lab6/do?state=2')
elif (state == 2):
initBattle(False)
power = 20
title = 'Sector 9 | Slower Than Light!'
content = 'You can almost see your target. Just one jump away.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
else:
initBattle(False)
power = 0
title = 'Repairs | Slower Than Light!'
content = 'You are only one jump away from your target. Suddenly, your sensors pick up a new ship approaching. A transmission comes in and the ship is friendly! The new ship offers to repair some of your hull.'
options = {'Accept the Offer' : 'do?state=3', 'Leave' : CLOSEPOPPUP}
return renderGame(title, content, options)
elif (location == 10):
if (state == 2):
initBattle(False)
power = 0
title = 'Victory! | Slower Than Light!'
content = 'The Flagship is ripped apart, piece by piece. The fireball sends a sense of satisfaction and joy across your face. Your quest is over.'
options = {'Next' : 'do?state=3'}
return renderGame(title, content, options)
elif (state == 3):
initBattle(False)
power = 0
title = 'Winner! | Slower Than Light!'
content = 'Congratulations! You won! Thank you for playing my game. I hope you enjoyed it, and if you haven\'t played FTL: Faster Than Light yet, I <i>highly</i> recommend it.'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
else:
initBattle(True)
power = 0
enemy = {'health' : 25, 'stopped' : 0, 'weapons' : {'missile' : 3, 'burst' : 2, 'ion' : 4, 'flak' : 10}, 'reward' : 100}
title = 'Federation Flagship | Slower Than Light!'
content = 'If that free heal wasn\'t obvious enoughi, you are going to be in for a big fight. The Federation Flagship has arrived! I am a kind game creator, so this will not be the insane boss battle of the source game, but I will still make it tough for you. Good luck, Commander!'
options = {'Ready the Guns!' : '?action=pass'}
state = 1
return renderGame(title, content, options)
else:
initBattle(False)
power = 0
return renderGame('I AM ERROR', 'I AM ERROR', {})
@app.route("/")
def game():
global health
global power
if (health <= 0):
power = 0
title = 'Game Over | Slower Than Light!'
content = 'You have died!<script>void(document.getElementById("ship").style.display = "none");</script>'
options = {'Close' : CLOSEPOPPUP}
return renderGame(title, content, options)
incrementCooldown()
if (state == 1):
return battle(request.args.get('action'))
else:
return read()
@app.route("/<int:newLoc>")
def jump(newLoc):
global power
global location
global state
global stopped
power = 0
location = newLoc
state = 0
stopped = 0
if (location in done and done[location]):
state = 3
return redirect('/~student23/Lab6/')
@app.route("/do")
def states():
global state
incrementCooldown()
state = int(request.args.get('state'))
return redirect('/~student23/Lab6/')
app.run(host='0.0.0.0', port=5123, debug=True)
| 50.774194 | 979 | 0.568252 |
a61562df8ef47edd9533d55fc44879eedf138531
| 61,468 |
py
|
Python
|
python/fbprophet/forecaster.py
|
otaviocals/prophet
|
1a0c32d12d2d0860472e56e67cf3b912695eac59
|
[
"MIT"
] | null | null | null |
python/fbprophet/forecaster.py
|
otaviocals/prophet
|
1a0c32d12d2d0860472e56e67cf3b912695eac59
|
[
"MIT"
] | null | null | null |
python/fbprophet/forecaster.py
|
otaviocals/prophet
|
1a0c32d12d2d0860472e56e67cf3b912695eac59
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function
import logging
from collections import OrderedDict, defaultdict
from datetime import timedelta
import numpy as np
import modin.pandas as pd
import pandas as pandas_legacy
import pystan # noqa F401
pd.to_datetime_legacy = pd.to_datetime
pd.to_datetime = lambda df : pd.to_datetime_legacy([x for x in list(df)])
from fbprophet.diagnostics import prophet_copy
from fbprophet.make_holidays import get_holiday_names, make_holidays_df
from fbprophet.models import prophet_stan_model
from fbprophet.plot import (plot, plot_components, plot_forecast_component,
plot_seasonality, plot_weekly, plot_yearly,
seasonality_plot_df)
logger = logging.getLogger('fbprophet')
logger.addHandler(logging.NullHandler())
if len(logger.handlers) == 1:
logging.basicConfig(level=logging.INFO)
class Prophet(object):
"""Prophet forecaster.
Parameters
----------
growth: String 'linear' or 'logistic' to specify a linear or logistic
trend.
changepoints: List of dates at which to include potential changepoints. If
not specified, potential changepoints are selected automatically.
n_changepoints: Number of potential changepoints to include. Not used
if input `changepoints` is supplied. If `changepoints` is not supplied,
then n_changepoints potential changepoints are selected uniformly from
the first `changepoint_range` proportion of the history.
changepoint_range: Proportion of history in which trend changepoints will
be estimated. Defaults to 0.8 for the first 80%. Not used if
`changepoints` is specified.
Not used if input `changepoints` is supplied.
yearly_seasonality: Fit yearly seasonality.
Can be 'auto', True, False, or a number of Fourier terms to generate.
weekly_seasonality: Fit weekly seasonality.
Can be 'auto', True, False, or a number of Fourier terms to generate.
daily_seasonality: Fit daily seasonality.
Can be 'auto', True, False, or a number of Fourier terms to generate.
holidays: pd.DataFrame with columns holiday (string) and ds (date type)
and optionally columns lower_window and upper_window which specify a
range of days around the date to be included as holidays.
lower_window=-2 will include 2 days prior to the date as holidays. Also
optionally can have a column prior_scale specifying the prior scale for
that holiday.
seasonality_mode: 'additive' (default) or 'multiplicative'.
seasonality_prior_scale: Parameter modulating the strength of the
seasonality model. Larger values allow the model to fit larger seasonal
fluctuations, smaller values dampen the seasonality. Can be specified
for individual seasonalities using add_seasonality.
holidays_prior_scale: Parameter modulating the strength of the holiday
components model, unless overridden in the holidays input.
changepoint_prior_scale: Parameter modulating the flexibility of the
automatic changepoint selection. Large values will allow many
changepoints, small values will allow few changepoints.
mcmc_samples: Integer, if greater than 0, will do full Bayesian inference
with the specified number of MCMC samples. If 0, will do MAP
estimation.
interval_width: Float, width of the uncertainty intervals provided
for the forecast. If mcmc_samples=0, this will be only the uncertainty
in the trend using the MAP estimate of the extrapolated generative
model. If mcmc.samples>0, this will be integrated over all model
parameters, which will include uncertainty in seasonality.
uncertainty_samples: Number of simulated draws used to estimate
uncertainty intervals. Settings this value to 0 or False will disable
uncertainty estimation and speed up the calculation.
"""
def __init__(
self,
growth='linear',
changepoints=None,
n_changepoints=25,
changepoint_range=0.8,
yearly_seasonality='auto',
weekly_seasonality='auto',
daily_seasonality='auto',
holidays=None,
seasonality_mode='additive',
seasonality_prior_scale=10.0,
holidays_prior_scale=10.0,
changepoint_prior_scale=0.05,
mcmc_samples=0,
interval_width=0.80,
uncertainty_samples=1000,
):
self.growth = growth
self.changepoints = pd.to_datetime(changepoints)
if self.changepoints is not None:
self.n_changepoints = len(self.changepoints)
self.specified_changepoints = True
else:
self.n_changepoints = n_changepoints
self.specified_changepoints = False
self.changepoint_range = changepoint_range
self.yearly_seasonality = yearly_seasonality
self.weekly_seasonality = weekly_seasonality
self.daily_seasonality = daily_seasonality
self.holidays = holidays
self.seasonality_mode = seasonality_mode
self.seasonality_prior_scale = float(seasonality_prior_scale)
self.changepoint_prior_scale = float(changepoint_prior_scale)
self.holidays_prior_scale = float(holidays_prior_scale)
self.mcmc_samples = mcmc_samples
self.interval_width = interval_width
self.uncertainty_samples = uncertainty_samples
# Set during fitting or by other methods
self.start = None
self.y_scale = None
self.logistic_floor = False
self.t_scale = None
self.changepoints_t = None
self.seasonalities = OrderedDict({})
self.extra_regressors = OrderedDict({})
self.country_holidays = None
self.stan_fit = None
self.params = {}
self.history = None
self.history_dates = None
self.train_component_cols = None
self.component_modes = None
self.train_holiday_names = None
self.validate_inputs()
def validate_inputs(self):
"""Validates the inputs to Prophet."""
if self.growth not in ('linear', 'logistic'):
raise ValueError(
"Parameter 'growth' should be 'linear' or 'logistic'.")
if ((self.changepoint_range < 0) or (self.changepoint_range > 1)):
raise ValueError("Parameter 'changepoint_range' must be in [0, 1]")
if self.holidays is not None:
if not (
isinstance(self.holidays, pd.DataFrame)
and 'ds' in self.holidays # noqa W503
and 'holiday' in self.holidays # noqa W503
):
raise ValueError("holidays must be a DataFrame with 'ds' and "
"'holiday' columns.")
self.holidays['ds'] = pd.to_datetime(self.holidays['ds'])
has_lower = 'lower_window' in self.holidays
has_upper = 'upper_window' in self.holidays
if has_lower + has_upper == 1:
raise ValueError('Holidays must have both lower_window and ' +
'upper_window, or neither')
if has_lower:
if self.holidays['lower_window'].max() > 0:
raise ValueError('Holiday lower_window should be <= 0')
if self.holidays['upper_window'].min() < 0:
raise ValueError('Holiday upper_window should be >= 0')
for h in self.holidays['holiday'].unique():
self.validate_column_name(h, check_holidays=False)
if self.seasonality_mode not in ['additive', 'multiplicative']:
raise ValueError(
"seasonality_mode must be 'additive' or 'multiplicative'"
)
def validate_column_name(self, name, check_holidays=True,
check_seasonalities=True, check_regressors=True):
"""Validates the name of a seasonality, holiday, or regressor.
Parameters
----------
name: string
check_holidays: bool check if name already used for holiday
check_seasonalities: bool check if name already used for seasonality
check_regressors: bool check if name already used for regressor
"""
if '_delim_' in name:
raise ValueError('Name cannot contain "_delim_"')
reserved_names = [
'trend', 'additive_terms', 'daily', 'weekly', 'yearly',
'holidays', 'zeros', 'extra_regressors_additive', 'yhat',
'extra_regressors_multiplicative', 'multiplicative_terms',
]
rn_l = [n + '_lower' for n in reserved_names]
rn_u = [n + '_upper' for n in reserved_names]
reserved_names.extend(rn_l)
reserved_names.extend(rn_u)
reserved_names.extend([
'ds', 'y', 'cap', 'floor', 'y_scaled', 'cap_scaled'])
if name in reserved_names:
raise ValueError('Name "{}" is reserved.'.format(name))
if (check_holidays and self.holidays is not None and
name in self.holidays['holiday'].unique()):
raise ValueError(
'Name "{}" already used for a holiday.'.format(name))
if (check_holidays and self.country_holidays is not None and
name in get_holiday_names(self.country_holidays)):
raise ValueError(
'Name "{}" is a holiday name in {}.'.format(name, self.country_holidays))
if check_seasonalities and name in self.seasonalities:
raise ValueError(
'Name "{}" already used for a seasonality.'.format(name))
if check_regressors and name in self.extra_regressors:
raise ValueError(
'Name "{}" already used for an added regressor.'.format(name))
def setup_dataframe(self, df, initialize_scales=False):
"""Prepare dataframe for fitting or predicting.
Adds a time index and scales y. Creates auxiliary columns 't', 't_ix',
'y_scaled', and 'cap_scaled'. These columns are used during both
fitting and predicting.
Parameters
----------
df: pd.DataFrame with columns ds, y, and cap if logistic growth. Any
specified additional regressors must also be present.
initialize_scales: Boolean set scaling factors in self from df.
Returns
-------
pd.DataFrame prepared for fitting or predicting.
"""
if 'y' in df:
df['y'] = pd.to_numeric(df['y'])
if np.isinf(df['y'].values).any():
raise ValueError('Found infinity in column y.')
if df['ds'].dtype == np.int64:
df['ds'] = df['ds'].astype(str)
df['ds'] = pd.to_datetime([x for x in list(df.ds)])
if df['ds'].dt.tz is not None:
raise ValueError(
'Column ds has timezone specified, which is not supported. '
'Remove timezone.'
)
if df['ds'].isnull().any():
raise ValueError('Found NaN in column ds.')
for name in self.extra_regressors:
if name not in df:
raise ValueError(
'Regressor "{}" missing from dataframe'.format(name))
df[name] = pd.to_numeric(df[name])
if df[name].isnull().any():
raise ValueError('Found NaN in column ' + name)
for props in self.seasonalities.values():
condition_name = props['condition_name']
if condition_name is not None:
if condition_name not in df:
raise ValueError(
'Condition "{}" missing from dataframe'.format(condition_name))
if not df[condition_name].isin([True, False]).all():
raise ValueError('Found non-boolean in column ' + condition_name)
df[condition_name] = df[condition_name].astype('bool')
if df.index.name == 'ds':
df.index.name = None
df = df.sort_values('ds')
df = df.reset_index(drop=True)
self.initialize_scales(initialize_scales, df)
if self.logistic_floor:
if 'floor' not in df:
raise ValueError("Expected column 'floor'.")
else:
df['floor'] = 0
if self.growth == 'logistic':
if 'cap' not in df:
raise ValueError(
"Capacities must be supplied for logistic growth in "
"column 'cap'"
)
if (df['cap'] <= df['floor']).any():
raise ValueError(
'cap must be greater than floor (which defaults to 0).'
)
df['cap_scaled'] = (df['cap'] - df['floor']) / self.y_scale
df['t'] = (df['ds'] - self.start) / self.t_scale
if 'y' in df:
df['y_scaled'] = (df['y'] - df['floor']) / self.y_scale
for name, props in self.extra_regressors.items():
df[name] = ((df[name] - props['mu']) / props['std'])
return df
def initialize_scales(self, initialize_scales, df):
"""Initialize model scales.
Sets model scaling factors using df.
Parameters
----------
initialize_scales: Boolean set the scales or not.
df: pd.DataFrame for setting scales.
"""
if not initialize_scales:
return
if self.growth == 'logistic' and 'floor' in df:
self.logistic_floor = True
floor = df['floor']
else:
floor = 0.
self.y_scale = (df['y'] - floor).abs().max()
if self.y_scale == 0:
self.y_scale = 1
self.start = df['ds'].min()
self.t_scale = df['ds'].max() - self.start
for name, props in self.extra_regressors.items():
standardize = props['standardize']
n_vals = len(df[name].unique())
if n_vals < 2:
standardize = False
if standardize == 'auto':
if set(df[name].unique()) == set([1, 0]):
# Don't standardize binary variables.
standardize = False
else:
standardize = True
if standardize:
mu = df[name].mean()
std = df[name].std()
self.extra_regressors[name]['mu'] = mu
self.extra_regressors[name]['std'] = std
def set_changepoints(self):
"""Set changepoints
Sets m$changepoints to the dates of changepoints. Either:
1) The changepoints were passed in explicitly.
A) They are empty.
B) They are not empty, and need validation.
2) We are generating a grid of them.
3) The user prefers no changepoints be used.
"""
if self.changepoints is not None:
if len(self.changepoints) == 0:
pass
else:
too_low = min(self.changepoints) < self.history['ds'].min()
too_high = max(self.changepoints) > self.history['ds'].max()
if too_low or too_high:
raise ValueError(
'Changepoints must fall within training data.')
else:
# Place potential changepoints evenly through first
# changepoint_range proportion of the history
hist_size = int(np.floor(
self.history.shape[0] * self.changepoint_range))
if self.n_changepoints + 1 > hist_size:
self.n_changepoints = hist_size - 1
logger.info(
'n_changepoints greater than number of observations.'
'Using {}.'.format(self.n_changepoints)
)
if self.n_changepoints > 0:
cp_indexes = (
np.linspace(0, hist_size - 1, self.n_changepoints + 1)
.round()
.astype(np.int)
)
self.changepoints = (
self.history.iloc[cp_indexes]['ds'].tail(-1)
)
else:
# set empty changepoints
self.changepoints = []
if len(self.changepoints) > 0:
self.changepoints_t = np.sort(np.array(
(self.changepoints - self.start) / self.t_scale))
else:
self.changepoints_t = np.array([0]) # dummy changepoint
@staticmethod
def fourier_series(dates, period, series_order):
"""Provides Fourier series components with the specified frequency
and order.
Parameters
----------
dates: pd.Series containing timestamps.
period: Number of days of the period.
series_order: Number of components.
Returns
-------
Matrix with seasonality features.
"""
# convert to days since epoch
t = np.array(
(dates - pd.datetime(1970, 1, 1))
.dt.total_seconds()
.astype(np.float)
) / (3600 * 24.)
return np.column_stack([
fun((2.0 * (i + 1) * np.pi * t / period))
for i in range(series_order)
for fun in (np.sin, np.cos)
])
@classmethod
def make_seasonality_features(cls, dates, period, series_order, prefix):
"""Data frame with seasonality features.
Parameters
----------
cls: Prophet class.
dates: pd.Series containing timestamps.
period: Number of days of the period.
series_order: Number of components.
prefix: Column name prefix.
Returns
-------
pd.DataFrame with seasonality features.
"""
features = cls.fourier_series(dates, period, series_order)
columns = [
'{}_delim_{}'.format(prefix, i + 1)
for i in range(features.shape[1])
]
return pd.DataFrame(features, columns=columns)
def construct_holiday_dataframe(self, dates):
"""Construct a dataframe of holiday dates.
Will combine self.holidays with the built-in country holidays
corresponding to input dates, if self.country_holidays is set.
Parameters
----------
dates: pd.Series containing timestamps used for computing seasonality.
Returns
-------
dataframe of holiday dates, in holiday dataframe format used in
initialization.
"""
all_holidays = pd.DataFrame()
if self.holidays is not None:
all_holidays = self.holidays.copy()
if self.country_holidays is not None:
year_list = list({x.year for x in dates})
country_holidays_df = make_holidays_df(
year_list=year_list, country=self.country_holidays
)
all_holidays = pd.concat((all_holidays, country_holidays_df), sort=False)
all_holidays.reset_index(drop=True, inplace=True)
# If the model has already been fit with a certain set of holidays,
# make sure we are using those same ones.
if self.train_holiday_names is not None:
# Remove holiday names didn't show up in fit
index_to_drop = all_holidays.index[
np.logical_not(
all_holidays.holiday.isin(self.train_holiday_names)
)
]
all_holidays = all_holidays.drop(index_to_drop)
# Add holiday names in fit but not in predict with ds as NA
holidays_to_add = pd.DataFrame({
'holiday': self.train_holiday_names[
np.logical_not(self.train_holiday_names.isin(all_holidays.holiday))
]
})
all_holidays = pd.concat((all_holidays, holidays_to_add), sort=False)
all_holidays.reset_index(drop=True, inplace=True)
return all_holidays
def make_holiday_features(self, dates, holidays):
"""Construct a dataframe of holiday features.
Parameters
----------
dates: pd.Series containing timestamps used for computing seasonality.
holidays: pd.Dataframe containing holidays, as returned by
construct_holiday_dataframe.
Returns
-------
holiday_features: pd.DataFrame with a column for each holiday.
prior_scale_list: List of prior scales for each holiday column.
holiday_names: List of names of holidays
"""
# Holds columns of our future matrix.
expanded_holidays = defaultdict(lambda: np.zeros(dates.shape[0]))
prior_scales = {}
# Makes an index so we can perform `get_loc` below.
# Strip to just dates.
row_index = pd.DatetimeIndex(dates.apply(lambda x: x.date()))
for _ix, row in holidays.iterrows():
dt = row.ds.date()
try:
lw = int(row.get('lower_window', 0))
uw = int(row.get('upper_window', 0))
except ValueError:
lw = 0
uw = 0
ps = float(row.get('prior_scale', self.holidays_prior_scale))
if np.isnan(ps):
ps = float(self.holidays_prior_scale)
if (
row.holiday in prior_scales and prior_scales[row.holiday] != ps
):
raise ValueError(
'Holiday {} does not have consistent prior scale '
'specification.'.format(row.holiday))
if ps <= 0:
raise ValueError('Prior scale must be > 0')
prior_scales[row.holiday] = ps
for offset in range(lw, uw + 1):
occurrence = dt + timedelta(days=offset)
try:
loc = row_index.get_loc(occurrence)
except KeyError:
loc = None
key = '{}_delim_{}{}'.format(
row.holiday,
'+' if offset >= 0 else '-',
abs(offset)
)
if loc is not None:
expanded_holidays[key][loc] = 1.
else:
# Access key to generate value
expanded_holidays[key]
holiday_features = pd.DataFrame(expanded_holidays)
# Make sure column order is consistent
holiday_features = holiday_features[sorted(holiday_features.columns.tolist())]
prior_scale_list = [
prior_scales[h.split('_delim_')[0]]
for h in holiday_features.columns
]
holiday_names = list(prior_scales.keys())
# Store holiday names used in fit
if self.train_holiday_names is None:
self.train_holiday_names = pd.Series(holiday_names)
return holiday_features, prior_scale_list, holiday_names
def add_regressor(
self, name, prior_scale=None, standardize='auto', mode=None
):
"""Add an additional regressor to be used for fitting and predicting.
The dataframe passed to `fit` and `predict` will have a column with the
specified name to be used as a regressor. When standardize='auto', the
regressor will be standardized unless it is binary. The regression
coefficient is given a prior with the specified scale parameter.
Decreasing the prior scale will add additional regularization. If no
prior scale is provided, self.holidays_prior_scale will be used.
Mode can be specified as either 'additive' or 'multiplicative'. If not
specified, self.seasonality_mode will be used. 'additive' means the
effect of the regressor will be added to the trend, 'multiplicative'
means it will multiply the trend.
Parameters
----------
name: string name of the regressor.
prior_scale: optional float scale for the normal prior. If not
provided, self.holidays_prior_scale will be used.
standardize: optional, specify whether this regressor will be
standardized prior to fitting. Can be 'auto' (standardize if not
binary), True, or False.
mode: optional, 'additive' or 'multiplicative'. Defaults to
self.seasonality_mode.
Returns
-------
The prophet object.
"""
if self.history is not None:
raise Exception(
"Regressors must be added prior to model fitting.")
self.validate_column_name(name, check_regressors=False)
if prior_scale is None:
prior_scale = float(self.holidays_prior_scale)
if mode is None:
mode = self.seasonality_mode
if prior_scale <= 0:
raise ValueError('Prior scale must be > 0')
if mode not in ['additive', 'multiplicative']:
raise ValueError("mode must be 'additive' or 'multiplicative'")
self.extra_regressors[name] = {
'prior_scale': prior_scale,
'standardize': standardize,
'mu': 0.,
'std': 1.,
'mode': mode,
}
return self
def add_seasonality(
self, name, period, fourier_order, prior_scale=None, mode=None, condition_name=None
):
"""Add a seasonal component with specified period, number of Fourier
components, and prior scale.
Increasing the number of Fourier components allows the seasonality to
change more quickly (at risk of overfitting). Default values for yearly
and weekly seasonalities are 10 and 3 respectively.
Increasing prior scale will allow this seasonality component more
flexibility, decreasing will dampen it. If not provided, will use the
seasonality_prior_scale provided on Prophet initialization (defaults
to 10).
Mode can be specified as either 'additive' or 'multiplicative'. If not
specified, self.seasonality_mode will be used (defaults to additive).
Additive means the seasonality will be added to the trend,
multiplicative means it will multiply the trend.
If condition_name is provided, the dataframe passed to `fit` and `predict`
should have a column with the specified condition_name containing booleans
which decides when to apply seasonality.
Parameters
----------
name: string name of the seasonality component.
period: float number of days in one period.
fourier_order: int number of Fourier components to use.
prior_scale: optional float prior scale for this component.
mode: optional 'additive' or 'multiplicative'
condition_name: string name of the seasonality condition.
Returns
-------
The prophet object.
"""
if self.history is not None:
raise Exception(
"Seasonality must be added prior to model fitting.")
if name not in ['daily', 'weekly', 'yearly']:
# Allow overwriting built-in seasonalities
self.validate_column_name(name, check_seasonalities=False)
if prior_scale is None:
ps = self.seasonality_prior_scale
else:
ps = float(prior_scale)
if ps <= 0:
raise ValueError('Prior scale must be > 0')
if fourier_order <= 0:
raise ValueError('Fourier Order must be > 0')
if mode is None:
mode = self.seasonality_mode
if mode not in ['additive', 'multiplicative']:
raise ValueError("mode must be 'additive' or 'multiplicative'")
if condition_name is not None:
self.validate_column_name(condition_name)
self.seasonalities[name] = {
'period': period,
'fourier_order': fourier_order,
'prior_scale': ps,
'mode': mode,
'condition_name': condition_name,
}
return self
def add_country_holidays(self, country_name):
"""Add in built-in holidays for the specified country.
These holidays will be included in addition to any specified on model
initialization.
Holidays will be calculated for arbitrary date ranges in the history
and future. See the online documentation for the list of countries with
built-in holidays.
Built-in country holidays can only be set for a single country.
Parameters
----------
country_name: Name of the country, like 'UnitedStates' or 'US'
Returns
-------
The prophet object.
"""
if self.history is not None:
raise Exception(
"Country holidays must be added prior to model fitting."
)
# Validate names.
for name in get_holiday_names(country_name):
# Allow merging with existing holidays
self.validate_column_name(name, check_holidays=False)
# Set the holidays.
if self.country_holidays is not None:
logger.warning(
'Changing country holidays from {} to {}'.format(
self.country_holidays, country_name
)
)
self.country_holidays = country_name
return self
def make_all_seasonality_features(self, df):
"""Dataframe with seasonality features.
Includes seasonality features, holiday features, and added regressors.
Parameters
----------
df: pd.DataFrame with dates for computing seasonality features and any
added regressors.
Returns
-------
pd.DataFrame with regression features.
list of prior scales for each column of the features dataframe.
Dataframe with indicators for which regression components correspond to
which columns.
Dictionary with keys 'additive' and 'multiplicative' listing the
component names for each mode of seasonality.
"""
seasonal_features = []
prior_scales = []
modes = {'additive': [], 'multiplicative': []}
# Seasonality features
for name, props in self.seasonalities.items():
features = self.make_seasonality_features(
df['ds'],
props['period'],
props['fourier_order'],
name,
)
if props['condition_name'] is not None:
features.loc[(features[~(df[props['condition_name']])])] = 0
seasonal_features.append(features)
prior_scales.extend(
[props['prior_scale']] * features.shape[1])
modes[props['mode']].append(name)
# Holiday features
holidays = self.construct_holiday_dataframe(df['ds'])
if len(holidays) > 0:
features, holiday_priors, holiday_names = (
self.make_holiday_features(df['ds'], holidays)
)
seasonal_features.append(features)
prior_scales.extend(holiday_priors)
modes[self.seasonality_mode].extend(holiday_names)
# Additional regressors
for name, props in self.extra_regressors.items():
seasonal_features.append(pd.DataFrame(df[name]))
prior_scales.append(props['prior_scale'])
modes[props['mode']].append(name)
# Dummy to prevent empty X
if len(seasonal_features) == 0:
seasonal_features.append(
pd.DataFrame({'zeros': np.zeros(df.shape[0])}))
prior_scales.append(1.)
seasonal_features = pd.concat(seasonal_features, axis=1)
component_cols, modes = self.regressor_column_matrix(
seasonal_features, modes
)
return seasonal_features, prior_scales, component_cols, modes
def regressor_column_matrix(self, seasonal_features, modes):
"""Dataframe indicating which columns of the feature matrix correspond
to which seasonality/regressor components.
Includes combination components, like 'additive_terms'. These
combination components will be added to the 'modes' input.
Parameters
----------
seasonal_features: Constructed seasonal features dataframe
modes: Dictionary with keys 'additive' and 'multiplicative' listing the
component names for each mode of seasonality.
Returns
-------
component_cols: A binary indicator dataframe with columns seasonal
components and rows columns in seasonal_features. Entry is 1 if
that columns is used in that component.
modes: Updated input with combination components.
"""
components = pd.DataFrame({
'col': np.arange(seasonal_features.shape[1]),
'component': [
x.split('_delim_')[0] for x in seasonal_features.columns
],
})
# Add total for holidays
if self.train_holiday_names is not None:
components = self.add_group_component(
components, 'holidays', self.train_holiday_names.unique())
# Add totals additive and multiplicative components, and regressors
for mode in ['additive', 'multiplicative']:
components = self.add_group_component(
components, mode + '_terms', modes[mode]
)
regressors_by_mode = [
r for r, props in self.extra_regressors.items()
if props['mode'] == mode
]
components = self.add_group_component(
components, 'extra_regressors_' + mode, regressors_by_mode)
# Add combination components to modes
modes[mode].append(mode + '_terms')
modes[mode].append('extra_regressors_' + mode)
# After all of the additive/multiplicative groups have been added,
modes[self.seasonality_mode].append('holidays')
# Convert to a binary matrix
component_cols = pd.crosstab(
components['col'], components['component'],
).sort_index(level='col')
# Add columns for additive and multiplicative terms, if missing
for name in ['additive_terms', 'multiplicative_terms']:
if name not in component_cols:
component_cols[name] = 0
# Remove the placeholder
component_cols.drop('zeros', axis=1, inplace=True, errors='ignore')
# Validation
if (
max(component_cols['additive_terms']
+ component_cols['multiplicative_terms']) > 1
):
raise Exception('A bug occurred in seasonal components.')
# Compare to the training, if set.
if self.train_component_cols is not None:
component_cols = component_cols[self.train_component_cols.columns]
if not component_cols.equals(self.train_component_cols):
raise Exception('A bug occurred in constructing regressors.')
return component_cols, modes
def add_group_component(self, components, name, group):
"""Adds a component with given name that contains all of the components
in group.
Parameters
----------
components: Dataframe with components.
name: Name of new group component.
group: List of components that form the group.
Returns
-------
Dataframe with components.
"""
new_comp = components[components['component'].isin(set(group))].copy()
group_cols = new_comp['col'].unique()
if len(group_cols) > 0:
new_comp = pd.DataFrame({'col': group_cols, 'component': name})
components = components.append(new_comp)
return components
def parse_seasonality_args(self, name, arg, auto_disable, default_order):
"""Get number of fourier components for built-in seasonalities.
Parameters
----------
name: string name of the seasonality component.
arg: 'auto', True, False, or number of fourier components as provided.
auto_disable: bool if seasonality should be disabled when 'auto'.
default_order: int default fourier order
Returns
-------
Number of fourier components, or 0 for disabled.
"""
if arg == 'auto':
fourier_order = 0
if name in self.seasonalities:
logger.info(
'Found custom seasonality named "{name}", '
'disabling built-in {name} seasonality.'.format(name=name)
)
elif auto_disable:
logger.info(
'Disabling {name} seasonality. Run prophet with '
'{name}_seasonality=True to override this.'.format(
name=name)
)
else:
fourier_order = default_order
elif arg is True:
fourier_order = default_order
elif arg is False:
fourier_order = 0
else:
fourier_order = int(arg)
return fourier_order
def set_auto_seasonalities(self):
"""Set seasonalities that were left on auto.
Turns on yearly seasonality if there is >=2 years of history.
Turns on weekly seasonality if there is >=2 weeks of history, and the
spacing between dates in the history is <7 days.
Turns on daily seasonality if there is >=2 days of history, and the
spacing between dates in the history is <1 day.
"""
first = self.history['ds'].min()
last = self.history['ds'].max()
dt = self.history['ds'].diff()
min_dt = dt.iloc[dt.values.nonzero()[0]].min()
# Yearly seasonality
yearly_disable = last - first < pd.Timedelta(days=730)
fourier_order = self.parse_seasonality_args(
'yearly', self.yearly_seasonality, yearly_disable, 10)
if fourier_order > 0:
self.seasonalities['yearly'] = {
'period': 365.25,
'fourier_order': fourier_order,
'prior_scale': self.seasonality_prior_scale,
'mode': self.seasonality_mode,
'condition_name': None
}
# Weekly seasonality
weekly_disable = ((last - first < pd.Timedelta(weeks=2)) or
(min_dt >= pd.Timedelta(weeks=1)))
fourier_order = self.parse_seasonality_args(
'weekly', self.weekly_seasonality, weekly_disable, 3)
if fourier_order > 0:
self.seasonalities['weekly'] = {
'period': 7,
'fourier_order': fourier_order,
'prior_scale': self.seasonality_prior_scale,
'mode': self.seasonality_mode,
'condition_name': None
}
# Daily seasonality
daily_disable = ((last - first < pd.Timedelta(days=2)) or
(min_dt >= pd.Timedelta(days=1)))
fourier_order = self.parse_seasonality_args(
'daily', self.daily_seasonality, daily_disable, 4)
if fourier_order > 0:
self.seasonalities['daily'] = {
'period': 1,
'fourier_order': fourier_order,
'prior_scale': self.seasonality_prior_scale,
'mode': self.seasonality_mode,
'condition_name': None
}
@staticmethod
def linear_growth_init(df):
"""Initialize linear growth.
Provides a strong initialization for linear growth by calculating the
growth and offset parameters that pass the function through the first
and last points in the time series.
Parameters
----------
df: pd.DataFrame with columns ds (date), y_scaled (scaled time series),
and t (scaled time).
Returns
-------
A tuple (k, m) with the rate (k) and offset (m) of the linear growth
function.
"""
i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
T = df['t'].iloc[i1] - df['t'].iloc[i0]
k = (df['y_scaled'].iloc[i1] - df['y_scaled'].iloc[i0]) / T
m = df['y_scaled'].iloc[i0] - k * df['t'].iloc[i0]
return (k, m)
@staticmethod
def logistic_growth_init(df):
"""Initialize logistic growth.
Provides a strong initialization for logistic growth by calculating the
growth and offset parameters that pass the function through the first
and last points in the time series.
Parameters
----------
df: pd.DataFrame with columns ds (date), cap_scaled (scaled capacity),
y_scaled (scaled time series), and t (scaled time).
Returns
-------
A tuple (k, m) with the rate (k) and offset (m) of the logistic growth
function.
"""
i0, i1 = df['ds'].idxmin(), df['ds'].idxmax()
T = df['t'].iloc[i1] - df['t'].iloc[i0]
# Force valid values, in case y > cap or y < 0
C0 = df['cap_scaled'].iloc[i0]
C1 = df['cap_scaled'].iloc[i1]
y0 = max(0.01 * C0, min(0.99 * C0, df['y_scaled'].iloc[i0]))
y1 = max(0.01 * C1, min(0.99 * C1, df['y_scaled'].iloc[i1]))
r0 = C0 / y0
r1 = C1 / y1
if abs(r0 - r1) <= 0.01:
r0 = 1.05 * r0
L0 = np.log(r0 - 1)
L1 = np.log(r1 - 1)
# Initialize the offset
m = L0 * T / (L0 - L1)
# And the rate
k = (L0 - L1) / T
return (k, m)
def fit(self, df, **kwargs):
"""Fit the Prophet model.
This sets self.params to contain the fitted model parameters. It is a
dictionary parameter names as keys and the following items:
k (Mx1 array): M posterior samples of the initial slope.
m (Mx1 array): The initial intercept.
delta (MxN array): The slope change at each of N changepoints.
beta (MxK matrix): Coefficients for K seasonality features.
sigma_obs (Mx1 array): Noise level.
Note that M=1 if MAP estimation.
Parameters
----------
df: pd.DataFrame containing the history. Must have columns ds (date
type) and y, the time series. If self.growth is 'logistic', then
df must also have a column cap that specifies the capacity at
each ds.
kwargs: Additional arguments passed to the optimizing or sampling
functions in Stan.
Returns
-------
The fitted Prophet object.
"""
if self.history is not None:
raise Exception('Prophet object can only be fit once. '
'Instantiate a new object.')
if ('ds' not in df) or ('y' not in df):
raise ValueError(
"Dataframe must have columns 'ds' and 'y' with the dates and "
"values respectively."
)
history = df[df['y'].notnull()].copy()
if history.shape[0] < 2:
raise ValueError('Dataframe has less than 2 non-NaN rows.')
self.history_dates = pd.to_datetime([x for x in list(df.ds)]).sort_values()
history = self.setup_dataframe(history, initialize_scales=True)
self.history = history
self.set_auto_seasonalities()
seasonal_features, prior_scales, component_cols, modes = (
self.make_all_seasonality_features(history))
self.train_component_cols = component_cols
self.component_modes = modes
self.set_changepoints()
dat = {
'T': history.shape[0],
'K': seasonal_features.shape[1],
'S': len(self.changepoints_t),
'y': history['y_scaled'],
't': history['t'],
't_change': self.changepoints_t,
'X': seasonal_features,
'sigmas': prior_scales,
'tau': self.changepoint_prior_scale,
'trend_indicator': int(self.growth == 'logistic'),
's_a': component_cols['additive_terms'],
's_m': component_cols['multiplicative_terms'],
}
if self.growth == 'linear':
dat['cap'] = np.zeros(self.history.shape[0])
kinit = self.linear_growth_init(history)
else:
dat['cap'] = history['cap_scaled']
kinit = self.logistic_growth_init(history)
model = prophet_stan_model
def stan_init():
return {
'k': kinit[0],
'm': kinit[1],
'delta': np.zeros(len(self.changepoints_t)),
'beta': np.zeros(seasonal_features.shape[1]),
'sigma_obs': 1,
}
if (
(history['y'].min() == history['y'].max())
and self.growth == 'linear'
):
# Nothing to fit.
self.params = stan_init()
self.params['sigma_obs'] = 1e-9
for par in self.params:
self.params[par] = np.array([self.params[par]])
elif self.mcmc_samples > 0:
args = dict(
data=dat,
init=stan_init,
iter=self.mcmc_samples,
)
args.update(kwargs)
self.stan_fit = model.sampling(**args)
for par in self.stan_fit.model_pars:
self.params[par] = self.stan_fit[par]
# Shape vector parameters
if par in ['delta', 'beta'] and len(self.params[par].shape) < 2:
self.params[par] = self.params[par].reshape((-1, 1))
else:
args = dict(
data=dat,
init=stan_init,
algorithm='Newton' if dat['T'] < 100 else 'LBFGS',
iter=1e4,
)
args.update(kwargs)
try:
self.stan_fit = model.optimizing(**args)
except RuntimeError:
# Fall back on Newton
logger.warning(
'Optimization terminated abnormally. Falling back to Newton.'
)
args['algorithm'] = 'Newton'
self.stan_fit = model.optimizing(**args)
for par in self.stan_fit:
self.params[par] = self.stan_fit[par].reshape((1, -1))
# If no changepoints were requested, replace delta with 0s
if len(self.changepoints) == 0:
# Fold delta into the base rate k
self.params['k'] = self.params['k'] + self.params['delta'].reshape(-1)
self.params['delta'] = np.zeros(self.params['delta'].shape).reshape((-1, 1))
return self
def predict(self, df=None):
"""Predict using the prophet model.
Parameters
----------
df: pd.DataFrame with dates for predictions (column ds), and capacity
(column cap) if logistic growth. If not provided, predictions are
made on the history.
Returns
-------
A pd.DataFrame with the forecast components.
"""
if self.history is None:
raise Exception('Model must be fit before predictions can be made.')
if df is None:
df = self.history.copy()
else:
if df.shape[0] == 0:
raise ValueError('Dataframe has no rows.')
df = self.setup_dataframe(df.copy())
df['trend'] = self.predict_trend(df)
seasonal_components = self.predict_seasonal_components(df)
if self.uncertainty_samples:
intervals = self.predict_uncertainty(df)
else:
intervals = None
# Drop columns except ds, cap, floor, and trend
cols = ['ds', 'trend']
if 'cap' in df:
cols.append('cap')
if self.logistic_floor:
cols.append('floor')
# Add in forecast components
df2 = pd.concat((df[cols], intervals, seasonal_components), axis=1)
df2['yhat'] = (
df2['trend'] * (1 + df2['multiplicative_terms'])
+ df2['additive_terms']
)
return df2
@staticmethod
def piecewise_linear(t, deltas, k, m, changepoint_ts):
"""Evaluate the piecewise linear function.
Parameters
----------
t: np.array of times on which the function is evaluated.
deltas: np.array of rate changes at each changepoint.
k: Float initial rate.
m: Float initial offset.
changepoint_ts: np.array of changepoint times.
Returns
-------
Vector y(t).
"""
# Intercept changes
gammas = -changepoint_ts * deltas
# Get cumulative slope and intercept at each t
k_t = k * np.ones_like(t)
m_t = m * np.ones_like(t)
for s, t_s in enumerate(changepoint_ts):
indx = t >= t_s
k_t[indx] += deltas[s]
m_t[indx] += gammas[s]
return k_t * t + m_t
@staticmethod
def piecewise_logistic(t, cap, deltas, k, m, changepoint_ts):
"""Evaluate the piecewise logistic function.
Parameters
----------
t: np.array of times on which the function is evaluated.
cap: np.array of capacities at each t.
deltas: np.array of rate changes at each changepoint.
k: Float initial rate.
m: Float initial offset.
changepoint_ts: np.array of changepoint times.
Returns
-------
Vector y(t).
"""
# Compute offset changes
k_cum = np.concatenate((np.atleast_1d(k), np.cumsum(deltas) + k))
gammas = np.zeros(len(changepoint_ts))
for i, t_s in enumerate(changepoint_ts):
gammas[i] = (
(t_s - m - np.sum(gammas))
* (1 - k_cum[i] / k_cum[i + 1]) # noqa W503
)
# Get cumulative rate and offset at each t
k_t = k * np.ones_like(t)
m_t = m * np.ones_like(t)
for s, t_s in enumerate(changepoint_ts):
indx = t >= t_s
k_t[indx] += deltas[s]
m_t[indx] += gammas[s]
return cap / (1 + np.exp(-k_t * (t - m_t)))
def predict_trend(self, df):
"""Predict trend using the prophet model.
Parameters
----------
df: Prediction dataframe.
Returns
-------
Vector with trend on prediction dates.
"""
k = np.nanmean(self.params['k'])
m = np.nanmean(self.params['m'])
deltas = np.nanmean(self.params['delta'], axis=0)
t = np.array(df['t'])
if self.growth == 'linear':
trend = self.piecewise_linear(t, deltas, k, m, self.changepoints_t)
else:
cap = df['cap_scaled']
trend = self.piecewise_logistic(
t, cap, deltas, k, m, self.changepoints_t)
return trend * self.y_scale + df['floor']
def predict_seasonal_components(self, df):
"""Predict seasonality components, holidays, and added regressors.
Parameters
----------
df: Prediction dataframe.
Returns
-------
Dataframe with seasonal components.
"""
seasonal_features, _, component_cols, _ = (
self.make_all_seasonality_features(df)
)
if self.uncertainty_samples:
lower_p = 100 * (1.0 - self.interval_width) / 2
upper_p = 100 * (1.0 + self.interval_width) / 2
X = seasonal_features.values
data = {}
for component in component_cols.columns:
beta_c = self.params['beta'] * component_cols[component].values
comp = np.matmul(X, beta_c.transpose())
if component in self.component_modes['additive']:
comp *= self.y_scale
data[component] = np.nanmean(comp, axis=1)
if self.uncertainty_samples:
data[component + '_lower'] = np.nanpercentile(
comp, lower_p, axis=1,
)
data[component + '_upper'] = np.nanpercentile(
comp, upper_p, axis=1,
)
return pd.DataFrame(data)
def sample_posterior_predictive(self, df):
"""Prophet posterior predictive samples.
Parameters
----------
df: Prediction dataframe.
Returns
-------
Dictionary with posterior predictive samples for the forecast yhat and
for the trend component.
"""
n_iterations = self.params['k'].shape[0]
samp_per_iter = max(1, int(np.ceil(
self.uncertainty_samples / float(n_iterations)
)))
# Generate seasonality features once so we can re-use them.
seasonal_features, _, component_cols, _ = (
self.make_all_seasonality_features(df)
)
sim_values = {'yhat': [], 'trend': []}
for i in range(n_iterations):
for _j in range(samp_per_iter):
sim = self.sample_model(
df=df,
seasonal_features=seasonal_features,
iteration=i,
s_a=component_cols['additive_terms'],
s_m=component_cols['multiplicative_terms'],
)
for key in sim_values:
sim_values[key].append(sim[key])
for k, v in sim_values.items():
sim_values[k] = np.column_stack(v)
return sim_values
def predictive_samples(self, df):
"""Sample from the posterior predictive distribution.
Parameters
----------
df: Dataframe with dates for predictions (column ds), and capacity
(column cap) if logistic growth.
Returns
-------
Dictionary with keys "trend" and "yhat" containing
posterior predictive samples for that component.
"""
df = self.setup_dataframe(df.copy())
sim_values = self.sample_posterior_predictive(df)
return sim_values
def predict_uncertainty(self, df):
"""Prediction intervals for yhat and trend.
Parameters
----------
df: Prediction dataframe.
Returns
-------
Dataframe with uncertainty intervals.
"""
sim_values = self.sample_posterior_predictive(df)
lower_p = 100 * (1.0 - self.interval_width) / 2
upper_p = 100 * (1.0 + self.interval_width) / 2
series = {}
for key in ['yhat', 'trend']:
series['{}_lower'.format(key)] = np.nanpercentile(
sim_values[key], lower_p, axis=1)
series['{}_upper'.format(key)] = np.nanpercentile(
sim_values[key], upper_p, axis=1)
return pd.DataFrame(series)
def sample_model(self, df, seasonal_features, iteration, s_a, s_m):
"""Simulate observations from the extrapolated generative model.
Parameters
----------
df: Prediction dataframe.
seasonal_features: pd.DataFrame of seasonal features.
iteration: Int sampling iteration to use parameters from.
s_a: Indicator vector for additive components
s_m: Indicator vector for multiplicative components
Returns
-------
Dataframe with trend and yhat, each like df['t'].
"""
trend = self.sample_predictive_trend(df, iteration)
beta = self.params['beta'][iteration]
Xb_a = np.matmul(seasonal_features.values, beta * s_a.values) * self.y_scale
Xb_m = np.matmul(seasonal_features.values, beta * s_m.values)
sigma = self.params['sigma_obs'][iteration]
noise = np.random.normal(0, sigma, df.shape[0]) * self.y_scale
return pd.DataFrame({
'yhat': trend * (1 + Xb_m) + Xb_a + noise,
'trend': trend
})
def sample_predictive_trend(self, df, iteration):
"""Simulate the trend using the extrapolated generative model.
Parameters
----------
df: Prediction dataframe.
iteration: Int sampling iteration to use parameters from.
Returns
-------
np.array of simulated trend over df['t'].
"""
k = self.params['k'][iteration]
m = self.params['m'][iteration]
deltas = self.params['delta'][iteration]
t = np.array(df['t'])
T = t.max()
# New changepoints from a Poisson process with rate S on [1, T]
if T > 1:
S = len(self.changepoints_t)
n_changes = np.random.poisson(S * (T - 1))
else:
n_changes = 0
if n_changes > 0:
changepoint_ts_new = 1 + np.random.rand(n_changes) * (T - 1)
changepoint_ts_new.sort()
else:
changepoint_ts_new = []
# Get the empirical scale of the deltas, plus epsilon to avoid NaNs.
lambda_ = np.mean(np.abs(deltas)) + 1e-8
# Sample deltas
deltas_new = np.random.laplace(0, lambda_, n_changes)
# Prepend the times and deltas from the history
changepoint_ts = np.concatenate((self.changepoints_t,
changepoint_ts_new))
deltas = np.concatenate((deltas, deltas_new))
if self.growth == 'linear':
trend = self.piecewise_linear(t, deltas, k, m, changepoint_ts)
else:
cap = df['cap_scaled']
trend = self.piecewise_logistic(t, cap, deltas, k, m,
changepoint_ts)
return trend * self.y_scale + df['floor']
def make_future_dataframe(self, periods, freq='D', include_history=True):
"""Simulate the trend using the extrapolated generative model.
Parameters
----------
periods: Int number of periods to forecast forward.
freq: Any valid frequency for pd.date_range, such as 'D' or 'M'.
include_history: Boolean to include the historical dates in the data
frame for predictions.
Returns
-------
pd.Dataframe that extends forward from the end of self.history for the
requested number of periods.
"""
if self.history_dates is None:
raise Exception('Model must be fit before this can be used.')
last_date = self.history_dates.max()
dates = pd.date_range(
start=last_date,
periods=periods + 1, # An extra in case we include start
freq=freq)
dates = dates[dates > last_date] # Drop start if equals last_date
dates = dates[:periods] # Return correct number of periods
if include_history:
dates = np.concatenate((np.array(self.history_dates), dates))
return pd.DataFrame({'ds': dates})
def plot(self, fcst, ax=None, uncertainty=True, plot_cap=True, xlabel='ds',
ylabel='y', figsize=(10, 6)):
"""Plot the Prophet forecast.
Parameters
----------
fcst: pd.DataFrame output of self.predict.
ax: Optional matplotlib axes on which to plot.
uncertainty: Optional boolean to plot uncertainty intervals.
plot_cap: Optional boolean indicating if the capacity should be shown
in the figure, if available.
xlabel: Optional label name on X-axis
ylabel: Optional label name on Y-axis
figsize: Optional tuple width, height in inches.
Returns
-------
A matplotlib figure.
"""
return plot(
m=self, fcst=fcst, ax=ax, uncertainty=uncertainty,
plot_cap=plot_cap, xlabel=xlabel, ylabel=ylabel,
figsize=figsize
)
def plot_components(self, fcst, uncertainty=True, plot_cap=True,
weekly_start=0, yearly_start=0, figsize=None):
"""Plot the Prophet forecast components.
Will plot whichever are available of: trend, holidays, weekly
seasonality, and yearly seasonality.
Parameters
----------
fcst: pd.DataFrame output of self.predict.
uncertainty: Optional boolean to plot uncertainty intervals.
plot_cap: Optional boolean indicating if the capacity should be shown
in the figure, if available.
weekly_start: Optional int specifying the start day of the weekly
seasonality plot. 0 (default) starts the week on Sunday. 1 shifts
by 1 day to Monday, and so on.
yearly_start: Optional int specifying the start day of the yearly
seasonality plot. 0 (default) starts the year on Jan 1. 1 shifts
by 1 day to Jan 2, and so on.
figsize: Optional tuple width, height in inches.
Returns
-------
A matplotlib figure.
"""
return plot_components(
m=self, fcst=fcst, uncertainty=uncertainty, plot_cap=plot_cap,
weekly_start=weekly_start, yearly_start=yearly_start,
figsize=figsize
)
| 39.176546 | 95 | 0.581099 |
aa06d7cb6b9a6b5a806e1bae3a677b534f642396
| 1,089 |
py
|
Python
|
dagflow/dag_repos/base_dag_repo.py
|
GodQ/autoflow
|
74954dafb9cdb16c29b9f3a7d081a3f3a12e808a
|
[
"Apache-2.0"
] | 1 |
2019-06-20T15:31:13.000Z
|
2019-06-20T15:31:13.000Z
|
dagflow/dag_repos/base_dag_repo.py
|
GodQ/dagflow
|
74954dafb9cdb16c29b9f3a7d081a3f3a12e808a
|
[
"Apache-2.0"
] | null | null | null |
dagflow/dag_repos/base_dag_repo.py
|
GodQ/dagflow
|
74954dafb9cdb16c29b9f3a7d081a3f3a12e808a
|
[
"Apache-2.0"
] | null | null | null |
__author__ = 'godq'
class BaseDagRepo:
def add_dag(self, dag_name, content):
raise NotImplemented
def update_dag(self, dag_name, content):
raise NotImplemented
def delete_dag(self, dag_name):
raise NotImplemented
def find_dag(self, dag_name):
raise NotImplemented
def list_dags(self, detail=False):
raise NotImplemented
def find_step_def(self, dag_name, step_name):
raise NotImplemented
def add_dag_run(self, dag_name, dag_run_id=None):
raise NotImplemented
def stop_dag_run(self, dag_name, dag_run_id):
raise NotImplemented
def list_dag_runs(self, dag_name):
raise NotImplemented
def find_dag_run(self, dag_name, dag_run_id):
raise NotImplemented
def mark_dag_run_status(self, dag_name, dag_run_id, status):
raise NotImplemented
def add_dag_run_event(self, dag_name, dag_run_id, event):
raise NotImplemented
def find_dag_run_events(self, dag_name, run_id):
raise NotImplemented
| 25.928571 | 65 | 0.669421 |
63f425174882867db2db0eb1e4aabc7676b37099
| 1,756 |
py
|
Python
|
idb/common/local_targets_manager.py
|
amonshiz/idb
|
3a5ef5c76966657c617133dd1f5d58db99d15d89
|
[
"MIT"
] | null | null | null |
idb/common/local_targets_manager.py
|
amonshiz/idb
|
3a5ef5c76966657c617133dd1f5d58db99d15d89
|
[
"MIT"
] | null | null | null |
idb/common/local_targets_manager.py
|
amonshiz/idb
|
3a5ef5c76966657c617133dd1f5d58db99d15d89
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import json
import logging
import os
from typing import List
from idb.common.constants import IDB_LOCAL_TARGETS_FILE
from idb.common.format import target_description_from_dictionary
from idb.common.types import TargetDescription
# this is the new companion manager for direct_client mode
class LocalTargetsManager:
def __init__(
self, logger: logging.Logger, local_targets_file: str = IDB_LOCAL_TARGETS_FILE
) -> None:
self.local_targets: List[TargetDescription] = []
self.local_targets_file = local_targets_file
self.logger = logger
self.logger.info(f"idb local targets file stored at {self.local_targets_file}")
def get_local_targets(self) -> List[TargetDescription]:
self.local_targets = self._load()
return self.local_targets
def _load(self) -> List[TargetDescription]:
targets = []
if (
os.path.exists(self.local_targets_file)
and os.path.getsize(self.local_targets_file) > 0
):
with open(self.local_targets_file, "r") as f:
targets_list = json.load(f)
for target in targets_list:
targets.append(target_description_from_dictionary(target))
return targets
def is_local_target_available(self, target_udid: str) -> bool:
self.get_local_targets()
targets = []
targets = [
target for target in self.local_targets if target.udid == target_udid
]
return len(targets) > 0
def clear(self) -> None:
with open(self.local_targets_file, "w") as f:
json.dump([], f)
f.flush()
| 32.518519 | 87 | 0.660023 |
d12a54f5ed45f307006a676d11714dc97fab10d4
| 2,947 |
py
|
Python
|
src/pudl/load.py
|
rohithdesikan/pudl
|
0009afc2b31031f6cd50e60aec096421f7a9d3c7
|
[
"MIT"
] | null | null | null |
src/pudl/load.py
|
rohithdesikan/pudl
|
0009afc2b31031f6cd50e60aec096421f7a9d3c7
|
[
"MIT"
] | null | null | null |
src/pudl/load.py
|
rohithdesikan/pudl
|
0009afc2b31031f6cd50e60aec096421f7a9d3c7
|
[
"MIT"
] | null | null | null |
"""Routines for loading PUDL data into various storage formats."""
import logging
import sys
from sqlite3 import Connection as SQLite3Connection
from sqlite3 import sqlite_version
from typing import Dict
import pandas as pd
import sqlalchemy as sa
from packaging import version
from sqlalchemy.exc import IntegrityError
from pudl.helpers import find_foreign_key_errors
from pudl.metadata.classes import Package
logger = logging.getLogger(__name__)
MINIMUM_SQLITE_VERSION = "3.32.0"
def dfs_to_sqlite(
dfs: Dict[str, pd.DataFrame],
engine: sa.engine.Engine,
check_foreign_keys: bool = True,
check_types: bool = True,
check_values: bool = True,
) -> None:
"""Load a dictionary of dataframes into the PUDL SQLite DB.
Args:
dfs: Dictionary mapping table names to dataframes.
engine: PUDL DB connection engine.
check_foreign_keys: if True, enforce foreign key constraints.
check_types: if True, enforce column data types.
check_values: if True, enforce value constraints.
"""
# This magic makes SQLAlchemy tell SQLite to check foreign key constraints
# whenever we insert data into thd database, which it doesn't do by default
@sa.event.listens_for(sa.engine.Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute(
f"PRAGMA foreign_keys={'ON' if check_foreign_keys else 'OFF'};"
)
cursor.close()
bad_sqlite_version = version.parse(sqlite_version) < version.parse(
MINIMUM_SQLITE_VERSION
)
if bad_sqlite_version and check_types:
check_types = False
logger.warning(
f"Found SQLite {sqlite_version} which is less than "
f"the minimum required version {MINIMUM_SQLITE_VERSION} "
"As a result, data type constraint checking has been disabled."
)
# Generate a SQLAlchemy MetaData object from dataframe names:
md = Package.from_resource_ids(resource_ids=tuple(sorted(dfs))).to_sql(
check_types=check_types, check_values=check_values
)
# Delete any existing tables, and create them anew:
md.drop_all(engine)
md.create_all(engine)
# Load any tables that exist in our dictionary of dataframes into the
# corresponding table in the newly create database:
for table in md.sorted_tables:
logger.info(f"Loading {table.name} into PUDL SQLite DB.")
try:
dfs[table.name].to_sql(
table.name,
engine,
if_exists="append",
index=False,
dtype={c.name: c.type for c in table.columns},
)
except IntegrityError as err:
logger.info(find_foreign_key_errors(dfs))
logger.info(err)
sys.exit(1)
| 34.670588 | 79 | 0.676281 |
3daf7e7a727265429ff2bfb6bcb22bbbeb7f241a
| 4,972 |
py
|
Python
|
tests/stub_net_date_time.py
|
Reveal-Energy-Services/orchid-python-api
|
21ed6058009f6b8793050a934238d2858a7fa0c9
|
[
"Apache-2.0"
] | null | null | null |
tests/stub_net_date_time.py
|
Reveal-Energy-Services/orchid-python-api
|
21ed6058009f6b8793050a934238d2858a7fa0c9
|
[
"Apache-2.0"
] | 28 |
2020-08-14T14:08:43.000Z
|
2022-02-07T14:11:38.000Z
|
tests/stub_net_date_time.py
|
Reveal-Energy-Services/orchid-python-api
|
21ed6058009f6b8793050a934238d2858a7fa0c9
|
[
"Apache-2.0"
] | 1 |
2021-12-01T21:20:07.000Z
|
2021-12-01T21:20:07.000Z
|
#
# This file is part of Orchid and related technologies.
#
# Copyright (c) 2017-2021 Reveal Energy Services. All Rights Reserved.
#
# LEGAL NOTICE:
# Orchid contains trade secrets and otherwise confidential information
# owned by Reveal Energy Services. Access to and use of this information is
# strictly limited and controlled by the Company. This file may not be copied,
# distributed, or otherwise disclosed outside of the Company's facilities
# except under appropriate precautions to maintain the confidentiality hereof,
# and may not be used in any way not expressly authorized by the Company.
#
import enum
import dataclasses as dc
import datetime as dt
from numbers import Real
import pendulum
import pendulum.tz as ptz
import toolz.curried as toolz
from orchid import (
measurement as om,
net_date_time as net_dt,
)
# noinspection PyUnresolvedReferences
from System import Int32, DateTime, DateTimeKind, DateTimeOffset, TimeSpan
@dc.dataclass
class TimePointDto:
year: int
month: int
day: int
hour: int
minute: int
second: int
fractional: om.Quantity = 0 * om.registry.microseconds
kind: net_dt.TimePointTimeZoneKind = net_dt.TimePointTimeZoneKind.UTC
@dc.dataclass
class TimeSpanDto:
hour: int
minute: int
second: int
# Setting is negative to `False` makes all components, hour, minute, and second, negative in the resulting
# .NET `TimeSpan`.
is_negative: bool = False
class StubDateTimeKind(enum.IntEnum):
UNSPECIFIED = 0,
UTC = 1,
LOCAL = 2,
INVALID = -999999999, # most likely not a match to any DateTimeKind member.
def make_microseconds(magnitude: Real) -> om.Quantity:
"""
Make a `pint` `Quantity` with the specified magnitude and `microsecond` unit.
Args:
magnitude: The magnitude of the measurement
Returns:
The `pint` `Quantity`.
"""
return magnitude * om.registry.microseconds
def make_milliseconds(magnitude: Real) -> om.Quantity:
"""
Make a `pint` `Quantity` with the specified magnitude and `millisecond` unit.
Args:
magnitude: The magnitude of the measurement
Returns:
The `pint` `Quantity`.
"""
return magnitude * om.registry.milliseconds
def make_net_date_time(time_point_dto: TimePointDto) -> DateTime:
"""
Construct a .NET `DateTime` instance from a `TimePointDto` instance.
Args:
time_point_dto: The `TimePointDto` instance used to construct the .NET `DateTime` instance.
Returns:
The .NET `DateTime` equivalent to `time_point_dto`.
"""
result = DateTime(time_point_dto.year, time_point_dto.month, time_point_dto.day,
time_point_dto.hour, time_point_dto.minute, time_point_dto.second,
int(round(time_point_dto.fractional.to(om.registry.milliseconds).magnitude)),
time_point_dto.kind.value)
return result
def make_net_date_time_offset(time_point_dto: TimePointDto) -> DateTimeOffset:
"""
Construct a .NET `DateTimeOffset` instance from a `TimePointDto` instance.
Args:
time_point_dto: The `TimePointDto` instance used to construct the .NET `DateTimeOffset` instance.
Returns:
The .NET `DateTimeOffset` equivalent to `time_point_dto`.
"""
result = DateTimeOffset(make_net_date_time(time_point_dto))
return result
def make_net_time_span(time_delta_dto: TimeSpanDto):
if not time_delta_dto.is_negative:
return TimeSpan(time_delta_dto.hour, time_delta_dto.minute, time_delta_dto.second)
else:
return TimeSpan(-time_delta_dto.hour, -time_delta_dto.minute, -time_delta_dto.second)
def make_date_time(time_point_dto: TimePointDto) -> pendulum.DateTime:
"""
Constructs a `pendulum.DateTime` instance from a `TimePointDto` instance.
This method is mostly for convenience.
Args:
time_point_dto: The instance from which to construct the `pendulum.Datetime` instance.
Returns:
The `pendulum.Datetime` instance equivalent to `time_point_dto`.
"""
result = pendulum.datetime(time_point_dto.year, time_point_dto.month, time_point_dto.day,
time_point_dto.hour, time_point_dto.minute, time_point_dto.second,
int(time_point_dto.fractional.to(om.registry.microseconds).magnitude),
tz=_kind_to_tzinfo(time_point_dto.kind.value))
return result
def utc_time_zone() -> dt.tzinfo:
"""
Calculate the single instance of the UTC time zone.
Returns:
The single instance of the UTC time zone.
"""
return ptz.UTC
_KIND_TO_TZINFO = {
net_dt.TimePointTimeZoneKind.UTC: ptz.UTC,
net_dt.TimePointTimeZoneKind.LOCAL: ptz.local_timezone(),
net_dt.TimePointTimeZoneKind.UNSPECIFIED: '',
}
def _kind_to_tzinfo(to_convert: int) -> str:
return toolz.get(net_dt.TimePointTimeZoneKind(to_convert), _KIND_TO_TZINFO)
| 30.317073 | 110 | 0.708166 |
ad8d3baa3df8a90761598c296005bc0d6258d406
| 1,604 |
py
|
Python
|
kajiki/tests/test_doctype.py
|
timgates42/kajiki
|
049b61fcde318614bf68a89a90ac3bf3225f83e0
|
[
"MIT"
] | null | null | null |
kajiki/tests/test_doctype.py
|
timgates42/kajiki
|
049b61fcde318614bf68a89a90ac3bf3225f83e0
|
[
"MIT"
] | null | null | null |
kajiki/tests/test_doctype.py
|
timgates42/kajiki
|
049b61fcde318614bf68a89a90ac3bf3225f83e0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from unittest import TestCase, main
from nine import str
from kajiki.doctype import DocumentTypeDeclaration, extract_dtd
XHTML1 = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" ' \
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">'
class TestDTD(TestCase):
def test_dtd(self):
dtd = DocumentTypeDeclaration.by_uri['']
assert dtd.name == 'html5'
assert str(dtd) == '<!DOCTYPE html>', str(dtd)
assert dtd.rendering_mode == 'html5'
dtd = DocumentTypeDeclaration.by_uri[None]
assert dtd.name == 'xhtml5'
assert str(dtd) == '<!DOCTYPE html>', str(dtd)
assert dtd.rendering_mode == 'xml'
dtd = DocumentTypeDeclaration.by_uri[
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"]
assert dtd.name == 'xhtml1transitional'
assert str(dtd) == XHTML1
assert dtd.rendering_mode == 'xml'
def test_extract_dtd(self):
html = '<div>Test template</div>'
markup = XHTML1 + html
extracted, pos, rest = extract_dtd(markup) # The function being tested
assert extracted == XHTML1
assert pos == 0
assert rest == html
dtd = DocumentTypeDeclaration.matching(extracted) # Another function
assert dtd is DocumentTypeDeclaration.by_uri[
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"]
if __name__ == '__main__':
main()
| 35.644444 | 79 | 0.638404 |
83ce475370014fe945b7e8455f531edc28acf570
| 82,722 |
py
|
Python
|
tests/p4rt/test_l3.py
|
pins/sonic-swss-public
|
4a443eaa33a3d354d99daa3c340cdcff882fc061
|
[
"Apache-2.0"
] | null | null | null |
tests/p4rt/test_l3.py
|
pins/sonic-swss-public
|
4a443eaa33a3d354d99daa3c340cdcff882fc061
|
[
"Apache-2.0"
] | 3 |
2021-11-19T21:46:50.000Z
|
2021-11-19T22:14:08.000Z
|
tests/p4rt/test_l3.py
|
pins/sonic-swss-public
|
4a443eaa33a3d354d99daa3c340cdcff882fc061
|
[
"Apache-2.0"
] | 1 |
2021-11-19T19:42:07.000Z
|
2021-11-19T19:42:07.000Z
|
from swsscommon import swsscommon
import time
import pytest
import json
import util
import l3
import test_vrf
class TestP4RTL3(object):
def _set_up(self, dvs):
self._p4rt_router_intf_obj = l3.P4RtRouterInterfaceWrapper()
self._p4rt_neighbor_obj = l3.P4RtNeighborWrapper()
self._p4rt_nexthop_obj = l3.P4RtNextHopWrapper()
self._p4rt_route_obj = l3.P4RtRouteWrapper()
self._p4rt_wcmp_group_obj = l3.P4RtWcmpGroupWrapper()
self._vrf_obj = test_vrf.TestVrf()
self._p4rt_router_intf_obj.set_up_databases(dvs)
self._p4rt_neighbor_obj.set_up_databases(dvs)
self._p4rt_nexthop_obj.set_up_databases(dvs)
self._p4rt_route_obj.set_up_databases(dvs)
self._p4rt_wcmp_group_obj.set_up_databases(dvs)
self.response_consumer = swsscommon.NotificationConsumer(
self._p4rt_route_obj.appl_db, "APPL_DB_P4RT_RESPONSE_CHANNEL")
def _set_vrf(self, dvs):
# Create VRF.
self._vrf_obj.setup_db(dvs)
self.vrf_id = "b4-traffic"
self.vrf_state = self._vrf_obj.vrf_create(dvs, self.vrf_id, [], {})
def _clean_vrf(self, dvs):
# Remove VRF.
self._vrf_obj.vrf_remove(dvs, self.vrf_id, self.vrf_state)
def test_IPv4RouteWithNexthopAddUpdateDeletePass(self, dvs, testlog):
# Initialize L3 objects and database connectors.
self._set_up(dvs)
self._set_vrf(dvs)
# Set IP type for route object.
self._p4rt_route_obj.set_ip_type("IPV4")
# Maintain list of original Application and ASIC DB entries before
# adding new route.
db_list = ((self._p4rt_nexthop_obj.asic_db,
self._p4rt_nexthop_obj.ASIC_DB_TBL_NAME),)
self._p4rt_nexthop_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_route_obj.appl_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.appl_state_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME))
self._p4rt_route_obj.get_original_redis_entries(db_list)
# Fetch the original key to oid information from Redis DB.
key_to_oid_helper = util.KeyToOidDBHelper(dvs)
_, original_key_oid_info = key_to_oid_helper.get_db_info()
# Create router interface.
router_interface_id, router_intf_key, attr_list = (
self._p4rt_router_intf_obj.create_router_interface()
)
util.verify_response(self.response_consumer, router_intf_key,
attr_list, "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count = 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create neighbor.
neighbor_id, neighbor_key, attr_list = (
self._p4rt_neighbor_obj.create_neighbor()
)
util.verify_response(self.response_consumer, neighbor_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create nexthop.
nexthop_id, nexthop_key, attr_list = (
self._p4rt_nexthop_obj.create_next_hop()
)
util.verify_response(self.response_consumer, nexthop_key, attr_list,
"SWSS_RC_SUCCESS")
# get nexthop_oid of newly created nexthop
nexthop_oid = self._p4rt_nexthop_obj.get_newly_created_nexthop_oid()
assert nexthop_oid is not None
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create route entry.
route_key, attr_list = self._p4rt_route_obj.create_route(nexthop_id)
util.verify_response(self.response_consumer, route_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for route entries.
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count() + 1
)
# Query application state database for newly created route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for route entries.
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count() + 1
)
# Query ASIC database for newly created route key.
asic_db_key = self._p4rt_route_obj.get_newly_created_asic_db_key()
assert asic_db_key is not None
(status, fvs) = util.get_key(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == True
attr_list = [(self._p4rt_route_obj.SAI_ATTR_NEXTHOP_ID, nexthop_oid)]
util.verify_attr(fvs, attr_list)
# Update route entry.
route_key, attr_list = self._p4rt_route_obj.create_route(action="drop")
util.verify_response(self.response_consumer, route_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count did not change in Redis DB.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for the updated route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
attr_list_appl_db = [(self._p4rt_route_obj.ACTION_FIELD, "drop"),
(util.prepend_param_field(self._p4rt_route_obj.NEXTHOP_ID_FIELD), nexthop_id)]
util.verify_attr(fvs, attr_list_appl_db)
# Query application state database for route entries.
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count() + 1
)
# Query application state database for the updated route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for route entries.
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count() + 1
)
# Query ASIC database for the updated route key.
asic_db_key = self._p4rt_route_obj.get_newly_created_asic_db_key()
assert asic_db_key is not None
(status, fvs) = util.get_key(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == True
attr_list = [(self._p4rt_route_obj.SAI_ATTR_NEXTHOP_ID, "oid:0x0"),
(self._p4rt_route_obj.SAI_ATTR_PACKET_ACTION, self._p4rt_route_obj.SAI_ATTR_PACKET_ACTION_DROP)]
util.verify_attr(fvs, attr_list)
# Remove route entry.
self._p4rt_route_obj.remove_app_db_entry(route_key)
util.verify_response(
self.response_consumer, route_key, [], "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove nexthop.
self._p4rt_nexthop_obj.remove_app_db_entry(nexthop_key)
util.verify_response(self.response_consumer, nexthop_key, [],
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove neighbor.
self._p4rt_neighbor_obj.remove_app_db_entry(neighbor_key)
util.verify_response(self.response_consumer, neighbor_key, [],
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove router interface.
self._p4rt_router_intf_obj.remove_app_db_entry(router_intf_key)
util.verify_response(
self.response_consumer, router_intf_key, [], "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count is same as the original count.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info)
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count()
)
# Verify that the route_key no longer exists in application database.
(status, fsv) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == False
# Query application state database for route entries.
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count()
)
# Verify that the route_key no longer exists in application state
# database.
(status, fsv) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == False
# Query ASIC database for route entries.
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count()
)
# Verify that removed route no longer exists in ASIC database.
(status, fvs) = util.get_key(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == False
self._clean_vrf(dvs)
def test_IPv6WithWcmpRouteAddUpdateDeletePass(self, dvs, testlog):
# Initialize L3 objects and database connectors.
self._set_up(dvs)
self._set_vrf(dvs)
# Set IP type for route object.
self._p4rt_route_obj.set_ip_type("IPV6")
# Maintain list of original Application and ASIC DB entries before
# adding new route.
db_list = ((self._p4rt_route_obj.appl_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.appl_state_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME))
self._p4rt_route_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_nexthop_obj.asic_db,
self._p4rt_nexthop_obj.ASIC_DB_TBL_NAME),)
self._p4rt_nexthop_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_wcmp_group_obj.appl_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.appl_state_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME))
self._p4rt_wcmp_group_obj.get_original_redis_entries(db_list)
# Fetch the original key to oid information from Redis DB.
key_to_oid_helper = util.KeyToOidDBHelper(dvs)
_, original_key_oid_info = key_to_oid_helper.get_db_info()
# Create router interface.
router_interface_id, router_intf_key, attr_list = (
self._p4rt_router_intf_obj.create_router_interface()
)
util.verify_response(self.response_consumer, router_intf_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count = 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create neighbor.
neighbor_id, neighbor_key, attr_list = (
self._p4rt_neighbor_obj.create_neighbor(ipv4=False)
)
util.verify_response(self.response_consumer, neighbor_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create nexthop.
nexthop_id, nexthop_key, attr_list = (
self._p4rt_nexthop_obj.create_next_hop(ipv4=False)
)
util.verify_response(self.response_consumer, nexthop_key, attr_list,
"SWSS_RC_SUCCESS")
# Get the oid of the newly created nexthop.
nexthop_oid = self._p4rt_nexthop_obj.get_newly_created_nexthop_oid()
assert nexthop_oid is not None
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create wcmp group.
wcmp_group_id, wcmp_group_key, attr_list = (
self._p4rt_wcmp_group_obj.create_wcmp_group()
)
util.verify_response(self.response_consumer, wcmp_group_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 2 in Redis DB
# (1 each for WCMP group and member).
count += 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for wcmp group entries.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for wcmp group entries.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
+ 1
)
# Query application state database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for wcmp group entries.
wcmp_group_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
+ 1
)
# Query ASIC database for newly created wcmp group oid.
wcmp_group_oid = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_oid()
assert wcmp_group_oid is not None
attr_list = [(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_TYPE,
self._p4rt_wcmp_group_obj.SAI_NEXT_HOP_GROUP_TYPE_DYNAMIC_UNORDERED_ECMP)]
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME,
wcmp_group_oid)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for wcmp group member entries.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
+ 1
)
# Query ASIC database for newly crated wcmp group member key.
asic_db_group_member_key = (
self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_member_asic_db_key()
)
assert asic_db_group_member_key is not None
attr_list = [(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_GROUP_ID,
wcmp_group_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_ID,
nexthop_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_WEIGHT,
str(self._p4rt_wcmp_group_obj.DEFAULT_WEIGHT))]
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME,
asic_db_group_member_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Create route entry.
route_key, attr_list = self._p4rt_route_obj.create_route(
wcmp_group_id=wcmp_group_id, action="set_wcmp_group_id", dst="2001:db8::/32")
util.verify_response(self.response_consumer, route_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for route entries.
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count() + 1
)
# Query application state database for newly created route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for route entries.
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count() + 1
)
# Query ASIC database for newly created route key.
asic_db_key = self._p4rt_route_obj.get_newly_created_asic_db_key()
assert asic_db_key is not None
(status, fvs) = util.get_key(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == True
attr_list = [
(self._p4rt_route_obj.SAI_ATTR_NEXTHOP_ID, wcmp_group_oid)]
util.verify_attr(fvs, attr_list)
# Update route entry.
route_key, attr_list = self._p4rt_route_obj.create_route(
action="drop", dst="2001:db8::/32")
util.verify_response(self.response_consumer, route_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count did not change in Redis DB.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for the updated route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
attr_list_appl_db = [(self._p4rt_route_obj.ACTION_FIELD, "drop"),
(util.prepend_param_field(self._p4rt_route_obj.WCMP_GROUP_ID_FIELD), wcmp_group_id)]
util.verify_attr(fvs, attr_list_appl_db)
# Query application state database for route entries.
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count() + 1
)
# Query application state database for the updated route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for route entries.
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count() + 1
)
# Query ASIC database for the updated route key.
asic_db_key = self._p4rt_route_obj.get_newly_created_asic_db_key()
assert asic_db_key is not None
(status, fvs) = util.get_key(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == True
attr_list = [(self._p4rt_route_obj.SAI_ATTR_NEXTHOP_ID, "oid:0x0"),
(self._p4rt_route_obj.SAI_ATTR_PACKET_ACTION, self._p4rt_route_obj.SAI_ATTR_PACKET_ACTION_DROP)]
util.verify_attr(fvs, attr_list)
# Remove route entry.
self._p4rt_route_obj.remove_app_db_entry(route_key)
util.verify_response(
self.response_consumer, route_key, [], "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove wcmp group entry.
self._p4rt_wcmp_group_obj.remove_app_db_entry(wcmp_group_key)
util.verify_response(self.response_consumer, wcmp_group_key, [],
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 2 in Redis DB
# (1 each for WCMP group and member).
count -= 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove nexthop.
self._p4rt_nexthop_obj.remove_app_db_entry(nexthop_key)
util.verify_response(self.response_consumer, nexthop_key, [],
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove neighbor.
self._p4rt_neighbor_obj.remove_app_db_entry(neighbor_key)
util.verify_response(self.response_consumer, neighbor_key, [],
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Remove router interface.
self._p4rt_router_intf_obj.remove_app_db_entry(router_intf_key)
util.verify_response(
self.response_consumer, router_intf_key, [], "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count is same as original count.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info)
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count()
)
# Verify that the route_key no longer exists in application database.
(status, fsv) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == False
# Query application state database for route entries.
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count()
)
# Verify that the route_key no longer exists in application state
# database.
(status, fsv) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == False
# Query ASIC database for route entries.
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count()
)
# Verify that removed route no longer exists in ASIC database.
(status, fvs) = util.get_key(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME,
asic_db_key)
assert status == False
# Query application database for wcmp group entries.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_db_entries_count()
)
# Verify that the route_key no longer exists in application database.
(status, fsv) = util.get_key(self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == False
# Query application state database for wcmp group entries.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
)
# Verify that the wcmp_group_key no longer exists in application state
# database.
(status, fsv) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == False
# Query ASIC database for wcmp group entries.
wcmp_group_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
)
# Verify that removed wcmp group no longer exists in ASIC database.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME,
wcmp_group_oid)
assert status == False
# Query ASIC database for wcmp group member entries.
wcmp_group_member_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
)
# Verify that removed wcmp group member no longer exists in ASIC
# database.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME,
asic_db_group_member_key)
assert status == False
self._clean_vrf(dvs)
def test_IPv4RouteAddWithInvalidNexthopFail(self, dvs, testlog):
marker = dvs.add_log_marker()
# Initialize L3 objects and database connectors.
self._set_up(dvs)
self._set_vrf(dvs)
# Set IP type for route object.
self._p4rt_route_obj.set_ip_type("IPV4")
# Maintain list of original Application and ASIC DB entries before
# adding new route.
db_list = ((self._p4rt_route_obj.appl_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.appl_state_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME))
self._p4rt_route_obj.get_original_redis_entries(db_list)
# Create route entry using invalid nexthop (expect failure).
route_key, attr_list = self._p4rt_route_obj.create_route()
err_log = "[OrchAgent] Nexthop ID '8' does not exist"
util.verify_response(self.response_consumer, route_key, attr_list,
"SWSS_RC_NOT_FOUND", err_log)
# Query application database for route entries.
route_entries = util.get_keys(
self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application database for route entries (no new route entry
# expected).
state_route_entries = util.get_keys(
self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count()
)
# Verify that the newly added route key does not exist in application
# state db.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == False
# Query ASIC database for route entries (no new ASIC DB entry should be
# created for route entry).
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count()
)
# Remove route entry (expect failure).
self._p4rt_route_obj.remove_app_db_entry(route_key)
err_log = "[OrchAgent] Route entry does not exist"
util.verify_response(
self.response_consumer, route_key, [], "SWSS_RC_NOT_FOUND", err_log)
self._clean_vrf(dvs)
def test_IPv6RouteAddWithInvalidWcmpFail(self, dvs, testlog):
marker = dvs.add_log_marker()
# Initialize L3 objects and database connectors.
self._set_up(dvs)
self._set_vrf(dvs)
# Set IP type for route object.
self._p4rt_route_obj.set_ip_type("IPV6")
# Maintain list of original Application and ASIC DB entries before
# adding new route.
db_list = ((self._p4rt_route_obj.appl_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.appl_state_db,
"%s:%s" % (self._p4rt_route_obj.APP_DB_TBL_NAME,
self._p4rt_route_obj.TBL_NAME)),
(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME))
self._p4rt_route_obj.get_original_redis_entries(db_list)
# Create route entry using invalid wcmp group (expect failure).
route_key, attr_list = self._p4rt_route_obj.create_route(
action="set_wcmp_group_id", wcmp_group_id="8")
err_log = "[OrchAgent] WCMP group '8' does not exist"
util.verify_response(self.response_consumer, route_key, attr_list,
"SWSS_RC_NOT_FOUND", err_log)
# Query application database for route entries
route_entries = util.get_keys(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created route key.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for route entries (no new APPL STATE DB
# entry should be created for route entry).
state_route_entries = util.get_keys(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME + ":" + self._p4rt_route_obj.TBL_NAME)
assert len(state_route_entries) == (
self._p4rt_route_obj.get_original_appl_state_db_entries_count()
)
# Verify that newly created route key does not exist in application
# state db.
(status, fvs) = util.get_key(self._p4rt_route_obj.appl_state_db,
self._p4rt_route_obj.APP_DB_TBL_NAME,
route_key)
assert status == False
# Query ASIC database for route entries (no new ASIC DB entry should be
# created for route entry).
route_entries = util.get_keys(self._p4rt_route_obj.asic_db,
self._p4rt_route_obj.ASIC_DB_TBL_NAME)
assert len(route_entries) == (
self._p4rt_route_obj.get_original_asic_db_entries_count()
)
# Remove route entry (expect failure).
self._p4rt_route_obj.remove_app_db_entry(route_key)
err_log = "[OrchAgent] Route entry does not exist"
util.verify_response(
self.response_consumer, route_key, [], "SWSS_RC_NOT_FOUND", err_log)
self._clean_vrf(dvs)
def test_PruneAndRestoreNextHop(self, dvs, testlog):
# Initialize L3 objects and database connectors.
self._set_up(dvs)
cdb = swsscommon.DBConnector(4, dvs.redis_sock, 0)
# Maintain original WCMP group entries for ASIC DB.
db_list = ((self._p4rt_wcmp_group_obj.appl_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.appl_state_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME))
self._p4rt_wcmp_group_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_nexthop_obj.asic_db,
self._p4rt_nexthop_obj.ASIC_DB_TBL_NAME),)
self._p4rt_nexthop_obj.get_original_redis_entries(db_list)
# Fetch the original key to oid information from Redis DB.
key_to_oid_helper = util.KeyToOidDBHelper(dvs)
_, original_key_oid_info = key_to_oid_helper.get_db_info()
# Bring up port under test.
port_name = "Ethernet0"
if_name = "eth0"
util.initialize_interface(dvs, port_name, "10.0.0.0/31")
util.set_interface_status(dvs, if_name, "up")
# Create router interface.
router_interface_id, router_intf_key, attr_list = (
self._p4rt_router_intf_obj.create_router_interface()
)
util.verify_response(
self.response_consumer, router_intf_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count = 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create neighbor.
neighbor_id, neighbor_key, attr_list = (
self._p4rt_neighbor_obj.create_neighbor()
)
util.verify_response(
self.response_consumer, neighbor_key, attr_list, "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create nexthop.
nexthop_id, nexthop_key, attr_list = (
self._p4rt_nexthop_obj.create_next_hop()
)
util.verify_response(
self.response_consumer, nexthop_key, attr_list, "SWSS_RC_SUCCESS")
# Get nexthop_oid of newly created nexthop.
nexthop_oid = self._p4rt_nexthop_obj.get_newly_created_nexthop_oid()
assert nexthop_oid is not None
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create wcmp group with one member.
wcmp_group_id, wcmp_group_key, attr_list = (
self._p4rt_wcmp_group_obj.create_wcmp_group(watch_port=port_name)
)
util.verify_response(
self.response_consumer, wcmp_group_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 2 in Redis DB
# (1 each for WCMP group and member).
count += 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for wcmp group entries.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for wcmp group entries.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
+ 1
)
# Query application state database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for wcmp group entries.
wcmp_group_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
+ 1
)
# Query ASIC database for newly created wcmp group oid.
wcmp_group_oid = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_oid()
assert wcmp_group_oid is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME,
wcmp_group_oid
)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_TYPE,
(self._p4rt_wcmp_group_obj.
SAI_NEXT_HOP_GROUP_TYPE_DYNAMIC_UNORDERED_ECMP))
]
util.verify_attr(fvs, asic_attr_list)
# Query ASIC database for newly created wcmp group member key.
asic_db_group_member_key = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_member_asic_db_key()
assert asic_db_group_member_key is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME,
asic_db_group_member_key
)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_GROUP_ID,
wcmp_group_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_ID,
nexthop_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_WEIGHT,
str(self._p4rt_wcmp_group_obj.DEFAULT_WEIGHT))
]
util.verify_attr(fvs, asic_attr_list)
# Force oper-down for the associated port.
util.set_interface_status(dvs, if_name)
# Check ASIC DB to verify that associated member for watch_port is
# pruned.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
)
# Check APPL STATE DB to verify no change.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Force oper-up for associated port.
util.set_interface_status(dvs, if_name, "up")
# Check pruned next hop member is restored in ASIC DB.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
+ 1
)
asic_db_group_member_key = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_member_asic_db_key()
assert asic_db_group_member_key is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME,
asic_db_group_member_key
)
assert status == True
util.verify_attr(fvs, asic_attr_list)
# Delete WCMP group member.
self._p4rt_wcmp_group_obj.remove_app_db_entry(wcmp_group_key)
# Verify that P4RT key to OID count decremented by 2 in Redis DB
# (1 each for WCMP group and member).
count -= 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Verify that APPL STATE DB is now updated.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
(self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" +
self._p4rt_wcmp_group_obj.TBL_NAME))
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
)
# Delete next hop.
self._p4rt_nexthop_obj.remove_app_db_entry(nexthop_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete neighbor.
self._p4rt_neighbor_obj.remove_app_db_entry(neighbor_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete router interface.
self._p4rt_router_intf_obj.remove_app_db_entry(router_intf_key)
# Verify that P4RT key to OID count is same as the original count.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info)
def test_PruneNextHopOnWarmBoot(self, dvs, testlog):
# Initialize L3 objects and database connectors.
self._set_up(dvs)
cdb = swsscommon.DBConnector(4, dvs.redis_sock, 0)
# Maintain original WCMP group entries for ASIC DB.
db_list = ((self._p4rt_wcmp_group_obj.appl_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.appl_state_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME))
self._p4rt_wcmp_group_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_nexthop_obj.asic_db,
self._p4rt_nexthop_obj.ASIC_DB_TBL_NAME),)
self._p4rt_nexthop_obj.get_original_redis_entries(db_list)
# Fetch the original key to oid information from Redis DB.
key_to_oid_helper = util.KeyToOidDBHelper(dvs)
_, original_key_oid_info = key_to_oid_helper.get_db_info()
# Bring up port under test.
port_name = "Ethernet0"
if_name = "eth0"
util.initialize_interface(dvs, port_name, "10.0.0.0/31")
util.set_interface_status(dvs, if_name, "up")
# Create router interface.
router_interface_id, router_intf_key, attr_list = (
self._p4rt_router_intf_obj.create_router_interface()
)
util.verify_response(
self.response_consumer, router_intf_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count = 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create neighbor.
neighbor_id, neighbor_key, attr_list = (
self._p4rt_neighbor_obj.create_neighbor()
)
util.verify_response(
self.response_consumer, neighbor_key, attr_list, "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create nexthop.
nexthop_id, nexthop_key, attr_list = (
self._p4rt_nexthop_obj.create_next_hop()
)
util.verify_response(
self.response_consumer, nexthop_key, attr_list, "SWSS_RC_SUCCESS")
# Get nexthop_oid of newly created nexthop.
nexthop_oid = self._p4rt_nexthop_obj.get_newly_created_nexthop_oid()
assert nexthop_oid is not None
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create wcmp group with one member.
wcmp_group_id, wcmp_group_key, attr_list = (
self._p4rt_wcmp_group_obj.create_wcmp_group(watch_port=port_name)
)
util.verify_response(
self.response_consumer, wcmp_group_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 2 in Redis DB
# (1 each for WCMP group and member).
count += 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for wcmp group entries.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for wcmp group entries.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
+ 1
)
# Query application state database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for wcmp group entries.
wcmp_group_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
+ 1
)
# Query ASIC database for newly created wcmp group oid.
wcmp_group_oid = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_oid()
assert wcmp_group_oid is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME,
wcmp_group_oid
)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_TYPE,
(self._p4rt_wcmp_group_obj.
SAI_NEXT_HOP_GROUP_TYPE_DYNAMIC_UNORDERED_ECMP))
]
util.verify_attr(fvs, asic_attr_list)
# Query ASIC database for wcmp group member entries.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
+ 1
)
# Query ASIC database for newly created wcmp group member key.
asic_db_group_member_key = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_member_asic_db_key()
assert asic_db_group_member_key is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME,
asic_db_group_member_key
)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_GROUP_ID,
wcmp_group_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_ID,
nexthop_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_WEIGHT,
str(self._p4rt_wcmp_group_obj.DEFAULT_WEIGHT))
]
util.verify_attr(fvs, asic_attr_list)
# Bring down the port.
util.set_interface_status(dvs, if_name)
# Execute the warm reboot.
dvs.runcmd("config warm_restart enable swss")
dvs.stop_swss()
dvs.start_swss()
# Make sure the system is stable.
dvs.check_swss_ready()
# Verify that the associated next hop is pruned in ASIC DB.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
)
# Delete WCMP group member.
self._p4rt_wcmp_group_obj.remove_app_db_entry(wcmp_group_key)
# Verify that P4RT key to OID count decremented by 2 in Redis DB
# (1 each for WCMP group and member).
count -= 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Verify that APPL STATE DB is updated.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
(self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" +
self._p4rt_wcmp_group_obj.TBL_NAME))
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
)
# Delete next hop.
self._p4rt_nexthop_obj.remove_app_db_entry(nexthop_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete neighbor.
self._p4rt_neighbor_obj.remove_app_db_entry(neighbor_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete router interface.
self._p4rt_router_intf_obj.remove_app_db_entry(router_intf_key)
# Verify that P4RT key to OID count is same as the original count.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info)
def test_CreateWcmpMemberForOperUpWatchportOnly(self, dvs, testlog):
# Initialize L3 objects and database connectors.
self._set_up(dvs)
cdb = swsscommon.DBConnector(4, dvs.redis_sock, 0)
# Maintain original WCMP group entries for ASIC DB.
db_list = ((self._p4rt_wcmp_group_obj.appl_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.appl_state_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME))
self._p4rt_wcmp_group_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_nexthop_obj.asic_db,
self._p4rt_nexthop_obj.ASIC_DB_TBL_NAME),)
self._p4rt_nexthop_obj.get_original_redis_entries(db_list)
# Fetch the original key to oid information from Redis DB.
key_to_oid_helper = util.KeyToOidDBHelper(dvs)
_, original_key_oid_info = key_to_oid_helper.get_db_info()
# Force oper-down on port under test.
port_name = "Ethernet0"
if_name = "eth0"
util.initialize_interface(dvs, port_name, "10.0.0.0/31")
util.set_interface_status(dvs, if_name)
# Create router interface.
router_interface_id, router_intf_key, attr_list = (
self._p4rt_router_intf_obj.create_router_interface()
)
util.verify_response(
self.response_consumer, router_intf_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count = 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create neighbor.
neighbor_id, neighbor_key, attr_list = (
self._p4rt_neighbor_obj.create_neighbor()
)
util.verify_response(
self.response_consumer, neighbor_key, attr_list, "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create nexthop.
nexthop_id, nexthop_key, attr_list = (
self._p4rt_nexthop_obj.create_next_hop()
)
util.verify_response(
self.response_consumer, nexthop_key, attr_list, "SWSS_RC_SUCCESS")
# Get nexthop_oid of newly created nexthop.
nexthop_oid = self._p4rt_nexthop_obj.get_newly_created_nexthop_oid()
assert nexthop_oid is not None
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create wcmp group with one member.
wcmp_group_id, wcmp_group_key, attr_list = (
self._p4rt_wcmp_group_obj.create_wcmp_group(watch_port=port_name)
)
util.verify_response(
self.response_consumer, wcmp_group_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB
# (WCMP group member is not created for operationally down watchport).
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for wcmp group entries.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for wcmp group entries.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count() + 1
)
# Query application state database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for wcmp group entries.
wcmp_group_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
+ 1
)
# Query ASIC database for newly created wcmp group oid.
wcmp_group_oid = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_oid()
assert wcmp_group_oid is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME,
wcmp_group_oid
)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_TYPE,
(self._p4rt_wcmp_group_obj.
SAI_NEXT_HOP_GROUP_TYPE_DYNAMIC_UNORDERED_ECMP))
]
util.verify_attr(fvs, asic_attr_list)
# Query ASIC database for wcmp group member entries (expect no entry).
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
)
# Bring up the port.
util.set_interface_status(dvs, if_name, "up")
# Verify that P4RT key to OID count incremented by 1 in Redis DB
# (WCMP group member is now expected to be created in SAI due to
# watchport now being operationally up)
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Verify that next hop member is now created in SAI.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
+ 1
)
asic_db_group_member_key = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_member_asic_db_key()
assert asic_db_group_member_key is not None
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.asic_db,
(self._p4rt_wcmp_group_obj.
ASIC_DB_GROUP_MEMBER_TBL_NAME),
asic_db_group_member_key)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_GROUP_ID,
wcmp_group_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_NEXTHOP_ID,
nexthop_oid),
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_MEMBER_WEIGHT,
str(self._p4rt_wcmp_group_obj.DEFAULT_WEIGHT))
]
util.verify_attr(fvs, asic_attr_list)
# Delete WCMP group member.
self._p4rt_wcmp_group_obj.remove_app_db_entry(wcmp_group_key)
# Verify that P4RT key to OID count decremented by 2 in Redis DB
# (1 each for WCMP group and member).
count -= 2
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Verify that APPL STATE DB is updated.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
(self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" +
self._p4rt_wcmp_group_obj.TBL_NAME))
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
)
# Delete next hop.
self._p4rt_nexthop_obj.remove_app_db_entry(nexthop_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete neighbor.
self._p4rt_neighbor_obj.remove_app_db_entry(neighbor_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete router interface.
self._p4rt_router_intf_obj.remove_app_db_entry(router_intf_key)
# Verify that P4RT key to OID count is same as the original count.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info)
def test_RemovePrunedWcmpGroupMember(self, dvs, testlog):
# Initialize L3 objects and database connectors.
self._set_up(dvs)
cdb = swsscommon.DBConnector(4, dvs.redis_sock, 0)
# Maintain original WCMP group entries for ASIC DB.
db_list = ((self._p4rt_wcmp_group_obj.appl_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.appl_state_db,
"%s:%s" % (self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
self._p4rt_wcmp_group_obj.TBL_NAME)),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME),
(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME))
self._p4rt_wcmp_group_obj.get_original_redis_entries(db_list)
db_list = ((self._p4rt_nexthop_obj.asic_db,
self._p4rt_nexthop_obj.ASIC_DB_TBL_NAME),)
self._p4rt_nexthop_obj.get_original_redis_entries(db_list)
# Fetch the original key to oid information from Redis DB.
key_to_oid_helper = util.KeyToOidDBHelper(dvs)
_, original_key_oid_info = key_to_oid_helper.get_db_info()
# Force oper-down on port under test.
port_name = "Ethernet0"
if_name = "eth0"
util.initialize_interface(dvs, port_name, "10.0.0.0/31")
util.set_interface_status(dvs, if_name)
# Create router interface.
router_interface_id, router_intf_key, attr_list = (
self._p4rt_router_intf_obj.create_router_interface()
)
util.verify_response(
self.response_consumer, router_intf_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count = 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create neighbor.
neighbor_id, neighbor_key, attr_list = (
self._p4rt_neighbor_obj.create_neighbor()
)
util.verify_response(
self.response_consumer, neighbor_key, attr_list, "SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create nexthop.
nexthop_id, nexthop_key, attr_list = (
self._p4rt_nexthop_obj.create_next_hop()
)
util.verify_response(
self.response_consumer, nexthop_key, attr_list, "SWSS_RC_SUCCESS")
# Get nexthop_oid of newly created nexthop.
nexthop_oid = self._p4rt_nexthop_obj.get_newly_created_nexthop_oid()
assert nexthop_oid is not None
# Verify that P4RT key to OID count incremented by 1 in Redis DB.
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Create wcmp group with one member.
wcmp_group_id, wcmp_group_key, attr_list = (
self._p4rt_wcmp_group_obj.create_wcmp_group(watch_port=port_name)
)
util.verify_response(
self.response_consumer, wcmp_group_key, attr_list,
"SWSS_RC_SUCCESS")
# Verify that P4RT key to OID count incremented by 1 in Redis DB
# (WCMP group member is not created for operationally down watchport).
count += 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Query application database for wcmp group entries.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_db_entries_count() + 1
)
# Query application database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query application state database for wcmp group entries.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" + self._p4rt_wcmp_group_obj.TBL_NAME)
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count() + 1
)
# Query application state database for newly created wcmp group key.
(status, fvs) = util.get_key(self._p4rt_wcmp_group_obj.appl_state_db,
self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME,
wcmp_group_key)
assert status == True
util.verify_attr(fvs, attr_list)
# Query ASIC database for wcmp group entries.
wcmp_group_entries = util.get_keys(self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
+ 1
)
# Query ASIC database for newly created wcmp group oid.
wcmp_group_oid = self._p4rt_wcmp_group_obj.get_newly_created_wcmp_group_oid()
assert wcmp_group_oid is not None
(status, fvs) = util.get_key(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME,
wcmp_group_oid
)
assert status == True
asic_attr_list = [
(self._p4rt_wcmp_group_obj.SAI_ATTR_GROUP_TYPE,
(self._p4rt_wcmp_group_obj.
SAI_NEXT_HOP_GROUP_TYPE_DYNAMIC_UNORDERED_ECMP))
]
util.verify_attr(fvs, asic_attr_list)
# Query ASIC database for wcmp group member entries.
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
+ 1
)
# Query ASIC database for wcmp group member entries (expect no entry).
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(
wcmp_group_member_entries) == self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
# Delete the pruned wcmp group member.
self._p4rt_wcmp_group_obj.remove_app_db_entry(wcmp_group_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Verify that APPL STATE DB is updated.
state_wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.appl_state_db,
(self._p4rt_wcmp_group_obj.APP_DB_TBL_NAME + ":" +
self._p4rt_wcmp_group_obj.TBL_NAME))
assert len(state_wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_appl_state_db_entries_count()
)
# Verify that ASIC DB is updated.
wcmp_group_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_TBL_NAME
)
assert len(wcmp_group_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_group_entries_count()
)
wcmp_group_member_entries = util.get_keys(
self._p4rt_wcmp_group_obj.asic_db,
self._p4rt_wcmp_group_obj.ASIC_DB_GROUP_MEMBER_TBL_NAME
)
assert len(wcmp_group_member_entries) == (
self._p4rt_wcmp_group_obj.get_original_asic_db_member_entries_count()
)
# Delete next hop.
self._p4rt_nexthop_obj.remove_app_db_entry(nexthop_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete neighbor.
self._p4rt_neighbor_obj.remove_app_db_entry(neighbor_key)
# Verify that P4RT key to OID count decremented by 1 in Redis DB.
count -= 1
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info) + count
# Delete router interface.
self._p4rt_router_intf_obj.remove_app_db_entry(router_intf_key)
# Verify that P4RT key to OID count is same as the original count.
status, fvs = key_to_oid_helper.get_db_info()
assert status == True
assert len(fvs) == len(original_key_oid_info)
| 44.787223 | 119 | 0.636155 |
b5844ca9b70f9902052cdaaedc3bb74b7aaaaad7
| 16,899 |
py
|
Python
|
tests/parsers/winreg_plugins/msie_zones.py
|
ir4n6/plaso
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
[
"Apache-2.0"
] | null | null | null |
tests/parsers/winreg_plugins/msie_zones.py
|
ir4n6/plaso
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
[
"Apache-2.0"
] | null | null | null |
tests/parsers/winreg_plugins/msie_zones.py
|
ir4n6/plaso
|
010f9cbdfc82e21ed6658657fd09a7b44115c464
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the MSIE Zone settings Windows Registry plugin."""
from __future__ import unicode_literals
import unittest
from plaso.formatters import winreg # pylint: disable=unused-import
from plaso.parsers.winreg_plugins import msie_zones
from tests import test_lib as shared_test_lib
from tests.parsers.winreg_plugins import test_lib
class MsieZoneSettingsPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for Internet Settings Zones plugin."""
def testFilters(self):
"""Tests the FILTERS class attribute."""
plugin = msie_zones.MsieZoneSettingsPlugin()
key_path = (
'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones')
self._AssertFiltersOnKeyPath(plugin, key_path)
key_path = (
'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones')
self._AssertFiltersOnKeyPath(plugin, key_path)
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones')
self._AssertFiltersOnKeyPath(plugin, key_path)
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones')
self._AssertFiltersOnKeyPath(plugin, key_path)
self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus')
@shared_test_lib.skipUnlessHasTestFile(['NTUSER-WIN7.DAT'])
def testProcessNtuserLockdownZones(self):
"""Tests the Process function on a Lockdown_Zones key."""
test_file_entry = self._GetTestFileEntry(['NTUSER-WIN7.DAT'])
key_path = (
'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin = msie_zones.MsieZoneSettingsPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin, file_entry=test_file_entry)
self.assertEqual(storage_writer.number_of_events, 6)
events = list(storage_writer.GetEvents())
event = events[1]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event.parser, plugin.plugin_name)
self.CheckTimestamp(event.timestamp, '2011-09-16 21:12:40.145514')
regvalue_identifier = '[1200] Run ActiveX controls and plug-ins'
expected_value = '3 (Not Allowed)'
self._TestRegvalue(event, regvalue_identifier, expected_value)
expected_message = (
'[{0:s}\\0 (My Computer)] '
'[1200] Run ActiveX controls and plug-ins: 3 (Not Allowed) '
'[1400] Active scripting: 1 (Prompt User) '
'[CurrentLevel]: 0 '
'[Description]: Your computer '
'[DisplayName]: Computer '
'[Flags]: 33 '
'[Icon]: shell32.dll#0016 '
'[LowIcon]: inetcpl.cpl#005422 '
'[PMDisplayName]: Computer '
'[Protected Mode]').format(key_path)
expected_short_message = '{0:s}...'.format(expected_message[:77])
self._TestGetMessageStrings(
event, expected_message, expected_short_message)
@shared_test_lib.skipUnlessHasTestFile(['NTUSER-WIN7.DAT'])
def testProcessNtuserZones(self):
"""Tests the Process function on a Zones key."""
test_file_entry = self._GetTestFileEntry(['NTUSER-WIN7.DAT'])
key_path = (
'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin = msie_zones.MsieZoneSettingsPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin, file_entry=test_file_entry)
self.assertEqual(storage_writer.number_of_events, 6)
events = list(storage_writer.GetEvents())
event = events[1]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event.parser, plugin.plugin_name)
self.CheckTimestamp(event.timestamp, '2011-09-16 21:12:40.145514')
regvalue_identifier = '[1200] Run ActiveX controls and plug-ins'
expected_value = '0 (Allow)'
self._TestRegvalue(event, regvalue_identifier, expected_value)
expected_message = (
'[{0:s}\\0 (My Computer)] '
'[1200] Run ActiveX controls and plug-ins: 0 (Allow) '
'[1400] Active scripting: 0 (Allow) '
'[2001] .NET: Run components signed with Authenticode: 3 (Not '
'Allowed) '
'[2004] .NET: Run components not signed with Authenticode: 3 (Not '
'Allowed) '
'[2007] UNKNOWN: 3 '
'[CurrentLevel]: 0 '
'[Description]: Your computer '
'[DisplayName]: Computer '
'[Flags]: 33 [Icon]: shell32.dll#0016 '
'[LowIcon]: inetcpl.cpl#005422 '
'[PMDisplayName]: Computer '
'[Protected Mode]').format(key_path)
expected_short_message = '{0:s}...'.format(expected_message[:77])
self._TestGetMessageStrings(
event, expected_message, expected_short_message)
@shared_test_lib.skipUnlessHasTestFile(['SOFTWARE'])
def testProcessSoftwareLockdownZones(self):
"""Tests the Process function on a Lockdown_Zones key."""
test_file_entry = self._GetTestFileEntry(['SOFTWARE'])
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin = msie_zones.MsieZoneSettingsPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin, file_entry=test_file_entry)
self.assertEqual(storage_writer.number_of_events, 6)
events = list(storage_writer.GetEvents())
event = events[1]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event.parser, plugin.plugin_name)
self.CheckTimestamp(event.timestamp, '2011-08-28 21:32:44.937675')
regvalue_identifier = '[1200] Run ActiveX controls and plug-ins'
expected_value = '3 (Not Allowed)'
self._TestRegvalue(event, regvalue_identifier, expected_value)
expected_message = (
'[{0:s}\\0 (My Computer)] '
'[1001] Download signed ActiveX controls: 1 (Prompt User) '
'[1004] Download unsigned ActiveX controls: 3 (Not Allowed) '
'[1200] Run ActiveX controls and plug-ins: 3 (Not Allowed) '
'[1201] Initialize and script ActiveX controls not marked as safe: 3 '
'(Not Allowed) '
'[1206] Allow scripting of IE Web browser control: 0 '
'[1207] Reserved: 3 '
'[1208] Allow previously unused ActiveX controls to run without '
'prompt: 3 '
'[1209] Allow Scriptlets: 3 '
'[120A] Override Per-Site (domain-based) ActiveX restrictions: 3 '
'[120B] Override Per-Site (domain-based) ActiveX restrictions: 0 '
'[1400] Active scripting: 1 (Prompt User) '
'[1402] Scripting of Java applets: 0 (Allow) '
'[1405] Script ActiveX controls marked as safe for scripting: 0 '
'(Allow) '
'[1406] Access data sources across domains: 0 (Allow) '
'[1407] Allow Programmatic clipboard access: 1 (Prompt User) '
'[1408] Reserved: 3 '
'[1409] UNKNOWN: 3 '
'[1601] Submit non-encrypted form data: 0 (Allow) '
'[1604] Font download: 0 (Allow) '
'[1605] Run Java: 0 '
'[1606] Userdata persistence: 0 (Allow) '
'[1607] Navigate sub-frames across different domains: 0 (Allow) '
'[1608] Allow META REFRESH: 0 (Allow) '
'[1609] Display mixed content: 1 (Prompt User) '
'[160A] Include local directory path when uploading files to a '
'server: 0 '
'[1802] Drag and drop or copy and paste files: 0 (Allow) '
'[1803] File Download: 0 (Allow) '
'[1804] Launching programs and files in an IFRAME: 0 (Allow) '
'[1805] Launching programs and files in webview: 0 '
'[1806] Launching applications and unsafe files: 0 '
'[1807] Reserved: 0 '
'[1808] Reserved: 0 '
'[1809] Use Pop-up Blocker: 3 (Not Allowed) '
'[180A] Reserved: 0 '
'[180C] Reserved: 0 '
'[180D] Reserved: 0 '
'[180E] UNKNOWN: 0 '
'[180F] UNKNOWN: 0 '
'[1A00] User Authentication: Logon: 0x00000000 (Automatic logon with '
'current user name and password) '
'[1A02] Allow persistent cookies that are stored on your computer: 0 '
'[1A03] Allow per-session cookies (not stored): 0 '
'[1A04] Don\'t prompt for client cert selection when no certs exists: '
'3 (Not Allowed) '
'[1A05] Allow 3rd party persistent cookies: 0 '
'[1A06] Allow 3rd party session cookies: 0 '
'[1A10] Privacy Settings: 0 '
'[1C00] Java permissions: 0x00000000 (Disable Java) '
'[2000] Binary and script behaviors: 0x00010000 '
'(Administrator approved) '
'[2005] UNKNOWN: 3 '
'[2100] Open files based on content, not file extension: 3 '
'(Not Allowed) '
'[2101] Web sites in less privileged zone can navigate into this '
'zone: 3 (Not Allowed) '
'[2102] Allow script initiated windows without size/position '
'constraints: '
'3 (Not Allowed) '
'[2103] Allow status bar updates via script: 3 '
'[2104] Allow websites to open windows without address or status '
'bars: 3 '
'[2105] Allow websites to prompt for information using scripted '
'windows: 3 '
'[2106] UNKNOWN: 3 '
'[2107] UNKNOWN: 3 '
'[2200] Automatic prompting for file downloads: 3 (Not Allowed) '
'[2201] Automatic prompting for ActiveX controls: 3 (Not Allowed) '
'[2301] Use Phishing Filter: 3 '
'[2400] .NET: XAML browser applications: 0 '
'[2401] .NET: XPS documents: 0 '
'[2402] .NET: Loose XAML: 0 '
'[2500] Turn on Protected Mode: 3 '
'[2600] Enable .NET Framework setup: 0 '
'[2700] UNKNOWN: 3 '
'[2701] UNKNOWN: 3 '
'[2702] UNKNOWN: 3 '
'[2703] UNKNOWN: 3 '
'[2708] UNKNOWN: 0 '
'[2709] UNKNOWN: 0 '
'[CurrentLevel]: 0 '
'[Description]: Your computer '
'[DisplayName]: Computer '
'[Flags]: 33 '
'[Icon]: shell32.dll#0016 '
'[LowIcon]: inetcpl.cpl#005422 '
'[PMDisplayName]: Computer '
'[Protected Mode]').format(key_path)
expected_short_message = '{0:s}...'.format(expected_message[:77])
self._TestGetMessageStrings(
event, expected_message, expected_short_message)
@shared_test_lib.skipUnlessHasTestFile(['SOFTWARE'])
def testProcessSoftwareZones(self):
"""Tests the Process function on a Zones key."""
test_file_entry = self._GetTestFileEntry(['SOFTWARE'])
key_path = (
'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones')
win_registry = self._GetWinRegistryFromFileEntry(test_file_entry)
registry_key = win_registry.GetKeyByPath(key_path)
plugin = msie_zones.MsieZoneSettingsPlugin()
storage_writer = self._ParseKeyWithPlugin(
registry_key, plugin, file_entry=test_file_entry)
self.assertEqual(storage_writer.number_of_events, 6)
events = list(storage_writer.GetEvents())
event = events[1]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event.parser, plugin.plugin_name)
self.CheckTimestamp(event.timestamp, '2011-08-28 21:32:44.937675')
regvalue_identifier = '[1200] Run ActiveX controls and plug-ins'
expected_value = '0 (Allow)'
self._TestRegvalue(event, regvalue_identifier, expected_value)
expected_message = (
'[{0:s}\\0 (My Computer)] '
'[1001] Download signed ActiveX controls: 0 (Allow) '
'[1004] Download unsigned ActiveX controls: 0 (Allow) '
'[1200] Run ActiveX controls and plug-ins: 0 (Allow) '
'[1201] Initialize and script ActiveX controls not marked as safe: 1 '
'(Prompt User) '
'[1206] Allow scripting of IE Web browser control: 0 '
'[1207] Reserved: 0 '
'[1208] Allow previously unused ActiveX controls to run without '
'prompt: 0 '
'[1209] Allow Scriptlets: 0 '
'[120A] Override Per-Site (domain-based) ActiveX restrictions: 0 '
'[120B] Override Per-Site (domain-based) ActiveX restrictions: 0 '
'[1400] Active scripting: 0 (Allow) '
'[1402] Scripting of Java applets: 0 (Allow) '
'[1405] Script ActiveX controls marked as safe for scripting: 0 '
'(Allow) '
'[1406] Access data sources across domains: 0 (Allow) '
'[1407] Allow Programmatic clipboard access: 0 (Allow) '
'[1408] Reserved: 0 '
'[1409] UNKNOWN: 3 '
'[1601] Submit non-encrypted form data: 0 (Allow) '
'[1604] Font download: 0 (Allow) '
'[1605] Run Java: 0 '
'[1606] Userdata persistence: 0 (Allow) '
'[1607] Navigate sub-frames across different domains: 0 (Allow) '
'[1608] Allow META REFRESH: 0 (Allow) '
'[1609] Display mixed content: 1 (Prompt User) '
'[160A] Include local directory path when uploading files to a '
'server: 0 '
'[1802] Drag and drop or copy and paste files: 0 (Allow) '
'[1803] File Download: 0 (Allow) '
'[1804] Launching programs and files in an IFRAME: 0 (Allow) '
'[1805] Launching programs and files in webview: 0 '
'[1806] Launching applications and unsafe files: 0 '
'[1807] Reserved: 0 '
'[1808] Reserved: 0 '
'[1809] Use Pop-up Blocker: 3 (Not Allowed) '
'[180A] Reserved: 0 '
'[180C] Reserved: 0 '
'[180D] Reserved: 0 '
'[180E] UNKNOWN: 0 '
'[180F] UNKNOWN: 0 '
'[1A00] User Authentication: Logon: 0x00000000 (Automatic logon with '
'current user name and password) '
'[1A02] Allow persistent cookies that are stored on your computer: 0 '
'[1A03] Allow per-session cookies (not stored): 0 '
'[1A04] Don\'t prompt for client cert selection when no certs exists: '
'0 (Allow) '
'[1A05] Allow 3rd party persistent cookies: 0 '
'[1A06] Allow 3rd party session cookies: 0 '
'[1A10] Privacy Settings: 0 '
'[1C00] Java permissions: 0x00020000 (Medium safety) '
'[2000] Binary and script behaviors: 0 (Allow) '
'[2001] .NET: Run components signed with Authenticode: '
'3 (Not Allowed) '
'[2004] .NET: Run components not signed with Authenticode: '
'3 (Not Allowed) '
'[2005] UNKNOWN: 0 '
'[2007] UNKNOWN: 3 '
'[2100] Open files based on content, not file extension: 0 (Allow) '
'[2101] Web sites in less privileged zone can navigate into this '
'zone: 3 (Not Allowed) '
'[2102] Allow script initiated windows without size/position '
'constraints: 0 (Allow) '
'[2103] Allow status bar updates via script: 0 '
'[2104] Allow websites to open windows without address or status '
'bars: 0 '
'[2105] Allow websites to prompt for information using scripted '
'windows: 0 '
'[2106] UNKNOWN: 0 '
'[2107] UNKNOWN: 0 '
'[2200] Automatic prompting for file downloads: 0 (Allow) '
'[2201] Automatic prompting for ActiveX controls: 0 (Allow) '
'[2300] Allow web pages to use restricted protocols for active '
'content: 1 (Prompt User) '
'[2301] Use Phishing Filter: 3 '
'[2400] .NET: XAML browser applications: 0 '
'[2401] .NET: XPS documents: 0 '
'[2402] .NET: Loose XAML: 0 '
'[2500] Turn on Protected Mode: 3 '
'[2600] Enable .NET Framework setup: 0 '
'[2700] UNKNOWN: 3 '
'[2701] UNKNOWN: 0 '
'[2702] UNKNOWN: 3 '
'[2703] UNKNOWN: 3 '
'[2708] UNKNOWN: 0 '
'[2709] UNKNOWN: 0 '
'[CurrentLevel]: 0 '
'[Description]: Your computer '
'[DisplayName]: Computer '
'[Flags]: 33 '
'[Icon]: shell32.dll#0016 '
'[LowIcon]: inetcpl.cpl#005422 '
'[PMDisplayName]: Computer '
'[Protected Mode]').format(key_path)
expected_short_message = '{0:s}...'.format(expected_message[:77])
self._TestGetMessageStrings(
event, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
| 41.217073 | 79 | 0.639209 |
5aa4f2cec8fa8155bc75bacd9d272dc693b445ba
| 6,243 |
py
|
Python
|
util/plt.py
|
liushuchun/pixel_link_py3
|
1582ab57fe4d8946731c90d6b719773f3d9949b5
|
[
"MIT"
] | 9 |
2019-02-18T03:52:10.000Z
|
2021-09-03T09:22:10.000Z
|
util/plt.py
|
liushuchun/pixel_link_py3
|
1582ab57fe4d8946731c90d6b719773f3d9949b5
|
[
"MIT"
] | 1 |
2020-06-11T10:03:59.000Z
|
2020-06-11T10:03:59.000Z
|
util/plt.py
|
liushuchun/pixel_link_py3
|
1582ab57fe4d8946731c90d6b719773f3d9949b5
|
[
"MIT"
] | 5 |
2018-12-30T05:35:35.000Z
|
2020-03-18T07:07:05.000Z
|
#coding=utf-8
'''
Created on 2016-9-27
@author: dengdan
'''
try:
import matplotlib as mpl
# mpl.use('Agg')
mpl.use('TkAgg')
import matplotlib.pyplot as plt
import numpy as np
import util
except Exception as e:
print(e)
def hist(x, title = None, normed = False, show = True, save = False,
save_path = None, bin_count = 100, bins = None,
cumulative = False):
x = np.asarray(x)
if len(np.shape(x)) > 1:
# x = np.reshape(x, np.prod(x.shape))
x = util.np.flatten(x)
if bins == None:
bins = np.linspace(start = min(x), stop = max(x), num = bin_count, endpoint = True, retstep = False)
plt.figure(num = title)
plt.hist(x, bins, normed = normed, cumulative = cumulative)
if save:
if save_path is None:
raise ValueError
path = util.io.join_path(save_path, title + '.png')
save_image(path)
if show:
plt.show()
#util.img.imshow(title, path, block = block)
def plot_solver_data(solver_path):
data = util.io.load(solver_path)
training_losses = data.training_losses
training_accuracies = data.training_accuracies
val_losses = data.val_losses
val_accuracies = data.val_accuracies
plt.figure(solver_path)
n = len(training_losses)
x = range(n)
plt.plot(x, training_losses, 'r-', label = 'Training Loss')
if len(training_accuracies) > 0:
plt.plot(x, training_accuracies, 'r--', label = 'Training Accuracy')
if len(val_losses) > 0:
n = len(val_losses)
x = range(n)
plt.plot(x, val_losses, 'g-', label = 'Validation Loss')
if len(val_accuracies) > 0:
plt.plot(x, val_accuracies, 'g--', label = 'Validation Accuracy')
plt.legend()
plt.show()
def rectangle(xy, width, height, color = 'red', linewidth = 1, fill = False, alpha = None, axis = None):
"""
draw a rectangle on plt axis
"""
import matplotlib.patches as patches
rect = patches.Rectangle(
xy = xy,
width = width,
height = height,
alpha = alpha,
color = color,
fill = fill,
linewidth = linewidth
)
if axis is not None:
axis.add_patch(rect)
return rect
rect = rectangle
def set_pos(x, y):
mgr = plt.get_current_fig_manager()
backend = mpl.get_backend()
if backend == 'TkAgg':
mgr.window.wm_geometry("+%d+%d" % (x, y))
elif backend == 'WXAgg':
mgr.window.SetPosition((x, y))
else:
# This works for QT and GTK
# You can also use window.setGeometry
mgr.window.move(x, y)
def maximize_figure():
mng = plt.get_current_fig_manager()
mng.full_screen_toggle()
def line(xy_start, xy_end, color = 'red', linewidth = 1, alpha = None, axis = None):
"""
draw a line on plt axis
"""
from matplotlib.lines import Line2D
num = 100
xdata = np.linspace(xy_start[0], xy_end[0], num = num)
ydata = np.linspace(xy_start[1], xy_end[1], num = num)
line = Line2D(
alpha = alpha,
color = color,
linewidth = linewidth,
xdata = xdata,
ydata = ydata
)
if axis is not None:
axis.add_line(line)
return line
def imshow(title = None, img = None, gray = False):
show_images([img], [title], gray = gray)
def show_images(images, titles = None, shape = None, share_axis = False,
bgr2rgb = False, maximized = False,
show = True, gray = False, save = False, color_bar = False,
path = None, axis_off = False, vertical = False, subtitle = None):
plt.close('all')
if shape == None:
if vertical:
shape = (len(images), 1)
else:
shape = (1, len(images))
shape = list(shape)
if shape[0] < 0:
shape[0] =(len(images) + shape[1]) / shape[1]
elif shape[1] < 0:
shape[1] = (len(images + shape[0])) / shape[0]
ret_axes = []
ax0 = None
plt.figure()
for idx, img in enumerate(images):
if bgr2rgb:
img = util.img.bgr2rgb(img)
loc = (idx // shape[1], idx % shape[1])
if idx == 0:
ax = plt.subplot2grid(shape, loc)
ax0 = ax
else:
if share_axis:
ax = plt.subplot2grid(shape, loc, sharex = ax0, sharey = ax0)
else:
ax = plt.subplot2grid(shape, loc)
if len(np.shape(img)) == 2 and gray:
img_ax = ax.imshow(img, cmap = 'gray')
else:
img_ax = ax.imshow(img)
if len(np.shape(img)) == 2 and color_bar:
plt.colorbar(img_ax, ax = ax)
if titles != None:
ax.set_title(titles[idx])
if axis_off:
plt.axis('off')
# plt.xticks([]), plt.yticks([])
ret_axes.append(ax)
if subtitle is not None:
set_subtitle(subtitle)
if maximized:
maximize_figure()
if save:
if path is None:
path = util.get_temp_path()
# raise ValueError('path can not be None when save is True')
save_image(path)
if show:
plt.show()
return path
def save_image(path, img = None, dpi = 150):
path = util.io.get_absolute_path(path)
util.io.make_parent_dir(path)
if img is None:
plt.gcf().savefig(path, dpi = dpi)
else:
plt.imsave(path, img, format = 'png')
imwrite = save_image
def to_ROI(ax, ROI):
xy1, xy2 = ROI
xmin, ymin = xy1
xmax, ymax = xy2
ax.set_xlim(xmin, xmax)
#ax.extent
ax.set_ylim(ymax, ymin)
def set_subtitle(title, fontsize = 12):
plt.gcf().suptitle(title, fontsize=fontsize)
def show(maximized = False):
if maximized:
maximize_figure()
plt.show()
def draw():
plt.gcf().canvas.draw()
def get_random_line_style():
colors = ['r', 'g', 'b']
line_types = ['-']#, '--', '-.', ':']
idx = util.rand.randint(len(colors))
color = colors[idx]
idx = util.rand.randint(len(line_types))
line_type = line_types[idx]
return color + line_type
| 28.248869 | 108 | 0.557424 |
1d2022d99d676cee00b7c31c7c25ba0b5f3d6e38
| 16,356 |
py
|
Python
|
server/athenian/api/__main__.py
|
athenianco/athenian-api
|
dd5556101a8c49703d6b0516e4268b9e8d8eda5b
|
[
"RSA-MD"
] | 9 |
2020-10-11T22:12:03.000Z
|
2022-02-26T02:16:45.000Z
|
server/athenian/api/__main__.py
|
athenianco/athenian-api
|
dd5556101a8c49703d6b0516e4268b9e8d8eda5b
|
[
"RSA-MD"
] | 246 |
2019-12-05T06:37:30.000Z
|
2022-03-29T10:00:07.000Z
|
server/athenian/api/__main__.py
|
athenianco/athenian-api
|
dd5556101a8c49703d6b0516e4268b9e8d8eda5b
|
[
"RSA-MD"
] | 5 |
2019-12-04T22:38:05.000Z
|
2021-02-26T00:50:04.000Z
|
#!/usr/bin/env python3
import argparse
import asyncio
from datetime import datetime, timezone
import getpass
import logging
import os
from pathlib import Path
import re
import socket
import sys
from typing import Callable, Optional
import aiohttp.web
from aiohttp.web_runner import GracefulExit
import aiomcache
from connexion.decorators import validation
from flogging import flogging
import jinja2
import morcilla
import numpy
import pandas
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.executing import ExecutingIntegration
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.pure_eval import PureEvalIntegration
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
import sentry_sdk.utils
from slack_sdk.web.async_client import AsyncWebClient as SlackWebClient
import uvloop
from athenian.api import metadata
from athenian.api.auth import Auth0
from athenian.api.connexion import AthenianApp
from athenian.api.db import check_schema_versions
from athenian.api.faster_pandas import patch_pandas
from athenian.api.kms import AthenianKMS
from athenian.api.preloading.cache import MemoryCachePreloader
from athenian.api.prometheus import PROMETHEUS_REGISTRY_VAR_NAME
from athenian.api.segment import SegmentClient
from athenian.api.tracing import MAX_SENTRY_STRING_LENGTH
# Global Sentry tracing sample rate override
trace_sample_rate_manhole = lambda request: None # noqa(E731)
def parse_args() -> argparse.Namespace:
"""Parse the command line and return the parsed arguments."""
class Formatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawTextHelpFormatter):
pass
parser = argparse.ArgumentParser(metadata.__package__, epilog="""environment variables:
SENTRY_KEY Sentry token: [email protected]
SENTRY_PROJECT Sentry project name
AUTH0_DOMAIN Auth0 domain, usually *.auth0.com
AUTH0_AUDIENCE JWT audience - the backref URL, usually the website address
AUTH0_CLIENT_ID Client ID of the Auth0 Machine-to-Machine Application
AUTH0_CLIENT_SECRET Client Secret of the Auth0 Machine-to-Machine Application
ATHENIAN_DEFAULT_USER Default user ID that is assigned to public requests
ATHENIAN_INVITATION_KEY Passphrase to encrypt the invitation links
ATHENIAN_INVITATION_URL_PREFIX
String with which any invitation URL starts, e.g. https://app.athenian.co/i/
ATHENIAN_MAX_CLIENT_SIZE Reject HTTP requests if their size in bytes is bigger than this value
ATHENIAN_MAX_LOAD Maximum load in abstract units the server accepts before rejecting requests with HTTP 503; the default value is 12
GOOGLE_KMS_PROJECT Name of the project with Google Cloud Key Management Service
GOOGLE_KMS_KEYRING Name of the keyring in Google Cloud Key Management Service
GOOGLE_KMS_KEYNAME Name of the key in the keyring in Google Cloud Key Management Service
GOOGLE_KMS_SERVICE_ACCOUNT_JSON (optional)
Path to the JSON file with Google Cloud credentions to access KMS
ATHENIAN_SEGMENT_KEY (optional)
Enable user action tracking in Segment.
GOOGLE_ANALYTICS (optional)
Track Swagger UI by Google Analytics tag.
""", # noqa
formatter_class=Formatter)
def level_from_msg(msg: str) -> Optional[str]:
if "GET /status" in msg or "before send dropped event" in msg:
# these aiohttp access logs are annoying
return "debug"
return None
flogging.add_logging_args(parser, level_from_msg=level_from_msg)
parser.add_argument("--host", default=[],
help="HTTP server host. May be specified multiple times.",
action="append")
parser.add_argument("--port", type=int, default=8080, help="HTTP server port.")
parser.add_argument("--metadata-db",
default="postgresql://postgres:[email protected]:5432/metadata",
help="Metadata (GitHub, JIRA, etc.) DB connection string in SQLAlchemy "
"format. This DB is readonly.")
parser.add_argument("--state-db",
default="postgresql://postgres:[email protected]:5432/state",
help="Server state (user settings, teams, etc.) DB connection string in "
"SQLAlchemy format. This DB is read/write.")
parser.add_argument("--precomputed-db",
default="postgresql://postgres:[email protected]:5432/precomputed",
help="Precomputed objects augmenting the metadata DB and reducing "
"the amount of online work. DB connection string in SQLAlchemy "
"format. This DB is read/write.")
parser.add_argument("--persistentdata-db",
default="postgresql://postgres:[email protected]:5432/persistentdata",
help="Pushed and pulled source data that Athenian owns. DB connection "
"string in SQLAlchemy format. This DB is read/write.")
parser.add_argument("--memcached", required=False,
help="memcached (users profiles, preprocessed metadata cache) address, "
"for example, 0.0.0.0:11211")
parser.add_argument("--preload-dataframes", required=False, action="store_true",
help="Whether to preload DB tables in memory and refresh periodically.")
parser.add_argument("--preload-refresh-frequency", type=int, default=60, required=False,
help="Frequency at which to refresh the preloaded tables in minutes.")
parser.add_argument("--ui", action="store_true", help="Enable the REST UI.")
parser.add_argument("--no-google-kms", action="store_true",
help="Skip Google Key Management Service initialization. Personal Access "
"Tokens will not work.")
parser.add_argument("--force-user",
help="Bypass user authorization and execute all requests on behalf of "
"this user.")
return parser.parse_args()
def setup_context(log: logging.Logger) -> None:
"""Log general info about the running process and configure Sentry."""
log.info("%s", sys.argv)
log.info("Version: %s", metadata.__version__)
log.info("Local time: %s", datetime.now())
log.info("UTC time: %s", datetime.now(timezone.utc))
commit = getattr(metadata, "__commit__", None)
if commit:
log.info("Commit: %s", commit)
build_date = getattr(metadata, "__date__", None)
if build_date:
log.info("Image built on %s", build_date)
username = getpass.getuser()
hostname = socket.getfqdn()
log.info("%s@%s -> %d", username, hostname, os.getpid())
if dev_id := os.getenv("ATHENIAN_DEV_ID"):
log.info("Developer: %s", dev_id)
pandas.set_option("display.max_rows", 20)
pandas.set_option("display.large_repr", "info")
pandas.set_option("display.memory_usage", False)
info = pandas.io.formats.info.BaseInfo.info
def _short_info_df(self) -> None:
info(self)
text = self.buf.getvalue()
if len(text) > 512:
text = "\n".join(text.split("\n")[:3]).rstrip(":")
self.buf.seek(0)
self.buf.truncate(0)
self.buf.write(text)
pandas.io.formats.info.BaseInfo.info = _short_info_df
numpy.set_printoptions(threshold=10, edgeitems=1)
if (level := log.getEffectiveLevel()) >= logging.INFO:
morcilla.core.logger.setLevel(level + 10)
validation.logger.error = validation.logger.warning
sentry_key, sentry_project = os.getenv("SENTRY_KEY"), os.getenv("SENTRY_PROJECT")
def warn(env_name):
logging.getLogger(metadata.__package__).warning(
"Skipped Sentry initialization: %s envvar is missing", env_name)
if not sentry_key:
warn("SENTRY_KEY")
return
if not sentry_project:
warn("SENTRY_PROJECT")
return
sentry_env = os.getenv("SENTRY_ENV", "development")
log.info("Sentry: https://[secure]@sentry.io/%s#%s" % (sentry_project, sentry_env))
traces_sample_rate = float(os.getenv(
"SENTRY_SAMPLING_RATE", "0.2" if sentry_env != "development" else "0"))
if traces_sample_rate > 0:
log.info("Sentry tracing is ON: sampling rate %.2f", traces_sample_rate)
disabled_transactions_re = re.compile("|".join([
"openapi.json", "ui(/|$)",
]))
throttled_transactions_re = re.compile("|".join([
"invite/progress", "events/(?!clear_cache)",
]))
api_path_re = re.compile(r"/v\d+/")
def sample_trace(context) -> float:
request: aiohttp.web.Request = context["aiohttp_request"]
if (override_sample_rate := trace_sample_rate_manhole(request)) is not None:
return override_sample_rate
if request.method == "OPTIONS":
return 0
path = request.path
if not (match := api_path_re.match(path)):
return 0
path = path[match.end():]
if disabled_transactions_re.match(path):
return 0
if throttled_transactions_re.match(path):
return traces_sample_rate / 100
return traces_sample_rate
sentry_log = logging.getLogger("sentry_sdk.errors")
sentry_log.handlers.clear()
flogging.trailing_dot_exceptions.add(sentry_log.name)
sentry_sdk.init(
environment=sentry_env,
dsn="https://%[email protected]/%s" % (sentry_key, sentry_project),
integrations=[AioHttpIntegration(transaction_style="method_and_path_pattern"),
LoggingIntegration(level=logging.INFO, event_level=logging.ERROR),
SqlalchemyIntegration(), PureEvalIntegration(), ExecutingIntegration()],
auto_enabling_integrations=False,
send_default_pii=True,
debug=sentry_env != "production",
max_breadcrumbs=20,
attach_stacktrace=True,
request_bodies="always",
release="%s@%s" % (metadata.__package__, metadata.__version__),
traces_sampler=sample_trace,
)
sentry_sdk.utils.MAX_STRING_LENGTH = MAX_SENTRY_STRING_LENGTH
sentry_sdk.serializer.MAX_DATABAG_BREADTH = 16 # e.g., max number of locals in a stack frame
with sentry_sdk.configure_scope() as scope:
if sentry_env == "development":
scope.set_tag("username", username)
if dev_id:
scope.set_tag("developer", dev_id)
if commit is not None:
scope.set_tag("commit", commit)
if build_date is not None:
scope.set_tag("build_date", build_date)
def create_memcached(addr: str, log: logging.Logger) -> Optional[aiomcache.Client]:
"""Create the memcached client, if possible."""
if not addr:
return None
host, port = addr.split(":")
port = int(port)
client = aiomcache.Client(host, port)
async def print_memcached_version():
version = "N/A"
attempts = 3
for attempt in range(attempts):
try:
version = await client.version()
except Exception as e:
last_attempt = attempt >= attempts - 1
log.log(logging.CRITICAL if last_attempt else logging.WARNING,
"[%d / %d] memcached: %s: %s",
attempt + 1, attempts, type(e).__name__, e)
if last_attempt:
sentry_sdk.capture_exception(e)
raise GracefulExit()
else:
await asyncio.sleep(1)
else:
break
log.info("memcached: %s on %s", version.decode(), addr)
delattr(client, "version_future")
client.version_future = asyncio.ensure_future(print_memcached_version())
return client
def create_auth0_factory(force_user: str) -> Callable[[], Auth0]:
"""Create the factory of Auth0 instances."""
def factory(**kwargs):
return Auth0(**kwargs, force_user=force_user)
return factory
def create_slack(log: logging.Logger) -> Optional[SlackWebClient]:
"""Initialize the Slack client to post notifications about new accounts, user, and \
installations."""
slack_token = os.getenv("SLACK_API_TOKEN")
if not slack_token:
return None
slack_client = SlackWebClient(token=slack_token)
general_channel = os.getenv("SLACK_CHANNEL", "#updates-installations")
if not general_channel:
raise ValueError("SLACK_CHANNEL may not be empty if SLACK_API_TOKEN exists")
slack_client.jinja2 = jinja2.Environment(
loader=jinja2.FileSystemLoader(Path(__file__).parent / "slack"),
autoescape=False, trim_blocks=True, lstrip_blocks=True,
)
slack_client.jinja2.globals["env"] = os.getenv("SENTRY_ENV", "development")
slack_client.jinja2.globals["now"] = lambda: datetime.now(timezone.utc)
async def post(template, channel="", **kwargs) -> None:
try:
response = await slack_client.chat_postMessage(
channel=channel or general_channel,
text=slack_client.jinja2.get_template(template).render(**kwargs))
error_name = error_data = ""
except Exception as e:
error_name = type(e).__name__
error_data = str(e)
response = None
if response is not None and response.status_code != 200:
error_name = "HTTP %d" % response.status_code
error_data = response.data
if error_name:
log.error("Could not send a Slack message to %s: %s: %s",
channel, error_name, error_data)
slack_client.post = post
log.info("Slack messaging to %s is enabled 👍", general_channel)
return slack_client
def create_segment() -> Optional[SegmentClient]:
"""Initialize the Segment client to track user actions."""
if key := os.getenv("ATHENIAN_SEGMENT_KEY"):
return SegmentClient(key)
return None
PRELOADER_VAR_NAME = "mc_preloader"
def setup_preloading(app: AthenianApp, preload_refresh_frequency: int,
log: logging.Logger) -> None:
"""Initialize the memory cache and schedule loading the DB tables."""
log.info("Preloading DB tables to memory is enabled")
app.app[PRELOADER_VAR_NAME] = mc_preloader = MemoryCachePreloader(
preload_refresh_frequency,
prometheus_registry=app.app[PROMETHEUS_REGISTRY_VAR_NAME])
app.on_dbs_connected(mc_preloader.preload)
async def shutdown(self, app: Optional[aiohttp.web.Application] = None) -> None:
await mc_preloader.stop()
app.app.on_shutdown.insert(0, shutdown)
def main() -> Optional[AthenianApp]:
"""Server's entry point."""
uvloop.install()
args = parse_args()
log = logging.getLogger(metadata.__package__)
setup_context(log)
if not check_schema_versions(args.metadata_db,
args.state_db,
args.precomputed_db,
args.persistentdata_db,
log):
return None
patch_pandas()
cache = create_memcached(args.memcached, log)
auth0_cls = create_auth0_factory(args.force_user)
kms_cls = None if args.no_google_kms else AthenianKMS
slack = create_slack(log)
app = AthenianApp(
mdb_conn=args.metadata_db,
sdb_conn=args.state_db,
pdb_conn=args.precomputed_db,
rdb_conn=args.persistentdata_db,
ui=args.ui, auth0_cls=auth0_cls, kms_cls=kms_cls, cache=cache, slack=slack,
client_max_size=int(os.getenv("ATHENIAN_MAX_CLIENT_SIZE", 256 * 1024)),
max_load=float(os.getenv("ATHENIAN_MAX_LOAD", 12)),
segment=create_segment(),
google_analytics=os.getenv("GOOGLE_ANALYTICS", ""),
)
if args.preload_dataframes:
setup_preloading(app, args.preload_refresh_frequency, log)
app.run(host=args.host, port=args.port, print=lambda s: log.info("\n" + s))
return app
if __name__ == "__main__":
exit(main() is None) # "1" for an error, "0" for a normal return
| 43.5 | 141 | 0.658902 |
1944d918ab68a02a2999c70614bb455e5c5a7e33
| 417 |
py
|
Python
|
mkt/webpay/forms.py
|
spasovski/zamboni
|
c7f4714029e3b2dc918ddfc2103f8e051193c14d
|
[
"BSD-3-Clause"
] | 1 |
2021-07-29T00:51:09.000Z
|
2021-07-29T00:51:09.000Z
|
mkt/webpay/forms.py
|
imclab/olympia
|
35bc9c484e384bafab520ca8b5d5b0f8da5b62c0
|
[
"BSD-3-Clause"
] | null | null | null |
mkt/webpay/forms.py
|
imclab/olympia
|
35bc9c484e384bafab520ca8b5d5b0f8da5b62c0
|
[
"BSD-3-Clause"
] | null | null | null |
from django import forms
import happyforms
from mkt.api.forms import SluggableModelChoiceField
from mkt.webapps.models import Webapp
class PrepareForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.objects.valid(),
sluggable_to_field_name='app_slug')
class FailureForm(happyforms.Form):
url = forms.CharField()
attempts = forms.IntegerField()
| 23.166667 | 71 | 0.724221 |
4cd3cf17e56716c9b4c4411c3722a44de8095990
| 3,555 |
py
|
Python
|
src/pyuwds3/reasoning/estimation/shape_estimator.py
|
LAAS-HRI/uwds3
|
42390f62ed5701a32710341b01faa10efc448078
|
[
"MIT"
] | 2 |
2020-08-19T06:15:14.000Z
|
2021-05-23T09:55:18.000Z
|
src/pyuwds3/reasoning/estimation/shape_estimator.py
|
LAAS-HRI/uwds3
|
42390f62ed5701a32710341b01faa10efc448078
|
[
"MIT"
] | 5 |
2021-01-06T09:00:35.000Z
|
2021-01-20T13:22:19.000Z
|
src/pyuwds3/reasoning/estimation/shape_estimator.py
|
LAAS-HRI/uwds3
|
42390f62ed5701a32710341b01faa10efc448078
|
[
"MIT"
] | 2 |
2020-11-18T17:34:43.000Z
|
2021-05-23T16:14:17.000Z
|
import cv2
import rospy
from ...types.shape.sphere import Sphere
from ...types.shape.cylinder import Cylinder
import numpy as np
K = 3
class ShapeEstimator(object):
""" """
def estimate(self, rgb_image, objects_tracks, camera):
""" """
for o in objects_tracks:
try:
if o.is_confirmed() and o.bbox.height() > 0:
if o.bbox.depth is not None:
if o.label != "person":
if not o.has_shape():
shape = self.compute_cylinder_from_bbox(o.bbox, camera)
if o.label == "face" or o.label == "hand" or o.label =="sports_ball":
shape = self.compute_sphere_from_bbox(o.bbox, camera)
shape.pose.pos.x = .0
shape.pose.pos.y = .0
shape.pose.pos.z = .0
shape.color = self.compute_dominant_color(rgb_image, o.bbox)
o.shapes.append(shape)
else:
shape = self.compute_cylinder_from_bbox(o.bbox, camera)
z = o.pose.pos.z
shape.pose.pos.x = .0
shape.pose.pos.y = .0
shape.pose.pos.z = -(z - shape.h/2.0)/2.0
if not o.has_shape():
shape.color = [0, 200, 0, 1]
shape.w = 0.50
shape.h = z + shape.h/2.0
o.shapes.append(shape)
else:
o.shapes[0].w = 0.50
shape.h = z + shape.h/2.0
o.shapes[0].h = shape.h
except Exception as e:
rospy.logwarn(e)
def compute_dominant_color(self, rgb_image, bbox):
xmin = int(bbox.xmin)
ymin = int(bbox.ymin)
h = int(bbox.height())
w = int(bbox.width())
cropped_image = rgb_image[ymin:ymin+h, xmin:xmin+w].copy()
cropped_image = cv2.resize(cropped_image, (10, 10))
np_pixels = cropped_image.shape[0] * cropped_image.shape[1]
cropped_image = cropped_image.reshape((np_pixels, 3))
data = cropped_image.reshape((-1, 3))
data = np.float32(data)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
ret, label, centers = cv2.kmeans(data,K,None,criteria,10,cv2.KMEANS_RANDOM_CENTERS)
dominant_color = centers[0].astype(np.int32)/255.0
color = np.ones(4)
color[0] = dominant_color[0]
color[1] = dominant_color[1]
color[2] = dominant_color[2]
color[3] = 1.0
return color
def compute_cylinder_from_bbox(self, bbox, camera):
camera_matrix = camera.camera_matrix()
z = bbox.depth
fx = camera_matrix[0][0]
fy = camera_matrix[1][1]
w = bbox.width()
h = bbox.height()
w = w * z / fx
h = h * z / fy
return Cylinder(w, h)
def compute_sphere_from_bbox(self, bbox, camera):
camera_matrix = camera.camera_matrix()
z = bbox.depth
fx = camera_matrix[0][0]
fy = camera_matrix[1][1]
w = bbox.width()
h = bbox.height()
w = w * z / fx
h = h * z / fy
d = max(w, h)
return Sphere(d)
| 39.065934 | 101 | 0.474543 |
a755051de50ca90af38611ed390b74ec2827fe2a
| 25,549 |
py
|
Python
|
bayarea/datasources.py
|
ual/bayarea
|
e3fdee835545434a0c7b9742636d03795c749ff8
|
[
"BSD-3-Clause"
] | 2 |
2018-09-05T23:27:36.000Z
|
2018-09-05T23:27:38.000Z
|
bayarea/datasources.py
|
ual/bayarea
|
e3fdee835545434a0c7b9742636d03795c749ff8
|
[
"BSD-3-Clause"
] | 7 |
2018-06-13T16:54:55.000Z
|
2018-08-14T00:09:40.000Z
|
bayarea/datasources.py
|
ual/bayarea
|
e3fdee835545434a0c7b9742636d03795c749ff8
|
[
"BSD-3-Clause"
] | 2 |
2018-03-19T19:59:20.000Z
|
2018-08-31T23:09:51.000Z
|
import orca
import numpy as np
import pandas as pd
# data documentation: https://berkeley.app.box.com/notes/282712547032
# Set data directory
d = './data/'
if 'data_directory' in orca.list_injectables():
d = orca.get_injectable('data_directory')
@orca.injectable('store', cache=True)
def hdfstore():
return pd.HDFStore((d+"model_data.h5"),
mode='r')
@orca.table('parcels', cache=True)
def parcels(store):
df = store['parcels']
df.index.name = 'parcel_id'
return df
@orca.table('buildings', cache=True)
def buildings(store):
df = store['buildings']
return df
@orca.table('building_types', cache=True)
def buildings(store):
df = store['building_types']
return df
@orca.table('jobs', cache=True)
def jobs(store):
df = store['jobs']
return df
@orca.table('establishments', cache=True)
def establishments(store):
df = store['establishments']
return df
@orca.table('households', cache=True)
def households(store):
df = store['households']
return df
@orca.table('persons', cache=True)
def persons(store):
df = store['persons']
return df
@orca.table('rentals', cache=True)
def rentals(store):
rentals = store['rentals']
rentals.rent[rentals.rent < 100] = 100
rentals.rent[rentals.rent > 10000] = 10000
rentals.rent_sqft[rentals.rent_sqft < .2] = .2
rentals.rent_sqft[rentals.rent_sqft > 50] = 50
return rentals
@orca.table('units', cache=True)
def units(store):
df = store['units']
df.index.name = 'unit_id'
return df
@orca.table('nodessmall', cache=True)
def nodessmall(store):
df = store['nodessmall']
df.index.name = 'osmid'
return df
@orca.table('edgessmall', cache=True)
def edgessmall(store):
df = store['edgessmall']
return df
@orca.table('nodeswalk', cache=True)
def nodeswalk(store):
df = store['nodeswalk']
df.index.name = 'node_id_walk'
return df
@orca.table('edgeswalk', cache=True)
def edgeswalk(store):
df = store['edgeswalk']
return df
@orca.table('nodesbeam', cache=True)
def nodesbeam(store):
df = store['nodesbeam']
df.index.name = 'node_id_beam'
return df
@orca.table('edgesbeam', cache=True)
def edgesbeam(store):
df = store['edgesbeam']
return df
# Broadcasts, a.k.a. merge relationships
orca.broadcast(
'parcels', 'buildings', cast_index=True, onto_on='parcel_id')
orca.broadcast(
'buildings', 'units', cast_index=True, onto_on='building_id')
orca.broadcast(
'units', 'households', cast_index=True, onto_on='unit_id')
orca.broadcast(
'households', 'persons', cast_index=True, onto_on='household_id')
orca.broadcast(
'buildings', 'jobs', cast_index=True, onto_on='building_id')
orca.broadcast(
'buildings', 'establishments', cast_index=True, onto_on='building_id')
orca.broadcast(
'nodeswalk', 'parcels', cast_index=True, onto_on='node_id_walk')
orca.broadcast(
'nodeswalk', 'craigslist', cast_index=True, onto_on='node_id_walk')
orca.broadcast(
'nodessmall', 'craigslist', cast_index=True, onto_on='node_id_small')
orca.broadcast(
'nodessmall', 'parcels', cast_index=True, onto_on='node_id_small')
orca.broadcast(
'nodesbeam', 'parcels', cast_index=True, onto_on='node_id_beam')
orca.broadcast(
'nodesbeam', 'craigslist', cast_index=True, onto_on='node_id_beam')
# @orca.table('annual_employment_control_totals', cache=True)
# def aect(store):
# df = store['annual_employment_control_totals']
# return df
# @orca.table('annual_household_control_totals', cache=True)
# def ahct(store):
# df = store['annual_household_control_totals']
# return df
# def register_aggregation_table(table_name, table_id):
# """
# Generator function for tables representing aggregate geography.
# """
# @orca.table(table_name, cache=True)
# def func(parcels):
# geog_ids = parcels[table_id].value_counts().index.values
# df = pd.DataFrame(index=geog_ids)
# df.index.name = table_id
# return df
# return func
# # aggregate_geos = {'zonings': 'zoning_id',
# # 'locations': 'location_id',
# # 'block_groups': 'block_group_id',
# # 'blocks': 'block_id',
# # 'zones': 'zone_id',
# # 'plans': 'plan_id',
# # 'zone_districts': 'zone_district_id',
# # 'zone_subdistricts': 'zone_subdistrict_id'}
# # orca.add_injectable('aggregate_geos', aggregate_geos)
# aggregate_geos = {'blocks': 'block_id'}
# orca.add_injectable('aggregate_geos', aggregate_geos)
# for geog in aggregate_geos.items():
# register_aggregation_table(geog[0], geog[1])
# @orca.injectable('year')
# def year():
# default_year = 2010
# try:
# iter_var = orca.get_injectable('iter_var')
# if iter_var is not None:
# return iter_var
# else:
# return default_year
# except:
# return default_year
# @orca.table('building_types', cache=True)
# @orca.table('plan_types', cache=True)
# def plan_types():
# df = pd.read_csv('./data/plan_types.csv').set_index('plan_id')
# return df
# @orca.table('zone_types', cache=True)
# def zone_types():
# df = pd.read_csv('./data/zone_types.csv').set_index('zoning_id')
# return df
# @orca.table('plan_compatible_zones', cache=True)
# def plan_compatible_zones():
# df = pd.read_csv('./data/plan_compatible_zones.csv').\
# set_index('plan_zone_id')
# return df
# @orca.table('allowable_building_types', cache=True)
# def allowable_building_types():
# df = pd.read_csv('./data/allowable_building_types.csv').\
# set_index('zoning_building_id')
# return df
# @orca.table('building_sqft_per_job', cache=True)
# def building_sqft_per_job():
# df = pd.read_csv('./data/bsqft_per_job.csv')
# return df
# @orca.table('zone_overlay_types', cache=True)
# def zone_overlay_types():
# df = pd.read_csv('./data/zone_overlay_types.csv')
# return df
# @orca.table('site_proposals', cache=False)
# def site_proposals(parcels, zone_types, plan_compatible_zones):
# # Prepares input files
# parcelsdf = parcels.local.reset_index()
# zone_typesdf = zone_types.to_frame().reset_index()
# plan_compatible_zonesdf = plan_compatible_zones.to_frame()
# # Identifies parcel location ("status_ugb")
# parcelsdf = defines_location(parcelsdf)
# # Creates possible parcel_zoning combinations
# site_proposals = creates_site_proposals\
# (parcelsdf, plan_compatible_zonesdf, zone_typesdf)
# #Calculates rezoning costs if applicable
# site_proposals = rezoning_costs(site_proposals)
# # Calculates overlay costs if applicable
# site_proposals = overlay_costs(site_proposals)
# # Formats output
# site_proposals = formats_site_proposals(site_proposals)
# return site_proposals
# def defines_location(parcelsdf):
# parcelsdf.loc[parcelsdf['city'].notnull(),'status_ugb'] = 'within_city'
# parcelsdf.loc[(parcelsdf['city'].isnull()) &
# (parcelsdf['ugb'].notnull()),'status_ugb'] = 'within_ugb'
# parcelsdf.loc[(parcelsdf['city'].isnull()) &
# (parcelsdf['ugb'].isnull()),'status_ugb'] = 'outside_ugb'
# return parcelsdf
# def creates_site_proposals(parcelsdf, plan_compatible_zonesdf, zone_typesdf):
# # parcels without zoning_id are removed from site_proposals
# parcelsdf[['zoning_id', 'plan_id']] = \
# parcelsdf[['zoning_id', 'plan_id']].fillna(value=0)
# parcelsdf = parcelsdf[parcelsdf['zoning_id'] != 0]
# # Identifies valid plan_zoning combinations existing in parcels table but
# # missing in plan_compatible_zones table. This ensures that all existing
# # parcel-zone combinations are also included in site_proposals
# missing_plan_zoning_combinations = \
# missing_plan_zone_comb(parcelsdf, plan_compatible_zonesdf)
# # Merges plan_compatible_zones table to parcels table to create
# # all potential parcel_zoning combinations
# plan_compatible_zonesdf = plan_compatible_zonesdf[
# ['plan_id', 'zoning_id', 'cost_in_city',
# 'cost_in_ugb', 'cost_outside_ugb']]
# plan_compatible_zonesdf = plan_compatible_zonesdf.rename(
# columns={'zoning_id': 'potential_zoning_id',
# 'cost_in_city': 'cost_in_city_',
# 'cost_in_ugb': 'cost_in_ugb_',
# 'cost_outside_ugb': 'cost_outside_ugb_'})
# site_proposals = pd.merge(
# parcelsdf, plan_compatible_zonesdf, on='plan_id', how='left')
# # Parcels that have zoning_id information but no plan_id information
# # are only represented with original zoning_id
# site_proposals.loc[(site_proposals.plan_id == 0) &
# (site_proposals.zoning_id != 0),
# 'potential_zoning_id'] = site_proposals['zoning_id']
# # Parcels that have a plan_id that doesn't exist in the
# # plan_compatible_zones table and Plans with zoning_id = 0 in the
# # plan_compatible_zones table can be identified with null and zero
# # 'potential_zoning_id`, respectively. This variable is filled with
# # `zoning_id` in these cases, to represent the original zoning_id only
# site_proposals.loc[site_proposals.potential_zoning_id.isnull(),
# 'potential_zoning_id'] = site_proposals['zoning_id']
# site_proposals.loc[site_proposals.potential_zoning_id == 0,
# 'potential_zoning_id'] = site_proposals['zoning_id']
# # Appends missing plan_zoning combinations to the site_proposals table
# site_proposals = \
# site_proposals.append(missing_plan_zoning_combinations).reset_index()
# site_proposals.loc[site_proposals.missing == 1, 'potential_zoning_id'] = \
# site_proposals['zoning_id']
# site_proposals.drop(columns=['missing'], inplace = True)
# # Removes site proposals that would require rezoning but have
# # can_rezone==True
# zone_typesdf = \
# zone_typesdf.rename(columns={'zoning_id': 'potential_zoning_id'})
# site_proposals = pd.merge(
# site_proposals, zone_typesdf, on = 'potential_zoning_id', how = 'left')
# site_proposals['remove'] = 0
# site_proposals.loc[(site_proposals['zoning_id']!=
# site_proposals['potential_zoning_id']) &
# (site_proposals['can_rezone']==0), 'remove'] = 1
# site_proposals = site_proposals[site_proposals['remove'] == 0]
# return site_proposals
# def missing_plan_zone_comb(parcelsdf, plan_compatible_zonesdf):
# possible = plan_compatible_zonesdf[['plan_id', 'zoning_id']].copy()
# possible = possible[possible['plan_id'] != 0]
# possible = possible[possible['zoning_id'] != 0]
# possible['represented'] = 1
# actual = parcelsdf[parcelsdf['plan_id'] != 0].copy()
# actual = actual.merge(possible, on=['plan_id', 'zoning_id'], how='left')
# missing = actual[(actual['represented'] != 1)].copy()
# missing = missing[missing['zoning_id'] != 0]
# missings = missing[missing['plan_id'] != 0]
# missing = missing.drop(columns=['represented']).copy()
# missing['potential_zoning_id'] = missing['zoning_id']
# missing['cost_in_city_'] = 0
# missing['cost_in_ugb_'] = 0
# missing['cost_outside_ugb_'] = 0
# missing['missing'] = 1
# return missing
# def rezoning_costs(site_proposals):
# # Identifies combinations that imply rezoning
# site_proposals.loc[site_proposals.zoning_id !=
# site_proposals.potential_zoning_id, 'rezoning'] = 1
# site_proposals.loc[site_proposals['rezoning'] != 1, 'rezoning_cost'] = 0
# # Includes column with rezoning_cost (considering status_ugb)
# site_proposals.loc[(site_proposals['rezoning'] == 1) &
# (site_proposals['status_ugb'] == 'within_city'),
# 'rezoning_cost'] = site_proposals['cost_in_city_']
# site_proposals.loc[(site_proposals['rezoning'] == 1) &
# (site_proposals['status_ugb'] == 'within_ugb'),
# 'rezoning_cost'] = site_proposals['cost_in_ugb_']
# site_proposals.loc[
# (site_proposals['rezoning'] == 1) &
# (site_proposals['status_ugb'] == 'outside_ugb'), 'rezoning_cost'] = \
# site_proposals['cost_outside_ugb_']
# site_proposals = \
# site_proposals.drop(columns=['cost_in_city_', 'cost_in_ugb_',
# 'cost_outside_ugb_', 'rezoning'])
# return site_proposals
# def overlay_costs(site_proposals):
# # Includes column with overlay_cost
# # (considering location in relation to ugb)
# overlays = orca.get_table('zone_overlay_types').to_frame()
# overlays = overlays[['overlay_id', 'annexed_overlay_id',
# 'overlay_combination' , 'cost_in_city', 'cost_in_ugb',
# 'cost_outside_ugb']].copy()
# overlays = overlays.rename(columns={'cost_in_city': 'cost_in_city_',
# 'cost_in_ugb': 'cost_in_ugb_',
# 'cost_outside_ugb':
# 'cost_outside_ugb_'})
# site_proposals.loc[site_proposals.overlay_id.isnull(), 'overlay_id'] = '-1'
# site_proposals['overlay_id'] = \
# site_proposals['overlay_id'].astype(float).astype(int)
# site_proposals = \
# pd.merge(site_proposals, overlays, on='overlay_id', how = 'left')
# site_proposals.loc[site_proposals['status_ugb'] == 'within_city',
# 'overlay_cost'] = site_proposals['cost_in_city_']
# site_proposals.loc[site_proposals['status_ugb'] == 'within_ugb',
# 'overlay_cost'] = site_proposals['cost_in_ugb_']
# site_proposals.loc[site_proposals['status_ugb'] == 'outside_ugb',
# 'overlay_cost'] = site_proposals['cost_outside_ugb_']
# site_proposals = site_proposals.drop\
# (columns=['cost_in_city_', 'cost_in_ugb_', 'cost_outside_ugb_'])
# return site_proposals
# def formats_site_proposals(site_proposals):
# # Removes irrelevant fields and renames "potential_zoning_id" to
# # "parcel_zoning_id_combination", unique to each combination in the table
# site_proposals['parcel_zoning_id_combination'] = \
# site_proposals['parcel_id'].astype(int).astype(str) + "_" + \
# site_proposals['potential_zoning_id'].astype(int).astype(str)
# site_proposals = site_proposals.rename\
# (columns={'zoning_id': "original_zoning_id"})
# # Reorders columns to have newly created columns at the beggining.
# ordered_columns = ['parcel_zoning_id_combination', 'parcel_id',
# 'primary_id', 'zone_id','x', 'y','block_group_id',
# 'block_id', 'zone_district_id','zone_subdistrict_id',
# 'location_id','city', 'ugb','status_ugb','plan_id',
# 'overlay_id', 'annexed_overlay_id','original_zoning_id',
# 'zoning_name','potential_zoning_id','can_rezone',
# 'rezoning_cost', 'overlay_cost', 'land_value', 'acres',
# 'proportion_undevelopable','Shape_Length', 'Shape_Area',
# 'max_far','placeholder_max_far', 'max_dua',
# 'placeholder_max_dua','min_far', 'min_dua',
# 'max_height', 'min_front_setback','max_front_setback',
# 'rear_setback','side_setback','coverage', 'OBJECTID']
# site_proposals = site_proposals.reindex(columns=ordered_columns)
# return site_proposals
# # @orca.table('target_vacancies', cache=True)
# # def target_vacancies():
# # vacancies = pd.read_csv('./data/target_vacancies.csv').\
# # set_index('building_type_id')
# # return vacancies
# # Dictionary of variables to generate output indicators and charts
# def creates_main_dicts():
# dict = {'total': {'households': 'Total households',
# 'jobs': 'Total jobs'},
# 'sum': {
# 'residential_units': 'Total residential units in buildings',
# 'residential_sqft':
# 'Total residential area in buildings (sqft)',
# 'non_residential_sqft':
# 'Total non residential sqft in buildings',
# 'job_spaces': 'Total job spaces in buildings',
# 'residential_units': 'Total number of residential units',
# 'acres': 'Total area (acres)',
# 'persons': 'Total persons in households',
# 'workers': 'Total workers in households',
# 'children': 'Total children in households',
# 'cars': 'Total vehicles in households',
# 'income': 'Total annual income from households',
# 'recent_mover':
# 'Total households that moved within last 5 yrs'},
# 'mean': {
# 'non_residential_sqft':
# 'Average non residential sqft in buildings',
# 'sqft_per_unit': 'Average area per residential unit in sqft',
# 'sqft_per_unit_ave_800_linear':
# 'Average area per residential unit in sqft within 800m '
# 'along the auto street network (using flat decay)',
# 'job_spaces': 'Average job spaces in buildings',
# 'year_built': 'Average year of construction of buildings',
# 'sector_id': 'Average job sector id',
# 'acres': 'Average parcel area (acres)',
# 'persons': 'Average persons in households',
# 'workers': 'Average workers in households',
# 'children': 'Average children in households',
# 'cars': 'Average vehicles in households',
# 'income': 'Average household annual income',
# 'age_of_head': 'Average age of the household head',
# 'x': 'Average x coordinate of parcels',
# 'y': 'Average y coordinate of parcels',
# 'value_per_unit': 'Average assessed value per unit',
# 'value_per_sqft': 'Average assessed value per sqft of area'},
# 'median': {
# 'building_type_id': 'Median building type id',
# 'income_quartile': 'Median income quartile',
# 'tenure': 'Median tenure code of households',
# 'race_of_head': 'Median race code of head of household',
# 'sector_id': 'Median job sector id'},
# 'other': {'density_buildings': 'Density of buildings',
# 'density_households': 'Density of households',
# 'density_jobs': 'Density of jobs',
# 'ratio_jobs_to_households': 'Job-housing balance',
# 'ratio_workers_to_persons': 'Ratio of workers to persons',
# 'ratio_households_to_residential_units':
# 'Residential occupancy rate',
# 'residential_vacancy_rate':
# 'Total residential vacancy rate',
# 'non_residential_vacancy_rate':
# 'Total non residential vacancy rate',
# 'remaining_nonresidential_sqft_capacity':
# 'Total remaining non residential sqft capacity',
# 'remaining_residential_unit_capacity':
# 'Total remaining residential unit capacity',
# 'ave_annual_rent_sqft_400m':'Average annual rent per sqft '
# 'within 400m along the auto street network (flat decay)',
# 'ave_annual_office_rent_sqft_800m':'Average annual office '
# 'rent per sqft within 800m along the auto street network '
# '(using flat decay)',
# 'ave_annual_industrial_rent_sqft_800m':'Average annual '
# 'industrial rent per sqft within 800m along the auto '
# 'street network (using flat decay)'}}
# custom_dict = {'jobs_sector_id':
# {'data_name': 'Total jobs',
# 'aggregation_name': 'sector id'},
# 'households_income_quartile':
# {'data_name': 'Total households',
# 'aggregation_name': 'income quartile'},
# 'households_age_of_head_quartile':
# {'data_name': 'Total households',
# 'aggregation_name': 'age of head quartile'},
# 'households_recent_mover_income':
# {'data_name': 'Total households that moved within last'
# ' 5 years',
# 'aggregation_name': 'income quartile (1 = lowest '
# 'quartile, 2 = all others)'},
# 'buildings_repm_id':
# {'data_name': 'Total buildings',
# 'aggregation_name': 'representative building type'}}
# prop_vars = {'households': ['persons', 'race_of_head', 'workers',
# 'children','cars', 'tenure', 'recent_mover',
# 'income_quartile'],
# 'jobs': ['sector_id'],
# 'buildings': ['building_type_id']}
# uses = ['retail', 'industrial','sf_detached', 'duplex_townhome',
# 'multifamily', 'office']
# return dict, custom_dict, prop_vars, uses
# def adds_dict_proportions(prop_vars, dict):
# prop = {}
# for agent in prop_vars:
# vars = prop_vars[agent]
# agents = orca.get_table(agent)
# for var in vars:
# agents_by_cat = agents[var].value_counts()
# cats_to_measure = agents_by_cat[agents_by_cat > 500].index.values
# for cat in cats_to_measure:
# new_var = var + '_' + str(cat)
# desc = 'Proportion of ' + agent + ' with ' + var + \
# ' equal to ' + str(cat)
# prop[new_var] = desc
# dict['prop'] = prop
# return dict
# def adds_derived_vars_dict(dict, uses):
# new_dict = {}
# derived_vars = {'total': ['households', 'jobs'],
# 'sum': dict['sum'].keys(),
# 'mean': dict['mean'].keys(),
# 'median': dict['median'].keys(),
# 'prop': dict['prop'].keys()}
# for agg in ['total', 'sum', 'mean', 'median', 'prop','other']:
# for var in dict[agg]:
# if agg != 'other':
# new_var = agg + '_' + var
# else:
# new_var = var
# new_dict[new_var] = dict[agg][var]
# for use in uses:
# var = 'mean_pred_' + use + '_price'
# new_var = var + '_ave_800_linear'
# new_dict[var] = 'Average predicted ' + use + ' price per sqft'
# method =' within 800m along the auto street network (using flat decay)'
# new_dict[new_var] = new_dict[var] + method
# for dist in [500, 1000, 1500]:
# for method in ['linear', 'flat']:
# for agg in ['total', 'sum', 'mean', 'prop']:
# for var in derived_vars[agg]:
# new_var = agg + '_' + var + '_ave_' + str(
# dist) + '_' + method
# desc = 'Log of average within ' + str(dist/1000) + \
# 'km along the auto street network (' + method + \
# ' decay) of: ' + \
# dict[agg][var].strip('Log of ').capitalize()
# new_dict[new_var] = desc
# new_var = 'without_log_' + new_var
# desc = 'Average within ' + str(dist / 1000) + \
# 'km along the auto street network (' + method + \
# ' decay) of: ' + dict[agg][var]
# new_dict[new_var] = desc
# for agg in ['total', 'sum']:
# for var in derived_vars[agg]:
# new_var = agg + '_' + var + '_sum_' + str(
# dist) + '_' + method
# desc = 'Log of sum within ' + str(dist/1000) + \
# 'km along the auto street network (' + method + \
# ' decay) of: ' + \
# dict[agg][var].strip('Log of ').capitalize()
# new_dict[new_var] = desc
# new_var = 'without_log_' + new_var
# desc = 'Sum within ' + str(dist / 1000) + \
# 'km along the auto street network (' + method + \
# ' decay) of: ' + dict[agg][var]
# new_dict[new_var] = desc
# return new_dict
# @orca.injectable('dictionary')
# def dictionary():
# new_dict = {}
# dict, custom_dict, prop_vars, uses = creates_main_dicts()
# dict = adds_dict_proportions(prop_vars, dict)
# new_dict = adds_derived_vars_dict(dict, uses)
# full_dict = {'var_dict': new_dict}
# full_dict['custom_var_dict'] = custom_dict
# return full_dict
| 40.489699 | 81 | 0.590199 |
d7acb3960ef68d25b6962003fa4f2bf2e6e257d0
| 8,030 |
py
|
Python
|
tensorflow/g3doc/tutorials/word2vec/word2vec_basic.py
|
Mr-Kumar-Abhishek/tensordrift
|
a9ca5173b2252b0de5dd754147b275e85298e522
|
[
"Apache-2.0"
] | 15 |
2015-11-17T09:34:29.000Z
|
2019-11-03T14:49:24.000Z
|
tensorflow/g3doc/tutorials/word2vec/word2vec_basic.py
|
usepavlov/tensorflow
|
21fac39c471dede0e4ae62dd60e2b0b85db48415
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/g3doc/tutorials/word2vec/word2vec_basic.py
|
usepavlov/tensorflow
|
21fac39c471dede0e4ae62dd60e2b0b85db48415
|
[
"Apache-2.0"
] | 15 |
2017-04-19T02:38:07.000Z
|
2019-11-08T20:37:30.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import collections
import math
import numpy as np
import os
import random
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import zipfile
# Step 1: Download the data.
url = 'http://mattmahoney.net/dc/'
def maybe_download(filename, expected_bytes):
"""Download a file if not present, and make sure it's the right size."""
if not os.path.exists(filename):
filename, _ = urllib.request.urlretrieve(url + filename, filename)
statinfo = os.stat(filename)
if statinfo.st_size == expected_bytes:
print('Found and verified', filename)
else:
print(statinfo.st_size)
raise Exception(
'Failed to verify ' + filename + '. Can you get to it with a browser?')
return filename
filename = maybe_download('text8.zip', 31344016)
# Read the data into a string.
def read_data(filename):
f = zipfile.ZipFile(filename)
for name in f.namelist():
return f.read(name).split()
f.close()
words = read_data(filename)
print('Data size', len(words))
# Step 2: Build the dictionary and replace rare words with UNK token.
vocabulary_size = 50000
def build_dataset(words):
count = [['UNK', -1]]
count.extend(collections.Counter(words).most_common(vocabulary_size - 1))
dictionary = dict()
for word, _ in count:
dictionary[word] = len(dictionary)
data = list()
unk_count = 0
for word in words:
if word in dictionary:
index = dictionary[word]
else:
index = 0 # dictionary['UNK']
unk_count = unk_count + 1
data.append(index)
count[0][1] = unk_count
reverse_dictionary = dict(zip(dictionary.values(), dictionary.keys()))
return data, count, dictionary, reverse_dictionary
data, count, dictionary, reverse_dictionary = build_dataset(words)
del words # Hint to reduce memory.
print('Most common words (+UNK)', count[:5])
print('Sample data', data[:10])
data_index = 0
# Step 4: Function to generate a training batch for the skip-gram model.
def generate_batch(batch_size, num_skips, skip_window):
global data_index
assert batch_size % num_skips == 0
assert num_skips <= 2 * skip_window
batch = np.ndarray(shape=(batch_size), dtype=np.int32)
labels = np.ndarray(shape=(batch_size, 1), dtype=np.int32)
span = 2 * skip_window + 1 # [ skip_window target skip_window ]
buffer = collections.deque(maxlen=span)
for _ in range(span):
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
for i in range(batch_size // num_skips):
target = skip_window # target label at the center of the buffer
targets_to_avoid = [ skip_window ]
for j in range(num_skips):
while target in targets_to_avoid:
target = random.randint(0, span - 1)
targets_to_avoid.append(target)
batch[i * num_skips + j] = buffer[skip_window]
labels[i * num_skips + j, 0] = buffer[target]
buffer.append(data[data_index])
data_index = (data_index + 1) % len(data)
return batch, labels
batch, labels = generate_batch(batch_size=8, num_skips=2, skip_window=1)
for i in range(8):
print(batch[i], '->', labels[i, 0])
print(reverse_dictionary[batch[i]], '->', reverse_dictionary[labels[i, 0]])
# Step 5: Build and train a skip-gram model.
batch_size = 128
embedding_size = 128 # Dimension of the embedding vector.
skip_window = 1 # How many words to consider left and right.
num_skips = 2 # How many times to reuse an input to generate a label.
# We pick a random validation set to sample nearest neighbors. Here we limit the
# validation samples to the words that have a low numeric ID, which by
# construction are also the most frequent.
valid_size = 16 # Random set of words to evaluate similarity on.
valid_window = 100 # Only pick dev samples in the head of the distribution.
valid_examples = np.array(random.sample(np.arange(valid_window), valid_size))
num_sampled = 64 # Number of negative examples to sample.
graph = tf.Graph()
with graph.as_default():
# Input data.
train_inputs = tf.placeholder(tf.int32, shape=[batch_size])
train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])
valid_dataset = tf.constant(valid_examples, dtype=tf.int32)
# Construct the variables.
embeddings = tf.Variable(
tf.random_uniform([vocabulary_size, embedding_size], -1.0, 1.0))
nce_weights = tf.Variable(
tf.truncated_normal([vocabulary_size, embedding_size],
stddev=1.0 / math.sqrt(embedding_size)))
nce_biases = tf.Variable(tf.zeros([vocabulary_size]))
# Look up embeddings for inputs.
embed = tf.nn.embedding_lookup(embeddings, train_inputs)
# Compute the average NCE loss for the batch.
# tf.nce_loss automatically draws a new sample of the negative labels each
# time we evaluate the loss.
loss = tf.reduce_mean(
tf.nn.nce_loss(nce_weights, nce_biases, embed, train_labels,
num_sampled, vocabulary_size))
# Construct the SGD optimizer using a learning rate of 1.0.
optimizer = tf.train.GradientDescentOptimizer(1.0).minimize(loss)
# Compute the cosine similarity between minibatch examples and all embeddings.
norm = tf.sqrt(tf.reduce_sum(tf.square(embeddings), 1, keep_dims=True))
normalized_embeddings = embeddings / norm
valid_embeddings = tf.nn.embedding_lookup(
normalized_embeddings, valid_dataset)
similarity = tf.matmul(
valid_embeddings, normalized_embeddings, transpose_b=True)
# Step 6: Begin training
num_steps = 100001
with tf.Session(graph=graph) as session:
# We must initialize all variables before we use them.
tf.initialize_all_variables().run()
print("Initialized")
average_loss = 0
for step in xrange(num_steps):
batch_inputs, batch_labels = generate_batch(
batch_size, num_skips, skip_window)
feed_dict = {train_inputs : batch_inputs, train_labels : batch_labels}
# We perform one update step by evaluating the optimizer op (including it
# in the list of returned values for session.run()
_, loss_val = session.run([optimizer, loss], feed_dict=feed_dict)
average_loss += loss_val
if step % 2000 == 0:
if step > 0:
average_loss = average_loss / 2000
# The average loss is an estimate of the loss over the last 2000 batches.
print("Average loss at step ", step, ": ", average_loss)
average_loss = 0
# note that this is expensive (~20% slowdown if computed every 500 steps)
if step % 10000 == 0:
sim = similarity.eval()
for i in xrange(valid_size):
valid_word = reverse_dictionary[valid_examples[i]]
top_k = 8 # number of nearest neighbors
nearest = (-sim[i, :]).argsort()[1:top_k+1]
log_str = "Nearest to %s:" % valid_word
for k in xrange(top_k):
close_word = reverse_dictionary[nearest[k]]
log_str = "%s %s," % (log_str, close_word)
print(log_str)
final_embeddings = normalized_embeddings.eval()
# Step 7: Visualize the embeddings.
def plot_with_labels(low_dim_embs, labels, filename='tsne.png'):
assert low_dim_embs.shape[0] >= len(labels), "More labels than embeddings"
plt.figure(figsize=(18, 18)) #in inches
for i, label in enumerate(labels):
x, y = low_dim_embs[i,:]
plt.scatter(x, y)
plt.annotate(label,
xy=(x, y),
xytext=(5, 2),
textcoords='offset points',
ha='right',
va='bottom')
plt.savefig(filename)
try:
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
tsne = TSNE(perplexity=30, n_components=2, init='pca', n_iter=5000)
plot_only = 500
low_dim_embs = tsne.fit_transform(final_embeddings[:plot_only,:])
labels = list(dictionary.keys())[:plot_only]
plot_with_labels(low_dim_embs, labels)
except ImportError:
print("Please install sklearn and matplotlib to visualize embeddings.")
| 35.219298 | 80 | 0.702366 |
f2239367a9cda8d6375573660eec59e7483d5f37
| 22,343 |
py
|
Python
|
models_numpyro_utils.py
|
benmarlin/HeartStepsU01
|
44fb199810988fd7a02604d35ac08ee0309dec67
|
[
"BSD-3-Clause"
] | null | null | null |
models_numpyro_utils.py
|
benmarlin/HeartStepsU01
|
44fb199810988fd7a02604d35ac08ee0309dec67
|
[
"BSD-3-Clause"
] | 1 |
2021-01-06T19:17:20.000Z
|
2021-01-06T19:17:20.000Z
|
models_numpyro_utils.py
|
reml-lab/HeartStepsU01
|
c891456719e6776b9d0cbbee713e56f91cf2c112
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import matplotlib as mpl
if os.environ.get('DISPLAY','') == '':
mpl.use('Agg')
import matplotlib.pyplot as plt
from textwrap import wrap
import collections
import timeit
from datetime import datetime
import sklearn.metrics as sm
import pandas as pd
import numpy as np
import jax.numpy as jnp
from jax import ops, random
import numpyro
import numpyro.distributions as dist
import numpyro.distributions.constraints as constraints
from numpyro.infer import MCMC, NUTS, Predictive
from numpyro.diagnostics import hpdi
fig_size = (7,5)
build_time = collections.defaultdict(list)
def save_build_time(title, start_time):
duration = (int)(timeit.default_timer() - start_time)
print('duration =', duration, 'seconds')
build_time[title].append(duration)
def build_df(filename, b_set_index=True, b_drop=True):
df = pd.read_csv(filename, low_memory=False)
if b_drop:
df = df.dropna()
if b_set_index:
df['Subject ID'] = df['Subject ID'].astype(str)
df = df.set_index(['Subject ID', 'Date'])
return df
def plot_data(df_data1, df_data2, y_name, x_names, y_lim=None, b_show=True):
plt.figure(figsize=fig_size)
plot_title = y_name + ' vs ' + str(x_names)
ys = df_data1[y_name].values
for x_name in x_names:
xs = df_data2[x_name].values
plt.scatter(xs, ys, label=x_name, marker='o', alpha=.3)
plt.legend(loc=2)
plt.xlabel('x')
if y_lim != None:
plt.ylim(y_lim)
plt.ylabel(y_name)
plot_title = y_name + ' vs ' + str(x_names)
plt.title(plot_title + ' data')
if b_show:
plt.show()
else:
filename = plot_title + "_data.png"
plt.savefig(filename)
plt.close('all')
def plot_data_regression_lines(samples, title, df, x_name, y_name, x_lim=None, y_lim=None, b_show=True):
#Plot data
data_x = df[x_name].values
data_y = df[y_name].values
plt.figure(figsize=fig_size)
plt.scatter(data_x, data_y, s=10, alpha=.5, marker='o', color='#1f77b4', label='data')
#Plot regression lines
xs = np.linspace(data_x.min(), data_x.max(), 100)
a_samples = samples['intercept']
b1_samples = samples[x_name]
a_mean = jnp.mean(a_samples)
b1_mean = jnp.mean(b1_samples)
n_samples = 1000
for i in range(n_samples):
plt.plot(xs, a_samples[i] + b1_samples[i] * xs, color='blue', alpha=0.005)
plt.plot(xs, a_mean + b1_mean * xs, color='blue', lw=2, label='fitted')
plt.ylabel(y_name)
plt.xlabel(x_name)
if y_lim != None:
plt.ylim(y_lim)
if x_lim != None:
plt.xlim(x_lim)
plot_title = "\n".join(wrap(title, 80))
plot_title += ':\ndata and ' + str(n_samples) + ' fitted regression lines'
plt.title(plot_title)
plt.gcf().tight_layout()
plt.legend(loc=4)
if b_show:
plt.show()
else:
plt.savefig(title + '_regression.png')
plt.close('all')
def plot_data_regression_ci(title, samples, df_data1, df_data2, y_name, x_name, x_lim=None, y_lim=None, b_show=True):
confidence = 0.95
posterior_mu = jnp.expand_dims(samples['intercept'], -1)
posterior_mu += jnp.expand_dims(samples['b_'+x_name], -1) * df_data2[x_name].values
xs_to_sort = df_data2[x_name].values
idx = jnp.argsort(xs_to_sort)
xs = xs_to_sort[idx]
y_means = jnp.mean(posterior_mu, axis=0)[idx]
hpdis = hpdi(posterior_mu, confidence)[:, idx]
ys = df_data1[y_name].values[idx]
plt.figure(figsize=(fig_size))
label = y_name + ' vs ' + x_name
plt.scatter(xs, ys, label=label, marker='o', alpha=.3)
plt.plot(xs, y_means, 'k', label='mean')
label = str(int(confidence*100)) + '% CI'
plt.fill_between(xs, hpdis[0], hpdis[1], alpha=0.25, color="blue", label=label)
plt.xlabel(x_name)
plt.ylabel(y_name)
if x_lim == None:
x_lim = (0,6)
plt.xlim(x_lim)
if y_lim != None:
plt.ylim(y_lim)
plot_title = "\n".join(wrap(title, 80))
plt.title(plot_title)
plt.legend(loc=2)
if b_show:
plt.show()
else:
filename = title + '_' + str(int(confidence*100)) + 'p_ci_regression.png'
plt.savefig(filename)
plt.close('all')
def plot_data_AR(title, y_test, y_pred, n_forecast, b_show=True):
plt.figure(figsize=fig_size)
plt.plot(y_test, label='y test')
plt.plot(y_pred, label='y pred', ls=':', lw=3, color='magenta')
plt.title('forecast = ' + str(n_forecast) + ' for ' + title)
plt.ylabel('y')
plt.xlabel('t')
plt.legend(loc=2)
if b_show:
plt.show()
else:
plt.savefig(title + '_predict.png')
plt.close('all')
def model_regression(df, y_name, x_names):
xs = jnp.array(df[x_names].values)
y_obs = df[y_name].values
mu = numpyro.sample('intercept', dist.Normal(0., 1000))
M = xs.shape[1]
for i in range(M):
b_name = 'b_'+x_names[i]
bi = numpyro.sample(b_name, dist.Normal(0., 10.))
bxi = bi * xs[:,i]
mu = mu + bxi
log_sigma = numpyro.sample('log_sigma', dist.Normal(0., 10.))
numpyro.sample(y_name, dist.Normal(mu, jnp.exp(log_sigma)), obs=y_obs)
def model_impute(data1, df_data2, y_name, y_index, x_names, mapping, b_fill_daily, b_classify, y_obs=None):
bias = numpyro.sample('intercept', dist.Normal(0, 10))
linear_predictor = bias
for x_name in x_names:
if not b_fill_daily:
x_values = df_data2[x_name].values
isnan = np.isnan(x_values)
x_nan_indices = np.nonzero(isnan)[0]
if len(x_nan_indices) > 0:
x_mu = numpyro.sample(x_name+'_mu', dist.Normal(0, 10).expand([1]))
x_log_sigma = numpyro.sample(x_name+'_log_sigma', dist.Normal(0, 10).expand([1]))
x_impute = numpyro.sample(x_name+'_impute', dist.Normal(x_mu[x_nan_indices],
jnp.exp(x_log_sigma[x_nan_indices])).mask(False))
x_values = ops.index_update(x_values, x_nan_indices, x_impute)
numpyro.sample(x_name, dist.Normal(x_mu, jnp.exp(x_log_sigma)), obs=x_values)
df_data2[x_name] = x_values
mapping[x_name] = df_data2[x_name].to_dict()
x_values = jnp.array([mapping[x_name][i] for i in list(data1[y_index])])
b_value = numpyro.sample('b_'+x_name, dist.Normal(0., 10.))
linear_predictor += b_value * x_values
if b_classify:
numpyro.sample(y_name, dist.Bernoulli(logits=linear_predictor), obs=y_obs)
else:
log_sigma = numpyro.sample('log_sigma', dist.Normal(0., 10.))
numpyro.sample(y_name, dist.Normal(linear_predictor, jnp.exp(log_sigma)), obs=y_obs)
def model_AR(y_obs, K, y_matrix):
b = numpyro.sample('b', dist.Normal(0., 10.).expand([1+K]))
mus = y_matrix @ b[1:] + b[0]
b0 = jnp.array([b[0]])
mus = jnp.concatenate((b0, mus), axis=0)
log_sigma = numpyro.sample('log_sigma', dist.Normal(0., 10.))
y_sample = numpyro.sample('obs', dist.Normal(mus, jnp.exp(log_sigma)), obs=y_obs)
def fit_simple_regression_model_numpyro(df_data, y_name, x_names, x_lim=None, y_lim=None, y_mean_lim=None, b_show=True):
mcmcs = []
for x_name in x_names:
start_time_simple_regression = timeit.default_timer()
title = y_name + ' vs ' + x_name + ' (regression model)'
print('fitting for %s...' % title)
#Fit model
rng_key = random.PRNGKey(0)
kernel = NUTS(model_regression)
mcmc = MCMC(kernel, num_warmup=500, num_samples=1000)
mcmc.run(rng_key, df=df_data, y_name=y_name, x_names=[x_name])
#Display summary
print('\nsummary for %s =' % title)
mcmc.print_summary()
samples = mcmc.get_samples()
samples['sigma'] = jnp.exp(samples['log_sigma'])
ss = samples['sigma']
print('sigma mean = %.2f\tstd = %.2f\tmedian = %.2f\tQ5%% = %.2f\tQ95%% = %.2f' % (
np.mean(ss), np.std(ss), np.median(ss), np.quantile(ss, 0.05, axis=0), np.quantile(ss, 0.95, axis=0)))
save_build_time(title, start_time_simple_regression)
#Plot
plot_data_regression_lines(samples, title, df_data, x_name, y_name, x_lim, y_lim, b_show)
mcmcs.append(mcmc)
print('\n\n\n')
return mcmcs
def fit_regression_model_numpyro(df_data, y_name, x_names, y_mean_lim=None, b_show=True):
start_time_regression = timeit.default_timer()
title = y_name + ' vs ' + str(x_names) + ' (regression model)'
print('fitting for %s...' % title)
#Fit model
rng_key = random.PRNGKey(0)
kernel = NUTS(model_regression)
mcmc = MCMC(kernel, num_warmup=500, num_samples=1000)
mcmc.run(rng_key, df=df_data, y_name=y_name, x_names=x_names)
#Display summary
print('\nsummary for %s =' % title)
mcmc.print_summary()
samples = mcmc.get_samples()
samples['sigma'] = jnp.exp(samples['log_sigma'])
ss = samples['sigma']
print('sigma mean = %.2f\tstd = %.2f\tmedian = %.2f\tQ5%% = %.2f\tQ95%% = %.2f' % (
np.mean(ss), np.std(ss), np.median(ss), np.quantile(ss, 0.05, axis=0), np.quantile(ss, 0.95, axis=0)))
save_build_time(title, start_time_regression)
print('\n\n\n')
return mcmc
def get_title(participant, df_data1, df_data2, y_name, x_names, b_fill_daily, b_classify):
rows1 = df_data1.shape[0]
rows2 = df_data2.shape[0]
detail = participant + ' ' + y_name + ' vs ' + str(x_names)
detail2 = ' fill=' + str(b_fill_daily)
if b_classify:
title = detail + ' (logistic regression N1=' + str(rows1) + ' N2=' + str(rows2) + detail2 + ')'
else:
title = detail + ' (regression N1=' + str(rows1) + ' N2=' + str(rows2) + detail2 + ')'
return title
def prepare_data(df_data1, df_data2, y_name, y_index, x_names, b_standardize, b_show=True):
data2 = {}
for x_name in x_names:
if b_standardize:
x_mean = df_data2[x_name].mean()
x_std = df_data2[x_name].std()
df_data2[x_name] = df_data2[x_name].apply(lambda x: (x - x_mean) / x_std)
data2[x_name] = df_data2[x_name].values
data1 = {}
if b_standardize:
y_mean = df_data1[y_name].mean()
y_std = df_data1[y_name].std()
df_data1[y_name] = df_data1[y_name].apply(lambda y: (y - y_mean) / y_std)
data1[y_name] = df_data1[y_name].values
data1[y_index] = df_data1[y_index].values
#plot_data(df_data1, df_data2, y_name, x_names, y_lim=None, b_show=b_show)
return df_data1, data1, df_data2, data2
def align_start_stop_date(df_mood, df_fitbit):
#The start date is the first date of df_mood
#The stop date is the last date of df_fitbit
df_mood = df_mood.reset_index()
df_fitbit = df_fitbit.reset_index()
df_mood['Date'] = pd.to_datetime(df_mood['Date'])
df_fitbit['Date'] = pd.to_datetime(df_fitbit['Date'])
start_date = df_mood['Date'].min()
print('data start_date =', start_date)
df_mood = df_mood[df_mood['Date'] >= start_date]
df_fitbit = df_fitbit[df_fitbit['Date'] >= start_date]
stop_date = df_fitbit['Date'].max()
print('data stop_date =', stop_date)
df_mood = df_mood[df_mood['Date'] <= stop_date]
df_fitbit = df_fitbit[df_fitbit['Date'] <= stop_date]
return df_mood, df_fitbit
def fit_with_missing_data(participant, length, b_fill_daily, b_classify, df_data1, df_data2,
x_names, y_name, y_lim, b_summary, b_show):
start_time = timeit.default_timer()
df_data2, df_data1 = align_start_stop_date(df_data2, df_data1)
if (df_data2.shape[0] == 0):
print('not enough data')
return []
b_standardize = False
y_index = 'y_index'
df_data1 = df_data1[:length]
df_data1[y_index] = df_data1.groupby(['Date']).ngroup()
df_data1, data1, df_data2, _ = prepare_data(df_data1, df_data2, y_name, y_index, x_names,
b_standardize, b_show=b_show)
y_obs = df_data1[y_name].values
mapping = {}
if b_fill_daily:
df_data2[x_names] = df_data2[x_names].ffill().bfill()
mapping = df_data2[x_names].to_dict()
print('df_data1.shape =', df_data1.shape)
print('df_data2.shape =', df_data2.shape)
#Fit model
title = get_title(participant, df_data1, df_data2, y_name, x_names, b_fill_daily, b_classify)
print('%s start fitting %s...\n' % (datetime.now(), title))
mcmc = MCMC(NUTS(model_impute), num_warmup=500, num_samples=1000)
mcmc.run(random.PRNGKey(0), data1=data1, df_data2=df_data2, y_name=y_name, y_index=y_index,
x_names=x_names, mapping=mapping, b_fill_daily=b_fill_daily, b_classify=b_classify, y_obs=y_obs)
if b_summary:
mcmc.print_summary()
samples = mcmc.get_samples()
save_build_time(title, start_time)
#Posterior predictive distribution
y_pred = Predictive(model_impute, samples)(random.PRNGKey(1), data1=data1, df_data2=df_data2, y_name=y_name, y_index=y_index,
x_names=x_names, mapping=mapping, b_fill_daily=b_fill_daily, b_classify=b_classify)[y_name]
if b_classify:
y_pred = (y_pred.mean(axis=0) >= 0.5).astype(jnp.uint8)
print('accuracy =', (y_pred == y_obs).sum() / y_obs.shape[0])
confusion_matrix = pd.crosstab(pd.Series(y_obs, name='actual'), pd.Series(y_pred, name='predict'))
confusion_matrix = confusion_matrix / confusion_matrix.sum(axis=1)
#print('\n', confusion_matrix)
else:
y_pred = y_pred.mean(axis=0).reshape(-1)
print('prediction shape =', y_pred.shape)
print('mean absolute error =', round(sm.mean_absolute_error(y_obs, y_pred), 2))
print('mean squared error =', round(sm.mean_squared_error(y_obs, y_pred), 2))
print('R2 score =', round(sm.r2_score(y_obs, y_pred), 2))
if (not b_standardize):
x_lim = None
plot_data_regression_ci(title, samples, df_data1, df_data2, y_name, x_names[0], x_lim, y_lim, b_show)
print('\n\n')
return samples
def get_AR_predictions(y_obs, y_test, parameters, window):
history = y_obs[len(y_obs)-window:]
history = [history[i] for i in range(len(history))]
predictions = []
for t in range(len(y_test)):
n_hist = len(history)
lags = [history[i] for i in range(n_hist-window, n_hist)]
y_predict = parameters[0]
for d in range(window):
y_predict += parameters[d+1] * lags[window-d-1]
obs = y_test[t]
predictions.append(y_predict)
history.append(obs)
return predictions
def fit_AR_model(df_data, length, n_forecast, window, K, b_show):
start_AR_mcmc = timeit.default_timer()
df_data = df_data[:length]
y_data = df_data[y_name].values
y_obs, y_test = y_data[1:len(y_data)-n_forecast+1], y_data[len(y_data)-n_forecast:]
#Fit model
title = y_name + ' (AR' + str(K) + ' N=' + str(y_obs.shape[0]) + ')'
print('\n%s, start fitting %s...' % (datetime.now(), title))
y_matrix = jnp.zeros(shape=(len(y_obs)-1, K))
for p in range(1,K+1):
values = [y_obs[t-p] if (t >= p) else 0 for t in range(1, len(y_obs))]
y_matrix = ops.index_update(y_matrix, ops.index[:, p-1], values)
rng_key = random.PRNGKey(0)
kernel = NUTS(model_AR)
mcmc = MCMC(kernel, num_warmup=500, num_samples=1000, num_chains=1)
mcmc.run(rng_key, y_obs=y_obs, K=K, y_matrix=y_matrix)
#Display summary
print('\nsummary for %s =' % title)
mcmc.print_summary()
samples = mcmc.get_samples()
samples['sigma'] = jnp.exp(samples['log_sigma'])
ss = samples['sigma']
print('sigma mean = %.2f\tstd = %.2f\tmedian = %.2f\tQ5%% = %.2f\tQ95%% = %.2f' % (
np.mean(ss), np.std(ss), np.median(ss), np.quantile(ss, 0.05, axis=0), np.quantile(ss, 0.95, axis=0)))
parameter_means = []
for k,v in samples.items():
if k.find('b') >= 0:
for p in range(K+1):
mean_values = (float)(v[:,p].mean())
#print('%s[%d] -> %s -> %s\t-> %s' % (k, p, v[:,p].shape, mean_values, v[:,p][:3]))
parameter_means.append(mean_values)
y_pred = get_AR_predictions(y_obs, y_test, parameter_means, window)
print('test mean squared error =', round(sm.mean_squared_error(y_test, y_pred), 2))
save_build_time(title, start_AR_mcmc)
# plot
plot_data_AR(title, y_test, y_pred, n_forecast, b_show)
print('\n')
return samples
if __name__ == '__main__':
#filename1 = 'df_mood_fitbit_daily.csv' #Replace with desired csv file
#filename2 = 'df_imputed_105_10Min.csv' #Replace with desired csv file
#chosen_df1 = build_df(filename1)
#chosen_df2 = build_df(filename2)
#print('chosen_df1.shape =', chosen_df1.shape)
#print('chosen_df2.shape =', chosen_df2.shape)
#print()
b_show = False
n_repeats = 1 # tested 1, 5
for repeat in range(n_repeats):
'''
#Analysis with Daily Metrics Data
x_names = ['Committed', 'Busy', 'Rested']
y_name = 'Fitbit Step Count'
mcmcs = fit_simple_regression_model_numpyro(chosen_df1 , y_name, x_names, y_lim=(-5000, 35000), x_lim=(0.5, 5.5),
b_show=b_show)
mcmc = fit_regression_model_numpyro(chosen_df1 , y_name, x_names, b_show=b_show)
print('build_time numpyro (repeat=%d) = %s\n\n\n' % (repeat, dict(build_time)))
pd.DataFrame.from_dict(data=build_time, orient='index').to_csv('build_time_numpyro.csv', header=False)
#Analysis with Fitbit Data Per Minute
participants = ['105']
y_name = 'steps'
x_names = ['Committed', 'Busy', 'Rested']
mcmcs = fit_simple_regression_model_numpyro(chosen_df2, y_name, x_names, b_show=b_show)
mcmc = fit_regression_model_numpyro(chosen_df2, y_name, x_names, b_show=b_show)
print('build_time numpyro (repeat=%d) = %s\n\n\n' % (repeat, dict(build_time)))
pd.DataFrame.from_dict(data=build_time, orient='index').to_csv('build_time_numpyro.csv', header=False)
'''
'''
#Analysis using fit with missing data
filename = 'build_time_numpyro_impute.csv'
participants = [105] # tested [105, 69, 80]
for participant in participants:
participant = str(participant)
filename3a = 'df_' + participant + '_fitbit_per_minute.csv' #Replace with desired csv file
filename3b = 'df_' + participant + '_moods.csv' #Replace with desired csv file
chosen_df3a = build_df(filename3a, b_set_index=False, b_drop=False)
chosen_df3b = build_df(filename3b, b_set_index=False, b_drop=False)
print('participant =', participant)
print('chosen_df3a.shape =', chosen_df3a.shape)
print('chosen_df3b.shape =', chosen_df3b.shape)
print()
#Inference with missing data
#Set b_fill_daily to True: impute daily before inference
#Set b_fill_daily to False: impute while performing inference
b_fill_daily_list = [True, False]
for b_fill_daily in b_fill_daily_list:
print('b_fill_daily (forward fill before inference) =', b_fill_daily)
b_summary = b_fill_daily
x_names = ['Committed', 'Busy', 'Rested']
y_name = 'steps'
y_lim = None
chosen_df3a = chosen_df3a.dropna(subset=[y_name])
for col in list(chosen_df3b.columns):
print('nan', chosen_df3b[col].isna().sum(), '\t->', col)
print()
lengths = [1000] # [100000, 150000, 300000]
for length in lengths:
df_data1 = chosen_df3a.copy() # fitbit
df_data2 = chosen_df3b.copy() # mood
#Perform regression
b_classify = False
samples = fit_with_missing_data(participant, length, b_fill_daily, b_classify, df_data1, df_data2,
x_names, y_name, y_lim, b_summary, b_show=b_show)
pd.DataFrame.from_dict(data=build_time, orient='index').to_csv(filename, header=False)
#Perform logistic regression
b_classify = True
if b_classify == True: #Convert steps to binary: 1 if steps > 0, else 0
df_data1[y_name] = df_data1[y_name].apply(lambda x: 1 if x > 0 else 0)
samples = fit_with_missing_data(participant, length, b_fill_daily, b_classify, df_data1, df_data2,
x_names, y_name, y_lim, b_summary, b_show=b_show)
pd.DataFrame.from_dict(data=build_time, orient='index').to_csv(filename, header=False)
'''
#'''
#Analysis using AR model
duration_filename = 'build_time_numpyro_AR.csv'
#data_filename = 'df_105_fitbit_per_minute.csv'
data_filename = 'fitbit_per_minute.csv'
df_data = build_df(data_filename, b_set_index=False, b_drop=False)
df_data = df_data.rename(columns={'date' : 'Date'})
y_name = 'steps' # 'heart_rate' # 'steps'
K = 10
window = K
n_forecast = 200
print('columns =', list(df_data.columns))
print('na =\n', pd.isna(df_data).sum())
print('original shape =', df_data.shape)
print('K =', K)
print('window =', window)
print('forecast =', n_forecast)
train_lengths = [50000] # tested [5000, 50000, 100000, 1000000...]
for train_length in train_lengths:
length = train_length + n_forecast
df_data = df_data.dropna()
df_data = df_data[['Date', 'steps', 'heart_rate']]
samples = fit_AR_model(df_data, length, n_forecast, window, K, b_show)
pd.DataFrame.from_dict(data=build_time, orient='index').to_csv(duration_filename, header=False)
#'''
print('finished!')
| 44.596806 | 129 | 0.616345 |
400c98ed9a0c52aee87df76ef3bbf1b3901ec2d8
| 2,214 |
py
|
Python
|
reagent/training/__init__.py
|
ojaswa-privado/ReAgent
|
e990e66f69369cbe89212e334191180716c9bf4e
|
[
"BSD-3-Clause"
] | 2 |
2021-10-31T01:05:46.000Z
|
2021-11-08T09:43:25.000Z
|
reagent/training/__init__.py
|
ojaswa-privado/ReAgent
|
e990e66f69369cbe89212e334191180716c9bf4e
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/training/__init__.py
|
ojaswa-privado/ReAgent
|
e990e66f69369cbe89212e334191180716c9bf4e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from reagent.training.c51_trainer import C51Trainer
from reagent.training.cem_trainer import CEMTrainer
from reagent.training.discrete_crr_trainer import DiscreteCRRTrainer
from reagent.training.dqn_trainer import DQNTrainer
from reagent.training.parametric_dqn_trainer import ParametricDQNTrainer
from reagent.training.ppo_trainer import PPOTrainer
from reagent.training.qrdqn_trainer import QRDQNTrainer
from reagent.training.reagent_lightning_module import (
ReAgentLightningModule,
StoppingEpochCallback,
)
from reagent.training.reinforce_trainer import ReinforceTrainer
from reagent.training.reward_network_trainer import RewardNetTrainer
from reagent.training.rl_trainer_pytorch import RLTrainer
from reagent.training.sac_trainer import SACTrainer
from reagent.training.slate_q_trainer import SlateQTrainer
from reagent.training.td3_trainer import TD3Trainer
from reagent.training.trainer import Trainer
from reagent.training.world_model.mdnrnn_trainer import MDNRNNTrainer
from .parameters import (
C51TrainerParameters,
DQNTrainerParameters,
ParametricDQNTrainerParameters,
QRDQNTrainerParameters,
RewardNetworkTrainerParameters,
SACTrainerParameters,
Seq2SlateTrainerParameters,
SlateQTrainerParameters,
TD3TrainerParameters,
CRRTrainerParameters,
ReinforceTrainerParameters,
PPOTrainerParameters,
)
__all__ = [
"C51Trainer",
"CEMTrainer",
"RLTrainer",
"DQNTrainer",
"MDNRNNTrainer",
"ParametricDQNTrainer",
"QRDQNTrainer",
"SACTrainer",
"SlateQTrainer",
"TD3Trainer",
"DiscreteCRRTrainer",
"RewardNetTrainer",
"C51TrainerParameters",
"DQNTrainerParameters",
"ParametricDQNTrainerParameters",
"QRDQNTrainerParameters",
"SACTrainerParameters",
"SlateQTrainerParameters",
"TD3TrainerParameters",
"CRRTrainerParameters",
"RewardNetworkTrainerParameters",
"Seq2SlateTrainerParameters",
"ReAgentLightningModule",
"StoppingEpochCallback",
"Trainer",
"ReinforceTrainer",
"ReinforceTrainerParameters",
"PPOTrainer",
"PPOTrainerParameters",
]
| 31.183099 | 72 | 0.792231 |
994555c5ca03f628eab43f1b4190e3385a247df0
| 1,780 |
py
|
Python
|
minipj1/main.py
|
huangluyang001/EC601-project
|
8d09bbb8987fd2004a8cae053d0d0f6f3796b827
|
[
"MIT"
] | 1 |
2018-09-17T21:44:06.000Z
|
2018-09-17T21:44:06.000Z
|
minipj1/main.py
|
huangluyang001/EC601-project
|
8d09bbb8987fd2004a8cae053d0d0f6f3796b827
|
[
"MIT"
] | 2 |
2018-09-19T05:08:41.000Z
|
2018-10-13T03:04:18.000Z
|
minipj1/main.py
|
huangluyang001/EC601-project
|
8d09bbb8987fd2004a8cae053d0d0f6f3796b827
|
[
"MIT"
] | 1 |
2018-09-19T04:55:34.000Z
|
2018-09-19T04:55:34.000Z
|
import MakeVideo, tweet, visiondetection
import argparse
import logging
#from socketserver import ForkingMixIn
if __name__ == '__main__':
#logging.basicConfig(level='INFO')
parser = argparse.ArgumentParser()
parser.add_argument('-r','--rate',required=False,type=float,default=1,help='number of pictures per minute, default=1')
parser.add_argument('-o','--output',required=False,type=str,default='final.mkv',help='output filename and directory, should be .mkv default=final.mkv')
parser.add_argument('-k','--keyword',required=False,type=str,default='jerry',help='search keyword default=messi')
parser.add_argument('-c','--count',required=False, type=int,default=200,help='number of homline in each user, no larger than 20')
parser.add_argument('-n',required=False,type=int,default=10,help='number of users to get from the same keyword, no larger than 200')
parser.add_argument('-m',type=bool,required=False,help='grap from my homeline')
args = parser.parse_args()
try:rate = float(args.rate)
except:rate=1.0
try:output = args.output
except:output = 'final.mkv'
try:keyword = args.keyword
except:keyword='jerry'
try:count = int(args.count)
except:count=200
try:num = int(args.n)
except:num=10
#logging.info(print(rate, output,keyword,str(count),str(num)))
twitter = tweet.GetJpgFromTweet()
vd = visiondetection.VisionDetction()
mv = MakeVideo.MakeVideo()
if args.m == True:
twitter.FromMyHome(count=count)
else:
user_info, pic_counts = twitter.FromSpecificUser(keyword=keyword,count=count, numofuser=num)
print(user_info, pic_counts)
label_dict = vd.GenerateTypes()
vd.MakeSrc(label_dict, rate=rate)
mv.makevideo(rate=rate, output_dir=output)
| 44.5 | 155 | 0.712921 |
3a161eafeeb4ea0ac37f1573bf90b2400626b212
| 912 |
py
|
Python
|
backend/questions/migrations/0001_initial.py
|
Stonedch/web-privollesru
|
44f5656c57457ba00cc33f770d57823edacc1ce2
|
[
"MIT"
] | null | null | null |
backend/questions/migrations/0001_initial.py
|
Stonedch/web-privollesru
|
44f5656c57457ba00cc33f770d57823edacc1ce2
|
[
"MIT"
] | null | null | null |
backend/questions/migrations/0001_initial.py
|
Stonedch/web-privollesru
|
44f5656c57457ba00cc33f770d57823edacc1ce2
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.4 on 2022-05-27 12:41
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField(max_length=512, verbose_name='title')),
('body', models.TextField(max_length=512, verbose_name='body')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='updated')),
],
options={
'verbose_name': 'question',
'verbose_name_plural': 'questions',
},
),
]
| 31.448276 | 117 | 0.573465 |
4b53b178317c3bf32852f6a7008604e80e665bb6
| 1,026 |
py
|
Python
|
tests/01_arrays/test_arrays_compile.py
|
krystophny/pytchfort
|
e47c7bffa2df488d7adf65300d37167f01a90a60
|
[
"MIT"
] | 6 |
2019-07-25T11:04:32.000Z
|
2021-11-02T20:32:13.000Z
|
tests/01_arrays/test_arrays_compile.py
|
krystophny/pytchfort
|
e47c7bffa2df488d7adf65300d37167f01a90a60
|
[
"MIT"
] | 4 |
2019-07-31T10:08:21.000Z
|
2020-11-14T23:55:49.000Z
|
tests/01_arrays/test_arrays_compile.py
|
krystophny/pytchfort
|
e47c7bffa2df488d7adf65300d37167f01a90a60
|
[
"MIT"
] | 1 |
2019-05-09T12:11:53.000Z
|
2019-05-09T12:11:53.000Z
|
"""
Created: 2019-04-01
@author: Christopher Albert <[email protected]>
Compiles CFF for test_arrays
"""
import os
from shutil import copy
from fffi import FortranModule
def test_compile(tmp_path):
cwd = os.path.dirname(__file__)
copy(os.path.join(cwd, 'Makefile'), tmp_path)
copy(os.path.join(cwd, 'mod_arrays.f90'), tmp_path)
copy(os.path.join(cwd, 'test_arrays.f90'), tmp_path)
os.mkdir(os.path.join(tmp_path, 'static'))
os.mkdir(os.path.join(tmp_path, 'shared'))
os.chdir(tmp_path)
os.system('make')
# Initialize
mod_arrays = FortranModule('test_arrays', 'mod_arrays', path=tmp_path)
# This will use fortran_module.fdef instead of cdef in the future.
# Ideally, the actual Fortran source file would be parsed as an
# option instead of code containing only the signatures.
mod_arrays.cdef("""
void {mod}_test_vector(array_1d *vec);
void {mod}_test_array_2d(array_2d *arr);
""")
mod_arrays.compile(tmpdir=tmp_path, verbose=True)
| 27.72973 | 74 | 0.69883 |
033f44984b551a97b8e8461f4f4b474869842ffe
| 244 |
py
|
Python
|
pandas/tests/exchange/conftest.py
|
AdrianMastronardi/pandas
|
67045903306ac4a1cab108177e92df30d99912b4
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/tests/exchange/conftest.py
|
AdrianMastronardi/pandas
|
67045903306ac4a1cab108177e92df30d99912b4
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
pandas/tests/exchange/conftest.py
|
AdrianMastronardi/pandas
|
67045903306ac4a1cab108177e92df30d99912b4
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
import pytest
import pandas as pd
@pytest.fixture(scope="package")
def df_from_dict():
def maker(dct, is_categorical=False):
df = pd.DataFrame(dct)
return df.astype("category") if is_categorical else df
return maker
| 18.769231 | 62 | 0.692623 |
dcc0417c138484b9d3f589ea19f75dacf4be6122
| 15,839 |
py
|
Python
|
sdk/python/arvados/util.py
|
coolmaksat/arvados
|
5f571760d4b52426e39ae39d0ce5cb9b7cfb0add
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/arvados/util.py
|
coolmaksat/arvados
|
5f571760d4b52426e39ae39d0ce5cb9b7cfb0add
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/arvados/util.py
|
coolmaksat/arvados
|
5f571760d4b52426e39ae39d0ce5cb9b7cfb0add
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
from __future__ import division
from builtins import range
import fcntl
import hashlib
import httplib2
import os
import random
import re
import subprocess
import errno
import sys
import arvados
from arvados.collection import CollectionReader
HEX_RE = re.compile(r'^[0-9a-fA-F]+$')
CR_UNCOMMITTED = 'Uncommitted'
CR_COMMITTED = 'Committed'
CR_FINAL = 'Final'
keep_locator_pattern = re.compile(r'[0-9a-f]{32}\+\d+(\+\S+)*')
signed_locator_pattern = re.compile(r'[0-9a-f]{32}\+\d+(\+\S+)*\+A\S+(\+\S+)*')
portable_data_hash_pattern = re.compile(r'[0-9a-f]{32}\+\d+')
uuid_pattern = re.compile(r'[a-z0-9]{5}-[a-z0-9]{5}-[a-z0-9]{15}')
collection_uuid_pattern = re.compile(r'[a-z0-9]{5}-4zz18-[a-z0-9]{15}')
group_uuid_pattern = re.compile(r'[a-z0-9]{5}-j7d0g-[a-z0-9]{15}')
user_uuid_pattern = re.compile(r'[a-z0-9]{5}-tpzed-[a-z0-9]{15}')
link_uuid_pattern = re.compile(r'[a-z0-9]{5}-o0j2j-[a-z0-9]{15}')
job_uuid_pattern = re.compile(r'[a-z0-9]{5}-8i9sb-[a-z0-9]{15}')
container_uuid_pattern = re.compile(r'[a-z0-9]{5}-dz642-[a-z0-9]{15}')
manifest_pattern = re.compile(r'((\S+)( +[a-f0-9]{32}(\+\d+)(\+\S+)*)+( +\d+:\d+:\S+)+$)+', flags=re.MULTILINE)
def clear_tmpdir(path=None):
"""
Ensure the given directory (or TASK_TMPDIR if none given)
exists and is empty.
"""
if path is None:
path = arvados.current_task().tmpdir
if os.path.exists(path):
p = subprocess.Popen(['rm', '-rf', path])
stdout, stderr = p.communicate(None)
if p.returncode != 0:
raise Exception('rm -rf %s: %s' % (path, stderr))
os.mkdir(path)
def run_command(execargs, **kwargs):
kwargs.setdefault('stdin', subprocess.PIPE)
kwargs.setdefault('stdout', subprocess.PIPE)
kwargs.setdefault('stderr', sys.stderr)
kwargs.setdefault('close_fds', True)
kwargs.setdefault('shell', False)
p = subprocess.Popen(execargs, **kwargs)
stdoutdata, stderrdata = p.communicate(None)
if p.returncode != 0:
raise arvados.errors.CommandFailedError(
"run_command %s exit %d:\n%s" %
(execargs, p.returncode, stderrdata))
return stdoutdata, stderrdata
def git_checkout(url, version, path):
if not re.search('^/', path):
path = os.path.join(arvados.current_job().tmpdir, path)
if not os.path.exists(path):
run_command(["git", "clone", url, path],
cwd=os.path.dirname(path))
run_command(["git", "checkout", version],
cwd=path)
return path
def tar_extractor(path, decompress_flag):
return subprocess.Popen(["tar",
"-C", path,
("-x%sf" % decompress_flag),
"-"],
stdout=None,
stdin=subprocess.PIPE, stderr=sys.stderr,
shell=False, close_fds=True)
def tarball_extract(tarball, path):
"""Retrieve a tarball from Keep and extract it to a local
directory. Return the absolute path where the tarball was
extracted. If the top level of the tarball contained just one
file or directory, return the absolute path of that single
item.
tarball -- collection locator
path -- where to extract the tarball: absolute, or relative to job tmp
"""
if not re.search('^/', path):
path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
os.stat(path)
except OSError:
os.mkdir(path)
already_have_it = False
try:
if os.readlink(os.path.join(path, '.locator')) == tarball:
already_have_it = True
except OSError:
pass
if not already_have_it:
# emulate "rm -f" (i.e., if the file does not exist, we win)
try:
os.unlink(os.path.join(path, '.locator'))
except OSError:
if os.path.exists(os.path.join(path, '.locator')):
os.unlink(os.path.join(path, '.locator'))
for f in CollectionReader(tarball).all_files():
if re.search('\.(tbz|tar.bz2)$', f.name()):
p = tar_extractor(path, 'j')
elif re.search('\.(tgz|tar.gz)$', f.name()):
p = tar_extractor(path, 'z')
elif re.search('\.tar$', f.name()):
p = tar_extractor(path, '')
else:
raise arvados.errors.AssertionError(
"tarball_extract cannot handle filename %s" % f.name())
while True:
buf = f.read(2**20)
if len(buf) == 0:
break
p.stdin.write(buf)
p.stdin.close()
p.wait()
if p.returncode != 0:
lockfile.close()
raise arvados.errors.CommandFailedError(
"tar exited %d" % p.returncode)
os.symlink(tarball, os.path.join(path, '.locator'))
tld_extracts = [f for f in os.listdir(path) if f != '.locator']
lockfile.close()
if len(tld_extracts) == 1:
return os.path.join(path, tld_extracts[0])
return path
def zipball_extract(zipball, path):
"""Retrieve a zip archive from Keep and extract it to a local
directory. Return the absolute path where the archive was
extracted. If the top level of the archive contained just one
file or directory, return the absolute path of that single
item.
zipball -- collection locator
path -- where to extract the archive: absolute, or relative to job tmp
"""
if not re.search('^/', path):
path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
os.stat(path)
except OSError:
os.mkdir(path)
already_have_it = False
try:
if os.readlink(os.path.join(path, '.locator')) == zipball:
already_have_it = True
except OSError:
pass
if not already_have_it:
# emulate "rm -f" (i.e., if the file does not exist, we win)
try:
os.unlink(os.path.join(path, '.locator'))
except OSError:
if os.path.exists(os.path.join(path, '.locator')):
os.unlink(os.path.join(path, '.locator'))
for f in CollectionReader(zipball).all_files():
if not re.search('\.zip$', f.name()):
raise arvados.errors.NotImplementedError(
"zipball_extract cannot handle filename %s" % f.name())
zip_filename = os.path.join(path, os.path.basename(f.name()))
zip_file = open(zip_filename, 'wb')
while True:
buf = f.read(2**20)
if len(buf) == 0:
break
zip_file.write(buf)
zip_file.close()
p = subprocess.Popen(["unzip",
"-q", "-o",
"-d", path,
zip_filename],
stdout=None,
stdin=None, stderr=sys.stderr,
shell=False, close_fds=True)
p.wait()
if p.returncode != 0:
lockfile.close()
raise arvados.errors.CommandFailedError(
"unzip exited %d" % p.returncode)
os.unlink(zip_filename)
os.symlink(zipball, os.path.join(path, '.locator'))
tld_extracts = [f for f in os.listdir(path) if f != '.locator']
lockfile.close()
if len(tld_extracts) == 1:
return os.path.join(path, tld_extracts[0])
return path
def collection_extract(collection, path, files=[], decompress=True):
"""Retrieve a collection from Keep and extract it to a local
directory. Return the absolute path where the collection was
extracted.
collection -- collection locator
path -- where to extract: absolute, or relative to job tmp
"""
matches = re.search(r'^([0-9a-f]+)(\+[\w@]+)*$', collection)
if matches:
collection_hash = matches.group(1)
else:
collection_hash = hashlib.md5(collection).hexdigest()
if not re.search('^/', path):
path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
os.stat(path)
except OSError:
os.mkdir(path)
already_have_it = False
try:
if os.readlink(os.path.join(path, '.locator')) == collection_hash:
already_have_it = True
except OSError:
pass
# emulate "rm -f" (i.e., if the file does not exist, we win)
try:
os.unlink(os.path.join(path, '.locator'))
except OSError:
if os.path.exists(os.path.join(path, '.locator')):
os.unlink(os.path.join(path, '.locator'))
files_got = []
for s in CollectionReader(collection).all_streams():
stream_name = s.name()
for f in s.all_files():
if (files == [] or
((f.name() not in files_got) and
(f.name() in files or
(decompress and f.decompressed_name() in files)))):
outname = f.decompressed_name() if decompress else f.name()
files_got += [outname]
if os.path.exists(os.path.join(path, stream_name, outname)):
continue
mkdir_dash_p(os.path.dirname(os.path.join(path, stream_name, outname)))
outfile = open(os.path.join(path, stream_name, outname), 'wb')
for buf in (f.readall_decompressed() if decompress
else f.readall()):
outfile.write(buf)
outfile.close()
if len(files_got) < len(files):
raise arvados.errors.AssertionError(
"Wanted files %s but only got %s from %s" %
(files, files_got,
[z.name() for z in CollectionReader(collection).all_files()]))
os.symlink(collection_hash, os.path.join(path, '.locator'))
lockfile.close()
return path
def mkdir_dash_p(path):
if not os.path.isdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
# It is not an error if someone else creates the
# directory between our exists() and makedirs() calls.
pass
else:
raise
def stream_extract(stream, path, files=[], decompress=True):
"""Retrieve a stream from Keep and extract it to a local
directory. Return the absolute path where the stream was
extracted.
stream -- StreamReader object
path -- where to extract: absolute, or relative to job tmp
"""
if not re.search('^/', path):
path = os.path.join(arvados.current_job().tmpdir, path)
lockfile = open(path + '.lock', 'w')
fcntl.flock(lockfile, fcntl.LOCK_EX)
try:
os.stat(path)
except OSError:
os.mkdir(path)
files_got = []
for f in stream.all_files():
if (files == [] or
((f.name() not in files_got) and
(f.name() in files or
(decompress and f.decompressed_name() in files)))):
outname = f.decompressed_name() if decompress else f.name()
files_got += [outname]
if os.path.exists(os.path.join(path, outname)):
os.unlink(os.path.join(path, outname))
mkdir_dash_p(os.path.dirname(os.path.join(path, outname)))
outfile = open(os.path.join(path, outname), 'wb')
for buf in (f.readall_decompressed() if decompress
else f.readall()):
outfile.write(buf)
outfile.close()
if len(files_got) < len(files):
raise arvados.errors.AssertionError(
"Wanted files %s but only got %s from %s" %
(files, files_got, [z.name() for z in stream.all_files()]))
lockfile.close()
return path
def listdir_recursive(dirname, base=None, max_depth=None):
"""listdir_recursive(dirname, base, max_depth)
Return a list of file and directory names found under dirname.
If base is not None, prepend "{base}/" to each returned name.
If max_depth is None, descend into directories and return only the
names of files found in the directory tree.
If max_depth is a non-negative integer, stop descending into
directories at the given depth, and at that point return directory
names instead.
If max_depth==0 (and base is None) this is equivalent to
sorted(os.listdir(dirname)).
"""
allfiles = []
for ent in sorted(os.listdir(dirname)):
ent_path = os.path.join(dirname, ent)
ent_base = os.path.join(base, ent) if base else ent
if os.path.isdir(ent_path) and max_depth != 0:
allfiles += listdir_recursive(
ent_path, base=ent_base,
max_depth=(max_depth-1 if max_depth else None))
else:
allfiles += [ent_base]
return allfiles
def is_hex(s, *length_args):
"""is_hex(s[, length[, max_length]]) -> boolean
Return True if s is a string of hexadecimal digits.
If one length argument is given, the string must contain exactly
that number of digits.
If two length arguments are given, the string must contain a number of
digits between those two lengths, inclusive.
Return False otherwise.
"""
num_length_args = len(length_args)
if num_length_args > 2:
raise arvados.errors.ArgumentError(
"is_hex accepts up to 3 arguments ({} given)".format(1 + num_length_args))
elif num_length_args == 2:
good_len = (length_args[0] <= len(s) <= length_args[1])
elif num_length_args == 1:
good_len = (len(s) == length_args[0])
else:
good_len = True
return bool(good_len and HEX_RE.match(s))
def list_all(fn, num_retries=0, **kwargs):
# Default limit to (effectively) api server's MAX_LIMIT
kwargs.setdefault('limit', sys.maxsize)
items = []
offset = 0
items_available = sys.maxsize
while len(items) < items_available:
c = fn(offset=offset, **kwargs).execute(num_retries=num_retries)
items += c['items']
items_available = c['items_available']
offset = c['offset'] + len(c['items'])
return items
def ca_certs_path(fallback=httplib2.CA_CERTS):
"""Return the path of the best available CA certs source.
This function searches for various distribution sources of CA
certificates, and returns the first it finds. If it doesn't find any,
it returns the value of `fallback` (httplib2's CA certs by default).
"""
for ca_certs_path in [
# Arvados specific:
'/etc/arvados/ca-certificates.crt',
# Debian:
'/etc/ssl/certs/ca-certificates.crt',
# Red Hat:
'/etc/pki/tls/certs/ca-bundle.crt',
]:
if os.path.exists(ca_certs_path):
return ca_certs_path
return fallback
def new_request_id():
rid = "req-"
# 2**104 > 36**20 > 2**103
n = random.getrandbits(104)
for _ in range(20):
c = n % 36
if c < 10:
rid += chr(c+ord('0'))
else:
rid += chr(c+ord('a')-10)
n = n // 36
return rid
def get_config_once(svc):
if not svc._rootDesc.get('resources').get('configs', False):
# Old API server version, no config export endpoint
return {}
if not hasattr(svc, '_cached_config'):
svc._cached_config = svc.configs().get().execute()
return svc._cached_config
| 36.834884 | 111 | 0.585012 |
33c6ff53d96403b0e10f0fbd5d431644e6df71e6
| 4,473 |
py
|
Python
|
libs/html5lib/treebuilders/__init__.py
|
bbondy/brianbondy.gae
|
5c189e5d8f1ee0fdc77ab48c21f3da2c9e3f246c
|
[
"MIT"
] | null | null | null |
libs/html5lib/treebuilders/__init__.py
|
bbondy/brianbondy.gae
|
5c189e5d8f1ee0fdc77ab48c21f3da2c9e3f246c
|
[
"MIT"
] | null | null | null |
libs/html5lib/treebuilders/__init__.py
|
bbondy/brianbondy.gae
|
5c189e5d8f1ee0fdc77ab48c21f3da2c9e3f246c
|
[
"MIT"
] | null | null | null |
"""A collection of modules for building different kinds of tree from
HTML documents.
To create a treebuilder for a new type of tree, you need to do
implement several things:
1) A set of classes for various types of elements: Document, Doctype,
Comment, Element. These must implement the interface of
_base.treebuilders.Node (although comment nodes have a different
signature for their constructor, see treebuilders.simpletree.Comment)
Textual content may also be implemented as another node type, or not, as
your tree implementation requires.
2) A treebuilder object (called TreeBuilder by convention) that
inherits from treebuilders._base.TreeBuilder. This has 4 required attributes:
documentClass - the class to use for the bottommost node of a document
elementClass - the class to use for HTML Elements
commentClass - the class to use for comments
doctypeClass - the class to use for doctypes
It also has one required method:
getDocument - Returns the root node of the complete document tree
3) If you wish to run the unit tests, you must also create a
testSerializer method on your treebuilder which accepts a node and
returns a string containing Node and its children serialized according
to the format used in the unittests
The supplied simpletree module provides a python-only implementation
of a full treebuilder and is a useful reference for the semantics of
the various methods.
"""
treeBuilderCache = {}
def getTreeBuilder(treeType, implementation=None, **kwargs):
"""Get a TreeBuilder class for various types of tree with built-in support
treeType - the name of the tree type required (case-insensitive). Supported
values are "simpletree", "dom", "etree" and "beautifulsoup"
"simpletree" - a built-in DOM-ish tree type with support for some
more pythonic idioms.
"dom" - A generic builder for DOM implementations, defaulting to
a xml.dom.minidom based implementation for the sake of
backwards compatibility (as releases up until 0.10 had a
builder called "dom" that was a minidom implemenation).
"etree" - A generic builder for tree implementations exposing an
elementtree-like interface (known to work with
ElementTree, cElementTree and lxml.etree).
"beautifulsoup" - Beautiful soup (if installed)
implementation - (Currently applies to the "etree" and "dom" tree types). A
module implementing the tree type e.g.
xml.etree.ElementTree or lxml.etree."""
treeType = treeType.lower()
if treeType not in treeBuilderCache:
if treeType == "dom":
import dom
# XXX: Keep backwards compatibility by using minidom if no implementation is given
if implementation == None:
from xml.dom import minidom
implementation = minidom
# XXX: NEVER cache here, caching is done in the dom submodule
return dom.getDomModule(implementation, **kwargs).TreeBuilder
elif treeType == "simpletree":
import simpletree
treeBuilderCache[treeType] = simpletree.TreeBuilder
elif treeType == "beautifulsoup":
import soup
treeBuilderCache[treeType] = soup.TreeBuilder
elif treeType == "lxml":
import etree_lxml
treeBuilderCache[treeType] = etree_lxml.TreeBuilder
elif treeType == "etree":
# Come up with a sane default
if implementation == None:
try:
import xml.etree.cElementTree as ET
except ImportError:
try:
import xml.etree.ElementTree as ET
except ImportError:
try:
import cElementTree as ET
except ImportError:
import elementtree.ElementTree as ET
implementation = ET
import etree
# XXX: NEVER cache here, caching is done in the etree submodule
return etree.getETreeModule(implementation, **kwargs).TreeBuilder
return treeBuilderCache.get(treeType)
| 48.096774 | 95 | 0.632685 |
4d7ca33fb37042716c0da32c6f01744509d4edae
| 1,527 |
py
|
Python
|
src/rprblender/properties/camera.py
|
Toorero/RadeonProRenderBlenderAddon
|
46be6f11cfb90e1bc40180213862eb9d8b623c5f
|
[
"Apache-2.0"
] | 1 |
2021-03-29T05:55:49.000Z
|
2021-03-29T05:55:49.000Z
|
src/rprblender/properties/camera.py
|
wojtuss/RadeonProRenderBlenderAddon
|
5ce2b5b61de8b6b802e58756f823c662808cf08a
|
[
"Apache-2.0"
] | 1 |
2021-04-03T09:39:28.000Z
|
2021-04-03T09:39:28.000Z
|
src/rprblender/properties/camera.py
|
wojtuss/RadeonProRenderBlenderAddon
|
5ce2b5b61de8b6b802e58756f823c662808cf08a
|
[
"Apache-2.0"
] | null | null | null |
#**********************************************************************
# Copyright 2020 Advanced Micro Devices, Inc
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#********************************************************************
import bpy
from bpy.props import (
PointerProperty,
FloatProperty,
BoolProperty,
)
from . import RPR_Properties
from rprblender.utils import logging
log = logging.Log(tag='properties.camera')
class RPR_CameraProperties(RPR_Properties):
""" Camera properties """
motion_blur_exposure: FloatProperty(
name="Exposure",
description="Camera motion blur exposure",
min=0.0, soft_max = 1.0,
default=1.0,
)
@classmethod
def register(cls):
log("Register")
bpy.types.Camera.rpr = PointerProperty(
name="RPR Camera Settings",
description="RPR Camera settings",
type=cls,
)
@classmethod
def unregister(cls):
log("Unregister")
del bpy.types.Camera.rpr
| 29.941176 | 74 | 0.622135 |
a87367fae66dced91e2870eec15f372487a1c225
| 259 |
py
|
Python
|
Basics/ModulesAndPackages/ModulesAndPackages/ModulesAndPackages.py
|
ttitto/python
|
5f9be2d3c15a61c94647c250c9365a45f0cc3d0f
|
[
"MIT"
] | null | null | null |
Basics/ModulesAndPackages/ModulesAndPackages/ModulesAndPackages.py
|
ttitto/python
|
5f9be2d3c15a61c94647c250c9365a45f0cc3d0f
|
[
"MIT"
] | null | null | null |
Basics/ModulesAndPackages/ModulesAndPackages/ModulesAndPackages.py
|
ttitto/python
|
5f9be2d3c15a61c94647c250c9365a45f0cc3d0f
|
[
"MIT"
] | null | null | null |
#import sys
##print(sys.argv)
#root_folder = sys.argv[0]
#print(root_folder)
#import os
#from nt import R_OK, F_OK, W_OK
#print(os.access('data/catalog_full.csv', W_OK))
#for root, dirs, files in os.walk('./'):
# print(root, '----', dirs, '----', files)
| 21.583333 | 48 | 0.640927 |
53c16e9f698bf2972f7fb093e1def8998f71d03c
| 2,658 |
py
|
Python
|
pygame_test_gamepad.py
|
winkleink/pygame_test_gamepad
|
548ffe69f6d2be1fe99bd822b17a412d05b7a99b
|
[
"MIT"
] | null | null | null |
pygame_test_gamepad.py
|
winkleink/pygame_test_gamepad
|
548ffe69f6d2be1fe99bd822b17a412d05b7a99b
|
[
"MIT"
] | null | null | null |
pygame_test_gamepad.py
|
winkleink/pygame_test_gamepad
|
548ffe69f6d2be1fe99bd822b17a412d05b7a99b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os, sys, pygame
from pygame import locals
import time
# Define the colors we will use in RGB format
black = [ 0, 0, 0]
white = [255,255,255]
red = [255, 0, 0]
#os.environ["SDL_VIDEODRIVER"] = "dummy"
pygame.init()
# Set the height and width of the screen
size=[630,300]
screen=pygame.display.set_mode(size)
# Fill the screen White
screen.fill(white)
# Put something in the application Bar
pygame.display.set_caption("Testing gamepad")
pygame.draw.rect(screen,black,(19,19,276,276),0)
pygame.display.flip()
pygame.joystick.init() # main joystick device system
textfont = pygame.font.SysFont("moonspace",24)
joyfont = pygame.font.SysFont("moonspace",60)
done = True
try:
j = pygame.joystick.Joystick(0) # create a joystick instance
j.init() # init instance
print ('Enabled joystick: ' + j.get_name())
joyName = j.get_name()
except pygame.error:
print ('no joystick found.')
pygame.draw.rect(screen,black,(19,19,276,276),0)
pygame.draw.rect(screen,white,(148,148,20,20),0)
joyText = textfont.render("Gamepad : "+joyName,1,red)
screen.blit(joyText,(350,100))
pygame.display.flip()
while done:
for e in pygame.event.get(): # iterate over event stack
if e.type == pygame.QUIT:
done = False
if e.type == pygame.locals.JOYAXISMOTION: # Read Analog Joystick Axis
x1 , y1 = j.get_axis(0), j.get_axis(1) # Left Stick
print (x1)
print (y1)
x1Text = textfont.render("x : "+str(x1),1,red)
y1Text = textfont.render("y : "+str(y1),1,red)
pygame.draw.rect(screen,black,(19,19,276,276),0)
pygame.draw.rect(screen,white,(148+(x1*128),148+(y1*128),20,20),0)
pygame.draw.rect(screen,white,(300,150,250,200),0)
screen.blit(x1Text,(350,150))
screen.blit(y1Text,(350,200))
pygame.display.flip()
if e.type == pygame.locals.JOYBUTTONDOWN: # Read the buttons
print ("button down"+str(e.button))
for i in range(0,10):
if e.button == i:
pygame.draw.rect(screen,red,(300+(i*30),20,20,20),0)
buttonText = textfont.render(str(i),1,white)
screen.blit(buttonText,(300+(i*30)+3,20))
pygame.display.flip()
if e.type == pygame.locals.JOYBUTTONUP: # Read the buttons
print ("button up"+str(e.button))
for i in range(0,10):
if e.button == i:
pygame.draw.rect(screen,white,(300+(i*30),20,20,20),0)
pygame.display.flip()
| 30.906977 | 78 | 0.59857 |
e93114e3ae2032e5342c5d695a8a581ffc193e4e
| 1,949 |
py
|
Python
|
ams-publisher/ams-queue-consume.py
|
vrdel/ams-test
|
3797a721e65d9548f50e74668b75f92829eb3e20
|
[
"Apache-2.0"
] | null | null | null |
ams-publisher/ams-queue-consume.py
|
vrdel/ams-test
|
3797a721e65d9548f50e74668b75f92829eb3e20
|
[
"Apache-2.0"
] | null | null | null |
ams-publisher/ams-queue-consume.py
|
vrdel/ams-test
|
3797a721e65d9548f50e74668b75f92829eb3e20
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python3
import argparse
import datetime
import os
import pprint
import pwd
import random
import sys
import time
from messaging.message import Message
from messaging.error import MessageError
from messaging.queue.dqs import DQS
from collections import deque
default_queue = '/var/spool/argo-nagios-ams-publisher/outgoing-messages/'
default_user = 'nagios'
args = None
cqcalld = 1
def seteuser(user):
os.setegid(user.pw_gid)
os.seteuid(user.pw_uid)
def consume_queue(mq, num=0):
global cqcalld, args
if not args.noout:
print('---- MSGS ---- RUN {0} ----'.format(cqcalld))
i, msgs = 1, deque()
for name in mq:
if mq.lock(name):
msgs.append(mq.get_message(name))
mq.remove(name)
if num and i == num:
break
i += 1
else:
print('{0} empty'.format(mq.path))
if msgs and not args.noout:
pprint.pprint(msgs)
cqcalld += 1
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--sleep', required=False, default=0, type=float)
parser.add_argument('--queue', required=False, default=default_queue, type=str)
parser.add_argument('--runas', required=False, default=default_user, type=str)
parser.add_argument('--purge', required=False, action='store_true', default=False)
parser.add_argument('--noout', required=False, action='store_true', default=False)
parser.add_argument('--num', required=False, default=0, type=int)
global args
args = parser.parse_args()
seteuser(pwd.getpwnam(args.runas))
msgs = []
mq = DQS(path=args.queue)
try:
if args.purge:
mq.purge()
if args.sleep > 0:
while True:
consume_queue(mq, args.num)
time.sleep(args.sleep)
else:
consume_queue(mq, args.num)
except KeyboardInterrupt as e:
raise SystemExit(0)
main()
| 24.670886 | 86 | 0.635711 |
effd13b360a014c379f47c8fbc329abda717029b
| 775 |
py
|
Python
|
snippet.py
|
ank-urG/Hacktoberfest2020
|
c3f8ae49d4a5dab888524915da5a5ea3ce075b89
|
[
"MIT"
] | null | null | null |
snippet.py
|
ank-urG/Hacktoberfest2020
|
c3f8ae49d4a5dab888524915da5a5ea3ce075b89
|
[
"MIT"
] | null | null | null |
snippet.py
|
ank-urG/Hacktoberfest2020
|
c3f8ae49d4a5dab888524915da5a5ea3ce075b89
|
[
"MIT"
] | 1 |
2020-10-09T17:49:53.000Z
|
2020-10-09T17:49:53.000Z
|
import time
print("1. Time Spent")
print("2. use unemate")
print("3. Comma-separated")
print("4. Print a string N times")
num = int(input("Enter a number: "))
if num == 1:
start_time = time.time()
a = 1
b = 2
c = a + b
print(c) # 3
end_time = time.time()
total_time = end_time - start_time
print("Time: ", total_time)
elif num == 2:
list = ["a", "b", "c", "d"]
for index, element in enumerate(list):
print("Value", element, "Index ", index, )
elif num == 3:
hobbies = ["basketball", "football", "swimming"]
print("My hobbies are:") # My hobbies are:
print(", ".join(hobbies)) # basketball, football, swimming
elif num == 4:
n = 2
s = "Programming"
print(s * n)
| 20.394737 | 64 | 0.545806 |
ff2ee3bcaa653579a87462651aa3397e5010aa21
| 721 |
py
|
Python
|
bin/_preamble.py
|
tlandschoff-scale/spyne
|
d271f5e5d64114f62375e8baec0a5d0d36f84c96
|
[
"BSD-3-Clause"
] | 786 |
2015-01-04T10:46:28.000Z
|
2022-03-31T19:24:35.000Z
|
bin/_preamble.py
|
tlandschoff-scale/spyne
|
d271f5e5d64114f62375e8baec0a5d0d36f84c96
|
[
"BSD-3-Clause"
] | 248 |
2015-01-01T21:52:47.000Z
|
2022-03-09T08:55:04.000Z
|
bin/_preamble.py
|
tlandschoff-scale/spyne
|
d271f5e5d64114f62375e8baec0a5d0d36f84c96
|
[
"BSD-3-Clause"
] | 210 |
2015-01-10T14:20:31.000Z
|
2022-03-09T08:38:43.000Z
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
# This makes sure that users don't have to set up their environment
# specially in order to run these programs from bin/.
# This helper is shared by many different actual scripts. It is not intended to
# be packaged or installed, it is only a developer convenience. By the time
# Spyne is actually installed somewhere, the environment should already be set
# up properly without the help of this tool.
import os
import sys
path = os.path.abspath(sys.argv[0])
while os.path.dirname(path) != path:
if os.path.exists(os.path.join(path, 'spyne', '__init__.py')):
sys.path.insert(0, path)
break
path = os.path.dirname(path)
| 34.333333 | 80 | 0.728155 |
d6ab435dc0cb1f97e9cf6ba449acd322ef7baa38
| 3,787 |
py
|
Python
|
networking_generic_switch/devices/netmiko_devices/dell.py
|
sandyw777/networking-generic-switch
|
82bdddc104e06f2a5dc2a14f220c6abadeb655bd
|
[
"Apache-2.0"
] | 26 |
2016-02-12T07:30:21.000Z
|
2021-11-26T06:32:01.000Z
|
networking_generic_switch/devices/netmiko_devices/dell.py
|
sandyw777/networking-generic-switch
|
82bdddc104e06f2a5dc2a14f220c6abadeb655bd
|
[
"Apache-2.0"
] | 10 |
2017-10-05T13:59:28.000Z
|
2021-09-16T13:57:52.000Z
|
networking_generic_switch/devices/netmiko_devices/dell.py
|
sandyw777/networking-generic-switch
|
82bdddc104e06f2a5dc2a14f220c6abadeb655bd
|
[
"Apache-2.0"
] | 34 |
2016-03-18T08:13:37.000Z
|
2021-10-01T15:50:19.000Z
|
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from networking_generic_switch.devices import netmiko_devices
from networking_generic_switch import exceptions as exc
class DellNos(netmiko_devices.NetmikoSwitch):
"""Netmiko device driver for Dell Force10 switches."""
ADD_NETWORK = (
'interface vlan {segmentation_id}',
'name {network_name}',
'exit',
)
DELETE_NETWORK = (
'no interface vlan {segmentation_id}',
'exit',
)
PLUG_PORT_TO_NETWORK = (
'interface vlan {segmentation_id}',
'untagged {port}',
'exit',
)
DELETE_PORT = (
'interface vlan {segmentation_id}',
'no untagged {port}',
'exit',
)
ADD_NETWORK_TO_TRUNK = (
'interface vlan {segmentation_id}',
'tagged {port}',
'exit',
)
REMOVE_NETWORK_FROM_TRUNK = (
'interface vlan {segmentation_id}',
'no tagged {port}',
'exit',
)
class DellPowerConnect(netmiko_devices.NetmikoSwitch):
"""Netmiko device driver for Dell PowerConnect switches."""
def _switch_to_general_mode(self):
self.PLUG_PORT_TO_NETWORK = self.PLUG_PORT_TO_NETWORK_GENERAL
self.DELETE_PORT = self.DELETE_PORT_GENERAL
def __init__(self, device_cfg):
super(DellPowerConnect, self).__init__(device_cfg)
port_mode = self.ngs_config['ngs_switchport_mode']
switchport_mode = {
'general': self._switch_to_general_mode,
'access': lambda: ()
}
def on_invalid_switchmode():
raise exc.GenericSwitchConfigException(
option="ngs_switchport_mode",
allowed_options=switchport_mode.keys()
)
switchport_mode.get(port_mode.lower(), on_invalid_switchmode)()
ADD_NETWORK = (
'vlan database',
'vlan {segmentation_id}',
'exit',
)
DELETE_NETWORK = (
'vlan database',
'no vlan {segmentation_id}',
'exit',
)
PLUG_PORT_TO_NETWORK_GENERAL = (
'interface {port}',
'switchport general allowed vlan add {segmentation_id} untagged',
'switchport general pvid {segmentation_id}',
'exit',
)
PLUG_PORT_TO_NETWORK = (
'interface {port}',
'switchport access vlan {segmentation_id}',
'exit',
)
DELETE_PORT_GENERAL = (
'interface {port}',
'switchport general allowed vlan remove {segmentation_id}',
'no switchport general pvid',
'exit',
)
DELETE_PORT = (
'interface {port}',
'switchport access vlan none',
'exit',
)
ADD_NETWORK_TO_TRUNK = (
'interface {port}',
'switchport general allowed vlan add {segmentation_id} tagged',
'exit',
)
REMOVE_NETWORK_FROM_TRUNK = (
'interface {port}',
'switchport general allowed vlan remove {segmentation_id}',
'exit',
)
ERROR_MSG_PATTERNS = (
re.compile(r'\% Incomplete command'),
re.compile(r'VLAN was not created by user'),
re.compile(r'Configuration Database locked by another application \- '
r'try later'),
)
| 27.244604 | 78 | 0.619224 |
62a392bd094d7445f5cd7ba96280ccca4ee3ac0b
| 1,252 |
py
|
Python
|
samples/hyperparameter-tutorial/ml_service/pipelines/hyperparams.py
|
h2floh/MLOpsManufacturing-1
|
3fcf15ef80600540450d16eae57b853d88a83063
|
[
"Apache-2.0"
] | 20 |
2020-11-09T08:20:13.000Z
|
2021-12-03T06:15:45.000Z
|
samples/hyperparameter-tutorial/ml_service/pipelines/hyperparams.py
|
h2floh/MLOpsManufacturing-1
|
3fcf15ef80600540450d16eae57b853d88a83063
|
[
"Apache-2.0"
] | 55 |
2020-10-20T02:18:56.000Z
|
2021-07-26T04:52:23.000Z
|
samples/hyperparameter-tutorial/ml_service/pipelines/hyperparams.py
|
h2floh/MLOpsManufacturing-1
|
3fcf15ef80600540450d16eae57b853d88a83063
|
[
"Apache-2.0"
] | 10 |
2020-11-26T05:47:57.000Z
|
2022-03-23T20:26:00.000Z
|
from azureml.train.hyperdrive import RandomParameterSampling, BanditPolicy, HyperDriveConfig, PrimaryMetricGoal
from azureml.train.hyperdrive import uniform
class HyperParams:
def __init(self):
print(f'HyperParams.Created. version = {VERSION}')
def get_param_sampling(self):
return RandomParameterSampling(PARAMETER_SAMPLING)
def get_bandit_policy(self):
return BanditPolicy(evaluation_interval=EVALUATION_INTERVAL, slack_factor=SLACK_FACTOR)
def get_hd_config(self, config):
hd_config = HyperDriveConfig(
run_config=config,
hyperparameter_sampling=self.get_param_sampling(),
policy=self.get_bandit_policy(),
primary_metric_name=PRIMARY_METRIC_NAME,
primary_metric_goal=PRIMARY_METRIC_GOAL,
max_total_runs=MAX_TOTAL_RUNS,
max_concurrent_runs=MAX_CONCURRENT_RUNS)
return hd_config
# https://docs.microsoft.com/azure/machine-learning/how-to-tune-hyperparameters
VERSION = '2021.02.26'
PARAMETER_SAMPLING = {
'--alpha': uniform(0.05, 1.0)
}
PRIMARY_METRIC_NAME = 'r2'
PRIMARY_METRIC_GOAL = PrimaryMetricGoal.MAXIMIZE
EVALUATION_INTERVAL = 2
SLACK_FACTOR = 0.1
MAX_TOTAL_RUNS = 4
MAX_CONCURRENT_RUNS = 2
| 32.102564 | 111 | 0.740415 |
2a089a6c4d02c9777c6120cd259c523e3ec67fc0
| 272 |
py
|
Python
|
ecommerce/ecommerce/core/models.py
|
crstnrm/ecommerce
|
72e757b1ecc7b2dfeecabe0ff06ee9797815dc8e
|
[
"MIT"
] | null | null | null |
ecommerce/ecommerce/core/models.py
|
crstnrm/ecommerce
|
72e757b1ecc7b2dfeecabe0ff06ee9797815dc8e
|
[
"MIT"
] | null | null | null |
ecommerce/ecommerce/core/models.py
|
crstnrm/ecommerce
|
72e757b1ecc7b2dfeecabe0ff06ee9797815dc8e
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils import timezone
class BaseModel(models.Model):
created_at = models.DateTimeField(default=timezone.now, editable=False)
updated_at = models.DateTimeField(default=timezone.now)
class Meta:
abstract = True
| 27.2 | 75 | 0.757353 |
28c97c41839437fb895e148e58647abfc201a7e4
| 2,722 |
py
|
Python
|
test/multiapi/Expected/AcceptanceTests/MultiapiCredentialDefaultPolicy/multiapicredentialdefaultpolicy/v1/aio/_configuration.py
|
cfculhane/autorest.python
|
8cbca95faee88d933a58bbbd17b76834faa8d387
|
[
"MIT"
] | null | null | null |
test/multiapi/Expected/AcceptanceTests/MultiapiCredentialDefaultPolicy/multiapicredentialdefaultpolicy/v1/aio/_configuration.py
|
cfculhane/autorest.python
|
8cbca95faee88d933a58bbbd17b76834faa8d387
|
[
"MIT"
] | null | null | null |
test/multiapi/Expected/AcceptanceTests/MultiapiCredentialDefaultPolicy/multiapicredentialdefaultpolicy/v1/aio/_configuration.py
|
cfculhane/autorest.python
|
8cbca95faee88d933a58bbbd17b76834faa8d387
|
[
"MIT"
] | 1 |
2022-03-28T08:58:03.000Z
|
2022-03-28T08:58:03.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any
from azure.core.configuration import Configuration
from azure.core.credentials import AzureKeyCredential
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
VERSION = "unknown"
class MultiapiServiceClientConfiguration(Configuration):
"""Configuration for MultiapiServiceClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.AzureKeyCredential
"""
def __init__(
self,
credential: AzureKeyCredential,
**kwargs: Any
) -> None:
super(MultiapiServiceClientConfiguration, self).__init__(**kwargs)
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
self.credential = credential
self.api_version = "1.0.0"
kwargs.setdefault('sdk_moniker', 'multiapicredentialdefaultpolicy/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AzureKeyCredentialPolicy(self.credential, "api-key", **kwargs)
| 47.754386 | 112 | 0.696179 |
fd93225e2649f229b062991bfb4ba20d423338e6
| 16,113 |
py
|
Python
|
fetchd/fetchd.py
|
newtoncurrency/ntqrelay
|
aa87402fcd19131b8e158839bed6080f64a93241
|
[
"MIT"
] | 608 |
2015-06-13T17:20:21.000Z
|
2022-03-26T11:30:32.000Z
|
fetchd/fetchd.py
|
AYIDouble/btcrelay
|
48c45c6c764315b0a274e74ef8d87915680c1132
|
[
"MIT"
] | 40 |
2015-07-01T19:03:38.000Z
|
2020-12-16T07:57:05.000Z
|
fetchd/fetchd.py
|
AYIDouble/btcrelay
|
48c45c6c764315b0a274e74ef8d87915680c1132
|
[
"MIT"
] | 188 |
2015-06-15T10:28:45.000Z
|
2022-03-12T21:13:27.000Z
|
#!/usr/bin/env python
# from datetime import datetime, date
from time import sleep
from argparse import ArgumentParser
import logging
from pyepm import api, config, __version__
from bitcoin import * # NOQA
from urllib import urlopen
# Warning !!!
# to make things work, also at https://github.com/etherex/pyepm/blob/master/pyepm/api.py#L38
# (method abi_data, before last return)
# need to implement fix for hex with odd length
# if len(data_abi) % 2 > 0:
# data_abi = data_abi.replace('0x','0x0')
BITCOIN_MAINNET = 'btc'
BITCOIN_TESTNET = 'testnet'
SLEEP_TIME = 5 * 60 # 5 mins. If changing, check retry logic
GAS_FOR_STORE_HEADERS = 1200000 # it should take less than 1M gas, but buffer to avoid running out
CHUNK_SIZE = 5 # number of headers to fetch at a time
CHUNK_RANGE = range(CHUNK_SIZE)
api_config = config.read_config()
instance = api.Api(api_config)
logging.basicConfig(format='%(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
pyepmLogger = logging.getLogger("pyepm")
pyepmLogger.setLevel(logging.INFO)
# instance.address = "0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826"
# instance.relayContract = "0xba164d1e85526bd5e27fd15ad14b0eae91c45a93"
# TESTNET relay: 0x142f674e911cc55c226af81ac4d6de0a671d4abf
# To use a wallet, deploy and configure https://github.com/ethereum/dapp-bin/blob/69cb5e8c82074b5fbf9c01f75145c3cad5af85e2/wallet/wallet.sol
# Update the 4 wallet variables below. Using an instance of the contract wallet
# can allow more ETH to be stored in the wallet, while a much smaller amount of
# ETH can be stored in the "--sender" unlocked account. The "sender" can request
# a daily amount from the contract wallet, and should be unable to fully withdraw from
# the contract wallet if it has been deployed and configured correctly.
useWallet = False # when True, need to set the following remaining values:
instance.walletContract = '' # address of the contract wallet
instance.weiRefill = int(1e18) # 1 ETH. Amount to refill the "hot" sender account each time walletWithdraw() is called
aWalletOwner = '' # address of an owner of the contract wallet
def get_hash_by_height(height,network='btc'):
url = 'https://blockchain.info/block-height/'+str(height)+'?format=json'
jsonurl = urlopen(url)
text = json.loads(jsonurl.read())
return text['blocks'][0]['hash']
def serialize_header(height,network='btc'):
url = 'https://blockchain.info/block-height/'+str(height)+'?format=json'
jsonurl = urlopen(url)
text = json.loads(jsonurl.read())
inp = text['blocks'][0]
o = encode(inp['ver'], 256, 4)[::-1] + \
inp['prev_block'].decode('hex')[::-1] + \
inp['mrkl_root'].decode('hex')[::-1] + \
encode(inp['time'], 256, 4)[::-1] + \
encode(inp['bits'], 256, 4)[::-1] + \
encode(inp['nonce'], 256, 4)[::-1]
h = bin_sha256(bin_sha256(o))[::-1].encode('hex')
assert h == inp['hash'], (sha256(o), inp['hash'])
return o.encode('hex')
def main():
# logging.basicConfig(level=logging.DEBUG)
logger.info("fetchd using PyEPM %s" % __version__)
parser = ArgumentParser()
parser.add_argument('-s', '--sender', required=True, help='sender of transaction')
parser.add_argument('-r', '--relay', required=True, help='relay contract address')
parser.add_argument('--rpcHost', default='127.0.0.1', help='RPC hostname')
parser.add_argument('--rpcPort', default='8545', type=int, help='RPC port')
parser.add_argument('--startBlock', default=0, type=int, help='block number to start fetching from')
parser.add_argument('-w', '--waitFor', default=0, type=int, help='number of blocks to wait between fetches')
parser.add_argument('--gasPrice', default=int(10e12), type=int, help='gas price') # default 10 szabo
parser.add_argument('--fetch', action='store_true', help='fetch blockheaders')
parser.add_argument('-n', '--network', default=BITCOIN_TESTNET, choices=[BITCOIN_TESTNET, BITCOIN_MAINNET], help='Bitcoin network')
parser.add_argument('-d', '--daemon', default=False, action='store_true', help='run as daemon')
parser.add_argument('--feeVTX', default=0, type=int, help='fee to charge for verifications')
parser.add_argument('--feeRecipient', help='address of fee recipient')
args = parser.parse_args()
instance.address = args.sender
instance.relayContract = args.relay
instance.rpcHost = args.rpcHost
instance.rpcPort = args.rpcPort
instance.jsonrpc_url = "http://%s:%s" % (instance.rpcHost, instance.rpcPort)
instance.numBlocksToWait = args.waitFor # for CPP eth as of Apr 28, 3 blocks seems reasonable. 0 seems to be fine for Geth
# instance.gasPrice = args.gasPrice
feeVerifyTx = args.feeVTX
logger.info('feeVTX: %s' % feeVerifyTx)
if useWallet:
if instance.walletContract == '' or aWalletOwner == '':
logger.info('wallet contract and owner address need to both be set')
sys.exit()
if instance.address != aWalletOwner:
logger.info('sender is not a wallet owner: %s' % instance.address)
sys.exit()
feeRecipient = args.feeRecipient or instance.address
logger.info('feeRecipient: %s' % feeRecipient)
if feeRecipient != instance.address and not useWallet:
logger.warn('feeRecipient %s is not sender %s and contract wallet is not used' % (feeRecipient, instance.address))
sys.exit()
# logger.info('@@@ rpc: %s' % instance.jsonrpc_url)
# this can't be commented out easily since run() always does instance.heightToStartFetch = getLastBlockHeight() + 1 for retries
# contractHeight = getLastBlockHeight() # needs instance.relayContract to be set
# logger.info('@@@ contract height: {0} gp: {1}').format(contractHeight, instance.gasPrice)
# instance.heightToStartFetch = args.startBlock or contractHeight + 1
# this will not handle exceptions or do retries. need to use -d switch if desired
if not args.daemon:
run(feeVerifyTx, feeRecipient, doFetch=args.fetch, network=args.network, startBlock=args.startBlock)
return
while True:
for i in range(4):
try:
run(feeVerifyTx, feeRecipient, doFetch=args.fetch, network=args.network, startBlock=args.startBlock)
sleep(SLEEP_TIME)
except Exception as e:
logger.info(e)
logger.info('Retry in 1min')
sleep(60)
continue
except: # catch *all* exceptions
e = sys.exc_info()[0]
logger.info(e)
logger.info('Rare exception')
raise
break
def run(feeVerifyTx, feeRecipient, doFetch=False, network=BITCOIN_TESTNET, startBlock=0):
chainHead = getBlockchainHead()
if not chainHead:
raise ValueError("Empty BlockchainHead returned.")
chainHead = blockHashHex(chainHead)
logger.info('BTC BlockchainHead: %s' % chainHead)
# loop in case contract stored correct HEAD, but reorg in *Ethereum* chain
# so that contract lost the correct HEAD. we try 3 times since it would
# be quite unlucky for 5 Ethereum reorgs to coincide with storing the
# non-orphaned Bitcoin block
nTime = 5
for i in range(nTime):
# refetch if needed in case contract's HEAD was orphaned
if startBlock:
contractHeight = startBlock
else:
contractHeight = getLastBlockHeight()
realHead = get_hash_by_height(contractHeight, network=network)
heightToRefetch = contractHeight
while chainHead != realHead:
logger.info('@@@ chainHead: {0} realHead: {1}'.format(chainHead, realHead))
fetchHeaders(heightToRefetch, 1, 1, feeVerifyTx, feeRecipient, network=network)
# wait for some blocks because Geth has a delay (at least in RPC), of
# returning the correct data. the non-orphaned header may already
# be in the Ethereum blockchain, so we should give it a chance before
# adjusting realHead to the previous parent
#
# realHead is adjusted to previous parent in the off-chance that
# there is more than 1 orphan block
# for j in range(4):
instance.wait_for_next_block(from_block=instance.last_block(), verbose=True)
chainHead = blockHashHex(getBlockchainHead())
realHead = get_hash_by_height(heightToRefetch, network=network)
heightToRefetch -= 1
if heightToRefetch < contractHeight - 10:
if i == nTime - 1:
# this really shouldn't happen since 2 orphans are already
# rare, let alone 10
logger.info('@@@@ TERMINATING big reorg? {0}'.format(heightToRefetch))
sys.exit()
else:
logger.info('@@@@ handle orphan did not succeed iteration {0}'.format(i))
break # start the refetch again, this time ++i
break # chainHead is same realHead
actualHeight = last_block_height(network) # pybitcointools 1.1.33
if startBlock:
instance.heightToStartFetch = startBlock
else:
instance.heightToStartFetch = getLastBlockHeight() + 1
logger.info('@@@ startFetch: {0} actualHeight: {1}'.format(instance.heightToStartFetch, actualHeight))
chunkSize = CHUNK_SIZE
fetchNum = actualHeight - instance.heightToStartFetch + 1
numChunk = fetchNum / chunkSize
leftoverToFetch = fetchNum % chunkSize
logger.info('@@@ numChunk: {0} leftoverToFetch: {1}'.format(numChunk, fetchNum))
logger.info('----------------------------------')
if doFetch:
fetchHeaders(instance.heightToStartFetch, chunkSize, numChunk, feeVerifyTx, feeRecipient, network=network)
fetchHeaders(actualHeight - leftoverToFetch + 1, 1, leftoverToFetch, feeVerifyTx, feeRecipient, network=network)
# sys.exit()
def fetchHeaders(chunkStartNum, chunkSize, numChunk, feeVerifyTx, feeRecipient, network=BITCOIN_TESTNET):
for j in range(numChunk):
strings = ""
for i in range(chunkSize):
blockNum = chunkStartNum + i
bhStr = serialize_header(blockNum, network=network)
logger.info("@@@ {0}: {1}".format(blockNum, bhStr))
logger.debug("Block header: %s" % repr(bhStr.decode('hex')))
strings += bhStr
storeHeaders(strings.decode('hex'), chunkSize, feeVerifyTx, feeRecipient)
chainHead = getBlockchainHead()
logger.info('@@@ DONE hexHead: %s' % blockHashHex(chainHead))
logger.info('==================================')
chunkStartNum += chunkSize
# average of 6*24=144 headers a day. So AROUND every 100 headers we check
# the balance of sender and if it's less than 1 ETH, we ask for more ETH
# from the wallet.
# CHUNK_RANGE is used when chunkSize>1 so that we ask for ETH if chunkStartNum ends in
# ????00, ????01, ????02 to ????04
if ((chunkSize == 1 and chunkStartNum % 100 == 0) or
(chunkSize == CHUNK_SIZE and chunkStartNum % 100 in CHUNK_RANGE)) and useWallet:
myWei = instance.balance_at(instance.address)
myBalance = myWei / 1e18
logger.info('myBalance ETH: %s' % myBalance)
if myBalance < 1:
logger.info('going to walletWithdraw')
walletWithdraw()
myWei = instance.balance_at(instance.address)
myBalance = myWei / 1e18
logger.info('topped up ETH balance: %s' % myBalance)
def storeHeaders(bhBytes, chunkSize, feeVerifyTx, feeRecipient):
txCount = instance.transaction_count(defaultBlock='pending')
logger.info('----------------------------------')
logger.info('txCount: %s' % txCount)
hashOne = blockHashHex(int(bin_dbl_sha256(bhBytes[:80])[::-1].encode('hex'), 16))
hashLast = blockHashHex(int(bin_dbl_sha256(bhBytes[-80:])[::-1].encode('hex'), 16))
logger.info('hashOne: %s' % hashOne)
logger.info('hashLast: %s' % hashLast)
firstH = bhBytes[:80].encode('hex')
lastH = bhBytes[-80:].encode('hex')
logger.info('firstH: %s' % firstH)
logger.info('lastH: %s' % lastH)
sig = 'bulkStoreHeader:[bytes,int256]:int256'
data = [bhBytes, chunkSize]
gas = GAS_FOR_STORE_HEADERS
value = 0
#
# Store the headers
#
if feeVerifyTx != 0:
sig = 'storeBlockWithFeeAndRecipient:[bytes,int256,int256]:int256'
for i in range(chunkSize):
if feeVerifyTx != 0:
offset = 80*i
data = [ bhBytes[offset:offset+80] , feeVerifyTx, feeRecipient]
# Wait for the transaction and retry if failed
txHash = instance.transact(instance.relayContract, sig=sig, data=data, gas=gas, value=value)
logger.info("store header txHash: %s" % txHash)
txResult = False
while txResult is False:
txResult = instance.wait_for_transaction(transactionHash=txHash, defaultBlock="pending", retry=30, verbose=True)
logger.info("store header pendingblock txResult: %s" % txResult)
if txResult is False:
txHash = instance.transact(instance.relayContract, sig=sig, data=data, gas=gas, value=value)
# Wait for the transaction to be mined and retry if failed
txResult = False
while txResult is False:
txResult = instance.wait_for_transaction(transactionHash=txHash, defaultBlock="latest", retry=60, verbose=True)
logger.info("store header latestblock txResult: %s" % txResult)
if txResult is False:
txHash = instance.transact(instance.relayContract, sig=sig, data=data, gas=gas, value=value)
if feeVerifyTx == 0:
break
chainHead = getBlockchainHead()
expHead = int(bin_dbl_sha256(bhBytes[-80:])[::-1].encode('hex'), 16)
if chainHead != expHead:
logger.info('@@@@@ MISMATCH chainHead: {0} expHead: {1}'.format(blockHashHex(chainHead), blockHashHex(expHead)))
# sys.exit(1)
def walletWithdraw():
# execute(address _to, uint _value, bytes _data)
sig = 'execute:[address,uint256,bytes]:bytes32'
data = [instance.address, instance.weiRefill, '']
gas = 999000
# Wait for the transaction retry if failed
txHash = instance.transact(instance.walletContract, sig=sig, data=data, gas=gas)
logger.info("walletWithdraw txHash: %s" % txHash)
txResult = False
while txResult is False:
txResult = instance.wait_for_transaction(transactionHash=txHash, defaultBlock="pending", retry=30, verbose=True)
if txResult is False:
txHash = instance.transact(instance.walletContract, sig=sig, data=data, gas=gas)
# Wait for the transaction to be mined and retry if failed
txResult = False
while txResult is False:
txResult = instance.wait_for_transaction(transactionHash=txHash, defaultBlock="latest", retry=60, verbose=True)
if txResult is False:
txHash = instance.transact(instance.walletContract, sig=sig, data=data, gas=gas)
def getLastBlockHeight():
sig = 'getLastBlockHeight:[]:int256'
data = []
pyepmLogger.setLevel(logging.WARNING)
callResult = instance.call(instance.relayContract, sig=sig, data=data)
pyepmLogger.setLevel(logging.INFO)
logger.debug("RESULT %s" % callResult)
chainHead = callResult[0] if len(callResult) else callResult
return chainHead
def getBlockchainHead():
sig = 'getBlockchainHead:[]:int256'
data = []
pyepmLogger.setLevel(logging.WARNING)
callResult = instance.call(instance.relayContract, sig=sig, data=data)
pyepmLogger.setLevel(logging.INFO)
chainHead = callResult[0] if len(callResult) else callResult
return chainHead
def blockHashHex(number):
hexHead = hex(number)[2:-1] # snip off the 0x and trailing L
hexHead = '0' * (64 - len(hexHead)) + hexHead
return hexHead
if __name__ == '__main__':
main()
| 42.180628 | 140 | 0.658971 |
7dffd432c8897f08381b52318d6df51d586fe99e
| 2,672 |
py
|
Python
|
ninja/router.py
|
duilio/django-ninja
|
8dac3c981bcf431322d32acd34c8179564a3698d
|
[
"MIT"
] | null | null | null |
ninja/router.py
|
duilio/django-ninja
|
8dac3c981bcf431322d32acd34c8179564a3698d
|
[
"MIT"
] | null | null | null |
ninja/router.py
|
duilio/django-ninja
|
8dac3c981bcf431322d32acd34c8179564a3698d
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
from typing import Callable, List
from django.http import HttpResponseNotAllowed
from django.urls import path as django_path
from ninja.operation import Operation, PathView
from ninja.constants import NOT_SET
from ninja.utils import normalize_path
class Router:
def __init__(self):
self.operations = OrderedDict() # TODO: better rename to path_operations
self.api = None
def get(self, path: str, *, auth=NOT_SET, response=None):
return self.api_operation(["GET"], path, auth=auth, response=response)
def post(self, path: str, *, auth=NOT_SET, response=None):
return self.api_operation(["POST"], path, auth=auth, response=response)
def delete(self, path: str, *, auth=NOT_SET, response=None):
return self.api_operation(["DELETE"], path, auth=auth, response=response)
def patch(self, path: str, *, auth=NOT_SET, response=None):
return self.api_operation(["PATCH"], path, auth=auth, response=response)
def put(self, path: str, *, auth=NOT_SET, response=None):
return self.api_operation(["PUT"], path, auth=auth, response=response)
def api_operation(
self, methods: List[str], path: str, *, auth=NOT_SET, response=None
):
def decorator(view_func):
self.add_api_operation(
path, methods, view_func, auth=auth, response=response
)
return view_func
return decorator
def add_api_operation(
self,
path: str,
methods: List[str],
view_func: Callable,
*,
auth=NOT_SET,
response=None
):
if path not in self.operations:
path_view = PathView()
self.operations[path] = path_view
else:
path_view = self.operations[path]
path_view.add(
path=path,
methods=methods,
view_func=view_func,
auth=auth,
response=response,
)
if self.api:
path_view.set_api_instance(self.api)
def set_api_instance(self, api):
self.api = api
for path_view in self.operations.values():
path_view.set_api_instance(self.api)
def urls_paths(self, prefix: str):
for path, path_view in self.operations.items():
path = path.replace("{", "<").replace("}", ">")
route = "/".join([i for i in (prefix, path) if i])
# to skip lot of checks we simply treat double slash as a mistake:
route = normalize_path(route)
route = route.lstrip("/")
yield django_path(route, path_view.get_view())
| 33.4 | 81 | 0.617141 |
3e522d51874c509f93273eccc7dbe055c60988fa
| 11,227 |
py
|
Python
|
grr/server/grr_response_server/flows/general/webhistory_test.py
|
syth3/grr
|
0e03976c5c5b694c210c0f392581364ddefa1f4b
|
[
"Apache-2.0"
] | null | null | null |
grr/server/grr_response_server/flows/general/webhistory_test.py
|
syth3/grr
|
0e03976c5c5b694c210c0f392581364ddefa1f4b
|
[
"Apache-2.0"
] | null | null | null |
grr/server/grr_response_server/flows/general/webhistory_test.py
|
syth3/grr
|
0e03976c5c5b694c210c0f392581364ddefa1f4b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Lint as: python3
# python3
"""Test the webhistory flows."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
from absl import app
import mock
from grr_response_client import client_utils
from grr_response_core.lib import utils
from grr_response_core.lib.parsers import chrome_history
from grr_response_core.lib.parsers import firefox3_history
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import client_fs as rdf_client_fs
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_server import file_store
from grr_response_server.databases import db
from grr_response_server.flows.general import collectors
from grr_response_server.flows.general import webhistory
from grr.test_lib import action_mocks
from grr.test_lib import flow_test_lib
from grr.test_lib import parser_test_lib
from grr.test_lib import test_lib
class WebHistoryFlowTestMixin(flow_test_lib.FlowTestsBaseclass):
def MockClientRawDevWithImage(self):
"""Mock the client to run off a test image.
Returns:
A context manager which ensures that client actions are served off the
test image.
"""
def MockGetRawdevice(path):
return rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "test_img.dd"),
mount_point="/"), path
return utils.Stubber(client_utils, "GetRawDevice", MockGetRawdevice)
class TestWebHistory(WebHistoryFlowTestMixin):
"""Test the browser history flows."""
def setUp(self):
super(TestWebHistory, self).setUp()
# Set up client info
users = [
rdf_client.User(
username="test",
full_name="test user",
homedir="/home/test/",
last_logon=250)
]
self.client_id = self.SetupClient(0, system="Linux", users=users)
self.client_mock = action_mocks.FileFinderClientMock()
def testChromeHistoryFetch(self):
"""Test that downloading the Chrome history works."""
with self.MockClientRawDevWithImage():
# Run the flow in the simulated way
session_id = flow_test_lib.TestFlowHelper(
webhistory.ChromeHistory.__name__,
self.client_mock,
check_flow_errors=False,
client_id=self.client_id,
username="test",
token=self.token,
pathtype=rdf_paths.PathSpec.PathType.TSK)
# Now check that the right files were downloaded.
fs_path = "/home/test/.config/google-chrome/Default/History"
components = list(filter(bool, self.base_path.split(os.path.sep)))
components.append("test_img.dd")
components.extend(filter(bool, fs_path.split(os.path.sep)))
# Check if the History file is created.
cp = db.ClientPath.TSK(self.client_id, tuple(components))
fd = file_store.OpenFile(cp)
self.assertGreater(len(fd.read()), 20000)
# Check for analysis file.
results = flow_test_lib.GetFlowResults(self.client_id, session_id)
self.assertGreater(len(results), 50)
self.assertIn("funnycats.exe", "\n".join(map(str, results)))
def testFirefoxHistoryFetch(self):
"""Test that downloading the Firefox history works."""
with self.MockClientRawDevWithImage():
# Run the flow in the simulated way
session_id = flow_test_lib.TestFlowHelper(
webhistory.FirefoxHistory.__name__,
self.client_mock,
check_flow_errors=False,
client_id=self.client_id,
username="test",
token=self.token,
pathtype=rdf_paths.PathSpec.PathType.TSK)
# Now check that the right files were downloaded.
fs_path = "/home/test/.mozilla/firefox/adts404t.default/places.sqlite"
components = list(filter(bool, self.base_path.split(os.path.sep)))
components.append("test_img.dd")
components.extend(filter(bool, fs_path.split(os.path.sep)))
# Check if the History file is created.
cp = db.ClientPath.TSK(self.client_id, tuple(components))
rel_fd = file_store.OpenFile(cp)
self.assertEqual(rel_fd.read(15), b"SQLite format 3")
# Check for analysis file.
results = flow_test_lib.GetFlowResults(self.client_id, session_id)
self.assertGreater(len(results), 3)
data = "\n".join(map(str, results))
self.assertNotEqual(data.find("Welcome to Firefox"), -1)
self.assertNotEqual(data.find("sport.orf.at"), -1)
def testCacheGrep(self):
"""Test the Cache Grep plugin."""
with self.MockClientRawDevWithImage():
# Run the flow in the simulated way
session_id = flow_test_lib.TestFlowHelper(
webhistory.CacheGrep.__name__,
self.client_mock,
check_flow_errors=False,
client_id=self.client_id,
grep_users=["test"],
data_regex=b"ENIAC",
pathtype=rdf_paths.PathSpec.PathType.TSK,
token=self.token)
# Check if the collection file was created.
hits = flow_test_lib.GetFlowResults(self.client_id, session_id)
# There should be one hit.
self.assertLen(hits, 1)
# Get the first hit.
self.assertIsInstance(hits[0], rdf_client_fs.StatEntry)
self.assertEqual(hits[0].pathspec.last.path,
"/home/test/.config/google-chrome/Default/Cache/data_1")
class TestWebHistoryWithArtifacts(WebHistoryFlowTestMixin):
"""Test the browser history flows."""
def setUp(self):
super(TestWebHistoryWithArtifacts, self).setUp()
users = [
rdf_client.User(
username="test",
full_name="test user",
homedir="/home/test/",
last_logon=250)
]
self.client_id = self.SetupClient(
0, system="Linux", os_version="12.04", users=users)
self.client_mock = action_mocks.FileFinderClientMock()
def RunCollectorAndGetCollection(self, artifact_list, client_mock=None, **kw):
"""Helper to handle running the collector flow."""
if client_mock is None:
client_mock = self.MockClient(client_id=self.client_id)
session_id = flow_test_lib.TestFlowHelper(
collectors.ArtifactCollectorFlow.__name__,
client_mock=client_mock,
client_id=self.client_id,
artifact_list=artifact_list,
token=self.token,
**kw)
return flow_test_lib.GetFlowResults(self.client_id, session_id)
@parser_test_lib.WithParser("Chrome", chrome_history.ChromeHistoryParser)
def testChrome(self):
"""Check we can run WMI based artifacts."""
with self.MockClientRawDevWithImage():
fd = self.RunCollectorAndGetCollection(
[webhistory.ChromeHistory.__name__],
client_mock=self.client_mock,
use_tsk=True)
self.assertLen(fd, 71)
self.assertIn("/home/john/Downloads/funcats_scr.exe",
[d.download_path for d in fd])
self.assertIn("http://www.java.com/", [d.url for d in fd])
self.assertEndsWith(fd[0].source_path,
"/home/test/.config/google-chrome/Default/History")
@parser_test_lib.WithParser("Firefox", firefox3_history.FirefoxHistoryParser)
def testFirefox(self):
"""Check we can run WMI based artifacts."""
with self.MockClientRawDevWithImage():
fd = self.RunCollectorAndGetCollection(
[webhistory.FirefoxHistory.__name__],
client_mock=self.client_mock,
use_tsk=True)
self.assertLen(fd, 5)
self.assertEqual(fd[0].access_time.AsSecondsSinceEpoch(), 1340623334)
self.assertIn("http://sport.orf.at/", [d.url for d in fd])
self.assertEndsWith(
fd[0].source_path,
"/home/test/.mozilla/firefox/adts404t.default/places.sqlite")
class MockArtifactCollectorFlow(collectors.ArtifactCollectorFlow):
def Start(self):
for artifact_name in self.args.artifact_list:
self.SendReply(
rdf_client_fs.StatEntry(
pathspec=rdf_paths.PathSpec.OS(
path=f"/home/foo/{artifact_name}")))
class BrowserHistoryFlowTest(flow_test_lib.FlowTestsBaseclass):
def setUp(self):
super().setUp()
self.client_id = self.SetupClient(0)
def _RunBrowserHistoryFlow(self, **kwargs):
flow_args = webhistory.BrowserHistoryFlowArgs(**kwargs)
flow_id = flow_test_lib.StartAndRunFlow(
webhistory.BrowserHistoryFlow,
creator=self.token.username,
client_mock=action_mocks.ActionMock(),
client_id=self.client_id,
flow_args=flow_args)
return flow_test_lib.GetFlowResults(self.client_id, flow_id)
def testCollectsChromeArtifacts(self):
with mock.patch.object(collectors, "ArtifactCollectorFlow",
MockArtifactCollectorFlow):
results = self._RunBrowserHistoryFlow(collect_chrome=True)
self.assertLen(results, 1)
self.assertEqual(results[0].pathspec.path, "/home/foo/ChromeHistory")
def testCollectsFirefoxArtifacts(self):
with mock.patch.object(collectors, "ArtifactCollectorFlow",
MockArtifactCollectorFlow):
results = self._RunBrowserHistoryFlow(collect_firefox=True)
self.assertLen(results, 1)
self.assertEqual(results[0].pathspec.path, "/home/foo/FirefoxHistory")
def testCollectsInternetExplorerArtifacts(self):
with mock.patch.object(collectors, "ArtifactCollectorFlow",
MockArtifactCollectorFlow):
results = self._RunBrowserHistoryFlow(collect_internet_explorer=True)
self.assertLen(results, 1)
self.assertEqual(results[0].pathspec.path,
"/home/foo/InternetExplorerHistory")
def testCollectsOperaArtifacts(self):
with mock.patch.object(collectors, "ArtifactCollectorFlow",
MockArtifactCollectorFlow):
results = self._RunBrowserHistoryFlow(collect_opera=True)
self.assertLen(results, 1)
self.assertEqual(results[0].pathspec.path, "/home/foo/OperaHistory")
def testCollectsSafariArtifacts(self):
with mock.patch.object(collectors, "ArtifactCollectorFlow",
MockArtifactCollectorFlow):
results = self._RunBrowserHistoryFlow(collect_safari=True)
self.assertLen(results, 1)
self.assertEqual(results[0].pathspec.path, "/home/foo/SafariHistory")
def testCollectsMultipleArtifacts(self):
with mock.patch.object(collectors, "ArtifactCollectorFlow",
MockArtifactCollectorFlow):
results = self._RunBrowserHistoryFlow(
collect_safari=True,
collect_firefox=True,
collect_opera=True,
collect_internet_explorer=True,
collect_chrome=True)
self.assertLen(results, 5)
paths = [r.pathspec.path for r in results]
self.assertCountEqual(paths, [
"/home/foo/SafariHistory", "/home/foo/FirefoxHistory",
"/home/foo/OperaHistory", "/home/foo/InternetExplorerHistory",
"/home/foo/ChromeHistory"
])
def testFailsForAllArgumentsFalse(self):
with self.assertRaisesRegexp(
RuntimeError, "Need to collect at least one type of history."):
self._RunBrowserHistoryFlow()
def main(argv):
# Run the full test suite
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
| 35.754777 | 80 | 0.699474 |
9fbd3932b247f5559817e70145ee7399634d287f
| 5,783 |
py
|
Python
|
contrib/seeds/makeseeds.py
|
gusasar/yukkoin
|
8dd8634d80e9457ab003246f888672bb63782f60
|
[
"MIT"
] | null | null | null |
contrib/seeds/makeseeds.py
|
gusasar/yukkoin
|
8dd8634d80e9457ab003246f888672bb63782f60
|
[
"MIT"
] | null | null | null |
contrib/seeds/makeseeds.py
|
gusasar/yukkoin
|
8dd8634d80e9457ab003246f888672bb63782f60
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 337600
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
"130.211.129.106", "178.63.107.226",
"83.81.130.26", "88.198.17.7", "148.251.238.178", "176.9.46.6",
"54.173.72.127", "54.174.10.182", "54.183.64.54", "54.194.231.211",
"54.66.214.167", "54.66.220.137", "54.67.33.14", "54.77.251.214",
"54.94.195.96", "54.94.200.247"
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/Satoshi:0.13.(0|1|2|99)/|/YukkoinCore:0.13.(0|1|2|99)/|/YukkoinCore:0.14.(0|1|2|99)/|/YukkoinCore:0.15.(0|1|2|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| 33.427746 | 186 | 0.570292 |
5300e3068795bc1187e25e0e8516b23aa4d77db8
| 21,157 |
py
|
Python
|
spark_fhir_schemas/r4/complex_types/imagingstudy_series.py
|
imranq2/SparkFhirSchemas
|
24debae6980fb520fe55aa199bdfd43c0092eb9c
|
[
"Apache-2.0"
] | 2 |
2020-10-31T23:25:01.000Z
|
2021-06-09T14:12:42.000Z
|
spark_fhir_schemas/r4/complex_types/imagingstudy_series.py
|
imranq2/SparkFhirSchemas
|
24debae6980fb520fe55aa199bdfd43c0092eb9c
|
[
"Apache-2.0"
] | null | null | null |
spark_fhir_schemas/r4/complex_types/imagingstudy_series.py
|
imranq2/SparkFhirSchemas
|
24debae6980fb520fe55aa199bdfd43c0092eb9c
|
[
"Apache-2.0"
] | null | null | null |
from typing import Union, List, Optional
from pyspark.sql.types import StructType, StructField, StringType, ArrayType, DataType
# This file is auto-generated by generate_schema so do not edit it manually
# noinspection PyPep8Naming
class ImagingStudy_SeriesSchema:
"""
Representation of the content produced in a DICOM imaging study. A study
comprises a set of series, each of which includes a set of Service-Object Pair
Instances (SOP Instances - images or other data) acquired or produced in a
common context. A series is of only one modality (e.g. X-ray, CT, MR,
ultrasound), but a study may have multiple series of different modalities.
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueUrl",
],
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
include_modifierExtension: Optional[bool] = False,
) -> Union[StructType, DataType]:
"""
Representation of the content produced in a DICOM imaging study. A study
comprises a set of series, each of which includes a set of Service-Object Pair
Instances (SOP Instances - images or other data) acquired or produced in a
common context. A series is of only one modality (e.g. X-ray, CT, MR,
ultrasound), but a study may have multiple series of different modalities.
id: Unique id for the element within a resource (for internal references). This
may be any string value that does not contain spaces.
extension: May be used to represent additional information that is not part of the basic
definition of the element. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
modifierExtension: May be used to represent additional information that is not part of the basic
definition of the element and that modifies the understanding of the element
in which it is contained and/or the understanding of the containing element's
descendants. Usually modifier elements provide negation or qualification. To
make the use of extensions safe and manageable, there is a strict set of
governance applied to the definition and use of extensions. Though any
implementer can define an extension, there is a set of requirements that SHALL
be met as part of the definition of the extension. Applications processing a
resource are required to check for modifier extensions.
Modifier extensions SHALL NOT change the meaning of any elements on Resource
or DomainResource (including cannot change the meaning of modifierExtension
itself).
uid: The DICOM Series Instance UID for the series.
number: The numeric identifier of this series in the study.
modality: The modality of this series sequence.
description: A description of the series.
numberOfInstances: Number of SOP Instances in the Study. The value given may be larger than the
number of instance elements this resource contains due to resource
availability, security, or other factors. This element should be present if
any instance elements are present.
endpoint: The network service providing access (e.g., query, view, or retrieval) for
this series. See implementation notes for information about using DICOM
endpoints. A series-level endpoint, if present, has precedence over a study-
level endpoint with the same Endpoint.connectionType.
bodySite: The anatomic structures examined. See DICOM Part 16 Annex L (http://dicom.nema
.org/medical/dicom/current/output/chtml/part16/chapter_L.html) for DICOM to
SNOMED-CT mappings. The bodySite may indicate the laterality of body part
imaged; if so, it shall be consistent with any content of
ImagingStudy.series.laterality.
laterality: The laterality of the (possibly paired) anatomic structures examined. E.g.,
the left knee, both lungs, or unpaired abdomen. If present, shall be
consistent with any laterality information indicated in
ImagingStudy.series.bodySite.
specimen: The specimen imaged, e.g., for whole slide imaging of a biopsy.
started: The date and time the series was started.
performer: Indicates who or what performed the series and how they were involved.
instance: A single SOP instance within the series, e.g. an image, or presentation state.
"""
from spark_fhir_schemas.r4.complex_types.extension import ExtensionSchema
from spark_fhir_schemas.r4.simple_types.id import idSchema
from spark_fhir_schemas.r4.simple_types.unsignedint import unsignedIntSchema
from spark_fhir_schemas.r4.complex_types.coding import CodingSchema
from spark_fhir_schemas.r4.complex_types.reference import ReferenceSchema
from spark_fhir_schemas.r4.simple_types.datetime import dateTimeSchema
from spark_fhir_schemas.r4.complex_types.imagingstudy_performer import (
ImagingStudy_PerformerSchema,
)
from spark_fhir_schemas.r4.complex_types.imagingstudy_instance import (
ImagingStudy_InstanceSchema,
)
if (
max_recursion_limit
and nesting_list.count("ImagingStudy_Series") >= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["ImagingStudy_Series"]
schema = StructType(
[
# Unique id for the element within a resource (for internal references). This
# may be any string value that does not contain spaces.
StructField("id", StringType(), True),
# May be used to represent additional information that is not part of the basic
# definition of the element. To make the use of extensions safe and manageable,
# there is a strict set of governance applied to the definition and use of
# extensions. Though any implementer can define an extension, there is a set of
# requirements that SHALL be met as part of the definition of the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
)
),
True,
),
# May be used to represent additional information that is not part of the basic
# definition of the element and that modifies the understanding of the element
# in which it is contained and/or the understanding of the containing element's
# descendants. Usually modifier elements provide negation or qualification. To
# make the use of extensions safe and manageable, there is a strict set of
# governance applied to the definition and use of extensions. Though any
# implementer can define an extension, there is a set of requirements that SHALL
# be met as part of the definition of the extension. Applications processing a
# resource are required to check for modifier extensions.
#
# Modifier extensions SHALL NOT change the meaning of any elements on Resource
# or DomainResource (including cannot change the meaning of modifierExtension
# itself).
StructField(
"modifierExtension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
)
),
True,
),
# The DICOM Series Instance UID for the series.
StructField(
"uid",
idSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# The numeric identifier of this series in the study.
StructField(
"number",
unsignedIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# The modality of this series sequence.
StructField(
"modality",
CodingSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# A description of the series.
StructField("description", StringType(), True),
# Number of SOP Instances in the Study. The value given may be larger than the
# number of instance elements this resource contains due to resource
# availability, security, or other factors. This element should be present if
# any instance elements are present.
StructField(
"numberOfInstances",
unsignedIntSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# The network service providing access (e.g., query, view, or retrieval) for
# this series. See implementation notes for information about using DICOM
# endpoints. A series-level endpoint, if present, has precedence over a study-
# level endpoint with the same Endpoint.connectionType.
StructField(
"endpoint",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
)
),
True,
),
# The anatomic structures examined. See DICOM Part 16 Annex L (http://dicom.nema
# .org/medical/dicom/current/output/chtml/part16/chapter_L.html) for DICOM to
# SNOMED-CT mappings. The bodySite may indicate the laterality of body part
# imaged; if so, it shall be consistent with any content of
# ImagingStudy.series.laterality.
StructField(
"bodySite",
CodingSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# The laterality of the (possibly paired) anatomic structures examined. E.g.,
# the left knee, both lungs, or unpaired abdomen. If present, shall be
# consistent with any laterality information indicated in
# ImagingStudy.series.bodySite.
StructField(
"laterality",
CodingSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# The specimen imaged, e.g., for whole slide imaging of a biopsy.
StructField(
"specimen",
ArrayType(
ReferenceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
)
),
True,
),
# The date and time the series was started.
StructField(
"started",
dateTimeSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth + 1,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
),
True,
),
# Indicates who or what performed the series and how they were involved.
StructField(
"performer",
ArrayType(
ImagingStudy_PerformerSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
)
),
True,
),
# A single SOP instance within the series, e.g. an image, or presentation state.
StructField(
"instance",
ArrayType(
ImagingStudy_InstanceSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
include_modifierExtension=include_modifierExtension,
)
),
True,
),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
if not include_modifierExtension:
schema.fields = [
c
if c.name != "modifierExtension"
else StructField("modifierExtension", StringType(), True)
for c in schema.fields
]
return schema
| 51.855392 | 104 | 0.563974 |
55a59dd4953a7339056357dcf8d807fa67836046
| 4,706 |
py
|
Python
|
cogs/today.py
|
waki285/rt-bot
|
1b264d7d9fe75526766b56547cea801619d13051
|
[
"BSD-4-Clause"
] | 26 |
2021-11-30T02:48:16.000Z
|
2022-03-26T04:47:25.000Z
|
cogs/today.py
|
waki285/rt-bot
|
1b264d7d9fe75526766b56547cea801619d13051
|
[
"BSD-4-Clause"
] | 143 |
2021-11-04T07:47:53.000Z
|
2022-03-31T23:13:33.000Z
|
cogs/today.py
|
waki285/rt-bot
|
1b264d7d9fe75526766b56547cea801619d13051
|
[
"BSD-4-Clause"
] | 14 |
2021-11-12T15:32:27.000Z
|
2022-03-28T04:04:44.000Z
|
# RT - What day is today
from discord.ext import commands, tasks
import discord
from rtlib import DatabaseManager
from bs4 import BeautifulSoup
from datetime import datetime, timedelta
class DataManager(DatabaseManager):
DB = "Today"
def __init__(self, db):
self.db = db
async def init_table(self, cursor) -> None:
await cursor.create_table(
self.DB, {
"GuildID": "BIGINT", "ChannelID": "BIGINT"
}
)
async def write(self, cursor, guild_id: int, channel_id: int) -> None:
target = {
"GuildID": guild_id, "ChannelID": channel_id
}
if await cursor.exists(self.DB, target):
raise KeyError("既に設定されています。")
else:
await cursor.insert_data(self.DB, target)
async def delete(self, cursor, guild_id: int, channel_id: int) -> None:
await cursor.delete(
self.DB, {"GuildID": guild_id, "ChannelID": channel_id}
)
async def reads(self, cursor, guild_id: int = None) -> list:
target = {}
if guild_id is not None:
target["GuildID"] = guild_id
return [
row async for row in cursor.get_datas(
self.DB, target)
if row
]
class Today(commands.Cog, DataManager):
YAHOO_ICON = "http://www.google.com/s2/favicons?domain=www.yahoo.co.jp"
def __init__(self, bot):
self.bot = bot
self.yet = True
self.bot.loop.create_task(self.init_database())
async def init_database(self):
await self.bot.wait_until_ready()
super(commands.Cog, self).__init__(self.bot.mysql)
await self.init_table()
self.today_notification.start()
async def get_today(self) -> discord.Embed:
# 今日はなんの日をyahooから持ってくる。
async with self.bot.session.get(
"https://kids.yahoo.co.jp/today"
) as r:
day = BeautifulSoup(
await r.read(), "html.parser"
).find("dl")
embed = discord.Embed(
title=day.find("span").text,
description=day.find("dd").text,
color=0xee373e
)
embed.set_footer(
text="Yahoo きっず",
icon_url=self.YAHOO_ICON
)
return embed
@commands.command(
extras={
"headding": {
"ja": "今日は何の日を表示、通知します。",
"en": "Sorry, This command is not supported."
}, "parent": "Entertainment"
}
)
@commands.cooldown(1, 10, commands.BucketType.guild)
async def today(self, ctx, setting: bool = None):
"""!lang ja
--------
今日は何の日を表示または通知します。
Parameters
----------
setting : bool, default False
通知を設定するかどうかです。
これはデフォルトではoffとなっておりこれをoffにすると今日は何の日を表示します。
もしこれをonにすると実行したチャンネルに通知を送るように設定をします。
Examples
--------
`rt!today` 今日は何の日を表示します。
`rt!today on` 実行したチャンネルに毎日朝九時に今日は何の日を送信します。"""
if setting is None:
await ctx.reply(embed=await self.get_today())
elif ctx.author.guild_permissions.manage_channels:
try:
await self.write(ctx.guild.id, ctx.channel.id)
if len(await self.reads(ctx.guild.id)) == 4:
raise OverflowError(
"一つのサーバーにつき三つまでしか設定できないようにする。"
)
except (KeyError, OverflowError) as e:
await self.delete(ctx.guild.id, ctx.channel.id)
if isinstance(e, OverflowError):
return await ctx.reply(
"一つのサーバーにつき四つまで設定が可能です。"
)
await ctx.reply("Ok")
else:
await ctx.reply("チャンネル管理権限がないと通知の設定はできません。")
def cog_unload(self):
self.today_notification.cancel()
@tasks.loop(seconds=30)
async def today_notification(self):
# 今日はなんの日通知をする。
if self.yet and (
datetime.now() + timedelta(hours=9)
).strftime("%H:%M") == "09:00":
for row in await self.reads():
channel = self.bot.get_channel(row[1])
if channel:
try:
await channel.send(embed=await self.get_today())
except (discord.HTTPException, discord.Forbidden):
pass
else:
# もしチャンネルが見つからないなら設定を削除する。
await self.delete(row[0], row[1])
self.yet = False
else:
self.yet = True
def setup(bot):
bot.add_cog(Today(bot))
| 30.36129 | 75 | 0.534424 |
8ce5f411f25f65e0ecd385bbd186c683b1f266ee
| 332 |
py
|
Python
|
Python/num.py
|
MarsBighead/mustang
|
ffbaf109931557e40da2d97e4eb914bc1c0aba0d
|
[
"MIT"
] | 4 |
2017-04-30T18:28:19.000Z
|
2018-12-08T15:46:37.000Z
|
Python/num.py
|
MarsBighead/mustang
|
ffbaf109931557e40da2d97e4eb914bc1c0aba0d
|
[
"MIT"
] | 1 |
2021-09-22T20:11:36.000Z
|
2021-09-22T20:11:36.000Z
|
Python/num.py
|
MarsBighead/mustang
|
ffbaf109931557e40da2d97e4eb914bc1c0aba0d
|
[
"MIT"
] | null | null | null |
#!/bin/python3
import time
import numpy as np
if __name__ == '__main__':
arr=np.arange(10)
print(arr)
t0 = time.time()
arr=np.zeros([5000,5000])
t1 = time.time()
arr2=[[0 for _ in range(5000)] for _ in range(5000)]
t2 = time.time()
print(str(t1-t0), str(t2-t1))
arr[0][0]=1
print(arr[0][0])
| 20.75 | 56 | 0.578313 |
e753b059acef4b238e2b270c01d4a8cfcd625d38
| 3,600 |
py
|
Python
|
n2n-tomo/unet_01_relu.py
|
vlcekl/n2n-tomo
|
97256212c6b958ae1b0791a897b131834aa90a3c
|
[
"MIT"
] | null | null | null |
n2n-tomo/unet_01_relu.py
|
vlcekl/n2n-tomo
|
97256212c6b958ae1b0791a897b131834aa90a3c
|
[
"MIT"
] | null | null | null |
n2n-tomo/unet_01_relu.py
|
vlcekl/n2n-tomo
|
97256212c6b958ae1b0791a897b131834aa90a3c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
class UNet(nn.Module):
"""Custom U-Net architecture for Noise2Noise (see Appendix, Table 2)."""
def __init__(self, in_channels=1, out_channels=1):
"""Initializes U-Net."""
super(UNet, self).__init__()
# Layers: enc_conv0, enc_conv1, pool1
self._block1 = nn.Sequential(
nn.Conv2d(in_channels, 24, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.Conv2d(24, 24, 3, padding=1),
nn.LeakyReLU(0.1),
nn.MaxPool2d(2))
# Layers: enc_conv(i), pool(i); i=2..5
self._block2 = nn.Sequential(
nn.Conv2d(24, 24, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.MaxPool2d(2))
# Layers: enc_conv6, upsample5
self._block3 = nn.Sequential(
nn.Conv2d(24, 24, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.ConvTranspose2d(24, 24, 3, stride=2, padding=1, output_padding=1))
#nn.Upsample(scale_factor=2, mode='nearest'))
# Layers: dec_conv5a, dec_conv5b, upsample4
self._block4 = nn.Sequential(
nn.Conv2d(48, 48, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.Conv2d(48, 48, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.ConvTranspose2d(48, 48, 3, stride=2, padding=1, output_padding=1))
#nn.Upsample(scale_factor=2, mode='nearest'))
# Layers: dec_deconv(i)a, dec_deconv(i)b, upsample(i-1); i=4..2
self._block5 = nn.Sequential(
nn.Conv2d(72, 48, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.Conv2d(48, 48, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.ConvTranspose2d(48, 48, 3, stride=2, padding=1, output_padding=1))
#nn.Upsample(scale_factor=2, mode='nearest'))
# Layers: dec_conv1a, dec_conv1b, dec_conv1c,
self._block6 = nn.Sequential(
nn.Conv2d(48 + in_channels, 32, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.Conv2d(32, 16, 3, stride=1, padding=1),
nn.LeakyReLU(0.1),
nn.Conv2d(16, out_channels, 3, stride=1, padding=1),
nn.ReLU(inplace=True))
#nn.LeakyReLU(0.1))
#nn.Tanh())
#nn.Linear())
# Initialize weights
self._init_weights()
def _init_weights(self):
"""Initializes weights using He et al. (2015)."""
for m in self.modules():
if isinstance(m, nn.ConvTranspose2d) or isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight.data)
m.bias.data.zero_()
def forward(self, x):
"""Through encoder, then decoder by adding U-skip connections. """
# Encoder
pool1 = self._block1(x)
pool2 = self._block2(pool1)
pool3 = self._block2(pool2)
pool4 = self._block2(pool3)
pool5 = self._block2(pool4)
# Decoder
upsample5 = self._block3(pool5)
concat5 = torch.cat((upsample5, pool4), dim=1)
upsample4 = self._block4(concat5)
concat4 = torch.cat((upsample4, pool3), dim=1)
upsample3 = self._block5(concat4)
concat3 = torch.cat((upsample3, pool2), dim=1)
upsample2 = self._block5(concat3)
concat2 = torch.cat((upsample2, pool1), dim=1)
upsample1 = self._block5(concat2)
concat1 = torch.cat((upsample1, x), dim=1)
# Final activation
return self._block6(concat1)
| 34.951456 | 81 | 0.563333 |
00c1ea347c9aeb22b828c1e43e36f4bb2a89ab1e
| 2,327 |
py
|
Python
|
saas/aiops/api/aiops-server/utils/data_source/data_source_connector.py
|
iuskye/SREWorks
|
a2a7446767d97ec5f6d15bd00189c42150d6c894
|
[
"Apache-2.0"
] | 407 |
2022-03-16T08:09:38.000Z
|
2022-03-31T12:27:10.000Z
|
saas/aiops/api/aiops-server/utils/data_source/data_source_connector.py
|
Kwafoor/SREWorks
|
37a64a0a84b29c65cf6b77424bd2acd0c7b42e2b
|
[
"Apache-2.0"
] | 25 |
2022-03-22T04:27:31.000Z
|
2022-03-30T08:47:28.000Z
|
saas/aiops/api/aiops-server/utils/data_source/data_source_connector.py
|
Kwafoor/SREWorks
|
37a64a0a84b29c65cf6b77424bd2acd0c7b42e2b
|
[
"Apache-2.0"
] | 109 |
2022-03-21T17:30:44.000Z
|
2022-03-31T09:36:28.000Z
|
#!/usr/bin/env python
# encoding: utf-8
""" """
__author__ = 'sreworks'
import logging
import threading
from elasticsearch import Elasticsearch
from common.exception.errors import DatasourceError
from services.datasource_service import DatasourceService
# 同步锁
def synchronous_lock(func):
def wrapper(*args, **kwargs):
with threading.Lock():
return func(*args, **kwargs)
return wrapper
class DataSourceConnector(object):
es_identifier = 'es'
instance = None
@synchronous_lock
def __new__(cls, *args, **kwargs):
if cls.instance is None:
cls.instance = object.__new__(cls)
return cls.instance
def __init__(self):
self.datasource_service = DatasourceService()
self.db_conn = {}
self.logger = logging.getLogger(__name__)
def get_conn(self, data_source_type, data_source_id):
# TODO 目前仅支持es数据源
if data_source_type == self.es_identifier:
if data_source_type not in self.db_conn:
self.db_conn[data_source_type] = {}
if data_source_id not in self.db_conn.get(data_source_type):
es_connector = self._get_es_conn(data_source_id)
self.db_conn[data_source_type][data_source_id] = es_connector
return es_connector
else:
return self.db_conn[data_source_type][data_source_id]
def _get_es_conn(self, data_source_id):
datasource = self.datasource_service.get_datasource_by_id(data_source_id)
if datasource is None:
raise DatasourceError("数据源异常")
self.logger.info(datasource)
es_clusters = [{
"host": datasource.get("endpoint"),
# "host": "elasticsearch-master.ca221ae8860d9421688e59c8ab45c8b21.cn-hangzhou.alicontainer.com",
"port": datasource.get("port"),
# "port": 80,
"index": datasource.get("source_table")
}]
access_key = datasource.get("access_key")
secret_key = datasource.get("secret_key")
if access_key and secret_key:
connector = Elasticsearch(es_clusters, http_auth=(access_key, secret_key))
else:
connector = Elasticsearch(es_clusters)
return connector
def _get_mysql_conn(self, name):
pass
| 31.445946 | 108 | 0.647615 |
436d5b0edb69c6b0a2430c83ab7766193f62f382
| 10,714 |
py
|
Python
|
experiments/testing/ROLO_network_test_single.py
|
marcpasfatou/ROLO
|
32137cbe749320c9b2aff6c8cc4d954b013f779c
|
[
"Apache-2.0"
] | 3 |
2017-12-01T22:20:41.000Z
|
2019-04-23T13:56:06.000Z
|
experiments/testing/ROLO_network_test_single.py
|
marcpasfatou/ROLO
|
32137cbe749320c9b2aff6c8cc4d954b013f779c
|
[
"Apache-2.0"
] | null | null | null |
experiments/testing/ROLO_network_test_single.py
|
marcpasfatou/ROLO
|
32137cbe749320c9b2aff6c8cc4d954b013f779c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) <2016> <GUANGHAN NING>. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Script File: ROLO_network_test_single.py
Description:
ROLO is short for Recurrent YOLO, aimed at simultaneous object detection and tracking
Paper: http://arxiv.org/abs/1607.05781
Author: Guanghan Ning
Webpage: http://guanghan.info/
'''
# Imports
import ROLO_utils as utils
import tensorflow as tf
from tensorflow.models.rnn import rnn, rnn_cell
import cv2
import numpy as np
import os.path
import time
import random
class ROLO_TF:
disp_console = True
restore_weights = True#False
# YOLO parameters
fromfile = None
tofile_img = 'test/output.jpg'
tofile_txt = 'test/output.txt'
imshow = True
filewrite_img = False
filewrite_txt = False
disp_console = True
yolo_weights_file = '/home/tf/ROLO/3rd\ party_upgrade/weights/YOLO_small.ckpt'
alpha = 0.1
threshold = 0.2
iou_threshold = 0.5
num_class = 20
num_box = 2
grid_size = 7
classes = ["aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train","tvmonitor"]
w_img, h_img = [352, 240]
# ROLO Network Parameters
rolo_weights_file = '/u03/Guanghan/dev/ROLO-dev/model_dropout_20.ckpt'
# rolo_weights_file = '/u03/Guanghan/dev/ROLO-dev/model_dropout_30.ckpt'
lstm_depth = 3
num_steps = 3 # number of frames as an input sequence
num_feat = 4096
num_predict = 6 # final output of LSTM 6 loc parameters
num_gt = 4
num_input = num_feat + num_predict # data input: 4096+6= 5002
# ROLO Parameters
batch_size = 1
display_step = 1
# tf Graph input
x = tf.placeholder("float32", [None, num_steps, num_input])
istate = tf.placeholder("float32", [None, 2*num_input]) #state & cell => 2x num_input
y = tf.placeholder("float32", [None, num_gt])
# Define weights
weights = {
'out': tf.Variable(tf.random_normal([num_input, num_predict]))
}
biases = {
'out': tf.Variable(tf.random_normal([num_predict]))
}
def __init__(self,argvs = []):
print("ROLO init")
self.ROLO(argvs)
def LSTM_single(self, name, _X, _istate, _weights, _biases):
with tf.device('/cpu:0'):
#with tf.device('/device:GPU:0'): #'not working on 0.11'
# input shape: (batch_size, n_steps, n_input)
_X = tf.transpose(_X, [1, 0, 2]) # permute num_steps and batch_size
# Reshape to prepare input to hidden activation
_X = tf.reshape(_X, [self.num_steps * self.batch_size, self.num_input]) # (num_steps*batch_size, num_input)
# Split data because rnn cell needs a list of inputs for the RNN inner loop
_X = tf.split(0, self.num_steps, _X) # n_steps * (batch_size, num_input)
cell = tf.nn.rnn_cell.LSTMCell(self.num_input, self.num_input, state_is_tuple = False)
state = _istate
for step in range(self.num_steps):
outputs, state = tf.nn.rnn(cell, [_X[step]], state)
tf.get_variable_scope().reuse_variables()
#print("output: ", outputs)
#print("state: ", state)
return outputs
# Experiment with dropout
def dropout_features(self, feature, prob):
num_drop = int(prob * 4096)
drop_index = random.sample(xrange(4096), num_drop)
for i in range(len(drop_index)):
index = drop_index[i]
feature[index] = 0
return feature
'''---------------------------------------------------------------------------------------'''
def build_networks(self):
if self.disp_console : print ("Building ROLO graph...")
# Build rolo layers
self.lstm_module = self.LSTM_single('lstm_test', self.x, self.istate, self.weights, self.biases)
self.ious= tf.Variable(tf.zeros([self.batch_size]), name="ious")
self.sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))
self.sess.run(tf.initialize_all_variables())
self.saver = tf.train.Saver()
#self.saver.restore(self.sess, self.rolo_weights_file)
if self.disp_console : print ("Loading complete!" + '\n')
def testing(self, x_path, y_path):
total_loss = 0
print("TESTING ROLO...")
# Use rolo_input for LSTM training
pred = self.LSTM_single('lstm_train', self.x, self.istate, self.weights, self.biases)
print("pred: ", pred)
self.pred_location = pred[0][:, 4097:4101]
print("pred_location: ", self.pred_location)
print("self.y: ", self.y)
self.correct_prediction = tf.square(self.pred_location - self.y)
print("self.correct_prediction: ", self.correct_prediction)
self.accuracy = tf.reduce_mean(self.correct_prediction) * 100
print("self.accuracy: ", self.accuracy)
#optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.accuracy) # Adam Optimizer
# Initializing the variables
init = tf.initialize_all_variables()
# Launch the graph
with tf.Session() as sess:
if (self.restore_weights == True):
sess.run(init)
self.saver.restore(sess, self.rolo_weights_file)
print ("Loading complete!" + '\n')
else:
sess.run(init)
id = 0 #don't change this
# Keep training until reach max iterations
while id < self.testing_iters - self.num_steps:
# Load training data & ground truth
batch_xs = self.rolo_utils.load_yolo_output_test(x_path, self.batch_size, self.num_steps, id) # [num_of_examples, num_input] (depth == 1)
# Apply dropout to batch_xs
#for item in range(len(batch_xs)):
# batch_xs[item] = self.dropout_features(batch_xs[item], 0.4)
batch_ys = self.rolo_utils.load_rolo_gt_test(y_path, self.batch_size, self.num_steps, id)
print("Batch_ys_initial: ", batch_ys)
batch_ys = utils.locations_from_0_to_1(self.w_img, self.h_img, batch_ys)
# Reshape data to get 3 seq of 5002 elements
batch_xs = np.reshape(batch_xs, [self.batch_size, self.num_steps, self.num_input])
batch_ys = np.reshape(batch_ys, [self.batch_size, 4])
print("Batch_ys: ", batch_ys)
pred_location= sess.run(self.pred_location,feed_dict={self.x: batch_xs, self.y: batch_ys, self.istate: np.zeros((self.batch_size, 2*self.num_input))})
print("ROLO Pred: ", pred_location)
#print("len(pred) = ", len(pred_location))
print("ROLO Pred in pixel: ", pred_location[0][0]*self.w_img, pred_location[0][1]*self.h_img, pred_location[0][2]*self.w_img, pred_location[0][3]*self.h_img)
#print("correct_prediction int: ", (pred_location + 0.1).astype(int))
# Save pred_location to file
utils.save_rolo_output_test(self.output_path, pred_location, id, self.num_steps, self.batch_size)
#sess.run(optimizer, feed_dict={self.x: batch_xs, self.y: batch_ys, self.istate: np.zeros((self.batch_size, 2*self.num_input))})
if id % self.display_step == 0:
# Calculate batch loss
loss = sess.run(self.accuracy, feed_dict={self.x: batch_xs, self.y: batch_ys, self.istate: np.zeros((self.batch_size, 2*self.num_input))})
print ("Iter " + str(id*self.batch_size) + ", Minibatch Loss= " + "{:.6f}".format(loss)) #+ "{:.5f}".format(self.accuracy)
total_loss += loss
id += 1
print(id)
print ("Testing Finished!")
avg_loss = total_loss/id
print ("Avg loss: " + str(avg_loss))
#save_path = self.saver.save(sess, self.rolo_weights_file)
#print("Model saved in file: %s" % save_path)
return None
def ROLO(self, argvs):
self.rolo_utils= utils.ROLO_utils()
self.rolo_utils.loadCfg()
self.params = self.rolo_utils.params
arguments = self.rolo_utils.argv_parser(argvs)
if self.rolo_utils.flag_train is True:
self.training(utils.x_path, utils.y_path)
elif self.rolo_utils.flag_track is True:
self.build_networks()
self.track_from_file(utils.file_in_path)
elif self.rolo_utils.flag_detect is True:
self.build_networks()
self.detect_from_file(utils.file_in_path)
else:
print ("Default: running ROLO test.")
self.build_networks()
test= 7 #choose video sequence here
[self.w_img, self.h_img, sequence_name, dummy_1, self.testing_iters] = utils.choose_video_sequence(test)
x_path = os.path.join('/home/tf/Documents/benchmark/DATA', sequence_name, 'yolo_out/')
y_path = os.path.join('/home/tf/Documents/benchmark/DATA', sequence_name, 'groundtruth_rect.txt')
self.output_path = os.path.join('/home/tf/Documents/benchmark/DATA', sequence_name, 'rolo_out_test/')
utils.createFolder(self.output_path)
#self.rolo_weights_file = '/u03/Guanghan/dev/ROLO-dev/output/ROLO_model/model_dropout_20.ckpt'
# self.rolo_weights_file = '/u03/Guanghan/dev/ROLO-dev/output/ROLO_model/model_dropout_30.ckpt'
#self.rolo_weights_file = '/u03/Guanghan/dev/ROLO-dev/output/ROLO_model/model_dropout_30_2.ckpt'
#self.rolo_weights_file = '/u03/Guanghan/dev/ROLO-dev/output/ROLO_model/model_30_2_nd_newfit.ckpt'
self.rolo_weights_file = '/home/tf/ROLO/models/model_demo.ckpt'
self.testing(x_path, y_path)
'''----------------------------------------main-----------------------------------------------------'''
def main(argvs):
ROLO_TF(argvs)
if __name__=='__main__':
main(' ')
| 41.527132 | 209 | 0.617323 |
f47a1819e4c26454d0b23f84526b31baf441d83c
| 7,852 |
py
|
Python
|
cython_tassl_wrap/native_tassl_sock_wrap.py
|
wenxiang-Li/fisco-bcos-python-sdk
|
d898589f284dbb93f5982027487247699bef57f0
|
[
"MIT"
] | 61 |
2019-07-03T07:40:17.000Z
|
2022-03-06T13:30:53.000Z
|
cython_tassl_wrap/native_tassl_sock_wrap.py
|
wenxiang-Li/fisco-bcos-python-sdk
|
d898589f284dbb93f5982027487247699bef57f0
|
[
"MIT"
] | 105 |
2019-07-25T08:48:59.000Z
|
2022-03-23T03:47:34.000Z
|
cython_tassl_wrap/native_tassl_sock_wrap.py
|
wenxiang-Li/fisco-bcos-python-sdk
|
d898589f284dbb93f5982027487247699bef57f0
|
[
"MIT"
] | 61 |
2019-07-03T06:58:42.000Z
|
2022-02-16T08:50:14.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
FISCO BCOS/Python-SDK is a python client for FISCO BCOS2.0 (https://github.com/FISCO-BCOS/)
FISCO BCOS/Python-SDK is free software: you can redistribute it and/or modify it under the
terms of the MIT License as published by the Free Software Foundation. This project is
distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
@author: kentzhang
@date: 2021-03
'''
# python + ctypes 调用 native_tassl_sock_wrap 库, 通过库里的c接口调用TasllSockWrap
import ctypes
import os
import platform
from ctypes import *
import time
ECHO_NONE = 0x0000
ECHO_PRINTF = 0x0001
ECHO_LOG = 0x0010
# 一系列的接口参数和返回值定义
# 对void类型的返回值,可以直接忽略
FN_ssock_create = {}
FN_ssock_create["name"] = "ssock_create"
FN_ssock_create["argtypes"] = []
FN_ssock_create["restype"] = ctypes.c_void_p
FN_ssock_release = {}
FN_ssock_release["name"] = "ssock_release"
FN_ssock_release["argtypes"] = [ctypes.c_void_p]
FN_ssock_release["restype"] = ctypes
FN_ssock_init = {}
FN_ssock_init["name"] = "ssock_init"
FN_ssock_init["argtypes"] = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p,
ctypes.c_char_p]
FN_ssock_init["restype"] = ctypes.c_int
FN_ssock_finish = {}
FN_ssock_finish["name"] = "ssock_finish"
FN_ssock_finish["argtypes"] = [ctypes.c_void_p]
FN_ssock_finish["restype"] = []
FN_ssock_set_echo_mode = {}
FN_ssock_set_echo_mode["name"] = "ssock_set_echo_mode"
FN_ssock_set_echo_mode["argtypes"] = [ctypes.c_void_p, ctypes.c_int]
FN_ssock_set_echo_mode["restype"] = []
FN_ssock_try_connect = {}
FN_ssock_try_connect["name"] = "ssock_try_connect"
FN_ssock_try_connect["argtypes"] = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_int]
FN_ssock_try_connect["restype"] = ctypes.c_int
FN_sscok_recv = {}
FN_sscok_recv["name"] = "ssock_recv"
FN_sscok_recv["argtypes"] = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_int]
FN_sscok_recv["restype"] = ctypes.c_int
FN_sscok_send = {}
FN_sscok_send["name"] = "ssock_send"
FN_sscok_send["argtypes"] = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_int]
FN_sscok_send["restype"] = ctypes.c_int
# 用dl load的方式封装接口,在windows、linux平台实测通过
class NativeTasslSockWrap:
libname_win = 'native_tassl_sock_wrap.dll'
libpath_win = "cpp_win"
libname_linux = "libnative_tassl_sock_wrap.so"
libpath_linux = "cpp_linux"
target_libname = ""
target_libpath = ""
target_platform = "unknown"
nativelib = None
ssock: ctypes.c_void_p = None
def __init__(self):
# 先处理平台兼容性
target_platform = "unknown"
platsys = platform.platform()
if platsys.lower().startswith("win"):
self.target_platform = "win64"
self.target_libname = self.libname_win
self.target_libpath = self.libpath_win
if "linux" in platsys.lower():
self.target_platform = "linux"
self.target_libname = self.libname_linux
self.target_libpath = self.libpath_linux
# print(self.target_libpath)
self.load_lib()
def load_lib(self):
# os.add_dll_directory(module_path)
# print("load_lib ",self.nativelib)
if self.nativelib is not None:
return self.nativelib
currpath = os.getcwd()
# print( "load_lib currpath ",currpath)
read_lib_name = os.path.join(currpath, self.target_libname)
module_path = os.path.dirname(os.path.realpath(__file__))
if not os.path.exists(read_lib_name):
# 当前运行路径未找到,继续找
# print("lib not found in cwd",read_lib_name)
read_lib_name = os.path.join(module_path, self.target_libname)
if not os.path.exists(read_lib_name):
# 当前模块位置未找到,试图加上相对路径再来一次
# print("lib not found in module_path:",read_lib_name)
read_lib_name = os.path.join(module_path, self.target_libpath, self.target_libname)
print("at last native_tassl_sock_wrap load the nativelib:", read_lib_name)
# print(self.target_platform)
if self.target_platform.lower().startswith("win"):
# print(os.path.dirname(read_lib_name))
os.add_dll_directory(os.path.dirname(read_lib_name))
# self.nativelib = cdll.LoadLibrary(read_lib_name)
# print("test as RTLD_LOCAL",ctypes.RTLD_LOCAL)
# print("test as RTLD_GLOBAL",ctypes.RTLD_GLOBAL)
self.nativelib = PyDLL(read_lib_name,ctypes.RTLD_GLOBAL)
if self.nativelib is None:
return -1
# python里,对c语言的库 ,需要显式定义入参和出参,否则ctypes可能会出错
self.nativelib.ssock_create.argtypes = FN_ssock_create["argtypes"]
self.nativelib.ssock_create.restype = FN_ssock_create["restype"]
self.nativelib.ssock_release.argtypes = FN_ssock_release["argtypes"]
# self.nativelib.ssock_release.restype = FN_ssock_release["restype"]
self.nativelib.ssock_init.argtypes = FN_ssock_init["argtypes"]
self.nativelib.ssock_init.restype = FN_ssock_init["restype"]
self.nativelib.ssock_finish.argtypes = FN_ssock_finish["argtypes"]
# self.nativelib.ssock_finish.restype = FN_ssock_finish["restype"]
self.nativelib.ssock_set_echo_mode.argtypes = FN_ssock_set_echo_mode["argtypes"]
# self.nativelib.ssock_set_echo_mode.restype = FN_ssock_set_echo_mode["restype"]
self.nativelib.ssock_try_connect.argtypes = FN_ssock_try_connect["argtypes"]
self.nativelib.ssock_try_connect.restype = FN_ssock_try_connect["restype"]
self.nativelib.ssock_recv.argtypes = FN_sscok_recv["argtypes"]
self.nativelib.ssock_recv.restype = FN_sscok_recv["restype"]
self.nativelib.ssock_send.argtypes = FN_sscok_send["argtypes"]
self.nativelib.ssock_send.restype = FN_sscok_send["restype"]
self.ssock = self.nativelib.ssock_create()
# print("ssock_create result {},type:{}\n".format(self.ssock,type(self.ssock) ) )
if self.ssock is None:
return -2
return 0
def set_echo_mode(self, mode):
self.nativelib.ssock_set_echo_mode(self.ssock, mode)
def init(self, ca_file, node_crt_file, node_key_file,
en_crt_file=None,
en_key_file=None):
return self.nativelib.ssock_init(self.ssock,
ca_file.encode("UTF-8"),
node_crt_file.encode("UTF-8"),
node_key_file.encode("UTF-8"),
en_crt_file.encode("UTF-8"),
en_key_file.encode("UTF-8"))
def try_connect(self, host=None, port=0):
retval = self.nativelib.ssock_try_connect(self.ssock, host.encode("UTF-8"), port)
return retval
def recv(self, recvsize=None) :
recvsize = 10 * 10 * 1024
buffer = ctypes.create_string_buffer(recvsize)
retval = self.nativelib.ssock_recv(self.ssock, buffer, recvsize)
# print("recv,retval={},len = {}".format(retval,len(buffer.raw)) )
retbuffer = b''
if retval > 0:
retbuffer = buffer[:retval]
return retbuffer
def send(self, buffer, bufferlen=None):
if type(buffer) == str:
buffer = buffer.encode("utf-8")
bufflen = len(buffer)
retval = -1
for i in range(0,3):
retval = self.nativelib.ssock_send(self.ssock, buffer, bufflen)
if retval > 0:
break
time.sleep(0.1)
return retval
def release(self):
if self.ssock is None:
return
self.nativelib.ssock_release(self.ssock)
self.ssock = None
def finish(self):
self.nativelib.ssock_finish(self.ssock)
| 37.390476 | 113 | 0.664671 |
bf187ac96ac78ef09f92c04499a54fc2fab2ce2d
| 1,077 |
py
|
Python
|
old_bin/bernie/get_humidity.py
|
sdss/ObserverTools
|
7f9949341edc91a79dac69d79e24af09e8558ffa
|
[
"BSD-3-Clause"
] | null | null | null |
old_bin/bernie/get_humidity.py
|
sdss/ObserverTools
|
7f9949341edc91a79dac69d79e24af09e8558ffa
|
[
"BSD-3-Clause"
] | null | null | null |
old_bin/bernie/get_humidity.py
|
sdss/ObserverTools
|
7f9949341edc91a79dac69d79e24af09e8558ffa
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# bernie Tue. 26 June '18
# just playing around: a script to retrieve arbitrarily defined intervals of APO process variables
from channelarchiver import Archiver, codes, utils
ss = 'http://sdss-telemetry.apo.nmsu.edu/telemetry/cgi/ArchiveDataServer.cgi'
archiver = Archiver(ss)
archiver.scan_archives()
# NEED TO TAKE THIS FROM THE COMMAND LINE, PERHAPS WITH A DEFAULT INTERVAL OF
# THe PAST 24 HOURS:
start = '2018-06-25 10:00:00'
end = '2018-06-26 11:00:00'
archiver.scan_archives()
# NEED TO TAKE THIS FROM THE COMMAND LINE
# pvs_to_retrieve=[ '25m:boss:SP1B2LN2TempRead', '25m:boss:SP1R0LN2TempRead',
# '25m:boss:SP2B2LN2TempRead', '25m:boss:SP2R0LN2TempRead' ]
pvs_to_retrieve = ['25m:apo:humidity']
for pv in pvs_to_retrieve:
print("# " + pv)
retrieved_pv = archiver.get(pv, start, end, interpolation='raw',
scan_archives=False)
for i in range(len(retrieved_pv.values)):
print("%s\t%f" % (
retrieved_pv.times[i].strftime('%Y-%m-%d %H:%M:%S.%f'),
retrieved_pv.values[i]))
| 32.636364 | 99 | 0.692665 |
52d7b029c35c2c17afa7b664502312c4c3207b0d
| 536 |
py
|
Python
|
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/tiny-frog-29119
|
eb3c34abb36c598458679076359052c9ac88868b
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/tiny-frog-29119
|
eb3c34abb36c598458679076359052c9ac88868b
|
[
"FTL",
"AML",
"RSA-MD"
] | 20 |
2021-07-24T18:26:54.000Z
|
2021-07-24T18:27:02.000Z
|
backend/home/migrations/0001_load_initial_data.py
|
crowdbotics-apps/tiny-frog-29119
|
eb3c34abb36c598458679076359052c9ac88868b
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "tiny-frog-29119.botics.co"
site_params = {
"name": "Tiny Frog",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| 20.615385 | 61 | 0.652985 |
a4a31315dd6b22ef95a4f3aa6394241ac985f6b3
| 7,747 |
py
|
Python
|
gifts_rest/settings/base.py
|
brjones/gifts_rest
|
8217e45fd1a692b00c9e9ae9f022ac2d2fab211e
|
[
"Apache-2.0"
] | null | null | null |
gifts_rest/settings/base.py
|
brjones/gifts_rest
|
8217e45fd1a692b00c9e9ae9f022ac2d2fab211e
|
[
"Apache-2.0"
] | null | null | null |
gifts_rest/settings/base.py
|
brjones/gifts_rest
|
8217e45fd1a692b00c9e9ae9f022ac2d2fab211e
|
[
"Apache-2.0"
] | null | null | null |
"""
.. See the NOTICE file distributed with this work for additional information
regarding copyright ownership.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Django settings for gifts_rest project.
Generated by 'django-admin startproject' using Django 2.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
from . import env
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Custom User model for the service
AUTH_USER_MODEL = 'aap_auth.AAPUser'
API_VERSION = '1.0.0'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
TIME_ZONE = 'Europe/London'
USE_TZ = True
# SECURITY WARNING: don't run with debug turned on in production!
if env.PROD_ENV:
DEBUG = False
AUTHENTICATOR_BACKEND = 'aap_auth.backend.AAPBackend'
elif env.TEST_ENV:
DEBUG = True
AUTHENTICATOR_BACKEND = 'aap_auth.backend.AAPBackend'
else:
DEBUG = True
AUTHENTICATOR_BACKEND = 'aap_auth.backend.YesBackend'
FALLOVER = env.FALLOVER == True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework_swagger',
'django.contrib.postgres',
'psqlextra',
'restui',
'aap_auth.apps.AppAuthConfig'
]
REST_FRAMEWORK = {
# 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE':10,
'DEFAULT_AUTHENTICATION_CLASSES': (
AUTHENTICATOR_BACKEND,
),
'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler'
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
if DEBUG is True:
INSTALLED_APPS += ('corsheaders', )
MIDDLEWARE.insert(0, 'corsheaders.middleware.CorsMiddleware' )
# CORS_ORIGIN_ALLOW_ALL = DEBUG
CORS_ALLOW_CREDENTIALS = True
CORS_ORIGIN_WHITELIST = (
'localhost:39093',
'localhost:8000',
)
ROOT_URLCONF = 'gifts_rest.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gifts_rest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
if 'TRAVIS' in os.environ:
SECRET_KEY = "SecretKeyForUseOnTravis"
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.postgresql_psycopg2',
'ENGINE': 'psqlextra.backend',
'NAME': 'ensembl_gifts',
'USER': 'postgres',
'PASSWORD': '',
'HOST': 'localhost',
# 'PORT': '5433',
},
'gifts': {
'ENGINE': 'psqlextra.backend',
'NAME': 'ensembl_gifts',
'USER': 'postgres',
'PASSWORD': '',
'HOST': 'localhost',
# 'PORT': '5433',
}
}
EMAIL_RECIPIENT_LIST = {
1: {
'name': 'Work email',
'email': '[email protected]',
}
}
else:
from . import secrets
SECRET_KEY = secrets.SECRET_KEY
# Email settings:
EMAIL_HOST = secrets.MAIL_SERVER
EMAIL_RECIPIENT_LIST = secrets.EMAIL_LIST
DATABASES = {
'default': {
'ENGINE': 'psqlextra.backend', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'OPTIONS': {
'options': '-c search_path=ensembl_gifts,public'
},
'NAME': secrets.GIFTS_DATABASE,
'USER': secrets.GIFTS_DATABASE_USER,
'PASSWORD': secrets.GIFTS_DATABASE_PASSWORD,
'HOST': secrets.GIFTS_DATABASE_HOST,
'PORT': secrets.GIFTS_DATABASE_PORT,
# 'NAME': secrets.REST_DATABASE,
# 'USER': secrets.REST_DATABASE_USER,
# 'PASSWORD': secrets.REST_DATABASE_PASSWORD,
# 'HOST': secrets.REST_DATABASE_HOST, # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
# 'PORT': secrets.REST_DATABASE_PORT, # Set to empty string for default.
},
# all operations on restui models point to 'gifts', see gifts_rest.router
'gifts': {
'ENGINE': 'psqlextra.backend',
'OPTIONS': {
# dev server must operate on its own schema (assume is named 'dev), see [EA-40].
'options': '-c search_path=ensembl_gifts,public' if not env.DEV_ENV else '-c search_path=dev,public'
},
'NAME': secrets.GIFTS_DATABASE,
'USER': secrets.GIFTS_DATABASE_USER,
'PASSWORD': secrets.GIFTS_DATABASE_PASSWORD,
'HOST': secrets.GIFTS_DATABASE_HOST,
'PORT': secrets.GIFTS_DATABASE_PORT,
}
}
DATABASE_ROUTERS = ['gifts_rest.router.GiftsRouter']
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# TaRK base URL
TARK_SERVER = "http://betatark.ensembl.org"
# Ensembl REST server
ENSEMBL_REST_SERVER = "http://rest.ensembl.org"
# AAP service
AAP_PEM_URL = 'https://api.aai.ebi.ac.uk/meta/public.pem'
AAP_PROFILE_URL = 'https://api.aai.ebi.ac.uk/users/{}/profile'
AAP_PEM_FILE = '/tmp/aap.pem'
AAP_GIFTS_DOMAIN = 'self.gifts'
# CELERY STUFF
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
| 29.911197 | 157 | 0.659094 |
873801ec3e5e2803b55fcd5dac0b9acc6610dcf8
| 802 |
py
|
Python
|
manage.py
|
ErfanSupratman/Hospital-Quality-Management-System
|
6ccfb0ddb8df5341c3e3f1a3f10ba737fd5a5b86
|
[
"MIT"
] | null | null | null |
manage.py
|
ErfanSupratman/Hospital-Quality-Management-System
|
6ccfb0ddb8df5341c3e3f1a3f10ba737fd5a5b86
|
[
"MIT"
] | null | null | null |
manage.py
|
ErfanSupratman/Hospital-Quality-Management-System
|
6ccfb0ddb8df5341c3e3f1a3f10ba737fd5a5b86
|
[
"MIT"
] | 1 |
2019-12-27T08:31:04.000Z
|
2019-12-27T08:31:04.000Z
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "HQMS.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 34.869565 | 77 | 0.640898 |
668d127f9a87a0c3765eb81414d0629018376603
| 22,379 |
py
|
Python
|
src/pretix/plugins/paypal/payment.py
|
whiteyhat/pretix
|
34d1fcf077a92765cd796d81d1aa6695d4801a9a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/plugins/paypal/payment.py
|
whiteyhat/pretix
|
34d1fcf077a92765cd796d81d1aa6695d4801a9a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/plugins/paypal/payment.py
|
whiteyhat/pretix
|
34d1fcf077a92765cd796d81d1aa6695d4801a9a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import json
import logging
import urllib.parse
from collections import OrderedDict
import paypalrestsdk
from django import forms
from django.contrib import messages
from django.core import signing
from django.http import HttpRequest
from django.template.loader import get_template
from django.urls import reverse
from django.utils.http import urlquote
from django.utils.translation import ugettext as __, ugettext_lazy as _
from paypalrestsdk.openid_connect import Tokeninfo
from pretix.base.decimal import round_decimal
from pretix.base.models import Event, OrderPayment, OrderRefund, Quota
from pretix.base.payment import BasePaymentProvider, PaymentException
from pretix.base.services.mail import SendMailException
from pretix.base.settings import SettingsSandbox
from pretix.helpers.urls import build_absolute_uri as build_global_uri
from pretix.multidomain.urlreverse import build_absolute_uri
from pretix.plugins.paypal.models import ReferencedPayPalObject
logger = logging.getLogger('pretix.plugins.paypal')
class Paypal(BasePaymentProvider):
identifier = 'paypal'
verbose_name = _('PayPal')
payment_form_fields = OrderedDict([
])
def __init__(self, event: Event):
super().__init__(event)
self.settings = SettingsSandbox('payment', 'paypal', event)
@property
def test_mode_message(self):
if self.settings.connect_client_id and not self.settings.secret:
# in OAuth mode, sandbox mode needs to be set global
is_sandbox = self.settings.connect_endpoint == 'sandbox'
else:
is_sandbox = self.settings.get('endpoint') == 'sandbox'
if is_sandbox:
return _('The PayPal sandbox is being used, you can test without actually sending money but you will need a '
'PayPal sandbox user to log in.')
return None
@property
def settings_form_fields(self):
if self.settings.connect_client_id and not self.settings.secret:
# PayPal connect
if self.settings.connect_user_id:
fields = [
('connect_user_id',
forms.CharField(
label=_('PayPal account'),
disabled=True
)),
]
else:
return {}
else:
fields = [
('client_id',
forms.CharField(
label=_('Client ID'),
max_length=80,
min_length=80,
help_text=_('<a target="_blank" rel="noopener" href="{docs_url}">{text}</a>').format(
text=_('Click here for a tutorial on how to obtain the required keys'),
docs_url='https://docs.pretix.eu/en/latest/user/payments/paypal.html'
)
)),
('secret',
forms.CharField(
label=_('Secret'),
max_length=80,
min_length=80,
)),
('endpoint',
forms.ChoiceField(
label=_('Endpoint'),
initial='live',
choices=(
('live', 'Live'),
('sandbox', 'Sandbox'),
),
)),
]
d = OrderedDict(
fields + list(super().settings_form_fields.items())
)
d.move_to_end('_enabled', False)
return d
def get_connect_url(self, request):
request.session['payment_paypal_oauth_event'] = request.event.pk
self.init_api()
return Tokeninfo.authorize_url({'scope': 'openid profile email'})
def settings_content_render(self, request):
if self.settings.connect_client_id and not self.settings.secret:
# Use PayPal connect
if not self.settings.connect_user_id:
return (
"<p>{}</p>"
"<a href='{}' class='btn btn-primary btn-lg'>{}</a>"
).format(
_('To accept payments via PayPal, you will need an account at PayPal. By clicking on the '
'following button, you can either create a new PayPal account connect pretix to an existing '
'one.'),
self.get_connect_url(request),
_('Connect with {icon} PayPal').format(icon='<i class="fa fa-paypal"></i>')
)
else:
return (
"<button formaction='{}' class='btn btn-danger'>{}</button>"
).format(
reverse('plugins:paypal:oauth.disconnect', kwargs={
'organizer': self.event.organizer.slug,
'event': self.event.slug,
}),
_('Disconnect from PayPal')
)
else:
return "<div class='alert alert-info'>%s<br /><code>%s</code></div>" % (
_('Please configure a PayPal Webhook to the following endpoint in order to automatically cancel orders '
'when payments are refunded externally.'),
build_global_uri('plugins:paypal:webhook')
)
def init_api(self):
if self.settings.connect_client_id and not self.settings.secret:
paypalrestsdk.set_config(
mode="sandbox" if "sandbox" in self.settings.connect_endpoint else 'live',
client_id=self.settings.connect_client_id,
client_secret=self.settings.connect_secret_key,
openid_client_id=self.settings.connect_client_id,
openid_client_secret=self.settings.connect_secret_key,
openid_redirect_uri=urlquote(build_global_uri('plugins:paypal:oauth.return')))
else:
paypalrestsdk.set_config(
mode="sandbox" if "sandbox" in self.settings.get('endpoint') else 'live',
client_id=self.settings.get('client_id'),
client_secret=self.settings.get('secret'))
def payment_is_valid_session(self, request):
return (request.session.get('payment_paypal_id', '') != ''
and request.session.get('payment_paypal_payer', '') != '')
def payment_form_render(self, request) -> str:
template = get_template('pretixplugins/paypal/checkout_payment_form.html')
ctx = {'request': request, 'event': self.event, 'settings': self.settings}
return template.render(ctx)
def checkout_prepare(self, request, cart):
self.init_api()
kwargs = {}
if request.resolver_match and 'cart_namespace' in request.resolver_match.kwargs:
kwargs['cart_namespace'] = request.resolver_match.kwargs['cart_namespace']
if request.event.settings.payment_paypal_connect_user_id:
userinfo = Tokeninfo.create_with_refresh_token(request.event.settings.payment_paypal_connect_refresh_token).userinfo()
request.event.settings.payment_paypal_connect_user_id = userinfo.email
payee = {
"email": request.event.settings.payment_paypal_connect_user_id,
# If PayPal ever offers a good way to get the MerchantID via the Identifity API,
# we should use it instead of the merchant's eMail-address
# "merchant_id": request.event.settings.payment_paypal_connect_user_id,
}
else:
payee = {}
payment = paypalrestsdk.Payment({
'header': {'PayPal-Partner-Attribution-Id': 'ramiioSoftwareentwicklung_SP'},
'intent': 'sale',
'payer': {
"payment_method": "paypal",
},
"redirect_urls": {
"return_url": build_absolute_uri(request.event, 'plugins:paypal:return', kwargs=kwargs),
"cancel_url": build_absolute_uri(request.event, 'plugins:paypal:abort', kwargs=kwargs),
},
"transactions": [
{
"item_list": {
"items": [
{
"name": __('Order for %s') % str(request.event),
"quantity": 1,
"price": self.format_price(cart['total']),
"currency": request.event.currency
}
]
},
"amount": {
"currency": request.event.currency,
"total": self.format_price(cart['total'])
},
"description": __('Event tickets for {event}').format(event=request.event.name),
"payee": payee
}
]
})
request.session['payment_paypal_order'] = None
return self._create_payment(request, payment)
def format_price(self, value):
return str(round_decimal(value, self.event.currency, {
# PayPal behaves differently than Stripe in deciding what currencies have decimal places
# Source https://developer.paypal.com/docs/classic/api/currency_codes/
'HUF': 0,
'JPY': 0,
'MYR': 0,
'TWD': 0,
# However, CLPs are not listed there while PayPal requires us not to send decimal places there. WTF.
'CLP': 0,
# Let's just guess that the ones listed here are 0-based as well
# https://developers.braintreepayments.com/reference/general/currencies
'BIF': 0,
'DJF': 0,
'GNF': 0,
'KMF': 0,
'KRW': 0,
'LAK': 0,
'PYG': 0,
'RWF': 0,
'UGX': 0,
'VND': 0,
'VUV': 0,
'XAF': 0,
'XOF': 0,
'XPF': 0,
}))
@property
def abort_pending_allowed(self):
return False
def _create_payment(self, request, payment):
try:
if payment.create():
if payment.state not in ('created', 'approved', 'pending'):
messages.error(request, _('We had trouble communicating with PayPal'))
logger.error('Invalid payment state: ' + str(payment))
return
request.session['payment_paypal_id'] = payment.id
for link in payment.links:
if link.method == "REDIRECT" and link.rel == "approval_url":
if request.session.get('iframe_session', False):
signer = signing.Signer(salt='safe-redirect')
return (
build_absolute_uri(request.event, 'plugins:paypal:redirect') + '?url=' +
urllib.parse.quote(signer.sign(link.href))
)
else:
return str(link.href)
else:
messages.error(request, _('We had trouble communicating with PayPal'))
logger.error('Error on creating payment: ' + str(payment.error))
except Exception as e:
messages.error(request, _('We had trouble communicating with PayPal'))
logger.exception('Error on creating payment: ' + str(e))
def checkout_confirm_render(self, request) -> str:
"""
Returns the HTML that should be displayed when the user selected this provider
on the 'confirm order' page.
"""
template = get_template('pretixplugins/paypal/checkout_payment_confirm.html')
ctx = {'request': request, 'event': self.event, 'settings': self.settings}
return template.render(ctx)
def execute_payment(self, request: HttpRequest, payment: OrderPayment):
if (request.session.get('payment_paypal_id', '') == '' or request.session.get('payment_paypal_payer', '') == ''):
raise PaymentException(_('We were unable to process your payment. See below for details on how to '
'proceed.'))
self.init_api()
pp_payment = paypalrestsdk.Payment.find(request.session.get('payment_paypal_id'))
ReferencedPayPalObject.objects.get_or_create(order=payment.order, payment=payment, reference=pp_payment.id)
if str(pp_payment.transactions[0].amount.total) != str(payment.amount) or pp_payment.transactions[0].amount.currency \
!= self.event.currency:
logger.error('Value mismatch: Payment %s vs paypal trans %s' % (payment.id, str(pp_payment)))
raise PaymentException(_('We were unable to process your payment. See below for details on how to '
'proceed.'))
return self._execute_payment(pp_payment, request, payment)
def _execute_payment(self, payment, request, payment_obj):
if payment.state == 'created':
payment.replace([
{
"op": "replace",
"path": "/transactions/0/item_list",
"value": {
"items": [
{
"name": __('Order {slug}-{code}').format(slug=self.event.slug.upper(),
code=payment_obj.order.code),
"quantity": 1,
"price": self.format_price(payment_obj.amount),
"currency": payment_obj.order.event.currency
}
]
}
},
{
"op": "replace",
"path": "/transactions/0/description",
"value": __('Order {order} for {event}').format(
event=request.event.name,
order=payment_obj.order.code
)
}
])
try:
payment.execute({"payer_id": request.session.get('payment_paypal_payer')})
except Exception as e:
messages.error(request, _('We had trouble communicating with PayPal'))
logger.exception('Error on creating payment: ' + str(e))
for trans in payment.transactions:
for rr in trans.related_resources:
if hasattr(rr, 'sale') and rr.sale:
if rr.sale.state == 'pending':
messages.warning(request, _('PayPal has not yet approved the payment. We will inform you as '
'soon as the payment completed.'))
payment_obj.info = json.dumps(payment.to_dict())
payment_obj.state = OrderPayment.PAYMENT_STATE_PENDING
payment_obj.save()
return
payment_obj.refresh_from_db()
if payment.state == 'pending':
messages.warning(request, _('PayPal has not yet approved the payment. We will inform you as soon as the '
'payment completed.'))
payment_obj.info = json.dumps(payment.to_dict())
payment_obj.state = OrderPayment.PAYMENT_STATE_PENDING
payment_obj.save()
return
if payment.state != 'approved':
payment_obj.state = OrderPayment.PAYMENT_STATE_FAILED
payment_obj.save()
payment_obj.order.log_action('pretix.event.order.payment.failed', {
'local_id': payment.local_id,
'provider': payment.provider,
})
logger.error('Invalid state: %s' % str(payment))
raise PaymentException(_('We were unable to process your payment. See below for details on how to '
'proceed.'))
if payment_obj.state == OrderPayment.PAYMENT_STATE_CONFIRMED:
logger.warning('PayPal success event even though order is already marked as paid')
return
try:
payment_obj.info = json.dumps(payment.to_dict())
payment_obj.save(update_fields=['info'])
payment_obj.confirm()
except Quota.QuotaExceededException as e:
raise PaymentException(str(e))
except SendMailException:
messages.warning(request, _('There was an error sending the confirmation mail.'))
return None
def payment_pending_render(self, request, payment) -> str:
retry = True
try:
if payment.info and payment.info_data['state'] == 'pending':
retry = False
except KeyError:
pass
template = get_template('pretixplugins/paypal/pending.html')
ctx = {'request': request, 'event': self.event, 'settings': self.settings,
'retry': retry, 'order': payment.order}
return template.render(ctx)
def payment_control_render(self, request: HttpRequest, payment: OrderPayment):
template = get_template('pretixplugins/paypal/control.html')
ctx = {'request': request, 'event': self.event, 'settings': self.settings,
'payment_info': payment.info_data, 'order': payment.order}
return template.render(ctx)
def payment_partial_refund_supported(self, payment: OrderPayment):
return True
def payment_refund_supported(self, payment: OrderPayment):
return True
def execute_refund(self, refund: OrderRefund):
self.init_api()
sale = None
for res in refund.payment.info_data['transactions'][0]['related_resources']:
for k, v in res.items():
if k == 'sale':
sale = paypalrestsdk.Sale.find(v['id'])
break
pp_refund = sale.refund({
"amount": {
"total": self.format_price(refund.amount),
"currency": refund.order.event.currency
}
})
if not pp_refund.success():
raise PaymentException(_('Refunding the amount via PayPal failed: {}').format(pp_refund.error))
else:
sale = paypalrestsdk.Payment.find(refund.payment.info_data['id'])
refund.payment.info = json.dumps(sale.to_dict())
refund.info = json.dumps(pp_refund.to_dict())
refund.done()
def payment_prepare(self, request, payment_obj):
self.init_api()
if request.event.settings.payment_paypal_connect_user_id:
userinfo = Tokeninfo.create_with_refresh_token(request.event.settings.payment_paypal_connect_refresh_token).userinfo()
request.event.settings.payment_paypal_connect_user_id = userinfo.email
payee = {
"email": request.event.settings.payment_paypal_connect_user_id,
# If PayPal ever offers a good way to get the MerchantID via the Identifity API,
# we should use it instead of the merchant's eMail-address
# "merchant_id": request.event.settings.payment_paypal_connect_user_id,
}
else:
payee = {}
payment = paypalrestsdk.Payment({
'header': {'PayPal-Partner-Attribution-Id': 'ramiioSoftwareentwicklung_SP'},
'intent': 'sale',
'payer': {
"payment_method": "paypal",
},
"redirect_urls": {
"return_url": build_absolute_uri(request.event, 'plugins:paypal:return'),
"cancel_url": build_absolute_uri(request.event, 'plugins:paypal:abort'),
},
"transactions": [
{
"item_list": {
"items": [
{
"name": __('Order {slug}-{code}').format(slug=self.event.slug.upper(),
code=payment_obj.order.code),
"quantity": 1,
"price": self.format_price(payment_obj.amount),
"currency": payment_obj.order.event.currency
}
]
},
"amount": {
"currency": request.event.currency,
"total": self.format_price(payment_obj.amount)
},
"description": __('Order {order} for {event}').format(
event=request.event.name,
order=payment_obj.order.code
),
"payee": payee
}
]
})
request.session['payment_paypal_order'] = payment_obj.order.pk
request.session['payment_paypal_payment'] = payment_obj.pk
return self._create_payment(request, payment)
def shred_payment_info(self, obj):
if obj.info:
d = json.loads(obj.info)
new = {
'id': d.get('id'),
'payer': {
'payer_info': {
'email': '█'
}
},
'update_time': d.get('update_time'),
'transactions': [
{
'amount': t.get('amount')
} for t in d.get('transactions', [])
],
'_shredded': True
}
obj.info = json.dumps(new)
obj.save(update_fields=['info'])
for le in obj.order.all_logentries().filter(action_type="pretix.plugins.paypal.event").exclude(data=""):
d = le.parsed_data
if 'resource' in d:
d['resource'] = {
'id': d['resource'].get('id'),
'sale_id': d['resource'].get('sale_id'),
'parent_payment': d['resource'].get('parent_payment'),
}
le.data = json.dumps(d)
le.shredded = True
le.save(update_fields=['data', 'shredded'])
| 43.966601 | 130 | 0.53443 |
23619c106e46eb0dbfa8076153cf543133bd6984
| 131 |
py
|
Python
|
build.py
|
DragonFlayer/oblige-my-doom
|
9d6594c26bd756255952d413b5c29c92ed4f303d
|
[
"MIT"
] | 3 |
2019-03-29T11:37:27.000Z
|
2020-09-24T06:53:17.000Z
|
build.py
|
DragonFlayer/oblige-my-doom
|
9d6594c26bd756255952d413b5c29c92ed4f303d
|
[
"MIT"
] | 1 |
2019-03-31T15:14:36.000Z
|
2019-04-02T18:27:46.000Z
|
build.py
|
DragonFlayer/oblige-my-doom
|
9d6594c26bd756255952d413b5c29c92ed4f303d
|
[
"MIT"
] | null | null | null |
import subprocess
subprocess.run('pyinstaller -wF -i=default_icon.ico gui.py -n ObligeMyDoom')
input("Press Enter to continue...")
| 32.75 | 76 | 0.770992 |
4297305e923aef5d058459bdae3ed968d7b11504
| 12,679 |
py
|
Python
|
transformer/SubLayers.py
|
tango4j/Continual-Learning-Benchmark
|
89eb1396c294feebdba818d67707c923667391ad
|
[
"MIT"
] | null | null | null |
transformer/SubLayers.py
|
tango4j/Continual-Learning-Benchmark
|
89eb1396c294feebdba818d67707c923667391ad
|
[
"MIT"
] | null | null | null |
transformer/SubLayers.py
|
tango4j/Continual-Learning-Benchmark
|
89eb1396c294feebdba818d67707c923667391ad
|
[
"MIT"
] | null | null | null |
''' Define the sublayers in encoder/decoder layer '''
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from transformer.Modules import ScaledDotProductAttention
import ipdb
import torchvision
__author__ = "Yu-Hsiang Huang"
class MGN(nn.Module):
def __init__(self, opt, x_dim, h_dim1, h_dim2, z_dim, mgn_model_type="cnn_2layers"):
super(MGN, self).__init__()
for key, val in opt.__dict__.items():
setattr(self, key, val)
# encoder part
self.mgn_model_type = mgn_model_type
self.ch = self.image_shape[0]
self.imageNet_shape = (224, 224)
layer_size = 64
if self.mgn_model_type=="cnn_2layers":
self.layer1 = nn.Sequential(
nn.Conv2d(self.ch, layer_size, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
self.layer2 = nn.Sequential(
nn.Conv2d(layer_size, layer_size, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
# self.fc1 = nn.Linear(8 * 8 * 16, h_dim1)
# self.drop_out = nn.Dropout()
# self.fc2 = nn.Linear(1000, 10)
last_cnn_dim = 8*8*layer_size
elif self.mgn_model_type == self.pretrained_model_type:
img_sz = (3, 32, 32)
self.image_size_change = nn.AdaptiveAvgPool2d((224, 224))
### This upscales the image
# self.adap_img = nn.AdaptiveAvgPool2d(self.imageNet_shape)
print("Loading pretrained ImageNet model {} ...".format(self.pretrained_model_type))
self.pretrained_model = getattr(torchvision.models, self.pretrained_model_type)(pretrained=True)
last_cnn_dim = 1000
elif self.mgn_model_type == 'mlp':
self.net1 = nn.Linear(x_dim, h_dim1)
last_cnn_dim = h_dim1
else:
raise ValueError('No such MGN model type such as {}'.format(self.mgn_model_type))
self.fc31 = nn.Linear(last_cnn_dim, z_dim)
self.fc32 = nn.Linear(last_cnn_dim, z_dim)
# model_list = [self.net1, self.fc2, self.fc31, self.fc32]
# model_list = [self.layer1, self.layer2, self.fc31, self.fc32]
# if opt.orthogonal_init:
# print("------====== Orthogonalizing the initial weights")
# for _model in model_list:
# torch.nn.init.orthogonal_(_model.weight)
# def encoder(self, x):
# with torch.no_grad():
def encoder(self, x):
if self.mgn_model_type == 'cnn_2layers':
x = x.view(-1, *self.image_shape)
out = self.layer1(x)
out = self.layer2(out)
h = out.view(out.size(0), -1)
# out = self.drop_out(out)
# try:
# h = self.fc1(out_flat)
# except:
# ipdb.set_trace()
elif self.mgn_model_type == self.pretrained_model_type:
if self.image_shape[0] == 1:
x = x.repeat(1, 3, 1, 1)
x = x.view(-1, 3, *self.image_shape[1:])
### h is BS x 1000
h = self.pretrained_model(self.image_size_change(x))
else:
h = F.relu(self.net1(x))
# h = F.relu(self.fc2(h))
mu, log_var = self.fc31(h), self.fc32(h) # mu, log_var
return mu, log_var
def sampling(self, mu, log_var):
# std = torch.exp(0.5*log_var)
std = torch.exp(0.5*log_var)
# ipdb.set_trace()
if self.scale_std == True:
std = self.fixed_std * std
else:
std = self.fixed_std * torch.ones_like(std)
eps = torch.randn_like(std)
return eps.mul(std).add_(mu) # return z sample
# def decoder(self, z):
# h = F.relu(self.fc4(z))
# h = F.relu(self.fc5(h))
# return F.sigmoid(self.fc6(h))
def forward(self, x):
mu, log_var = self.encoder(x)
z = self.sampling(mu, log_var)
# return self.decoder(z), mu, log_var
return z, mu, log_var
class MultiHeadAttentionMemory(nn.Module):
''' Multi-Head Attention module '''
# def __init__(self, n_head, d_model, d_k, d_v, compress=False, d_model_embed=1024, mlp_res=False, dropout=0.1):
def __init__(self, opt):
super().__init__()
for key, val in opt.__dict__.items():
setattr(self, key, val)
n_head = self.n_head
d_k = self.d_k
d_v = self.d_v
mlp_res = self.mlp_res
d_model = self.d_model
d_model_embed = self.d_model_embed
self.fc1_res = nn.Linear(d_model, n_head * d_k)
# if mha_mlp == 3:
# if self.compress:
# d_model_mlp_out = d_model_embed
# else:
# d_model_mlp_out = d_model
# else:
if self.compress:
d_model_mlp_out = d_model_embed
else:
d_model_mlp_out = d_model
self.fc2_res = nn.Linear(n_head * d_k, d_model_mlp_out)
# self.relu1 = nn.ReLU()
# self.relu2= nn.ReLU()
if self.mlp_mha == 3:
d_model = d_model_mlp_out
self.w_qs = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_ks = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_vs = nn.Linear(d_model, n_head * d_v, bias=False)
if self.compress:
self.fc = nn.Linear(n_head * d_v, d_model_embed, bias=False)
else:
self.fc = nn.Linear(n_head * d_v, d_model, bias=False)
# self.fc = nn.Linear(2* n_head * d_v, d_model, bias=False)
self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5)
# self.dropout = nn.Dropout(dropout)
if self.compress and self.mlp_mha <= 2:
lf.layer_norm = nn.LayerNorm(d_model_embed, eps=1e-6)
else:
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
def forward(self, _q, k, v, mask=None):
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, len_k, len_v = _q.size(0), _q.size(1), k.size(1), v.size(1)
if self.mlp_res:
residual = self.fc1_res(_q)
residual = self.fc2_res(residual)
else:
residual = _q
# _q = self.fc2_res(self.fc1_res(_q))
# k = self.fc2_res(self.fc1_res(k))
# v = self.fc2_res(self.fc1_res(v))
# residual = _q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
_q = self.w_qs(_q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
# qmax = torch.max(torch.abs(_q.view(-1)))
# print("1 qmax {}".format(qmax))
# Transpose for attention dot product: b x n x lq x dv
_q, k, v = _q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
# print("After transpose 1,2", _q.shape)
if mask is not None:
mask = mask.unsqueeze(1) # For head axis broadcasting.
# print("After mask unsqueeze", _q.shape)
# with torch.no_grad():
_q, attn = self.attention(_q, k, v, mask=mask)
# print("After attention", _q.shape)
# qmax = torch.max(torch.abs(_q.view(-1)))
# print("2 qmax {}".format(qmax))
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads together: b x lq x (n*dv)
_q = _q.transpose(1, 2).contiguous().view(sz_b, len_q, -1)
# print("After _q.transpose(1, 2).contiguous().view(sz_b, len_q, -1)", _q.shape)
# _q = self.dropout(self.fc(_q))
# mu = 100.0
# self.mu = 50.0
# mu = 0.0
if self.mlp_res:
if _q.shape != residual.shape:
_q = _q.unsqueeze(dim=2)
_q = self.fc(_q)
qmax = torch.max(torch.abs(_q.view(-1)))
rmax = torch.max(torch.abs(residual.view(-1)))
_q = _q/qmax + self.mu * residual/rmax
else:
_q = self.fc(_q)
if _q.shape != residual.shape:
_q = _q.unsqueeze(dim=2)
_q += residual
_q = self.layer_norm(_q)
return _q, attn
class PositionwiseFeedForward(nn.Module):
''' A two-feed-forward-layer module '''
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid) # position-wise
self.w_2 = nn.Linear(d_hid, d_in) # position-wise
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
x = self.layer_norm(x)
return x
class MultiHeadAttention(nn.Module):
''' Multi-Head Attention module '''
def __init__(self, n_head, d_model, d_k, d_v, dropout=0.1):
super().__init__()
self.n_head = n_head
self.d_k = d_k
self.d_v = d_v
self.w_qs = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_ks = nn.Linear(d_model, n_head * d_k, bias=False)
self.w_vs = nn.Linear(d_model, n_head * d_v, bias=False)
self.fc = nn.Linear(n_head * d_v, d_model, bias=False)
self.attention = ScaledDotProductAttention(temperature=d_k ** 0.5)
self.dropout = nn.Dropout(dropout)
self.layer_norm = nn.LayerNorm(d_model, eps=1e-6)
def forward(self, q, k, v, mask=None):
_q = q
d_k, d_v, n_head = self.d_k, self.d_v, self.n_head
sz_b, len_q, len_k, len_v = q.size(0), q.size(1), k.size(1), v.size(1)
residual = q
# Pass through the pre-attention projection: b x lq x (n*dv)
# Separate different heads: b x lq x n x dv
# ipdb.set_trace()
q = self.w_qs(q).view(sz_b, len_q, n_head, d_k)
k = self.w_ks(k).view(sz_b, len_k, n_head, d_k)
v = self.w_vs(v).view(sz_b, len_v, n_head, d_v)
# Transpose for attention dot product: b x n x lq x dv
q, k, v = q.transpose(1, 2), k.transpose(1, 2), v.transpose(1, 2)
if mask is not None:
mask = mask.unsqueeze(1) # For head axis broadcasting.
q, attn = self.attention(q, k, v, mask=mask)
# Transpose to move the head dimension back: b x lq x n x dv
# Combine the last two dimensions to concatenate all the heads together: b x lq x (n*dv)
q = q.transpose(1, 2).contiguous().view(sz_b, len_q, -1)
q = self.dropout(self.fc(q))
q += residual
q = self.layer_norm(q)
return q, attn
class mlp_embeddingExtractor(nn.Module):
''' Multi-Head Attention module '''
def __init__(self, opt):
# def __init__(self, n_head, d_model, d_k, d_v, compress=False, d_model_embed=1024, dropout=0.1):
super().__init__()
for key, val in opt.__dict__.items():
setattr(self, key, val)
self.relu01 = nn.ReLU()
self.net1 = nn.Linear(self.d_model, self.n_head * self.d_k)
if opt.compress:
self.fc2 = nn.Linear(self.n_head * self.d_k, self.d_model_embed)
else:
self.fc2 = nn.Linear(self.n_head * self.d_k, self.d_model)
if opt.orthogonal_init:
print("------====== Orthogonalizing the initial weights")
for _model in [self.net1, self.fc2]:
torch.nn.init.orthogonal_(_model.weight)
self.init_W1 = self.net1.weight
self.init_W2 = self.fc2.weight
# ipdb.set_trace()
def forward(self, _q, k, v,mask=None):
M = self.net1(_q)
# M = self.relu01(M)
if self.mlp_mha == 6:
with torch.no_grad():
M = self.fc2(M)
else:
M = self.fc2(M)
return M, None
class PositionwiseFeedForward(nn.Module):
''' A two-feed-forward-layer module '''
def __init__(self, d_in, d_hid, dropout=0.1):
super().__init__()
self.w_1 = nn.Linear(d_in, d_hid) # position-wise
self.w_2 = nn.Linear(d_hid, d_in) # position-wise
self.layer_norm = nn.LayerNorm(d_in, eps=1e-6)
self.dropout = nn.Dropout(dropout)
def forward(self, x):
residual = x
x = self.w_2(F.relu(self.w_1(x)))
x = self.dropout(x)
x += residual
x = self.layer_norm(x)
return x
| 34.360434 | 116 | 0.563373 |
df98c17da5d782d995b14636c7c8dbbf773c556b
| 675 |
py
|
Python
|
Natural language processing_Python/reading_book/readingBook.py
|
Sinchiguano/codingProblems_Python
|
195549824d94cdf773c174faad9d57aa6d6ddc2c
|
[
"BSD-2-Clause"
] | 2 |
2020-12-12T19:00:51.000Z
|
2020-12-17T03:32:27.000Z
|
Natural language processing_Python/reading_book/readingBook.py
|
Sinchiguano/codingProblems_Python
|
195549824d94cdf773c174faad9d57aa6d6ddc2c
|
[
"BSD-2-Clause"
] | null | null | null |
Natural language processing_Python/reading_book/readingBook.py
|
Sinchiguano/codingProblems_Python
|
195549824d94cdf773c174faad9d57aa6d6ddc2c
|
[
"BSD-2-Clause"
] | 1 |
2021-07-06T04:25:06.000Z
|
2021-07-06T04:25:06.000Z
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2018 CESAR SINCHIGUANO <[email protected]>
#
# Distributed under terms of the BSD license.
"""
"""
from collections import Counter
title='shakespeare.txt'
def readBook(title_path):
'''
Read a book and return it as a string.
'''
with open(title_path,'r') as file:
txt=file.read()
txt=txt.replace('\n','').replace('\r','')
return txt
def main():
print('Cesar SINCHIGUANO')
txt=readBook(title)
#print(len(txt))
aux='The King is kind'
index=txt.find(aux)
print(index)
#index: 1307581
sample=txt[index:index+100]#
print(sample)
if __name__=='__main__':
main()
| 15.697674 | 66 | 0.671111 |
89e1425c488474f7854d9bb9a4c3d55a1cd3f431
| 2,723 |
py
|
Python
|
glue/viewers/matplotlib/qt/toolbar.py
|
HPLegion/glue
|
1843787ccb4de852dfe103ff58473da13faccf5f
|
[
"BSD-3-Clause"
] | 550 |
2015-01-08T13:51:06.000Z
|
2022-03-31T11:54:47.000Z
|
glue/viewers/matplotlib/qt/toolbar.py
|
HPLegion/glue
|
1843787ccb4de852dfe103ff58473da13faccf5f
|
[
"BSD-3-Clause"
] | 1,362 |
2015-01-03T19:15:52.000Z
|
2022-03-30T13:23:11.000Z
|
glue/viewers/matplotlib/qt/toolbar.py
|
HPLegion/glue
|
1843787ccb4de852dfe103ff58473da13faccf5f
|
[
"BSD-3-Clause"
] | 142 |
2015-01-08T13:08:00.000Z
|
2022-03-18T13:25:57.000Z
|
from matplotlib.backends.backend_qt5 import NavigationToolbar2QT
from glue.config import viewer_tool
from glue.viewers.common.tool import CheckableTool, Tool
__all__ = ['MatplotlibTool', 'MatplotlibCheckableTool', 'HomeTool', 'SaveTool',
'PanTool', 'ZoomTool']
def _ensure_mpl_nav(viewer):
# Set up virtual Matplotlib navigation toolbar (don't show it)
if not hasattr(viewer, '_mpl_nav'):
viewer._mpl_nav = NavigationToolbar2QT(viewer.central_widget.canvas, viewer)
viewer._mpl_nav.hide()
def _cleanup_mpl_nav(viewer):
if getattr(viewer, '_mpl_nav', None) is not None:
viewer._mpl_nav.setParent(None)
try:
viewer._mpl_nav.parent = None
except AttributeError:
pass
viewer._mpl_nav = None
class MatplotlibTool(Tool):
def __init__(self, viewer=None):
super(MatplotlibTool, self).__init__(viewer=viewer)
_ensure_mpl_nav(viewer)
def close(self):
_cleanup_mpl_nav(self.viewer)
super(MatplotlibTool, self).close()
class MatplotlibCheckableTool(CheckableTool):
def __init__(self, viewer=None):
super(MatplotlibCheckableTool, self).__init__(viewer=viewer)
_ensure_mpl_nav(viewer)
def close(self):
_cleanup_mpl_nav(self.viewer)
super(MatplotlibCheckableTool, self).close()
@viewer_tool
class HomeTool(MatplotlibTool):
tool_id = 'mpl:home'
icon = 'glue_home'
action_text = 'Home'
tool_tip = 'Reset original zoom'
shortcut = 'H'
def activate(self):
if hasattr(self.viewer, 'state') and hasattr(self.viewer.state, 'reset_limits'):
self.viewer.state.reset_limits()
else:
self.viewer._mpl_nav.home()
@viewer_tool
class SaveTool(MatplotlibTool):
tool_id = 'mpl:save'
icon = 'glue_filesave'
action_text = 'Save plot to file'
tool_tip = 'Save the figure'
def activate(self):
self.viewer._mpl_nav.save_figure()
@viewer_tool
class PanTool(MatplotlibCheckableTool):
tool_id = 'mpl:pan'
icon = 'glue_move'
action_text = 'Pan'
tool_tip = 'Pan axes with left mouse, zoom with right'
shortcut = 'M'
def activate(self):
self.viewer._mpl_nav.pan()
def deactivate(self):
if hasattr(self.viewer, '_mpl_nav'):
self.viewer._mpl_nav.pan()
@viewer_tool
class ZoomTool(MatplotlibCheckableTool):
tool_id = 'mpl:zoom'
icon = 'glue_zoom_to_rect'
action_text = 'Zoom'
tool_tip = 'Zoom to rectangle'
shortcut = 'Z'
def activate(self):
self.viewer._mpl_nav.zoom()
def deactivate(self):
if hasattr(self.viewer, '_mpl_nav'):
self.viewer._mpl_nav.zoom()
| 24.754545 | 88 | 0.666911 |
baa4901ee90edadf09e62750db635671430a0fcb
| 9,375 |
py
|
Python
|
lib/voice/pico/tts/configure.py
|
Gara64/Irma
|
a388d56447df3d587b7c2b9bcf9662a0bd4313f1
|
[
"BSD-3-Clause"
] | null | null | null |
lib/voice/pico/tts/configure.py
|
Gara64/Irma
|
a388d56447df3d587b7c2b9bcf9662a0bd4313f1
|
[
"BSD-3-Clause"
] | null | null | null |
lib/voice/pico/tts/configure.py
|
Gara64/Irma
|
a388d56447df3d587b7c2b9bcf9662a0bd4313f1
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################################
import os
import time
import sys
import subprocess
##############################################################################################
class ConfirugePicoPi(object):
# ===========================================================================
def __init__(self, argv):
# --------------------------------------------------------------
self._VERSION = "v0.0.1"
self._DEV_PAGE = "https://github.com/ch3ll0v3k/picoPi2"
# --------------------------------------------------------------
self._argv = " ".join(argv);
self._argc = len(argv);
self._app_w = 80;
self._cmd_w = 24;
# --------------------------------------------------------------
self.MAKE = False;
self.LANG_TTS = "USA";
self.LANG_BIN_PATH = "../lang/";
self.LANGS = ["GBR", "USA", "FRA", "DEU", "SPA", "ITA"];
self.TO_STD = True; # True -> STD. False -> .wav
self._SYS_MSG = " Welcome to picoPi2";
# --------------------------------------------------------------
self.ParseArgs(self._argv);
# --------------------------------------------------------------
# ===========================================================================
def Confiruge(self):
# --------------------------------------------------------------
os.system("clear");
# --------------------------------------------------------------
# ===========================================================================
def ParseArgs(self, _argv):
# --------------------------------------------------------------
if "--help" in self._argv:
self.PrintUsage();
elif "--version" in self._argv:
print(" Version: "+self._VERSION);
exit();
elif self._argc < 2:
self.PrintUsage();
# --------------------------------------------------------------
if "--make" in self._argv:
self.MAKE = True;
else:
self._SYS_MSG = " ERROR: (--make) must be presented!";
self.PrintUsage();
# --------------------------------------------------------------
if "--lang-tts" in self._argv:
lang_tts_arg = self.GetCmdArg("--lang-tts");
if lang_tts_arg[1].strip() in self.LANGS:
self.LANG_TTS = lang_tts_arg[1].strip();
else:
self._SYS_MSG = " ERROR: Unknown TTS lang";
self.PrintUsage();
# --------------------------------------------------------------
if "--lang-bin" in self._argv:
path = os.path.abspath(self.GetCmdArg("--lang-bin-dir")[1].strip());
if os.path.isdir(path):
self.LANG_BIN_PATH = path;
else:
self._SYS_MSG = " ERROR: Directory NOT-EXITST ->\n\t"+path;
self.PrintUsage();
# --------------------------------------------------------------
if "--2wav" in self._argv:
self.TO_STD = False;
# --------------------------------------------------------------
self.DisplayConfSetting();
# --------------------------------------------------------------
# ===========================================================================
def PrintUsage(self):
# --------------------------------------------------------------
tb = " ";
# --------------------------------------------------------------
os.system("clear");
self._line();
# ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# Usage menu body!
print(" "+"\033[01;31m"+self._SYS_MSG+"\033[0m");
self._line();
self._print(" Usage: ./configure.py [ --make | --lang-tts | lang-bin ]");
self._print("");
self._print(tb+"--help:", "Display this help");
self._print(tb+"--make:", "Compile");
self._print(tb+"--lang-tts:", "Select TTS language "+tb+"(Default USA)");
self._print(tb+" ", tb+"[GBR, USA, FRA, DEU, SPA, ITA]");
self._print(tb+"--lang-bin-dir:", "Binary languages directory "+tb+'(Default "../lang/")');
self._print(tb+"--2wav:", "Binary wil output sound to '.wav' file. (Default) ");
self._print(tb+"--2std:", "Binary wil output sound to std.");
self._print(tb+"--version:", "Print current Version");
self._line();
self._print(" Sample:");
self._print("");
self._print(tb+"[./configure.py --make ] -> Compile (default setting) ");
self._print(tb+"[./configure.py --2wav ] picoPi2Wav -w file.wav 'this' && aplay file.wav ");
self._print(tb+"[./configure.py ] picoPi2Stdout | aplay ");
self._print(tb+"[./configure.py --make lang-tts SPA] -> Compile Spanish TTS ");
self._print(tb+"[./configure.py --make lang-bin \"/path/to/\"] -> Force to use this dir ");
# ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
self._line();
self._print("");
self._print(" Version: "+self._VERSION);
self._print(" Dev-Page: "+self._DEV_PAGE);
self._print("");
self._line();
# --------------------------------------------------------------
print("");
self._SYS_MSG = "";
exit(0);
# --------------------------------------------------------------
# ===========================================================================
def DisplayConfSetting(self):
# --------------------------------------------------------------
tb = " ";
os.system("clear");
self._line();
# ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# Usage menu body!
self._print(" This SETTING wil be used:");
self._print("");
self._print(tb+"--make:", '"'+str(self.MAKE)+'"');
self._print(tb+"--lang-tts:", '"'+self.LANG_TTS+'"');
self._print(tb+"--lang-bin-dir:", '"'+self.LANG_BIN_PATH+'"');
if self.TO_STD:
self._print(tb+"--2std:", '"True"');
self._print(tb+" Binary name:", '"./picoPi2Stdout"');
else:
self._print(tb+"--2wav:", '"True"');
self._print(tb+" Binary name:", '"./picoPi2Wav"');
self._print("");
self._line();
self._print("");
ANSW = str(raw_input(" Correct SETTINGS (Y/N) $> "))
while ANSW != "Y" and ANSW != "N":
ANSW = raw_input(" "+"\033[01;31m Invalid option select (Y/N)\033[0m $> ")
if ANSW == "N":
exit(0);
elif ANSW == "Y":
break;
# ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
self._print("");
self._line();
# --------------------------------------------------------------
print("");
# --------------------------------------------------------------
print(" Compiling ... \n");
if self.TO_STD:
subprocess.call("make LANGTTS=" +self.LANG_TTS + " -f ./Makefile.picoPi2Std.mkf", shell=True);
else:
subprocess.call("make -f ./Makefile.picoPi2Wav.mkf", shell=True);
# --------------------------------------------------------------
# ===========================================================================
def _print(self, _str_A="", _str_B=""):
# --------------------------------------------------------------
if _str_B == "":
_STD = "|{0:"+str(self._app_w)+"}|";
print(str("|{0:"+str(self._app_w)+"}|").format(_str_A));
elif _str_A != "" and _str_B != "":
_STD = "|{0:"+str(self._cmd_w)+"}{1:"+str(self._app_w-self._cmd_w)+"}|";
print(_STD.format(_str_A, _str_B));
else:
print("|{0:22}{1:22}| ".format(_str_A, _str_B));
# --------------------------------------------------------------
# ===========================================================================
def GetCmdArg(self, _cmd):
# --------------------------------------------------------------
return (_cmd, self._argv.strip().split(_cmd)[1].strip().split(" ")[0].strip());
# --------------------------------------------------------------
# ===========================================================================
def _line(self): print("|"+("-"*self._app_w)+"|");
# ===========================================================================
##############################################################################################
if __name__ == "__main__":
_ConfirugePicoPi = ConfirugePicoPi(sys.argv);
| 40.409483 | 106 | 0.308693 |
3e5b7987f02e377f03496cae63db59308374e18e
| 1,562 |
py
|
Python
|
marketplace/shops/api/views.py
|
FMajesty/dalytics-testtask
|
433eea49f13c4140e08ca432fa8277e18123d1b3
|
[
"MIT"
] | null | null | null |
marketplace/shops/api/views.py
|
FMajesty/dalytics-testtask
|
433eea49f13c4140e08ca432fa8277e18123d1b3
|
[
"MIT"
] | null | null | null |
marketplace/shops/api/views.py
|
FMajesty/dalytics-testtask
|
433eea49f13c4140e08ca432fa8277e18123d1b3
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from rest_framework.decorators import action
from rest_framework.mixins import ListModelMixin
from rest_framework.mixins import RetrieveModelMixin
from rest_framework.mixins import UpdateModelMixin
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from marketplace.shops.api.permissions import ShopOwnerPermission
from marketplace.shops.api.serializers import ShopPriceInfoSerializer
from marketplace.shops.api.serializers import ShopSerializer
from marketplace.shops.models import Shop
User = get_user_model()
class ShopViewSet(RetrieveModelMixin, ListModelMixin, UpdateModelMixin, GenericViewSet):
permission_classes = [ShopOwnerPermission]
serializer_class = ShopSerializer
queryset = Shop.objects.all()
lookup_field = "id"
# def get_queryset(self, *args, **kwargs):
# return self.queryset.filter(id=self.request.user.id)
@action(detail=False, methods=["get"], url_path="price-info")
def price_info(self, request):
# serializer = ShopPriceInfoSerializer(request.user, context={"request": request})
# return Response(status=status.HTTP_200_OK, data=serializer.data)
shops = Shop.objects.all()
page = self.paginate_queryset(shops)
if page is not None:
serializer = ShopPriceInfoSerializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = ShopPriceInfoSerializer(shops, many=True)
return Response(serializer.data)
| 42.216216 | 90 | 0.771447 |
548fc53a3b228e4472aad48dbc20ec16d5f36a35
| 4,072 |
py
|
Python
|
dev/Tools/build/waf-1.7.13/lmbrwaflib/compile_settings_android_armv8.py
|
CJoriginal/cjlumberyard
|
2e3184a7d8e59ba05e5707371b8cb6fe40b0ca60
|
[
"AML"
] | 2 |
2019-05-13T22:21:28.000Z
|
2019-05-24T22:52:01.000Z
|
dev/Tools/build/waf-1.7.13/lmbrwaflib/compile_settings_android_armv8.py
|
CJoriginal/cjlumberyard
|
2e3184a7d8e59ba05e5707371b8cb6fe40b0ca60
|
[
"AML"
] | null | null | null |
dev/Tools/build/waf-1.7.13/lmbrwaflib/compile_settings_android_armv8.py
|
CJoriginal/cjlumberyard
|
2e3184a7d8e59ba05e5707371b8cb6fe40b0ca60
|
[
"AML"
] | 5 |
2020-08-27T20:44:18.000Z
|
2021-08-21T22:54:11.000Z
|
#
# All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
# its licensors.
#
# For complete copyright and license terms please see the LICENSE at the root of this
# distribution (the "License"). All use of this software is governed by the License,
# or, if provided, by the license below or the license accompanying this file. Do not
# remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
import os
from cry_utils import append_to_unique_list
from waflib import Logs
from waflib.Configure import conf
################################################################
################################################################
@conf
def load_android_armv8_common_settings(conf):
"""
Setup all compiler and linker settings shared over all android armv8 configurations
"""
# remove the armv8 android build target if it doesn't meet the min API requirement.
# letting the platform finish configuring is harmless.
if (not conf.is_android_armv8_api_valid()) and ('android_armv8_clang' in conf.get_supported_platforms()):
Logs.warn('[WARN] Attempting to configure Android ARMv8 with an API that is lower than the min spec: API 21. Disabling the Android ARMv8 build target.')
conf.remove_platform_from_available_platforms('android_armv8_clang')
env = conf.env
env['ANDROID_ARCH'] = 'arm64-v8a'
ndk_root = env['ANDROID_NDK_HOME']
ndk_rev = env['ANDROID_NDK_REV_MAJOR']
is_ndk_19_plus = (ndk_rev >= 19)
defines = [
'LINUX64',
'__ARM_NEON__',
]
append_to_unique_list(env['DEFINES'], defines)
if not is_ndk_19_plus:
platform_root_compile = os.path.join(ndk_root, 'sysroot')
platform_root_link = os.path.join(ndk_root, 'platforms', env['ANDROID_NDK_PLATFORM'], 'arch-arm64')
env['INCLUDES'] += [
os.path.join(platform_root_compile, 'usr', 'include'),
]
common_flags = [
'--sysroot={}'.format(platform_root_compile),
'-isystem', os.path.join(platform_root_compile, 'usr', 'include', 'aarch64-linux-android'),
]
env['CFLAGS'] += common_flags[:]
env['CXXFLAGS'] += common_flags[:]
env['LIBPATH'] += [
os.path.join(platform_root_link, 'usr', 'lib')
]
env['LINKFLAGS'] += [
'--sysroot={}'.format(platform_root_link),
]
@conf
def load_debug_android_armv8_settings(conf):
"""
Setup all compiler and linker settings shared over all android armv8 configurations
for the "debug" configuration
"""
conf.load_android_armv8_common_settings()
# required 3rd party libs that need to be included in the apk
# Note: gdbserver is only required for debuggable builds
conf.env['EXT_LIBS'] += [
conf.add_to_android_cache(os.path.join(conf.env['ANDROID_NDK_HOME'], 'prebuilt', 'android-arm64', 'gdbserver', 'gdbserver'))
]
@conf
def load_profile_android_armv8_settings(conf):
"""
Setup all compiler and linker settings shared over all android armv8 configurations
for the "profile" configuration
"""
conf.load_android_armv8_common_settings()
# required 3rd party libs that need to be included in the apk
# Note: gdbserver is only required for debuggable builds
conf.env['EXT_LIBS'] += [
conf.add_to_android_cache(os.path.join(conf.env['ANDROID_NDK_HOME'], 'prebuilt', 'android-arm64', 'gdbserver', 'gdbserver'))
]
@conf
def load_performance_android_armv8_settings(conf):
"""
Setup all compiler and linker settings shared over all android armv8 configurations
for the "performance" configuration
"""
conf.load_android_armv8_common_settings()
@conf
def load_release_android_armv8_settings(conf):
"""
Setup all compiler and linker settings shared over all android armv8 configurations
for the "release" configuration
"""
conf.load_android_armv8_common_settings()
| 34.218487 | 161 | 0.674607 |
f6a6462760fdb07b08a1d4e3ce1c414e634e4c28
| 3,515 |
py
|
Python
|
tensorflow/python/ops/numpy_ops/np_random.py
|
TOT0RoKR/tensorflow
|
12c2babf7dccc00c13d6e297c0f792f89f7408aa
|
[
"Apache-2.0"
] | 10 |
2021-05-25T17:43:04.000Z
|
2022-03-08T10:46:09.000Z
|
tensorflow/python/ops/numpy_ops/np_random.py
|
CaptainGizzy21/tensorflow
|
3457a2b122e50b4d44ceaaed5a663d635e5c22df
|
[
"Apache-2.0"
] | 1,056 |
2019-12-15T01:20:31.000Z
|
2022-02-10T02:06:28.000Z
|
tensorflow/python/ops/numpy_ops/np_random.py
|
CaptainGizzy21/tensorflow
|
3457a2b122e50b4d44ceaaed5a663d635e5c22df
|
[
"Apache-2.0"
] | 6 |
2016-09-07T04:00:15.000Z
|
2022-01-12T01:47:38.000Z
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Random functions."""
# pylint: disable=g-direct-tensorflow-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.numpy_ops import np_array_ops
from tensorflow.python.ops.numpy_ops import np_dtypes
from tensorflow.python.ops.numpy_ops import np_utils
# TODO(agarwal): deprecate this.
DEFAULT_RANDN_DTYPE = onp.float32
@np_utils.np_doc('random.seed')
def seed(s):
"""Sets the seed for the random number generator.
Uses `tf.set_random_seed`.
Args:
s: an integer.
"""
try:
s = int(s)
except TypeError:
# TODO(wangpeng): support this?
raise ValueError('np.seed currently only support integer arguments.')
random_seed.set_seed(s)
@np_utils.np_doc('random.randn')
def randn(*args):
"""Returns samples from a normal distribution.
Uses `tf.random_normal`.
Args:
*args: The shape of the output array.
Returns:
An ndarray with shape `args` and dtype `float64`.
"""
return standard_normal(size=args)
@np_utils.np_doc('random.standard_normal')
def standard_normal(size=None):
# TODO(wangpeng): Use new stateful RNG
if size is None:
size = ()
elif np_utils.isscalar(size):
size = (size,)
dtype = np_dtypes.default_float_type()
return random_ops.random_normal(size, dtype=dtype)
@np_utils.np_doc('random.uniform')
def uniform(low=0.0, high=1.0, size=None):
dtype = np_dtypes.default_float_type()
low = np_array_ops.asarray(low, dtype=dtype)
high = np_array_ops.asarray(high, dtype=dtype)
if size is None:
size = array_ops.broadcast_dynamic_shape(low.shape, high.shape)
return random_ops.random_uniform(
shape=size, minval=low, maxval=high, dtype=dtype)
@np_utils.np_doc('random.poisson')
def poisson(lam=1.0, size=None):
if size is None:
size = ()
elif np_utils.isscalar(size):
size = (size,)
return random_ops.random_poisson(shape=size, lam=lam, dtype=np_dtypes.int_)
@np_utils.np_doc('random.random')
def random(size=None):
return uniform(0., 1., size)
@np_utils.np_doc('random.rand')
def rand(*size):
return uniform(0., 1., size)
@np_utils.np_doc('random.randint')
def randint(low, high=None, size=None, dtype=onp.int): # pylint: disable=missing-function-docstring
low = int(low)
if high is None:
high = low
low = 0
if size is None:
size = ()
elif isinstance(size, int):
size = (size,)
dtype = np_utils.result_type(dtype)
if dtype not in (onp.int32, onp.int64):
raise ValueError('Only np.int32 or np.int64 types are supported')
return random_ops.random_uniform(
shape=size, minval=low, maxval=high, dtype=dtype)
| 28.346774 | 100 | 0.715789 |
e86f3fdf236857df4d114a60f9f12c2fffc0248d
| 9,471 |
py
|
Python
|
GPflow/testing/test_variational.py
|
mlilab/Mixed-Effect-Composite-RNN-Gaussian-Process
|
dd7da89ce3c41d459a26ad1ce5ed2f40ab4ca85d
|
[
"Apache-2.0"
] | 24 |
2018-11-29T07:00:59.000Z
|
2021-04-22T19:12:31.000Z
|
GPflow/testing/test_variational.py
|
mlilab/Mixed-Effect-Composite-RNN-Gaussian-Process
|
dd7da89ce3c41d459a26ad1ce5ed2f40ab4ca85d
|
[
"Apache-2.0"
] | 1 |
2018-12-04T11:51:21.000Z
|
2018-12-04T11:51:21.000Z
|
GPflow/testing/test_variational.py
|
OpenXAIProject/Mixed-Effect-Composite-RNN-Gaussian-Process
|
dd7da89ce3c41d459a26ad1ce5ed2f40ab4ca85d
|
[
"Apache-2.0"
] | 12 |
2018-11-30T00:40:13.000Z
|
2019-10-30T16:09:52.000Z
|
# Copyright 2016 the GPflow authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.from __future__ import print_function
import gpflow
import tensorflow as tf
import numpy as np
import unittest
from .reference import referenceRbfKernel
from testing.gpflow_testcase import GPflowTestCase
def referenceUnivariateLogMarginalLikelihood(y, K, noiseVariance):
return (-0.5 * y * y / (K + noiseVariance)
-0.5 * np.log(K + noiseVariance)
-0.5 * np.log(np.pi * 2.))
def referenceUnivariatePosterior(y, K, noiseVariance):
mean = K * y / (K + noiseVariance)
variance = K - K / (K + noiseVariance)
return mean, variance
def referenceUnivariatePriorKL(meanA, meanB, varA, varB):
# KL[ qA | qB ] = E_{qA} \log [qA / qB] where qA and qB are univariate normal distributions.
return (0.5 * (np.log(varB) - np.log(varA) - 1. + varA/varB +
(meanB-meanA) * (meanB - meanA) / varB))
def referenceMultivariatePriorKL(meanA, covA, meanB, covB):
# KL[ qA | qB ] = E_{qA} \log [qA / qB] where qA and aB are
# K dimensional multivariate normal distributions.
# Analytically tractable and equal to...
# 0.5 * (Tr(covB^{-1} covA) + (meanB - meanA)^T covB^{-1} (meanB - meanA)
# - K + log(det(covB)) - log (det(covA)))
K = covA.shape[0]
traceTerm = 0.5 * np.trace(np.linalg.solve(covB, covA))
delta = meanB - meanA
mahalanobisTerm = 0.5 * np.dot(delta.T, np.linalg.solve(covB, delta))
constantTerm = -0.5 * K
priorLogDeterminantTerm = 0.5*np.linalg.slogdet(covB)[1]
variationalLogDeterminantTerm = -0.5 * np.linalg.slogdet(covA)[1]
return traceTerm + mahalanobisTerm + constantTerm + priorLogDeterminantTerm + variationalLogDeterminantTerm
def kernel(kernelVariance=1, lengthScale=1.):
kern = gpflow.kernels.RBF(1)
kern.variance = kernelVariance
kern.lengthscales = lengthScale
return kern
class VariationalUnivariateTest(GPflowTestCase):
def setUp(self):
self.y_real = 2.
self.K = 1.
self.noiseVariance = 0.5
self.univariate = 1
self.oneLatentFunction = 1
self.meanZero = 0.
self.X = np.atleast_2d(np.array([0.]))
self.Y = np.atleast_2d(np.array([self.y_real]))
self.Z = self.X.copy()
self.lik = gpflow.likelihoods.Gaussian()
self.lik.variance = self.noiseVariance
self.posteriorMean, self.posteriorVariance = referenceUnivariatePosterior(
y=self.y_real, K=self.K,
noiseVariance=self.noiseVariance)
self.posteriorStd = np.sqrt(self.posteriorVariance)
def get_model(self, is_diagonal, is_whitened):
m = gpflow.svgp.SVGP(X=self.X, Y=self.Y,
kern=kernel(kernelVariance=self.K),
likelihood=self.lik, Z=self.Z, q_diag=is_diagonal, whiten=is_whitened)
if is_diagonal:
m.q_sqrt = (np.ones((self.univariate, self.oneLatentFunction))
* self.posteriorStd)
else:
m.q_sqrt = (np.ones((self.univariate, self.univariate, self.oneLatentFunction))
* self.posteriorStd)
m.q_mu = np.ones((self.univariate, self.oneLatentFunction)) * self.posteriorMean
return m
def test_prior_KL(self):
with self.test_session():
meanA = self.posteriorMean
varA = self.posteriorVariance
meanB = self.meanZero # Assumes a zero
varB = self.K
referenceKL = referenceUnivariatePriorKL(meanA, meanB, varA, varB)
for is_diagonal in [True, False]:
for is_whitened in [True, False]:
m = self.get_model(is_diagonal, is_whitened)
test_prior_KL = gpflow.param.AutoFlow()(m.build_prior_KL.__func__)(m)
self.assertTrue(np.abs(referenceKL - test_prior_KL) < 1e-4)
def test_build_likelihood(self):
with self.test_session():
# reference marginal likelihood
log_marginal_likelihood = referenceUnivariateLogMarginalLikelihood(
y=self.y_real, K=self.K, noiseVariance=self.noiseVariance)
for is_diagonal in [True, False]:
for is_whitened in [True, False]:
model = self.get_model(is_diagonal, is_whitened)
model_likelihood = model.compute_log_likelihood()
self.assertTrue(
np.abs(model_likelihood - log_marginal_likelihood) < 1e-4)
def testUnivariateConditionals(self):
with self.test_session() as sess:
for is_diagonal in [True, False]:
for is_whitened in [True, False]:
m = self.get_model(is_diagonal, is_whitened)
free_vars = tf.placeholder(tf.float64)
m.make_tf_array(free_vars)
with m.tf_mode():
if is_whitened:
args = (self.X,
self.Z,
m.kern,
m.q_mu,
m.q_sqrt,
self.oneLatentFunction)
fmean_func, fvar_func = gpflow.conditionals.gaussian_gp_predict_whitened(*args)
else:
args = (self.X,
self.Z,
m.kern,
m.q_mu,
m.q_sqrt,
self.oneLatentFunction)
fmean_func, fvar_func = gpflow.conditionals.gaussian_gp_predict(*args)
mean_value = fmean_func.eval(
session=sess, feed_dict={free_vars: m.get_free_state()})[0, 0]
var_value = fvar_func.eval(
session=sess, feed_dict={free_vars: m.get_free_state()})[0, 0]
self.assertTrue(np.abs(mean_value - self.posteriorMean) < 1e-4)
self.assertTrue(np.abs(var_value - self.posteriorVariance) < 1e-4)
class VariationalMultivariateTest(GPflowTestCase):
def setUp(self):
self.nDimensions = 3
self.rng = np.random.RandomState(1)
self.Y = self.rng.randn(self.nDimensions, 1)
self.X = self.rng.randn(self.nDimensions, 1)
self.Z = self.X.copy()
self.noiseVariance = 0.5
self.signalVariance = 1.5
self.lengthScale = 1.7
self.oneLatentFunction = 1
self.lik = gpflow.likelihoods.Gaussian()
self.lik.variance = self.noiseVariance
self.q_mean = self.rng.randn(self.nDimensions, self.oneLatentFunction)
self.q_sqrt_diag = self.rng.rand(self.nDimensions, self.oneLatentFunction)
self.q_sqrt_full = np.tril(self.rng.rand(self.nDimensions, self.nDimensions))
def getModel(self, is_diagonal, is_whitened):
model = gpflow.svgp.SVGP(
X=self.X, Y=self.Y,
kern=kernel(kernelVariance=self.signalVariance, lengthScale=self.lengthScale),
likelihood=self.lik,
Z=self.Z,
q_diag=is_diagonal,
whiten=is_whitened)
if is_diagonal:
model.q_sqrt = self.q_sqrt_diag
else:
model.q_sqrt = self.q_sqrt_full[:, :, None]
model.q_mu = self.q_mean
return model
def test_refrence_implementation_consistency(self):
with self.test_session():
rng = np.random.RandomState(10)
qMean = rng.randn()
qCov = rng.rand()
pMean = rng.rand()
pCov = rng.rand()
univariate_KL = referenceUnivariatePriorKL(qMean, pMean, qCov, pCov)
multivariate_KL = referenceMultivariatePriorKL(
np.array([[qMean]]), np.array([[qCov]]),
np.array([[pMean]]), np.array([[pCov]]))
self.assertTrue(np.abs(univariate_KL - multivariate_KL) < 1e-4)
def test_prior_KL_fullQ(self):
with self.test_session():
covQ = np.dot(self.q_sqrt_full, self.q_sqrt_full.T)
mean_prior = np.zeros((self.nDimensions, 1))
for is_whitened in [True, False]:
m = self.getModel(False, is_whitened)
if is_whitened:
cov_prior = np.eye(self.nDimensions)
else:
cov_prior = referenceRbfKernel(
self.X, self.lengthScale, self.signalVariance)
referenceKL = referenceMultivariatePriorKL(
self.q_mean, covQ, mean_prior, cov_prior)
# now get test KL.
test_prior_KL = gpflow.param.AutoFlow()(m.build_prior_KL.__func__)(m)
self.assertTrue(np.abs(referenceKL - test_prior_KL) < 1e-4)
if __name__ == "__main__":
unittest.main()
| 42.855204 | 111 | 0.587583 |
210fc81c8fd84681439840f79afd20ceefa2c47b
| 9,770 |
py
|
Python
|
src/snc/agents/general_heuristics/custom_parameters_priority_agent.py
|
dmcnamee/snc
|
c2da8c1e9ecdc42c59b9de73224b3d50ee1c9786
|
[
"Apache-2.0"
] | 5 |
2021-03-24T16:23:10.000Z
|
2021-11-17T12:44:51.000Z
|
src/snc/agents/general_heuristics/custom_parameters_priority_agent.py
|
dmcnamee/snc
|
c2da8c1e9ecdc42c59b9de73224b3d50ee1c9786
|
[
"Apache-2.0"
] | 3 |
2021-03-26T01:16:08.000Z
|
2021-05-08T22:06:47.000Z
|
src/snc/agents/general_heuristics/custom_parameters_priority_agent.py
|
dmcnamee/snc
|
c2da8c1e9ecdc42c59b9de73224b3d50ee1c9786
|
[
"Apache-2.0"
] | 2 |
2021-03-24T17:20:06.000Z
|
2021-04-19T09:01:12.000Z
|
import numpy as np
from typing import Optional, Any
from snc.utils import snc_types as types
from snc.environments import controlled_random_walk as crw
from snc.agents import agents_utils
from snc.agents.agent_interface import AgentInterface
class CustomParametersPriorityAgent(AgentInterface):
def __init__(self, env: crw.ControlledRandomWalk, state_option: bool, cost_option: bool,
rate_option: bool, name: str, agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy for push models where each activity can be done only by one resource.
Buffers on which to work and activities to performed are decided based on a custom
combination of buffers state, buffer cost, and activity rate. Flags are used to specify
which combination of such parameters is used to determine priority. In order to be able to
work on a buffer, its state has to be higher than zero. If there are multiple options with
the same value, each resource chooses among them randomly.
:param env: the environment to stepped through.
:param state_option: whether the current state is considered when computing the priority
values.
:param cost_option: whether the cost is considered when computing the priority values.
:param rate_option: whether the activities rate is considered when computing the priority
values.
:param name: Agent identifier.
:return: None.
"""
# verify that at least an option has been selected
assert any([state_option, cost_option, rate_option])
# verify that each activity can be performed by only one resource
assert agents_utils.has_orthogonal_rows(env.constituency_matrix), \
"Constituency matrix must have orthogonal rows."
# verify that the environment is a push model
for resource_constraints in env.list_boundary_constraint_matrices:
assert np.all(np.sum(resource_constraints, axis=1) >= 1)
super().__init__(env, name, agent_seed)
self._state_option = state_option
self._cost_option = cost_option
self._rate_option = rate_option
self.env = env
def compute_priority_values(self, state: types.StateSpace) -> np.ndarray:
# we care only about activities that can work on buffers, i.e., negative value in the
# buffer_processing_matrix
bpm = self.env.job_generator.buffer_processing_matrix
priority_values = np.where(bpm < 0, -1, 0)
if self._rate_option:
# positive elements in bpm are set to 1 to avoid division by zero.
priority_values = \
np.divide(priority_values, np.where(bpm < 0, -bpm, 1))
if self._state_option:
priority_values = np.multiply(priority_values, state)
if self._cost_option:
priority_values = np.multiply(priority_values, self.env.cost_per_buffer)
return priority_values
def map_state_to_actions(self, state: types.StateSpace, **override_args: Any) \
-> types.ActionProcess:
"""
Returns action such that buffers on which to work and activities to performed are
decided based on the custom combination of parameters. If there are multiple options
with the same value, each resource chooses among them randomly.
:param state: Current state of the system.
:param override_args: extra policy-specific arguments not needed for this heuristic.
:return action: Action vector.
"""
_, num_activities = self.env.job_generator.buffer_processing_matrix.shape
action = np.zeros((num_activities, 1))
priority_values = self.compute_priority_values(state)
# For each resource
for resource_constituency in self.constituency_matrix:
activities_of_resource, = np.where(resource_constituency == 1)
search_action = True
while search_action:
min_value = np.amin(priority_values[:, activities_of_resource])
# check if there is no possible action
if min_value >= 0:
search_action = False
else:
# create a submatrix of priority_values with only the columns corresponding to
# the activities of the resource considered
restricted_priority_values = priority_values[:, activities_of_resource]
_, active_activity = \
np.where(restricted_priority_values == min_value)
# randomly select the active action if multiple have the same lowest value
i = self.np_random.choice(active_activity)
buffers_with_active_action = np.where(restricted_priority_values[:, i] < 0)
if all(state[buffers_with_active_action] > 0):
action[activities_of_resource[i]] = 1
search_action = False
else:
# at least one buffer on which the activity works is empty and thus
# such activity is prohibited
priority_values[:, activities_of_resource[i]] = 0
return action
class PriorityState(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityState",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities that work on longer buffers.
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=True, cost_option=False, rate_option=False,
name=name, agent_seed=agent_seed)
class PriorityCost(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityCost",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities that work on buffers with higher cost.
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=False, cost_option=True, rate_option=False,
name=name, agent_seed=agent_seed)
class PriorityRate(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityRate",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities with lower rate.
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=False, cost_option=False, rate_option=True,
name=name, agent_seed=agent_seed)
class PriorityStateCost(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityStateCost",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities that work on buffers with higher
(length * cost).
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=True, cost_option=True, rate_option=False,
name=name, agent_seed=agent_seed)
class PriorityStateRate(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityStateRate",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities with higher (buffers states / activity rate).
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=True, cost_option=False, rate_option=True,
name=name, agent_seed=agent_seed)
class PriorityCostRate(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityCostRate",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities with higher (buffer cost / activity rate).
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=False, cost_option=True, rate_option=True,
name=name, agent_seed=agent_seed)
class PriorityStateCostRate(CustomParametersPriorityAgent):
def __init__(self, env: crw.ControlledRandomWalk, name: str = "PriorityStateCostRate",
agent_seed: Optional[int] = None) -> None:
"""
Non-idling policy that prioritises activities with higher
(buffer length * buffer cost / activity rate).
:param env: the environment to stepped through.
:param name: Agent identifier.
:param agent_seed: Agent random seed.
:return: None.
"""
super().__init__(env=env, state_option=True, cost_option=True, rate_option=True,
name=name, agent_seed=agent_seed)
| 45.231481 | 99 | 0.65087 |
3d2dc9252507b36b016497d0088080342017edfc
| 3,649 |
py
|
Python
|
S4/S4 Library/simulation/narrative/narrative_open_street_director_mixin.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | 1 |
2021-05-20T19:33:37.000Z
|
2021-05-20T19:33:37.000Z
|
S4/S4 Library/simulation/narrative/narrative_open_street_director_mixin.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
S4/S4 Library/simulation/narrative/narrative_open_street_director_mixin.py
|
NeonOcean/Environment
|
ca658cf66e8fd6866c22a4a0136d415705b36d26
|
[
"CC-BY-4.0"
] | null | null | null |
from event_testing.test_events import TestEvent
from sims4.resources import Types
from sims4.tuning.tunable import TunableMapping, TunableReference, TunableSet
import services
NARRATIVE_LAYERS_TOKEN = 'narrative_layers'
class NarrativeOpenStreetDirectorMixin:
INSTANCE_TUNABLES = {'narrative_object_layers': TunableMapping(description='\n If defined for a narrative, associated conditional layers will be\n activated while that narrative is active.\n \n The layers should be exclusively owned by the narrative system\n and should not be toggled on/off by any other means. \n ', key_type=TunableReference(manager=services.get_instance_manager(Types.NARRATIVE), pack_safe=True), value_type=TunableSet(description='\n List of conditional layers that should be active while the\n specified narrative is active.\n ', tunable=TunableReference(manager=services.get_instance_manager(Types.CONDITIONAL_LAYER), pack_safe=True)))}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._narrative_layers = set()
def on_startup(self):
super().on_startup()
self._handle_narratives_updated(from_startup=True)
services.get_event_manager().register_single_event(self, TestEvent.NarrativesUpdated)
def on_shutdown(self):
services.get_event_manager().unregister_single_event(self, TestEvent.NarrativesUpdated)
super().on_shutdown()
def _load_custom_open_street_director(self, street_director_proto, reader):
self._narrative_layers.clear()
if reader is not None:
layer_tuning_mgr = services.get_instance_manager(Types.CONDITIONAL_LAYER)
for layer_guid in reader.read_uint64s(NARRATIVE_LAYERS_TOKEN, ()):
layer = layer_tuning_mgr.get(layer_guid)
if layer.client_only:
continue
if layer is not None:
self._narrative_layers.add(layer)
super()._load_custom_open_street_director(street_director_proto, reader)
def _save_custom_open_street_director(self, street_director_proto, writer):
writer.write_uint64s(NARRATIVE_LAYERS_TOKEN, tuple(layer.guid64 for layer in self._narrative_layers if not layer.client_only))
super()._save_custom_open_street_director(street_director_proto, writer)
def handle_event(self, sim_info, event, resolver):
if event == TestEvent.NarrativesUpdated:
self._handle_narratives_updated()
def _handle_narratives_updated(self, from_startup=False):
required_layers = set()
active_narratives = services.narrative_service().active_narratives
for narrative in (n for n in self.narrative_object_layers if n in active_narratives):
required_layers.update(self.narrative_object_layers[narrative])
current_layers = set(self._narrative_layers)
shut_down_layers = current_layers - required_layers
start_up_layers = required_layers - current_layers
load_layer_func = self.load_layer_immediately if from_startup else self.load_layer_gradually
for layer in shut_down_layers:
self.remove_layer_objects(layer)
self._narrative_layers.update(start_up_layers)
for layer in start_up_layers:
load_layer_func(layer)
def on_layer_objects_destroyed(self, conditional_layer):
super().on_layer_objects_destroyed(conditional_layer)
if conditional_layer in self._narrative_layers:
self._narrative_layers.remove(conditional_layer)
| 58.854839 | 774 | 0.724856 |
524fa349ae6026b924592e1be8ef9668462439ae
| 2,199 |
py
|
Python
|
fw_manager/migrations/0001_initial.py
|
monstrenyatko/butler-api
|
842cf16212ba9fdb6943b5ecd488bf0ca57acf84
|
[
"MIT"
] | null | null | null |
fw_manager/migrations/0001_initial.py
|
monstrenyatko/butler-api
|
842cf16212ba9fdb6943b5ecd488bf0ca57acf84
|
[
"MIT"
] | null | null | null |
fw_manager/migrations/0001_initial.py
|
monstrenyatko/butler-api
|
842cf16212ba9fdb6943b5ecd488bf0ca57acf84
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-20 02:23
from __future__ import unicode_literals
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import fw_manager.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='FirmwareAssignmentModel',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='firmware', serialize=False, to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'firmware assignment',
'verbose_name_plural': 'firmware assignments',
'db_table': 'firmware_assignment',
},
),
migrations.CreateModel(
name='FirmwareModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50, unique=True, validators=[django.core.validators.RegexValidator('^[0-9a-zA-Z\\-_]*$', 'Only Alphanumeric characters, Hyphen and Underscore symbols are allowed')])),
('hardware', models.CharField(choices=[('ESP8266-4MB', 'ESP8266 with 4MB flash')], max_length=50)),
('description', models.TextField(blank=True, max_length=250)),
('file', models.FileField(upload_to=fw_manager.models.get_firmware_file_name)),
('upload_date', models.DateTimeField(auto_now=True)),
],
options={
'verbose_name': 'firmware',
'verbose_name_plural': 'firmwares',
'db_table': 'firmware',
},
),
migrations.AddField(
model_name='firmwareassignmentmodel',
name='value',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assignments', to='fw_manager.FirmwareModel'),
),
]
| 40.722222 | 220 | 0.615734 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.