id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
11265677
|
import time
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from django.test import tag
from django.urls import reverse
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.ui import WebDriverWait
from course_flow.models import (
Activity,
Course,
Discipline,
Favourite,
ObjectPermission,
Outcome,
OutcomeHorizontalLink,
OutcomeNode,
OutcomeOutcome,
OutcomeWorkflow,
Program,
Project,
Week,
Workflow,
WorkflowProject,
)
from course_flow.utils import get_model_from_str
from .utils import get_author, login
timeout = 10
def action_hover_click(selenium, hover_item, click_item):
hover = (
ActionChains(selenium).move_to_element(hover_item).click(click_item)
)
return hover
@tag("selenium")
class SeleniumRegistrationTestCase(StaticLiveServerTestCase):
def setUp(self):
chrome_options = webdriver.chrome.options.Options()
if settings.CHROMEDRIVER_PATH is not None:
self.selenium = webdriver.Chrome(settings.CHROMEDRIVER_PATH)
else:
self.selenium = webdriver.Chrome()
super(SeleniumRegistrationTestCase, self).setUp()
def tearDown(self):
self.selenium.quit()
super(SeleniumRegistrationTestCase, self).tearDown()
def test_register_user(self):
selenium = self.selenium
selenium.get(self.live_server_url + "/register/")
first_name = selenium.find_element_by_id("id_first_name")
last_name = selenium.find_element_by_id("id_last_name")
username = selenium.find_element_by_id("id_username")
email = selenium.find_element_by_id("id_email")
password1 = selenium.find_element_by_id("id_password1")
password2 = selenium.find_element_by_id("id_password2")
username_text = "test_user1"
password_text = "<PASSWORD>"
first_name.send_keys("test")
last_name.send_keys("user")
username.send_keys(username_text)
email.send_keys("<EMAIL>")
password1.send_keys(password_text)
password2.send_keys(password_text)
selenium.find_element_by_id("register-button").click()
self.assertEqual(self.live_server_url + "/home/", selenium.current_url)
class SeleniumWorkflowsTestCase(StaticLiveServerTestCase):
def setUp(self):
chrome_options = webdriver.chrome.options.Options()
if settings.CHROMEDRIVER_PATH is not None:
self.selenium = webdriver.Chrome(settings.CHROMEDRIVER_PATH)
else:
self.selenium = webdriver.Chrome()
super(SeleniumWorkflowsTestCase, self).setUp()
selenium = self.selenium
selenium.maximize_window()
self.user = login(self)
selenium.get(self.live_server_url + "/home/")
username = selenium.find_element_by_id("id_username")
password = selenium.find_element_by_id("id_password")
username.send_keys("testuser1")
password.send_keys("<PASSWORD>")
selenium.find_element_by_css_selector("button[type=Submit]").click()
def tearDown(self):
self.selenium.quit()
super(SeleniumWorkflowsTestCase, self).tearDown()
def test_create_project_and_workflows(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
selenium.get(self.live_server_url + "/home/")
home = selenium.current_url
# Create a project
selenium.get(self.live_server_url + "/myprojects/")
selenium.find_element_by_css_selector(
".section-project .menu-create"
).click()
selenium.find_element_by_css_selector(
".section-project .create-dropdown.active a:first-child"
).click()
title = selenium.find_element_by_id("id_title")
description = selenium.find_element_by_id("id_description")
project_title = "test project title"
project_description = "test project description"
title.send_keys(project_title)
description.send_keys(project_description)
selenium.find_element_by_id("save-button").click()
assert (
project_title in selenium.find_element_by_id("workflowtitle").text
)
assert (
project_description
in selenium.find_element_by_css_selector(
".project-header .workflow-description"
).text
)
project_url = selenium.current_url
# Create templates
selenium.get(self.live_server_url + "/mytemplates/")
templates = selenium.current_url
for template_type in ["activity", "course"]:
if template_type == "course":
selenium.find_element_by_css_selector(
'a[href="#tabs-1"]'
).click()
selenium.find_element_by_css_selector(
".section-" + template_type + " .menu-create"
).click()
selenium.find_element_by_css_selector(
".section-"
+ template_type
+ " .create-dropdown.active a:first-child"
).click()
title = selenium.find_element_by_id("id_title")
description = selenium.find_element_by_id("id_description")
project_title = "test project title"
project_description = "test project description"
title.send_keys(project_title)
description.send_keys(project_description)
selenium.find_element_by_id("save-button").click()
assert (
project_title
in selenium.find_element_by_id("workflowtitle").text
)
assert (
project_description
in selenium.find_element_by_css_selector(
".workflow-header .workflow-description"
).text
)
selenium.get(templates)
selenium.get(project_url)
for i, workflow_type in enumerate(["activity", "course", "program"]):
# Create the workflow
selenium.find_element_by_css_selector(
'a[href="#tabs-' + str(i + 1) + '"]'
).click()
selenium.find_element_by_css_selector(
".section-" + workflow_type + " .menu-create"
).click()
selenium.find_element_by_css_selector(
".section-"
+ workflow_type
+ " .create-dropdown.active a:first-child"
).click()
title = selenium.find_element_by_id("id_title")
description = selenium.find_element_by_id("id_description")
project_title = "test " + workflow_type + " title"
project_description = "test " + workflow_type + " description"
title.send_keys(project_title)
description.send_keys(project_description)
selenium.find_element_by_id("save-button").click()
assert (
project_title
in selenium.find_element_by_class_name("workflow-title").text
)
assert (
project_description
in selenium.find_element_by_css_selector(
".workflow-header .workflow-description"
).text
)
selenium.get(project_url)
# edit link
selenium.find_element_by_css_selector(
".workflow-for-menu." + workflow_type + " .workflow-title"
).click()
assert (
project_title
in selenium.find_element_by_id("workflowtitle").text
)
selenium.get(project_url)
selenium.find_element_by_css_selector(
".workflow-for-menu."
+ workflow_type
+ " .workflow-duplicate-button"
).click()
wait.until(
lambda driver: len(
driver.find_elements_by_css_selector(
".section-" + workflow_type + " .workflow-title"
)
)
> 1
)
assert (
project_title
in selenium.find_elements_by_css_selector(
".section-workflow .workflow-title"
)[1].text
)
self.assertEqual(
get_model_from_str(workflow_type)
.objects.exclude(parent_workflow=None)
.count(),
1,
)
selenium.find_elements_by_css_selector(
".section-workflow ."
+ workflow_type
+ " .workflow-delete-button"
)[0].click()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(2)
self.assertEqual(
get_model_from_str(workflow_type)
.objects.filter(is_strategy=False)
.count(),
1,
)
def test_edit_project_details(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(author=self.user)
discipline = Discipline.objects.create(title="discipline")
discipline2 = Discipline.objects.create(title="discipline2")
discipline3 = Discipline.objects.create(title="discipline3")
selenium.get(
self.live_server_url
+ reverse("course_flow:project-update", args=[project.pk])
)
time.sleep(1)
selenium.find_element_by_id("edit-project-button").click()
selenium.find_element_by_id("project-title-input").send_keys(
"new title"
)
selenium.find_element_by_id("project-description-input").send_keys(
"new description"
)
selenium.find_elements_by_css_selector("#disciplines_all option")[
0
].click()
selenium.find_element_by_css_selector("#add-discipline").click()
selenium.find_element_by_id("project-publish-input").click()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(1)
selenium.find_element_by_id("save-changes").click()
assert (
"new title"
in selenium.find_element_by_css_selector("#workflowtitle div").text
)
assert (
"new description"
in selenium.find_element_by_css_selector(
".project-header .workflow-description"
).text
)
project = Project.objects.first()
self.assertEqual(project.title, "new title")
self.assertEqual(project.description, "new description")
self.assertEqual(project.published, True)
self.assertEqual(project.disciplines.first(), discipline)
def test_import_favourite(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
author = get_author()
project = Project.objects.create(
author=author, published=True, title="published project"
)
WorkflowProject.objects.create(
workflow=Activity.objects.create(author=author, published=True),
project=project,
)
WorkflowProject.objects.create(
workflow=Course.objects.create(author=author, published=True),
project=project,
)
WorkflowProject.objects.create(
workflow=Program.objects.create(author=author, published=True),
project=project,
)
Favourite.objects.create(user=self.user, content_object=project)
Favourite.objects.create(
user=self.user, content_object=Activity.objects.first()
)
Favourite.objects.create(
user=self.user, content_object=Course.objects.first()
)
Favourite.objects.create(
user=self.user, content_object=Program.objects.first()
)
# View the favourites
selenium.get(
self.live_server_url + reverse("course_flow:my-favourites")
)
favourites = selenium.current_url
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-0 .workflow-title"
)
)
== 4
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-1 .workflow-title"
)
)
== 1
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-2 .workflow-title"
)
)
== 1
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-3 .workflow-title"
)
)
== 1
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-4 .workflow-title"
)
)
== 1
)
# Import the project
selenium.find_element_by_css_selector(
"#tabs-0 .project .workflow-duplicate-button"
).click()
time.sleep(2)
new_project = Project.objects.get(parent_project=project)
for workflow_type in ["activity", "course", "program"]:
assert WorkflowProject.objects.get(
workflow=get_model_from_str(workflow_type).objects.get(
author=self.user,
parent_workflow=get_model_from_str(
workflow_type
).objects.get(author=author),
),
project=new_project,
)
# Create a project, then import the favourites one at a time
my_project1 = Project.objects.create(author=self.user)
selenium.find_element_by_css_selector("a[href='#tabs-2']").click()
selenium.find_element_by_css_selector(
"#tabs-2 .workflow-duplicate-button"
).click()
time.sleep(0.5)
selenium.find_elements_by_css_selector(
"#popup-container #tabs-0 .workflow-for-menu"
)[1].click()
selenium.find_element_by_css_selector("#set-linked-workflow").click()
time.sleep(1)
selenium.find_element_by_css_selector("a[href='#tabs-3']").click()
selenium.find_element_by_css_selector(
"#tabs-3 .workflow-duplicate-button"
).click()
time.sleep(0.5)
selenium.find_elements_by_css_selector(
"#popup-container #tabs-0 .workflow-for-menu"
)[1].click()
selenium.find_element_by_css_selector("#set-linked-workflow").click()
time.sleep(1)
selenium.find_element_by_css_selector("a[href='#tabs-4']").click()
selenium.find_element_by_css_selector(
"#tabs-4 .workflow-duplicate-button"
).click()
time.sleep(0.5)
selenium.find_elements_by_css_selector(
"#popup-container #tabs-0 .workflow-for-menu"
)[1].click()
selenium.find_element_by_css_selector("#set-linked-workflow").click()
time.sleep(1)
for workflow_type in ["activity", "course", "program"]:
assert WorkflowProject.objects.get(
workflow=get_model_from_str(workflow_type)
.objects.filter(
author=self.user,
parent_workflow=get_model_from_str(
workflow_type
).objects.get(author=author),
)
.last(),
project=my_project1,
)
# Import the workflows from a project rather than from the favourites menu
my_project = Project.objects.create(author=self.user)
selenium.get(
self.live_server_url
+ reverse("course_flow:project-update", args=[my_project.pk])
)
selenium.find_element_by_css_selector("#tabs-0 .menu-create").click()
selenium.find_element_by_css_selector(
".create-dropdown.active a:last-child"
).click()
time.sleep(0.5)
selenium.find_element_by_css_selector(
"#popup-container a[href='#tabs-2']"
).click()
selenium.find_elements_by_css_selector(
"#popup-container #tabs-2 .workflow-for-menu"
)[0].click()
selenium.find_element_by_css_selector("#set-linked-workflow").click()
time.sleep(1)
selenium.find_element_by_css_selector("#tabs-0 .menu-create").click()
selenium.find_element_by_css_selector(
".create-dropdown.active a:last-child"
).click()
time.sleep(0.5)
selenium.find_element_by_css_selector(
"#popup-container a[href='#tabs-2']"
).click()
selenium.find_elements_by_css_selector(
"#popup-container #tabs-2 .workflow-for-menu"
)[1].click()
selenium.find_element_by_css_selector("#set-linked-workflow").click()
time.sleep(1)
selenium.find_element_by_css_selector("#tabs-0 .menu-create").click()
selenium.find_element_by_css_selector(
".create-dropdown.active a:last-child"
).click()
time.sleep(0.5)
selenium.find_element_by_css_selector(
"#popup-container a[href='#tabs-2']"
).click()
selenium.find_elements_by_css_selector(
"#popup-container #tabs-2 .workflow-for-menu"
)[2].click()
selenium.find_element_by_css_selector("#set-linked-workflow").click()
time.sleep(1)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-0 .workflow-title"
)
)
== 3
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-1 .workflow-title"
)
)
== 1
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-2 .workflow-title"
)
)
== 1
)
assert (
len(
selenium.find_elements_by_css_selector(
"#tabs-3 .workflow-title"
)
)
== 1
)
for workflow_type in ["activity", "course", "program"]:
assert WorkflowProject.objects.get(
workflow=get_model_from_str(workflow_type)
.objects.filter(
author=self.user,
parent_workflow=get_model_from_str(
workflow_type
).objects.get(author=author),
)
.last(),
project=my_project,
)
def test_workflow_read_only(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
author = get_author()
project = Project.objects.create(author=author, published=True)
for workflow_type in ["activity", "course", "program"]:
workflow = get_model_from_str(workflow_type).objects.create(
author=author
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user, column=workflow.columns.first()
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
time.sleep(2)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".action-button")),
0,
)
selenium.find_elements_by_css_selector(".week")[0].click()
time.sleep(0.3)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
"#edit-menu .right-panel-inner"
)
),
0,
)
def test_workflow_editing(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(author=self.user)
for workflow_type in ["activity", "course", "program"]:
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user, column=workflow.columns.first()
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
num_columns = workflow.columns.all().count()
num_weeks = workflow.weeks.all().count()
num_nodes = 1
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .column"
)
),
num_columns,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .week"
)
),
num_weeks,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .node"
)
),
num_nodes,
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .column"
)
click_item = selenium.find_element_by_css_selector(
".column .insert-sibling-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
hover_item = selenium.find_element_by_css_selector(
".workflow-details .week"
)
click_item = selenium.find_element_by_css_selector(
".week .insert-sibling-button img"
)
selenium.find_element_by_css_selector(
"#sidebar .window-close-button"
).click()
time.sleep(0.5)
action_hover_click(selenium, hover_item, click_item).perform()
hover_item = selenium.find_element_by_css_selector(
".workflow-details .node"
)
click_item = selenium.find_element_by_css_selector(
".node .insert-sibling-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(2)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .column"
)
),
num_columns + 1,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .week"
)
),
num_weeks + 1,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .node"
)
),
num_nodes + 1,
)
# Deleting
hover_item = selenium.find_element_by_css_selector(
".workflow-details .node"
)
click_item = selenium.find_element_by_css_selector(
".node .delete-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(1)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .column"
)
click_item = selenium.find_element_by_css_selector(
".column .delete-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(1)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .week"
)
click_item = selenium.find_element_by_css_selector(
".week .delete-self-button img"
)
# selenium.find_element_by_css_selector(
# "#sidebar .window-close-button"
# ).click()
time.sleep(0.5)
action_hover_click(selenium, hover_item, click_item).perform()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .column"
)
),
num_columns,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .week"
)
),
num_weeks,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .node"
)
),
0,
)
def test_workflow_duplication(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(author=self.user)
for workflow_type in ["activity", "course", "program"]:
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user, column=workflow.columns.first()
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
num_columns = workflow.columns.all().count()
num_weeks = workflow.weeks.all().count()
num_nodes = 1
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .column"
)
),
num_columns,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .week"
)
),
num_weeks,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .node"
)
),
num_nodes,
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .column"
)
click_item = selenium.find_element_by_css_selector(
".column .duplicate-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .week"
)
click_item = selenium.find_element_by_css_selector(
".week > .mouseover-container-bypass > .mouseover-actions > .duplicate-self-button img"
)
selenium.find_element_by_css_selector(
"#sidebar .window-close-button"
).click()
time.sleep(0.5)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .node"
)
click_item = selenium.find_element_by_css_selector(
".node .duplicate-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .column"
)
),
num_columns + 1,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .week"
)
),
num_weeks + 1,
)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .node"
)
),
num_nodes * 2 + 1,
)
def test_outcome_editing(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(author=self.user)
workflow = Course.objects.create(author=self.user)
WorkflowProject.objects.create(workflow=workflow, project=project)
base_outcome = Outcome.objects.create(author=self.user)
OutcomeWorkflow.objects.create(outcome=base_outcome, workflow=workflow)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
selenium.find_element_by_css_selector("a[href='#outcome-bar']").click()
selenium.find_element_by_css_selector("#edit-outcomes-button").click()
time.sleep(1)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .outcome"
)
click_item = selenium.find_element_by_css_selector(
".outcome .insert-child-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome .outcome"
)
),
1,
)
self.assertEqual(
OutcomeOutcome.objects.filter(parent=base_outcome).count(), 1
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .outcome .outcome"
)
click_item = selenium.find_element_by_css_selector(
".outcome .outcome .insert-sibling-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome .outcome"
)
),
2,
)
self.assertEqual(
OutcomeOutcome.objects.filter(parent=base_outcome).count(), 2
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .outcome .outcome"
)
click_item = selenium.find_element_by_css_selector(
".outcome .outcome .delete-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome .outcome"
)
),
1,
)
self.assertEqual(
OutcomeOutcome.objects.filter(parent=base_outcome).count(), 1
)
selenium.find_element_by_css_selector(
".children-block:not(:empty)+.outcome-create-child"
).click()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome .outcome"
)
),
2,
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .outcome .outcome"
)
click_item = selenium.find_element_by_css_selector(
".outcome .outcome .duplicate-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome .outcome"
)
),
3,
)
self.assertEqual(
OutcomeOutcome.objects.filter(parent=base_outcome).count(), 3
)
selenium.find_element_by_css_selector("#add-new-outcome").click()
time.sleep(2)
self.assertEqual(Outcome.objects.filter(depth=0).count(), 2)
self.assertEqual(
OutcomeWorkflow.objects.filter(workflow=workflow).count(), 2
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .outcome-workflow > .outcome"
)
click_item = selenium.find_element_by_css_selector(
".workflow-details .outcome-workflow > .outcome > .mouseover-actions .insert-sibling-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome-workflow"
)
),
3,
)
self.assertEqual(
OutcomeWorkflow.objects.filter(workflow=workflow).count(), 3
)
hover_item = selenium.find_element_by_css_selector(
".workflow-details .outcome-workflow > .outcome"
)
click_item = selenium.find_element_by_css_selector(
".workflow-details .outcome-workflow > .outcome > .mouseover-actions .duplicate-self-button img"
)
action_hover_click(selenium, hover_item, click_item).perform()
time.sleep(1)
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
".workflow-details .outcome-workflow"
)
),
4,
)
self.assertEqual(
OutcomeWorkflow.objects.filter(workflow=workflow).count(), 4
)
def test_edit_menu(self):
# Note that we don't test ALL parts of the edit menu, and we test only for nodes. This will catch the vast majority of potential issues. Linked workflows are tested in a different test
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(author=self.user)
for i, workflow_type in enumerate(["activity", "course", "program"]):
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user,
column=workflow.columns.first(),
title="test node",
node_type=i,
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
selenium.find_element_by_css_selector(
".workflow-details .node"
).click()
time.sleep(1)
title = selenium.find_element_by_id("title-editor")
assert "test node" in title.get_attribute("value")
title.clear()
title.send_keys("new title")
time.sleep(2.5)
assert (
"new title"
in selenium.find_element_by_css_selector(
".workflow-details .node .node-title"
).text
)
self.assertEqual(
workflow.weeks.first().nodes.first().title, "new title"
)
if i < 2:
context = selenium.find_element_by_id("context-editor")
context.click()
selenium.find_elements_by_css_selector(
"#context-editor option"
)[2].click()
time.sleep(2.5)
self.assertEqual(
workflow.weeks.first()
.nodes.first()
.context_classification,
2 + 100 * i,
)
else:
self.assertEqual(
len(
selenium.find_elements_by_css_selector(
"#context-editor"
)
),
0,
)
if i < 2:
context = selenium.find_element_by_id("task-editor")
context.click()
selenium.find_elements_by_css_selector("#task-editor option")[
2
].click()
time.sleep(2.5)
self.assertEqual(
workflow.weeks.first().nodes.first().task_classification,
2 + 100 * i,
)
else:
self.assertEqual(
len(
selenium.find_elements_by_css_selector("#task-editor")
),
0,
)
def test_project_return(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
for i, workflow_type in enumerate(["activity", "course", "program"]):
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user,
column=workflow.columns.first(),
title="test node",
node_type=i,
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
selenium.find_element_by_id("project-return").click()
assert (
"project title"
in selenium.find_element_by_css_selector(
"#workflowtitle div"
).text
)
def test_strategy_convert(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
for i, workflow_type in enumerate(["activity", "course"]):
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user,
column=workflow.columns.first(),
title="test node",
node_type=i,
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
selenium.find_element_by_css_selector(
".workflow-details .week"
).click()
time.sleep(1)
title = selenium.find_element_by_id("title-editor").send_keys(
"new strategy"
)
time.sleep(2.5)
selenium.find_element_by_id("toggle-strategy-editor").click()
time.sleep(2)
selenium.find_element_by_css_selector(
"a[href='#strategy-bar']"
).click()
assert (
"new strategy"
in selenium.find_element_by_css_selector(
".strategy-bar-strategy div"
).text
)
selenium.get(
self.live_server_url + reverse("course_flow:my-templates")
)
selenium.find_element_by_css_selector(
"a[href='#tabs-" + str(i) + "']"
).click()
selenium.find_element_by_css_selector(".workflow-title").click()
assert (
"new strategy"
in selenium.find_element_by_css_selector(
"#workflowtitle a"
).text
)
self.assertEqual(
Workflow.objects.filter(is_strategy=True).count(), 1
)
self.assertEqual(
Workflow.objects.get(is_strategy=True)
.weeks.get(is_strategy=True)
.parent_week,
workflow.weeks.first(),
)
Workflow.objects.get(is_strategy=True).delete()
def test_outcome_view(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
for i, workflow_type in enumerate(["activity", "course", "program"]):
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user
)
WorkflowProject.objects.create(workflow=workflow, project=project)
base_outcome = Outcome.objects.create(author=self.user)
OutcomeWorkflow.objects.create(
outcome=base_outcome, workflow=workflow
)
OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
workflow.weeks.first().nodes.create(
author=self.user,
column=workflow.columns.first(),
title="test node",
node_type=i,
)
workflow.weeks.first().nodes.create(
author=self.user,
column=workflow.columns.first(),
title="test node",
node_type=i,
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
selenium.find_element_by_css_selector(".other-views").click()
selenium.find_element_by_css_selector(
"#button_outcometable"
).click()
time.sleep(1)
base_outcome_row_select = ".outcome-table > div > .outcome > .outcome-row > .outcome-cells"
outcome1_row_select = ".outcome .outcome-outcome:first-of-type .outcome > .outcome-row"
outcome2_row_select = ".outcome .outcome-outcome+.outcome-outcome .outcome > .outcome-row"
base_cell = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell"
)
base_cell2 = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell"
)
base_input = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell input"
)
base_input2 = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell input"
)
base_img = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell img"
)
base_img2 = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell img"
)
base_total_img = (
base_outcome_row_select
+ " .table-cell.total-cell:not(.grand-total-cell) img"
)
base_grandtotal_img = (
base_outcome_row_select + " .table-cell.grand-total-cell img"
)
base_toggle = action_hover_click(
selenium,
selenium.find_element_by_css_selector(base_cell),
selenium.find_element_by_css_selector(base_input),
)
outcome1_cell = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell"
)
outcome1_cell2 = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell"
)
outcome1_input = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell input"
)
outcome1_input2 = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell input"
)
outcome1_img = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell img"
)
outcome1_img2 = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell img"
)
outcome1_total_img = (
outcome1_row_select
+ " .table-cell.total-cell:not(.grand-total-cell) img"
)
outcome1_grandtotal_img = (
outcome1_row_select + " .table-cell.grand-total-cell img"
)
outcome1_toggle = action_hover_click(
selenium,
selenium.find_element_by_css_selector(outcome1_cell),
selenium.find_element_by_css_selector(outcome1_input),
)
outcome2_cell = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell"
)
outcome2_cell2 = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell"
)
outcome2_input = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell input"
)
outcome2_input2 = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell input"
)
outcome2_img = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell img"
)
outcome2_img2 = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell img"
)
outcome2_total_img = (
outcome2_row_select
+ " .table-cell.total-cell:not(.grand-total-cell) img"
)
outcome2_grandtotal_img = (
outcome2_row_select + " .table-cell.grand-total-cell img"
)
outcome2_toggle = action_hover_click(
selenium,
selenium.find_element_by_css_selector(outcome2_cell),
selenium.find_element_by_css_selector(outcome2_input),
)
def assert_image(element_string, string):
assert string in selenium.find_element_by_css_selector(
element_string
).get_attribute("src")
def assert_no_image(element_string):
self.assertEqual(
len(
selenium.find_elements_by_css_selector(element_string)
),
0,
)
# Toggle the base outcome. Check to make sure the children and totals columns behave as expected
base_toggle.perform()
time.sleep(1)
assert_image(base_img, "solid_check")
assert_image(base_total_img, "/check")
assert_image(base_grandtotal_img, "/check")
assert_image(outcome1_img, "/solid_check")
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_image(outcome2_img, "/solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
# Toggle one of the children. We expect to lose the top outcome to partial completion
outcome1_toggle.perform()
time.sleep(1)
assert_image(base_img, "/nocheck")
assert_image(base_total_img, "/nocheck")
assert_image(base_grandtotal_img, "/nocheck")
assert_no_image(outcome1_img)
assert_no_image(outcome1_total_img)
assert_no_image(outcome1_grandtotal_img)
assert_image(outcome2_img, "/solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
# check that re-toggling outcome 1 adds the parent
outcome1_toggle.perform()
time.sleep(1)
assert_image(base_img, "solid_check")
assert_image(base_total_img, "/check")
assert_image(base_grandtotal_img, "/check")
assert_image(outcome1_img, "/solid_check")
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_image(outcome2_img, "/solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
# check that removing the base outcome clears all
base_toggle.perform()
time.sleep(1)
assert_no_image(base_img)
assert_no_image(base_total_img)
assert_no_image(base_grandtotal_img)
assert_no_image(outcome1_img)
assert_no_image(outcome1_total_img)
assert_no_image(outcome1_grandtotal_img)
assert_no_image(outcome2_img)
assert_no_image(outcome2_total_img)
assert_no_image(outcome2_grandtotal_img)
# check completion when not all children are toggled
outcome1_toggle.perform()
time.sleep(1)
assert_image(base_img, "/nocheck")
assert_image(base_total_img, "/nocheck")
assert_image(base_grandtotal_img, "/nocheck")
assert_image(outcome1_img, "solid_check")
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_no_image(outcome2_img)
assert_no_image(outcome2_total_img)
assert_no_image(outcome2_grandtotal_img)
# check completion when children are toggled but in different nodes
action_hover_click(
selenium,
selenium.find_element_by_css_selector(outcome2_cell2),
selenium.find_element_by_css_selector(outcome2_input2),
).perform()
time.sleep(1)
assert_image(base_img, "/nocheck")
assert_image(base_img2, "/nocheck")
assert_image(base_total_img, "/check")
assert_image(base_grandtotal_img, "/check")
assert_image(outcome1_img, "solid_check")
assert_no_image(outcome1_img2)
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_no_image(outcome2_img)
assert_image(outcome2_img2, "solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
def test_horizontal_outcome_view(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
course = Course.objects.create(author=self.user)
program = Program.objects.create(author=self.user)
WorkflowProject.objects.create(workflow=course, project=project)
WorkflowProject.objects.create(workflow=program, project=project)
base_outcome = Outcome.objects.create(author=self.user)
OutcomeWorkflow.objects.create(outcome=base_outcome, workflow=program)
OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
course.outcomes.create(author=self.user)
course.outcomes.create(author=self.user)
node = program.weeks.first().nodes.create(
author=self.user,
linked_workflow=course,
column=program.columns.first(),
)
response = self.client.post(
reverse("course_flow:update-outcomenode-degree"),
{"nodePk": node.id, "outcomePk": base_outcome.id, "degree": 1},
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[program.pk])
)
selenium.find_element_by_css_selector(
"#sidebar .window-close-button"
).click()
time.sleep(0.5)
selenium.find_element_by_css_selector(".other-views").click()
selenium.find_element_by_css_selector(
"#button_horizontaloutcometable"
).click()
time.sleep(5)
base_outcome_row_select = (
".outcome-table > div > .outcome > .outcome-row > .outcome-cells"
)
outcome1_row_select = (
".outcome .outcome-outcome:first-of-type .outcome > .outcome-row"
)
outcome2_row_select = ".outcome .outcome-outcome+.outcome-outcome .outcome > .outcome-row"
base_cell = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell"
)
base_cell2 = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell"
)
base_input = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell input"
)
base_input2 = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell input"
)
base_img = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell img"
)
base_img2 = (
base_outcome_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell img"
)
base_total_img = (
base_outcome_row_select
+ " .table-cell.total-cell:not(.grand-total-cell) img"
)
base_grandtotal_img = (
base_outcome_row_select + " .table-cell.grand-total-cell img"
)
base_toggle = action_hover_click(
selenium,
selenium.find_element_by_css_selector(base_cell),
selenium.find_element_by_css_selector(base_input),
)
outcome1_cell = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell"
)
outcome1_cell2 = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell"
)
outcome1_input = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell input"
)
outcome1_input2 = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell input"
)
outcome1_img = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell img"
)
outcome1_img2 = (
outcome1_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell img"
)
outcome1_total_img = (
outcome1_row_select
+ " .table-cell.total-cell:not(.grand-total-cell) img"
)
outcome1_grandtotal_img = (
outcome1_row_select + " .table-cell.grand-total-cell img"
)
outcome1_toggle = action_hover_click(
selenium,
selenium.find_element_by_css_selector(outcome1_cell),
selenium.find_element_by_css_selector(outcome1_input),
)
outcome2_cell = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell"
)
outcome2_cell2 = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell"
)
outcome2_input = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell input"
)
outcome2_input2 = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell input"
)
outcome2_img = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell img"
)
outcome2_img2 = (
outcome2_row_select
+ " .table-group:first-of-type .blank-cell+.table-cell+.table-cell img"
)
outcome2_total_img = (
outcome2_row_select
+ " .table-cell.total-cell:not(.grand-total-cell) img"
)
outcome2_grandtotal_img = (
outcome2_row_select + " .table-cell.grand-total-cell img"
)
outcome2_toggle = action_hover_click(
selenium,
selenium.find_element_by_css_selector(outcome2_cell),
selenium.find_element_by_css_selector(outcome2_input),
)
def assert_image(element_string, string):
assert string in selenium.find_element_by_css_selector(
element_string
).get_attribute("src")
def assert_no_image(element_string):
self.assertEqual(
len(selenium.find_elements_by_css_selector(element_string)), 0,
)
# Toggle the base outcome. Check to make sure the children and totals columns behave as expected
base_toggle.perform()
time.sleep(1)
assert_image(base_img, "solid_check")
assert_image(base_total_img, "/check")
assert_image(base_grandtotal_img, "/check")
assert_image(outcome1_img, "/solid_check")
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_image(outcome2_img, "/solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
# Toggle one of the children. We expect to lose the top outcome to partial completion
outcome1_toggle.perform()
time.sleep(1)
assert_image(base_img, "/nocheck")
assert_image(base_total_img, "/nocheck")
assert_image(base_grandtotal_img, "/nocheck")
assert_no_image(outcome1_img)
assert_no_image(outcome1_total_img)
assert_no_image(outcome1_grandtotal_img)
assert_image(outcome2_img, "/solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
# check that re-toggling outcome 1 adds the parent
outcome1_toggle.perform()
time.sleep(1)
assert_image(base_img, "solid_check")
assert_image(base_total_img, "/check")
assert_image(base_grandtotal_img, "/check")
assert_image(outcome1_img, "/solid_check")
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_image(outcome2_img, "/solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
# check that removing the base outcome clears all
base_toggle.perform()
time.sleep(1)
assert_no_image(base_img)
assert_no_image(base_total_img)
assert_no_image(base_grandtotal_img)
assert_no_image(outcome1_img)
assert_no_image(outcome1_total_img)
assert_no_image(outcome1_grandtotal_img)
assert_no_image(outcome2_img)
assert_no_image(outcome2_total_img)
assert_no_image(outcome2_grandtotal_img)
# check completion when not all children are toggled
outcome1_toggle.perform()
time.sleep(1)
assert_image(base_img, "/nocheck")
assert_image(base_total_img, "/nocheck")
assert_image(base_grandtotal_img, "/nocheck")
assert_image(outcome1_img, "solid_check")
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_no_image(outcome2_img)
assert_no_image(outcome2_total_img)
assert_no_image(outcome2_grandtotal_img)
# check completion when children are toggled but in different nodes
action_hover_click(
selenium,
selenium.find_element_by_css_selector(outcome2_cell2),
selenium.find_element_by_css_selector(outcome2_input2),
).perform()
time.sleep(1)
assert_image(base_img, "/nocheck")
assert_image(base_img2, "/nocheck")
assert_image(base_total_img, "/check")
assert_image(base_grandtotal_img, "/check")
assert_image(outcome1_img, "solid_check")
assert_no_image(outcome1_img2)
assert_image(outcome1_total_img, "/check")
assert_image(outcome1_grandtotal_img, "/check")
assert_no_image(outcome2_img)
assert_image(outcome2_img2, "solid_check")
assert_image(outcome2_total_img, "/check")
assert_image(outcome2_grandtotal_img, "/check")
def test_outcome_analytics(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
course = Course.objects.create(author=self.user)
program = Program.objects.create(author=self.user)
WorkflowProject.objects.create(workflow=course, project=project)
WorkflowProject.objects.create(workflow=program, project=project)
base_outcome = Outcome.objects.create(author=self.user)
OutcomeWorkflow.objects.create(outcome=base_outcome, workflow=program)
poo1 = OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
poo2 = OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
coc1 = course.outcomes.create(author=self.user)
coc2 = course.outcomes.create(author=self.user)
node = program.weeks.first().nodes.create(
author=self.user,
linked_workflow=course,
column=program.columns.first(),
)
response = self.client.post(
reverse("course_flow:update-outcomenode-degree"),
{"nodePk": node.id, "outcomePk": base_outcome.id, "degree": 1},
)
OutcomeHorizontalLink.objects.create(
outcome=coc1, parent_outcome=poo1.child
)
OutcomeHorizontalLink.objects.create(
outcome=coc2, parent_outcome=poo2.child
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[program.pk])
)
selenium.find_element_by_css_selector(".other-views").click()
selenium.find_element_by_css_selector(
"#button_alignmentanalysis"
).click()
time.sleep(5)
assert (
selenium.find_element_by_css_selector(".week .title-text").text
== "Term 1"
)
assert len(selenium.find_elements_by_css_selector(".week .node")) == 1
assert (
len(
selenium.find_elements_by_css_selector(
".week .node .child-outcome"
)
)
== 2
)
assert (
len(
selenium.find_elements_by_css_selector(
".week .node .child-outcome .half-width>.outcome"
)
)
== 2
)
assert (
len(
selenium.find_elements_by_css_selector(
".week .node .child-outcome .alignment-row .outcome"
)
)
== 2
)
def test_outcome_csv_output(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
course = Course.objects.create(author=self.user)
program = Program.objects.create(author=self.user)
WorkflowProject.objects.create(workflow=course, project=project)
WorkflowProject.objects.create(workflow=program, project=project)
base_outcome = Outcome.objects.create(author=self.user)
OutcomeWorkflow.objects.create(outcome=base_outcome, workflow=program)
poo1 = OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
poo2 = OutcomeOutcome.objects.create(
parent=base_outcome,
child=Outcome.objects.create(author=self.user),
)
coc1 = course.outcomes.create(author=self.user)
coc2 = course.outcomes.create(author=self.user)
node = program.weeks.first().nodes.create(
author=self.user,
linked_workflow=course,
column=program.columns.first(),
)
response = self.client.post(
reverse("course_flow:update-outcomenode-degree"),
{"nodePk": node.id, "outcomePk": base_outcome.id, "degree": 1},
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[program.pk])
)
selenium.find_element_by_css_selector(".other-views").click()
selenium.find_element_by_css_selector(
"#button_competencymatrix"
).click()
time.sleep(1)
selenium.find_element_by_css_selector(".menu-create").click()
def test_grid_view(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
program = Program.objects.create(author=self.user)
WorkflowProject.objects.create(workflow=program, project=project)
node = program.weeks.first().nodes.create(
author=self.user, column=program.columns.first(),
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[program.pk])
)
selenium.find_element_by_css_selector(".other-views").click()
selenium.find_element_by_css_selector("#button_grid").click()
time.sleep(1)
assert (
len(selenium.find_elements_by_css_selector(".workflow-grid")) > 0
)
def test_linked_workflow(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
project = Project.objects.create(
author=self.user, title="project title"
)
workflow_types = ["activity", "course", "program"]
for i, workflow_type in enumerate(workflow_types):
workflow = get_model_from_str(workflow_type).objects.create(
author=self.user, title=workflow_type
)
WorkflowProject.objects.create(workflow=workflow, project=project)
workflow.weeks.first().nodes.create(
author=self.user,
column=workflow.columns.first(),
title="test node",
node_type=i,
)
selenium.get(
self.live_server_url
+ reverse("course_flow:workflow-update", args=[workflow.pk])
)
this_url = selenium.current_url
if workflow_type == "activity":
continue
selenium.find_element_by_css_selector(
".workflow-details .node .node-title"
).click()
time.sleep(2)
selenium.find_element_by_id("linked-workflow-editor").click()
time.sleep(2)
selenium.find_element_by_css_selector(
".section-" + workflow_types[i - 1] + " .workflow-for-menu"
).click()
selenium.find_element_by_id("set-linked-workflow").click()
time.sleep(1)
self.assertEqual(
workflow.weeks.first().nodes.first().linked_workflow.id,
get_model_from_str(workflow_types[i - 1]).objects.first().id,
)
ActionChains(selenium).double_click(
selenium.find_element_by_css_selector(
".workflow-details .node"
)
).perform()
assert (
workflow_types[i - 1]
in selenium.find_element_by_css_selector(
"#workflowtitle a"
).text
)
selenium.get(this_url)
selenium.find_element_by_css_selector(
".workflow-details .node .node-title"
).click()
selenium.find_element_by_id("linked-workflow-editor").click()
time.sleep(2)
selenium.find_element_by_css_selector(
".section-" + workflow_types[i - 1] + " .workflow-for-menu"
).click()
selenium.find_element_by_id("set-linked-workflow-none").click()
time.sleep(2)
self.assertEqual(
workflow.weeks.first().nodes.first().linked_workflow, None
)
ActionChains(selenium).double_click(
selenium.find_element_by_css_selector(
".workflow-details .node"
)
).perform()
assert (
workflow_type
in selenium.find_element_by_css_selector(
"#workflowtitle a"
).text
)
def create_many_items(self, author, published, disciplines):
for object_type in [
"project",
"activity",
"course",
"program",
]:
for i in range(10):
item = get_model_from_str(object_type).objects.create(
author=author,
published=published,
title=object_type + str(i),
)
item.disciplines.set(disciplines)
def test_explore(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
author = get_author()
discipline = Discipline.objects.create(title="Discipline1")
self.create_many_items(author, True, disciplines=[discipline])
selenium.get(self.live_server_url + reverse("course_flow:explore"))
for checkbox in selenium.find_elements_by_css_selector(
"#search-type input[type='checkbox']"
):
checkbox.click()
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 4
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
selenium.find_elements_by_css_selector(".page-button")[2].click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 4
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
assert "active" in selenium.find_elements_by_css_selector(
".page-button"
)[2].get_attribute("class")
selenium.find_element_by_css_selector("#next-page-button").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 4
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
assert "active" in selenium.find_elements_by_css_selector(
".page-button"
)[3].get_attribute("class")
selenium.find_element_by_css_selector("#prev-page-button").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 4
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
assert "active" in selenium.find_elements_by_css_selector(
".page-button"
)[2].get_attribute("class")
for checkbox in selenium.find_elements_by_css_selector(
"#search-discipline input[type='checkbox']"
):
checkbox.click()
selenium.find_element_by_id("submit").click()
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 4
)
selenium.find_element_by_css_selector("select[name='results']").click()
time.sleep(0.5)
selenium.find_elements_by_css_selector(
"select[name='results'] option"
)[1].click()
time.sleep(0.5)
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 20
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 2
)
selenium.find_element_by_css_selector("select[name='results']").click()
selenium.find_elements_by_css_selector(
"select[name='results'] option"
)[2].click()
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 40
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 1
)
selenium.find_element_by_id("search-title").send_keys("1")
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 4
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 1
)
for button in selenium.find_elements_by_css_selector(
".workflow-toggle-favourite"
):
button.click()
time.sleep(0.5)
self.assertEqual(
Favourite.objects.filter(
user=self.user,
content_type=ContentType.objects.get_for_model(Project),
).count(),
1,
)
self.assertEqual(
Favourite.objects.filter(
user=self.user,
content_type=ContentType.objects.get_for_model(Activity),
).count(),
1,
)
self.assertEqual(
Favourite.objects.filter(
user=self.user,
content_type=ContentType.objects.get_for_model(Course),
).count(),
1,
)
self.assertEqual(
Favourite.objects.filter(
user=self.user,
content_type=ContentType.objects.get_for_model(Program),
).count(),
1,
)
selenium.find_element_by_css_selector("select[name='results']").click()
selenium.find_elements_by_css_selector(
"select[name='results'] option"
)[0].click()
selenium.find_element_by_id("submit").click()
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 4
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 1
)
def test_explore_no_publish(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
author = get_author()
discipline = Discipline.objects.create(title="Discipline1")
self.create_many_items(author, False, disciplines=[discipline])
selenium.get(self.live_server_url + reverse("course_flow:explore"))
for checkbox in selenium.find_elements_by_css_selector(
"#search-type input[type='checkbox']"
):
checkbox.click()
selenium.find_element_by_id("submit").click()
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 0
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 0
)
def test_explore_disciplines(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
author = get_author()
discipline1 = Discipline.objects.create(title="Discipline1")
discipline2 = Discipline.objects.create(title="Discipline2")
self.create_many_items(author, True, disciplines=[discipline1])
self.create_many_items(author, True, disciplines=[discipline2])
self.create_many_items(
author, True, disciplines=[discipline1, discipline2]
)
selenium.get(self.live_server_url + reverse("course_flow:explore"))
for checkbox in selenium.find_elements_by_css_selector(
"#search-type input[type='checkbox']"
):
checkbox.click()
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 12
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
selenium.find_elements_by_css_selector(
"#search-discipline input[type='checkbox']"
)[0].click()
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 8
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
selenium.find_elements_by_css_selector(
"#search-discipline input[type='checkbox']"
)[0].click()
selenium.find_elements_by_css_selector(
"#search-discipline input[type='checkbox']"
)[1].click()
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 8
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
selenium.find_elements_by_css_selector(
"#search-discipline input[type='checkbox']"
)[0].click()
selenium.find_element_by_id("submit").click()
time.sleep(1)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".page-button")), 12
)
self.assertEqual(
len(selenium.find_elements_by_css_selector(".workflow-title")), 10
)
def test_share_edit_view(self):
selenium = self.selenium
wait = WebDriverWait(selenium, timeout=10)
user2 = get_author()
project = Project.objects.create(author=self.user)
selenium.get(
self.live_server_url
+ reverse("course_flow:project-update", args=[project.pk])
)
selenium.find_element_by_id("share-button").click()
inputs = selenium.find_elements_by_css_selector(".user-add input")
adds = selenium.find_elements_by_css_selector(".user-add button")
inputs[0].send_keys("<PASSWORD>")
time.sleep(2)
selenium.find_elements_by_css_selector(".ui-autocomplete li")[
0
].click()
adds[0].click()
time.sleep(1)
self.assertEqual(
ObjectPermission.objects.filter(
user=user2,
permission_type=ObjectPermission.PERMISSION_EDIT,
content_type=ContentType.objects.get_for_model(project),
object_id=project.id,
).count(),
1,
)
self.assertEqual(
ObjectPermission.objects.filter(
user=user2,
permission_type=ObjectPermission.PERMISSION_VIEW,
content_type=ContentType.objects.get_for_model(project),
object_id=project.id,
).count(),
0,
)
inputs[1].send_keys("<PASSWORD>")
time.sleep(2)
selenium.find_elements_by_css_selector(".ui-autocomplete li")[
1
].click()
adds[1].click()
time.sleep(1)
self.assertEqual(
ObjectPermission.objects.filter(
user=user2,
permission_type=ObjectPermission.PERMISSION_EDIT,
content_type=ContentType.objects.get_for_model(project),
object_id=project.id,
).count(),
0,
)
self.assertEqual(
ObjectPermission.objects.filter(
user=user2,
permission_type=ObjectPermission.PERMISSION_VIEW,
content_type=ContentType.objects.get_for_model(project),
object_id=project.id,
).count(),
1,
)
selenium.find_element_by_css_selector(
".user-label .window-close-button"
).click()
alert = wait.until(expected_conditions.alert_is_present())
selenium.switch_to.alert.accept()
time.sleep(2)
self.assertEqual(
ObjectPermission.objects.filter(
user=user2,
content_type=ContentType.objects.get_for_model(project),
object_id=project.id,
).count(),
0,
)
selenium.find_element_by_css_selector(
".message-wrap > .window-close-button"
).click()
self.assertEqual(
len(selenium.find_elements_by_css_selector(".message-wrap")), 0
)
|
StarcoderdataPython
|
1734717
|
# Copyright 2019 Jetperch LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from PySide2 import QtGui, QtCore, QtWidgets
from .signal_def import signal_def
from .signal import Signal
from .scrollbar import ScrollBar
from .xaxis import XAxis
from .settings_widget import SettingsWidget
from .font_resizer import FontResizer
from .ymarker_manager import YMarkerManager
from joulescope_ui.file_dialog import FileDialog
from joulescope.data_recorder import construct_record_filename
from joulescope_ui.preferences_def import FONT_SIZES
import pyqtgraph as pg
import pyqtgraph.exporters
from typing import Dict
import copy
import os
import logging
log = logging.getLogger(__name__)
SIGNAL_OFFSET_ROW = 2
class WaveformWidget(QtWidgets.QWidget):
"""Oscilloscope-style waveform view for multiple signals.
:param parent: The parent :class:`QWidget`.
"""
def __init__(self, parent, cmdp, state_preference):
QtWidgets.QWidget.__init__(self, parent=parent)
self._cmdp = cmdp
self._x_limits = [0.0, 30.0]
self._mouse_pos = None
self._clipboard_image = None
self._shortcuts = {}
self.layout = QtWidgets.QHBoxLayout(self)
self.layout.setSpacing(0)
self.layout.setContentsMargins(0, 0, 0, 0)
self.win = pg.GraphicsLayoutWidget(parent=self, show=True, title="Oscilloscope layout")
self.win.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
self.win.sceneObj.sigMouseClicked.connect(self._on_mouse_clicked_event)
self.win.sceneObj.sigMouseMoved.connect(self._on_mouse_moved_event)
self.layout.addWidget(self.win)
self._signals_def = {}
self._signals: Dict[str, Signal] = {}
self.config = {
'show_min_max': True,
'grid_x': 128,
'grid_y': 128,
'trace_width': 1,
}
self._dataview_data_pending = 0
self._ymarker_mgr = YMarkerManager(cmdp, self._signals)
self._settings_widget = SettingsWidget(self._cmdp)
self.win.addItem(self._settings_widget, row=0, col=0)
self._scrollbar = ScrollBar(parent=None, cmdp=cmdp)
self._scrollbar.regionChange.connect(self.on_scrollbarRegionChange)
self.win.addItem(self._scrollbar, row=0, col=1)
self._x_axis = XAxis(self._cmdp)
self.win.addItem(self._x_axis, row=1, col=1)
self._x_axis.add_to_scene()
self._x_axis.setGrid(128)
self._x_axis.sigMarkerMoving.connect(self._on_marker_moving)
self.win.ci.layout.setRowStretchFactor(0, 1)
self.win.ci.layout.setRowStretchFactor(1, 1)
self.win.ci.layout.setColumnStretchFactor(0, 1)
self.win.ci.layout.setColumnStretchFactor(1, 1000)
self.win.ci.layout.setColumnAlignment(0, QtCore.Qt.AlignRight)
self.win.ci.layout.setColumnAlignment(1, QtCore.Qt.AlignLeft)
self.win.ci.layout.setColumnAlignment(2, QtCore.Qt.AlignLeft)
self.win.ci.layout.setColumnStretchFactor(2, -1)
self.signal_configure()
self.set_xlimits(0.0, 30.0)
self.set_xview(25.0, 30.0)
self._statistics_font_resizer = FontResizer()
cmdp.subscribe('Widgets/Waveform/Statistics/font', self._statistics_font_resizer.on_font, update_now=True)
self._marker_font_resizer = FontResizer()
cmdp.subscribe('Widgets/Waveform/Statistics/font', self._marker_font_resizer.on_font, update_now=True)
c = self._cmdp
c.subscribe('DataView/#data', self._on_data, update_now=True)
c.subscribe('Device/#state/source', self._on_device_state_source, update_now=True)
c.subscribe('Device/#state/play', self._on_device_state_play, update_now=True)
c.subscribe('Device/#state/name', self._on_device_state_name, update_now=True)
c.subscribe('Widgets/Waveform/Markers/_state/instances/', self._on_marker_instance_change,
update_now=True)
c.subscribe('Widgets/Waveform/#requests/refresh_markers', self._on_refresh_markers, update_now=True)
c.subscribe('Widgets/Waveform/#statistics_over_range_resp', self._on_statics_over_range_resp,
update_now=True)
c.subscribe('Device/#state/x_limits', self._on_device_state_limits, update_now=True)
c.subscribe('Widgets/Waveform/Statistics/font-size', self._on_statistics_settings)
c.subscribe('Widgets/Waveform/_signals', self._on_signals_active, update_now=True)
c.register('!Widgets/Waveform/Signals/add', self._cmd_waveform_signals_add,
brief='Add a signal to the waveform.',
detail='value is list of signal name string and position. -1 inserts at end')
c.register('!Widgets/Waveform/Signals/remove', self._cmd_waveform_signals_remove,
brief='Remove a signal from the waveform by name.',
detail='value is signal name string.')
cmdp.subscribe('Appearance/__index__', self._on_colors, update_now=True)
shortcuts = [
[QtCore.Qt.Key_Asterisk, self._on_x_axis_zoom_all],
[QtCore.Qt.Key_Delete, self._on_markers_clear],
[QtCore.Qt.Key_Backspace, self._on_markers_clear],
[QtCore.Qt.Key_Left, self._on_left],
[QtCore.Qt.Key_Right, self._on_right],
[QtCore.Qt.Key_Up, self._on_zoom_in],
[QtCore.Qt.Key_Down, self._on_zoom_out],
[QtCore.Qt.Key_Plus, self._on_zoom_in],
[QtCore.Qt.Key_Minus, self._on_zoom_out],
]
self._shortcuts_activate(shortcuts)
def _shortcuts_activate(self, shortcuts):
for key, cbk in shortcuts:
shortcut = QtWidgets.QShortcut(QtGui.QKeySequence(key), self)
shortcut.activated.connect(cbk)
self._shortcuts[key] = [key, cbk, shortcut]
def _on_colors(self, topic, value):
colors = value['colors']
self.win.setBackground(colors['waveform_background'])
pyqtgraph.setConfigOption('background', colors['waveform_background'])
pyqtgraph.setConfigOption('foreground', colors['waveform_font_color'])
def _on_mouse_moved_event(self, pos):
self._mouse_pos = pos
def _on_mouse_clicked_event(self, ev):
if ev.isAccepted():
return
if ev.button() & QtCore.Qt.RightButton:
pos = ev.screenPos().toPoint()
self._context_menu(pos)
def _context_menu(self, pos):
log.debug('_context_menu')
menu = QtGui.QMenu('Waveform menu', self)
save_image = menu.addAction('Save image')
save_image.triggered.connect(self.on_save_image)
copy_image = menu.addAction('Copy image to clipboard')
copy_image.triggered.connect(self.on_copy_image_to_clipboard)
export_data = menu.addAction('Export visible data')
export_data.triggered.connect(self.on_export_visible_data)
export_data = menu.addAction('Export all data')
export_data.triggered.connect(self.on_export_all_data)
menu.exec_(pos)
def on_export_visible_data(self):
p1, p2 = self._scrollbar.get_xview()
value = {
'name': 'Export data',
'x_start': min(p1, p2),
'x_stop': max(p1, p2)
}
self._cmdp.invoke('!RangeTool/run', value)
def on_export_all_data(self):
p1, p2 = self._scrollbar.get_xlimits()
value = {
'name': 'Export data',
'x_start': min(p1, p2),
'x_stop': max(p1, p2)
}
self._cmdp.invoke('!RangeTool/run', value)
def _export_as_image(self):
r = QtWidgets.QApplication.desktop().devicePixelRatio()
w = self.win.sceneObj.getViewWidget()
k = w.viewportTransform().inverted()[0].mapRect(w.rect())
exporter = pg.exporters.ImageExporter(self.win.sceneObj)
exporter.parameters()['width'] = k.width() * r
return exporter.export(toBytes=True)
def on_save_image(self):
filter_str = 'png (*.png)'
filename = construct_record_filename()
filename = os.path.splitext(filename)[0] + '.png'
path = self._cmdp['General/data_path']
filename = os.path.join(path, filename)
dialog = FileDialog(self, 'Save Joulescope Data', filename, 'any')
dialog.setNameFilter(filter_str)
filename = dialog.exec_()
if not bool(filename):
return
png = self._export_as_image()
png.save(filename)
def on_copy_image_to_clipboard(self):
self._clipboard_image = self._export_as_image()
QtWidgets.QApplication.clipboard().setImage(self._clipboard_image)
def _mouse_as_x(self):
x = None
if self._mouse_pos:
x = self._x_axis.linkedView().mapSceneToView(self._mouse_pos).x()
return x
def keyPressEvent(self, ev):
key = ev.key()
if key == QtCore.Qt.Key_S:
self._cmdp.invoke('!Widgets/Waveform/Markers/single_add', self._mouse_as_x())
elif key == QtCore.Qt.Key_D:
x = self._mouse_as_x()
x_min, x_max = self._x_axis.range
w2 = (x_max - x_min) / 10
self._cmdp.invoke('!Widgets/Waveform/Markers/dual_add', [x - w2, x + w2])
elif key == QtCore.Qt.Key_Delete or key == QtCore.Qt.Key_Backspace:
self._cmdp.invoke('!Widgets/Waveform/Markers/clear', None)
elif QtCore.Qt.Key_1 <= key <= QtCore.Qt.Key_8:
self._markers_show(key - QtCore.Qt.Key_1 + 1)
def _markers_show(self, idx):
"""Show the markers
:param idx: The marker index, starting from 1.
"""
n = chr(ord('1') + idx - 1)
names = [n, f'{n}a', f'{n}b']
m = [self._x_axis.marker_get(name) for name in names]
m = [k for k in m if k is not None]
if len(m) == 1:
self._scrollbar.zoom_to_point(m[0].get_pos())
elif len(m) == 2:
self._scrollbar.zoom_to_range(m[0].get_pos(), m[1].get_pos())
@QtCore.Slot(bool)
def _on_markers_clear(self):
self._cmdp.invoke('!Widgets/Waveform/Markers/clear', None)
@QtCore.Slot(bool)
def _on_x_axis_zoom_all(self):
self._cmdp.invoke('!Widgets/Waveform/x-axis/zoom_all', None)
def _on_left(self):
self._cmdp.invoke('!Widgets/Waveform/x-axis/pan', -1)
def _on_right(self):
self._cmdp.invoke('!Widgets/Waveform/x-axis/pan', 1)
def _on_zoom_in(self):
self._cmdp.invoke('!Widgets/Waveform/x-axis/zoom', 1)
def _on_zoom_out(self):
self._cmdp.invoke('!Widgets/Waveform/x-axis/zoom', -1)
def _on_statistics_settings(self, topic, value):
self.win.ci.layout.invalidate()
def _cmd_waveform_signals_add(self, topic, value):
if value in self._signals:
return None
signals = list(self._signals.keys()) + [value]
self._cmdp['Widgets/Waveform/_signals'] = signals
return '!Widgets/Waveform/Signals/remove', value
def _cmd_waveform_signals_remove(self, topic, value):
if value not in self._signals:
return None
signals = list(self._signals.keys())
signals.remove(value)
self._cmdp['Widgets/Waveform/_signals'] = signals
return '!Widgets/Waveform/Signals/add', value
def _on_signals_active(self, topic, value):
# must be safe to call repeatedly
log.debug('_on_signals_active: %s', value)
signals_previous = list(self._signals.keys())
signals_next = value
for signal in signals_previous:
if signal not in signals_next:
self.signal_remove(signal)
for signal in signals_next:
if signal not in signals_previous:
self._on_signalAdd(signal)
def _on_device_state_limits(self, topic, value):
if value is not None:
self.set_xlimits(*value)
self.set_xview(*value)
def _on_device_state_name(self, topic, value):
if not value:
# disconnected from data source
self.data_clear()
self.markers_clear()
def _on_device_state_play(self, topic, value):
if value:
self.set_display_mode('realtime')
else:
self.set_display_mode('buffer')
def _on_device_state_source(self, topic, value):
if value == 'USB':
if self.set_display_mode('realtime'):
self.request_x_change()
else:
self.set_display_mode('buffer')
def set_display_mode(self, mode):
"""Configure the display mode.
:param mode: The oscilloscope display mode which is one of:
* 'realtime': Display realtime data, and do not allow x-axis time scrolling
away from present time.
* 'buffer': Display stored data, either from a file or a buffer,
with a fixed x-axis range.
Use :meth:`set_xview` and :meth:`set_xlimits` to configure the current
view and the total allowed range.
"""
return self._scrollbar.set_display_mode(mode)
def set_sampling_frequency(self, freq):
"""Set the sampling frequency.
:param freq: The sampling frequency in Hz.
This value is used to request appropriate x-axis ranges.
"""
self._scrollbar.set_sampling_frequency(freq)
def set_xview(self, x_min, x_max):
"""Set the current view extents for the time x-axis.
:param x_min: The minimum value to display on the current view in seconds.
:param x_max: The maximum value to display on the current view in seconds.
"""
self._scrollbar.set_xview(x_min, x_max)
def set_xlimits(self, x_min, x_max):
"""Set the allowable view extents for the time x-axis.
:param x_min: The minimum value in seconds.
:param x_max: The maximum value in seconds.
"""
self._x_limits = [x_min, x_max]
self._scrollbar.set_xlimits(x_min, x_max)
for signal in self._signals.values():
signal.set_xlimits(x_min, x_max)
def signal_configure(self, signals=None):
"""Configure the available signals.
:param signals: The list of signal definitions. Each definition is a dict:
* name: The signal name [required].
* units: The optional SI units for the signal.
* y_limit: The list of [min, max].
* y_log_min: The minimum log value. None (default) disables logarithmic scale.
* show: True to show. Not shown by default.
"""
if signals is None:
signals = signal_def
for signal in signals:
signal = copy.deepcopy(signal)
signal['display_name'] = signal.get('display_name', signal['name'])
self._signals_def[signal['name']] = signal
def _on_signalAdd(self, name):
signal = self._signals_def[name]
self.signal_add(signal)
def signal_add(self, signal):
if signal['name'] in self._signals:
self.signal_remove(signal['name'])
s = Signal(parent=self, cmdp=self._cmdp,
statistics_font_resizer=self._statistics_font_resizer,
marker_font_resizer=self._marker_font_resizer,
**signal)
s.addToLayout(self.win, row=self.win.ci.layout.rowCount())
s.markers = self._x_axis.markers
s.vb.sigWheelZoomXEvent.connect(self._scrollbar.on_wheelZoomX)
s.vb.sigPanXEvent.connect(self._scrollbar.on_panX)
self._signals[signal['name']] = s
self._vb_relink() # Linking to last axis makes grid draw correctly
s.y_axis.setGrid(self.config['grid_y'])
return s
def signal_remove(self, name):
signal = self._signals.get(name)
if signal is None:
log.warning('signal_remove(%s) but not found', name)
return
self._ymarker_mgr.clear(name)
signal = self._signals.pop(name, None)
signal.vb.sigWheelZoomXEvent.disconnect()
signal.vb.sigPanXEvent.disconnect()
row = signal.removeFromLayout(self.win)
for k in range(row + 1, self.win.ci.layout.rowCount()):
for j in range(3):
i = self.win.getItem(k, j)
if i is not None:
self.win.removeItem(i)
self.win.addItem(i, row=k - 1, col=j)
self._vb_relink()
def _vb_relink(self):
if len(self._signals) <= 0:
self._x_axis.unlinkFromView()
else:
row = SIGNAL_OFFSET_ROW + len(self._signals) - 1
vb = self.win.ci.layout.itemAt(row, 1)
self._x_axis.linkToView(vb)
for p in self._signals.values():
if p.vb == vb:
p.vb.setXLink(None)
else:
p.vb.setXLink(vb)
self._settings_widget.on_signalsAvailable(list(self._signals_def.values()),
visible=list(self._signals.keys()))
def values_column_hide(self):
for idx in range(self.win.ci.layout.rowCount()):
item = self.win.ci.layout.itemAt(idx, 2)
if item is not None:
item.hide()
item.setMaximumWidth(0)
def values_column_show(self):
for idx in range(self.win.ci.layout.rowCount()):
item = self.win.ci.layout.itemAt(idx, 2)
if item is not None:
item.show()
item.setMaximumWidth(16777215)
def _on_data(self, topic, data):
if not self.isVisible():
return
if data is None or not bool(data):
self.data_clear()
return
self._dataview_data_pending += 1
x_limits = data['time']['limits']['value']
if x_limits is not None and x_limits != self._x_limits:
self.set_xlimits(*x_limits)
self.set_display_mode(data['state']['source_type'])
x = data['time']['x']['value']
for name, value in data['signals'].items():
s = self._signals.get(name)
if s is None:
continue
s.update(x, value)
self._markers_single_update_all()
self._markers_dual_update_all()
def _markers_single_update_all(self):
markers = [(m.name, m.get_pos()) for m in self._x_axis.markers_single()]
for s in self._signals.values():
s.update_markers_single_all(markers)
def _markers_single_update(self, marker_name):
marker = self._x_axis.marker_get(marker_name)
if marker.is_single:
for s in self._signals.values():
s.update_markers_single_one(marker.name, marker.get_pos())
def _on_data_frame_done(self):
self._dataview_data_pending = 0
self._cmdp.publish('Widgets/Waveform/#requests/data_next', None)
def _markers_dual_update_all(self):
ranges = []
markers = []
for m1, m2 in self._x_axis.markers_dual():
t1 = m1.get_pos()
t2 = m2.get_pos()
if t1 > t2:
t1, t2 = t2, t1
ranges.append((t1, t2, m2.name))
markers.append((m2.name, m2.get_pos()))
if len(ranges):
request = {
'ranges': ranges,
'source_id': 'Waveform._markers_dual_update_all',
'markers': markers,
'reply_topic': 'Widgets/Waveform/#statistics_over_range_resp'
}
self._cmdp.publish('DataView/#service/range_statistics', request)
else:
for s in self._signals.values():
s.update_markers_dual_all([])
self._on_data_frame_done()
def _on_statics_over_range_resp(self, topic, value):
if value is not None:
show_dt = self._cmdp['Widgets/Waveform/dual_markers_Δt']
req = value['request']
rsp = value['response']
if rsp is None:
rsp = [None] * len(req['markers'])
for s in self._signals.values():
y = []
for (name, pos), stat in zip(req['markers'], rsp):
if stat is not None:
dt = stat['time']['delta']
stat = stat['signals'].get(s.name, {})
if show_dt:
stat['Δt'] = dt
y.append((name, pos, stat))
s.update_markers_dual_all(y)
self._on_data_frame_done()
def _on_marker_instance_change(self, topic, value):
marker_name = topic.split('/')[-2]
marker = self._x_axis.marker_get(marker_name)
if marker is None:
return # marker still being created
elif marker.is_single:
self._markers_single_update(marker_name)
else:
self._markers_dual_update_all() # todo : just update one
@QtCore.Slot(str, float)
def _on_marker_moving(self, marker_name, marker_pos):
for s in self._signals.values():
s.marker_move(marker_name, marker_pos)
def _on_refresh_markers(self, topic, value):
# keep it simple for now, just refresh everything
self._markers_single_update_all()
self._markers_dual_update_all()
def data_clear(self):
for s in self._signals.values():
s.data_clear()
def markers_clear(self):
self._x_axis.markers_clear()
self._markers_single_update_all()
self._markers_dual_update_all()
def x_state_get(self):
"""Get the x-axis state.
:return: The dict of x-axis state including:
* length: The current length in pixels (integer)
* x_limits: The tuple of (x_min: float, x_max: float) view limits.
* x_view: The tuple of (x_min: float, x_max: float) for the current view range.
"""
length = self.win.ci.layout.itemAt(0, 1).geometry().width()
length = int(length)
return {
'length': length,
'x_limits': tuple(self._x_limits),
'x_view': self._scrollbar.get_xview(),
}
@QtCore.Slot(float, float, float)
def on_scrollbarRegionChange(self, x_min, x_max, x_count):
row_count = self.win.ci.layout.rowCount()
if x_min > x_max:
x_min = x_max
if (row_count > SIGNAL_OFFSET_ROW) and len(self._signals):
row = SIGNAL_OFFSET_ROW + len(self._signals) - 1
log.info('on_scrollbarRegionChange(%s, %s, %s)', x_min, x_max, x_count)
vb = self.win.ci.layout.itemAt(row, 1)
vb.setXRange(x_min, x_max, padding=0)
else:
log.info('on_scrollbarRegionChange(%s, %s, %s) with no ViewBox', x_min, x_max, x_count)
self._cmdp.publish('DataView/#service/x_change_request', [x_min, x_max, x_count])
def request_x_change(self):
self._scrollbar.request_x_change()
def widget_register(cmdp):
cmdp.define('Widgets/Waveform/', 'Waveform display settings')
cmdp.define(
topic='Widgets/Waveform/show_min_max',
brief='Display the minimum and maximum for ease of finding short events.',
dtype='str',
options={
'off': {'brief': 'Hide the min/max indicators'},
'lines': {'brief': 'Display minimum and maximum lines'},
'fill': {'brief': 'Fill the region between min and max, but may significantly reduce performance.'}},
default='lines')
cmdp.define(
topic='Widgets/Waveform/grid_x',
brief='Display the x-axis grid',
dtype='bool',
default=True)
cmdp.define(
topic='Widgets/Waveform/grid_y',
brief='Display the y-axis grid',
dtype='bool',
default=True)
cmdp.define(
topic='Widgets/Waveform/trace_width',
brief='The trace width in pixels',
detail='Increasing trace width SIGNIFICANTLY degrades performance',
dtype='str',
options=['1', '2', '4', '6', '8'],
default='1')
cmdp.define(
topic='Widgets/Waveform/Statistics/font',
brief='The font for the statistics text on the right-hand side of the waveform display.',
dtype='font',
default='Lato,10,-1,5,87,0,0,0,0,0,Black')
cmdp.define(
topic='Widgets/Waveform/dual_markers_Δt',
brief='Show the Δt statistics with dual markers.',
dtype='bool',
default=True)
cmdp.define(
topic='Widgets/Waveform/_signals',
brief='The signal configurations.',
dtype='obj',
default=['current', 'voltage'])
cmdp.define('Widgets/Waveform/#requests/refresh_markers', dtype=object) # list of marker names
cmdp.define('Widgets/Waveform/#requests/data_next', dtype='none')
cmdp.define('Widgets/Waveform/#statistics_over_range_resp', dtype=object)
return {
'name': 'Waveform',
'brief': 'Display waveforms of values over time.',
'class': WaveformWidget,
'location': QtCore.Qt.RightDockWidgetArea,
'singleton': True,
'sizePolicy': ['expanding', 'expanding'],
}
|
StarcoderdataPython
|
1908745
|
<gh_stars>0
#!/usr/bin/env python
#===========================================================================
#
# Produce plots for cov_to_mom output
#
#===========================================================================
from __future__ import print_function
import os
import sys
import subprocess
from optparse import OptionParser
import numpy as np
from numpy import convolve
from numpy import linalg, array, ones
import matplotlib.pyplot as plt
from matplotlib import dates
import math
import datetime
import contextlib
def main():
# globals
global options
global debug
global startTime
global endTime
# parse the command line
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option('--debug',
dest='debug', default=False,
action="store_true",
help='Set debugging on')
parser.add_option('--verbose',
dest='verbose', default=False,
action="store_true",
help='Set verbose debugging on')
parser.add_option('--c2m_file',
dest='c2mFilePath',
default='../data/pecan/cov_to_mom.spol.qc.txt',
help='File path for bias results')
parser.add_option('--cp_file',
dest='cpFilePath',
default='../data/pecan/cp_analysis.spol.txt',
help='CP results file path')
parser.add_option('--title',
dest='title',
default='COV_TO_MOM status',
help='Title for plot')
parser.add_option('--width',
dest='figWidthMm',
default=400,
help='Width of figure in mm')
parser.add_option('--height',
dest='figHeightMm',
default=250,
help='Height of figure in mm')
parser.add_option('--lenMean',
dest='lenMean',
default=1,
help='Len of moving mean filter')
parser.add_option('--start',
dest='startTime',
default='1970 01 01 00 00 00',
help='Start time for XY plot')
parser.add_option('--end',
dest='endTime',
default='1970 01 01 01 00 00',
help='End time for XY plot')
(options, args) = parser.parse_args()
if (options.verbose == True):
options.debug = True
year, month, day, hour, minute, sec = options.startTime.split()
startTime = datetime.datetime(int(year), int(month), int(day),
int(hour), int(minute), int(sec))
year, month, day, hour, minute, sec = options.endTime.split()
endTime = datetime.datetime(int(year), int(month), int(day),
int(hour), int(minute), int(sec))
if (options.debug == True):
print("Running %prog", file=sys.stderr)
print(" c2mFilePath: ", options.c2mFilePath, file=sys.stderr)
print(" cpFilePath: ", options.cpFilePath, file=sys.stderr)
print(" startTime: ", startTime, file=sys.stderr)
print(" endTime: ", endTime, file=sys.stderr)
# read in column headers for c2m results
iret, c2mHdrs, c2mData = readColumnHeaders(options.c2mFilePath)
if (iret != 0):
sys.exit(-1)
# read in data for c2m results
c2mData, c2mTimes = readInputData(options.c2mFilePath, c2mHdrs, c2mData)
# read in column headers for CP results
iret, cpHdrs, cpData = readColumnHeaders(options.cpFilePath)
if (iret != 0):
sys.exit(-1)
# read in data for CP results
cpData, cpTimes = readInputData(options.cpFilePath, cpHdrs, cpData)
# render the plot
doPlot(c2mData, c2mTimes, cpData, cpTimes)
sys.exit(0)
########################################################################
# Read columm headers for the data
# this is in the first line
def readColumnHeaders(filePath):
colHeaders = []
colData = {}
fp = open(filePath, 'r')
line = fp.readline()
fp.close()
commentIndex = line.find("#")
if (commentIndex == 0):
# header
colHeaders = line.lstrip("# ").rstrip("\n").split()
if (options.debug == True):
print("colHeaders: ", colHeaders, file=sys.stderr)
else:
print("ERROR - readColumnHeaders", file=sys.stderr)
print(" First line does not start with #", file=sys.stderr)
return -1, colHeaders, colData
for index, var in enumerate(colHeaders, start=0):
colData[var] = []
return 0, colHeaders, colData
########################################################################
# Read in the data
def readInputData(filePath, colHeaders, colData):
# open file
fp = open(filePath, 'r')
lines = fp.readlines()
# read in a line at a time, set colData
for line in lines:
commentIndex = line.find("#")
if (commentIndex >= 0):
continue
# data
data = line.strip().split()
if (len(data) == len(colHeaders)):
for index, var in enumerate(colHeaders, start=0):
if (var == 'count' or var == 'year' \
or var == 'month' or var == 'day' or \
var == 'hour' or var == 'min' or var == 'sec' or \
var == 'unix_time'):
colData[var].append(int(data[index]))
else:
colData[var].append(float(data[index]))
fp.close()
# load observation times array
year = colData['year']
month = colData['month']
day = colData['day']
hour = colData['hour']
minute = colData['min']
sec = colData['sec']
obsTimes = []
for ii, var in enumerate(year, start=0):
thisTime = datetime.datetime(year[ii], month[ii], day[ii],
hour[ii], minute[ii], sec[ii])
obsTimes.append(thisTime)
return colData, obsTimes
########################################################################
# Moving average filter
def movingAverage(values, window):
if (window < 2):
return values
weights = np.repeat(1.0, window)/window
sma = np.convolve(values, weights, 'same')
return sma
########################################################################
# Plot
def doPlot(c2mData, c2mTimes, cpData, cpTimes):
fileName = options.c2mFilePath
titleStr = "File: " + fileName
hfmt = dates.DateFormatter('%y/%m/%d')
lenMeanFilter = int(options.lenMean)
# set up arrays for c2m
c2mtimes = np.array(c2mTimes).astype(datetime.datetime)
noiseHc = np.array(c2mData["meanNoiseDbmHc"]).astype(np.double)
noiseHc = movingAverage(noiseHc, lenMeanFilter)
noiseHcValid = np.isfinite(noiseHc)
noiseVc = np.array(c2mData["meanNoiseDbmVc"]).astype(np.double)
noiseVc = movingAverage(noiseVc, lenMeanFilter)
noiseVcValid = np.isfinite(noiseVc)
noiseHx = np.array(c2mData["meanNoiseDbmHx"]).astype(np.double)
noiseHx = movingAverage(noiseHx, lenMeanFilter)
noiseHxValid = np.isfinite(noiseHx)
noiseVx = np.array(c2mData["meanNoiseDbmVx"]).astype(np.double)
noiseVx = movingAverage(noiseVx, lenMeanFilter)
noiseVxValid = np.isfinite(noiseVx)
validNoiseHcTimes = c2mtimes[noiseHcValid]
validNoiseHcVals = noiseHc[noiseHcValid]
validNoiseVcTimes = c2mtimes[noiseVcValid]
validNoiseVcVals = noiseVc[noiseVcValid]
validNoiseHxTimes = c2mtimes[noiseHxValid]
validNoiseHxVals = noiseHx[noiseHxValid]
validNoiseVxTimes = c2mtimes[noiseVxValid]
validNoiseVxVals = noiseVx[noiseVxValid]
# daily stats
(dailyTimeNoiseHc, dailyValNoiseHc) = computeDailyStats(validNoiseHcTimes, validNoiseHcVals)
(dailyTimeNoiseVc, dailyValNoiseVc) = computeDailyStats(validNoiseVcTimes, validNoiseVcVals)
(dailyTimeNoiseHx, dailyValNoiseHx) = computeDailyStats(validNoiseHxTimes, validNoiseHxVals)
(dailyTimeNoiseVx, dailyValNoiseVx) = computeDailyStats(validNoiseVxTimes, validNoiseVxVals)
# site temp, vert pointing and sun scan results
ctimes = np.array(cpTimes).astype(datetime.datetime)
ZdrmVert = np.array(cpData["ZdrmVert"]).astype(np.double)
validZdrmVert = np.isfinite(ZdrmVert)
SunscanZdrm = np.array(cpData["SunscanZdrm"]).astype(np.double)
validSunscanZdrm = np.isfinite(SunscanZdrm)
cptimes = np.array(cpTimes).astype(datetime.datetime)
tempSite = np.array(cpData["TempSite"]).astype(np.double)
validTempSite = np.isfinite(tempSite)
# set up plots
widthIn = float(options.figWidthMm) / 25.4
htIn = float(options.figHeightMm) / 25.4
fig1 = plt.figure(1, (widthIn, htIn))
ax1a = fig1.add_subplot(2,1,1,xmargin=0.0)
ax1b = fig1.add_subplot(2,1,2,xmargin=0.0)
#ax1c = fig1.add_subplot(3,1,3,xmargin=0.0)
oneDay = datetime.timedelta(1.0)
ax1a.set_xlim([c2mtimes[0] - oneDay, c2mtimes[-1] + oneDay])
ax1a.set_title("Noise Hc and Vc by radar volume (dBm)")
ax1b.set_xlim([c2mtimes[0] - oneDay, c2mtimes[-1] + oneDay])
ax1b.set_title("Daily noise mean (dBm)")
#ax1c.set_xlim([c2mtimes[0] - oneDay, c2mtimes[-1] + oneDay])
#ax1c.set_title("Site temperature (C)")
ax1a.plot(validNoiseHcTimes, validNoiseHcVals, \
"o", label = 'NoiseHc (dBm)', color='blue')
ax1a.plot(validNoiseHcTimes, validNoiseHcVals, \
label = 'NoiseHc', linewidth=1, color='blue')
ax1a.plot(validNoiseVcTimes, validNoiseVcVals, \
"o", label = 'NoiseVc (dBm)', color='red')
ax1a.plot(validNoiseVcTimes, validNoiseVcVals, \
label = 'NoiseVc', linewidth=1, color='red')
#ax1a.plot(ctimes[validSunscanZdrm], SunscanZdrm[validSunscanZdrm], \
# linewidth=2, label = 'Zdrm Sun/CP (dB)', color = 'green')
#ax1a.plot(ctimes[validZdrmVert], ZdrmVert[validZdrmVert], \
# "^", markersize=10, linewidth=1, label = 'Zdrm Vert (dB)', \
# color = 'orange')
ax1b.plot(dailyTimeNoiseHc, dailyValNoiseHc, \
label = 'NoiseHc Daily', linewidth=1, color='blue')
ax1b.plot(dailyTimeNoiseHc, dailyValNoiseHc, \
"^", label = 'NoiseHc Daily', color='blue', markersize=10)
ax1b.plot(dailyTimeNoiseVc, dailyValNoiseVc, \
label = 'NoiseVc Daily', linewidth=1, color='red')
ax1b.plot(dailyTimeNoiseVc, dailyValNoiseVc, \
"^", label = 'NoiseVc Daily', color='red', markersize=10)
ax1b.plot(dailyTimeNoiseHx, dailyValNoiseHx, \
label = 'NoiseHx Daily', linewidth=1, color='cyan')
ax1b.plot(dailyTimeNoiseHx, dailyValNoiseHx, \
"^", label = 'NoiseHx Daily', color='cyan', markersize=10)
ax1b.plot(dailyTimeNoiseVx, dailyValNoiseVx, \
label = 'NoiseVx Daily', linewidth=1, color='green')
ax1b.plot(dailyTimeNoiseVx, dailyValNoiseVx, \
"^", label = 'NoiseVx Daily', color='green', markersize=10)
#ax1c.plot(cptimes[validTempSite], tempSite[validTempSite], \
# linewidth=1, label = 'Site Temp', color = 'red')
#configDateAxis(ax1a, -9999, 9999, "ZDR C2m (dB)", 'upper right')
configDateAxis(ax1a, -9999, -9999, "Noise by vol (dBm)", 'upper right')
configDateAxis(ax1b, -9999, -9999, "Mean daily noise (dBm)", 'lower right')
#configDateAxis(ax1c, -9999, 9999, "Temp (C)", 'upper right')
fig1.autofmt_xdate()
fig1.tight_layout()
fig1.subplots_adjust(bottom=0.05, left=0.06, right=0.94, top=0.95)
plt.show()
########################################################################
# initialize legends etc
def configDateAxis(ax, miny, maxy, ylabel, legendLoc):
legend = ax.legend(loc=legendLoc, ncol=6)
for label in legend.get_texts():
label.set_fontsize('x-small')
ax.set_xlabel("Date")
ax.set_ylabel(ylabel)
ax.grid(True)
if (miny > -9990 and maxy > -9990):
ax.set_ylim([miny, maxy])
hfmt = dates.DateFormatter('%y/%m/%d')
ax.xaxis.set_major_locator(dates.DayLocator())
ax.xaxis.set_major_formatter(hfmt)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(8)
########################################################################
# compute daily stats for a variable
def computeDailyStats(times, vals):
dailyTimes = []
dailyMeans = []
nptimes = np.array(times).astype(datetime.datetime)
npvals = np.array(vals).astype(np.double)
validFlag = np.isfinite(npvals)
timesValid = nptimes[validFlag]
valsValid = npvals[validFlag]
startTime = nptimes[0]
endTime = nptimes[-1]
startDate = datetime.datetime(startTime.year, startTime.month, startTime.day, 0, 0, 0)
endDate = datetime.datetime(endTime.year, endTime.month, endTime.day, 0, 0, 0)
oneDay = datetime.timedelta(1)
halfDay = datetime.timedelta(0.5)
thisDate = startDate
while (thisDate < endDate + oneDay):
nextDate = thisDate + oneDay
result = []
sum = 0.0
sumDeltaTime = datetime.timedelta(0)
count = 0.0
for ii, val in enumerate(valsValid, start=0):
thisTime = timesValid[ii]
if (thisTime >= thisDate and thisTime < nextDate):
sum = sum + val
deltaTime = thisTime - thisDate
sumDeltaTime = sumDeltaTime + deltaTime
count = count + 1
result.append(val)
if (count > 5):
mean = sum / count
meanDeltaTime = datetime.timedelta(0, sumDeltaTime.total_seconds() / count)
dailyMeans.append(mean)
dailyTimes.append(thisDate + meanDeltaTime)
# print >>sys.stderr, " daily time, meanStrong: ", dailyTimes[-1], meanStrong
result.sort()
thisDate = thisDate + oneDay
return (dailyTimes, dailyMeans)
########################################################################
# Run a command in a shell, wait for it to complete
def runCommand(cmd):
if (options.debug == True):
print("running cmd:",cmd, file=sys.stderr)
try:
retcode = subprocess.call(cmd, shell=True)
if retcode < 0:
print("Child was terminated by signal: ", -retcode, file=sys.stderr)
else:
if (options.debug == True):
print("Child returned code: ", retcode, file=sys.stderr)
except OSError as e:
print("Execution failed:", e, file=sys.stderr)
########################################################################
# Run - entry point
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
102900
|
<filename>prog.py
import sys
print 'what is your name?'
sys.stdout.flush()
name = raw_input()
print 'your name is ' + name
sys.stdout.flush()
|
StarcoderdataPython
|
9708774
|
<reponame>marc-ortuno/Vocal-Percussion-Classification-for-Real-Time-Context
import numpy as np
from interfaces import pre_processing, activity_detection, feature_extraction, classificator
import pandas as pd
# noinspection INSPECTION_NAME
def init_pre_processing(by_pass=False, bands = 8):
global pre_processing_by_pass
global n_bands
n_bands = bands
pre_processing_by_pass = by_pass
def init_activity_detection(func_type=1, by_pass=False):
# Activity detection variables
global onset_location
global last_onset
global hfc
global activity_detection_type # Function name
global activiy_detection_by_pass
global previous_hfc
global previous_th
onset_location = []
last_onset = False
hfc = 0
previous_hfc = np.zeros(n_bands)
previous_th = np.zeros(n_bands)
activity_detection_type = func_type
activiy_detection_by_pass = by_pass
def init_feature_extraction(func_type="mfcc", n_mfcc_arg=20, by_pass=False, norm_file=[]):
# Feature extraction variables
global active_signal
global features
global n_mfcc
global feature_extraction_by_pass
global normalization_values
global feature_extraction_type # Function name
normalization_values = norm_file
active_signal = []
features = []
n_mfcc = n_mfcc_arg
feature_extraction_type = func_type
feature_extraction_by_pass = by_pass
def init_classificator(knn_model=[], by_pass=False):
# Classificator Variables
global model
global predicted
global classificator_by_pass
model = knn_model
predicted = []
classificator_by_pass = by_pass
# Init audio process variables
def init(sr, b_len, audio_len=0):
# declare variables used in `process`
# Audio details
global samp_freq
global buffer_len
global onset_timeout
global onset_duration
global audio_size
global execution_time
global highest_peak
samp_freq = sr
buffer_len = b_len
avg_duration = 0.100 # in seconds
onset_duration = int(avg_duration / (b_len / sr)) # 150ms average duration of a class
onset_timeout = onset_duration
audio_size = audio_len
execution_time = 0
highest_peak = np.full(n_bands, b_len*0.8)
# the process function!
def process(input_buffer, output_buffer):
global last_onset
global active_signal
global onset_timeout
global highest_peak
global execution_time
global previous_hfc
global previous_th
features = []
activity_detected = False
class_type = ""
if not pre_processing_by_pass:
# Pre-Processing Block
subband_signal, n_signal = pre_processing(input_buffer, samp_freq, n_bands)
if not activiy_detection_by_pass:
# activity_detection_evaluation Block
onset, hfc, threshold, highest_peak = activity_detection(activity_detection_type, subband_signal, samp_freq,
buffer_len, previous_hfc, highest_peak)
previous_hfc = np.vstack((previous_hfc, hfc))
previous_th = np.vstack((previous_th, threshold))
# To prevent repeated reporting of an
# onset (and thus producing numerous false positive detections), an
# onset is only reported if no onsets have been detected in the previous three frames (30 ms aprox).
mHfc = previous_hfc[-2:].sum(axis=1)
mTh = previous_th[-2:].sum(axis=1)
if last_onset is True and onset is False:
if onset_timeout > 0:
onset = True
onset_timeout -= 1
activity_detected = False
else:
onset = False
onset_timeout = onset_duration
activity_detected = True
if len(mHfc) > 1 and int(mHfc[1]) < int(mHfc[0]) and len(mTh) > 1 and int(mTh[1]) <= int(mTh[0]) and\
int(mHfc[0]) - int(mHfc[1]) < (4 * buffer_len):
onset = False
onset_timeout = onset_duration
activity_detected = True
if onset:
active_signal.extend(
input_buffer) # Until we get an offset, the active sound is stored for later do feature extraction an onset is False: classification.
onset_location.extend(np.ones(buffer_len)) # Onset location for visual analysis
else:
onset_location.extend(np.zeros(buffer_len))
# Offset detected
if activity_detected: # or (int(execution_time*samp_freq) + buffer_len >= audio_size):
if not feature_extraction_by_pass:
# Feature Extraction Block
features = feature_extraction(feature_extraction_type, active_signal, samp_freq, n_mfcc, buffer_len,
normalization_values)
active_signal = [] # Clean active signal buffer
if not classificator_by_pass:
# Classificator Block
class_type = classificator(features, model)
predicted.append(class_type)
last_onset = onset
# Update execution time
execution_time += (buffer_len / samp_freq)
return n_signal, features, hfc, predicted, onset_location, threshold, class_type
def main(audio, buffer_len=512):
# Signal details
signal = audio.waveform
samp_freq = audio.sample_rate
n_buffers = len(signal) // buffer_len
# Init process variables
init(samp_freq, buffer_len, n_buffers * buffer_len)
data_type = signal.dtype
# allocate input and output buffers
input_buffer = np.zeros(buffer_len, dtype=data_type)
output_buffer = np.zeros(buffer_len, dtype=data_type)
onset_location = []
total_features = []
total_hfc = []
total_th = []
# simulate block based processing
signal_proc = np.zeros(n_buffers * buffer_len, dtype=data_type)
for k in range(n_buffers):
# index the appropriate samples
input_buffer = signal[k * buffer_len:(k + 1) * buffer_len]
output_buffer, features, hfc, predicted, onset_location, threshold, _ = process(input_buffer, output_buffer)
signal_proc[k * buffer_len:(k + 1) * buffer_len] = output_buffer
total_features.extend(features)
if type(hfc) is np.ndarray:
total_hfc.extend([np.sum(hfc)] * output_buffer.size)
else:
total_hfc.extend([np.sum(hfc)] * output_buffer.size)
if type(threshold) is np.ndarray:
total_th.extend([np.sum(threshold)] * output_buffer.size)
else:
total_th.extend([np.sum(threshold)] * output_buffer.size)
# return in a dictionary
return {'SIGNAL_PROCESSED': signal_proc, 'ONSET_LOCATIONS': onset_location, 'HFC': total_hfc, 'THRESHOLD': total_th,
'PREDICTION': predicted, 'FEATURES': total_features}
|
StarcoderdataPython
|
4979617
|
<reponame>martinmcbride/python-imaging-book-examples
# Author: <NAME>
# Created: 2021-05-14
# Copyright (C) 2021, <NAME>
# License: MIT
# Colorize a greyscale image
from PIL import Image, ImageOps
image = Image.open('boat-small-grayscale.jpg')
# Colorise blue to white
result_image = ImageOps.colorize(image, 'darkblue', 'white')
result_image.save('imageops-colorize-blue.jpg')
# Colorise blue to yellow
result_image = ImageOps.colorize(image, 'darkblue', 'yellow')
result_image.save('imageops-colorize-yellow.jpg')
# Colorise purple to white with blue mid tones
result_image = ImageOps.colorize(image, 'purple', 'white', mid='mediumslateblue')
result_image.save('imageops-colorize-purple.jpg')
# Colorise blue to white with black point and white point
result_image = ImageOps.colorize(image, 'darkblue', 'white', blackpoint=64, whitepoint=192)
result_image.save('imageops-colorize-blue-bp.jpg')
|
StarcoderdataPython
|
5120061
|
<reponame>naotohori/cafysis<filename>dat_hb_cor_from_con.py<gh_stars>1-10
#!/usr/bin/env python
# IDX TYP CG1 NT1 CG2 NT2 DST REF
# 1 CAN 11 C03B 56 G18B 5.746 5.655
CLM_HB_IDX = 1 - 1
#CLM_HB_TYP = 2 - 1
CLM_HB_CG1 = 3 - 1
CLM_HB_NT1 = 4 - 1
CLM_HB_CG2 = 5 - 1
CLM_HB_NT2 = 6 - 1
#CLM_HB_DST = 7 - 1
CLM_HB_REF = 8 - 1
#CLM_HB_2ND = 9 - 1
CUTOFF_CONTACT_LOW = 0.8
CUTOFF_CONTACT_HIG = 1.2
import sys
import os
import glob
if len(sys.argv) == 6:
step_final = int(sys.argv[4])
flg_final = True
elif len(sys.argv) == 5:
flg_final = False
else:
print('Usage: SCRIPT [HB file (bwyv.hb)] [dir_search] [step_ignore] [dir_out]')
print(' or : SCRIPT [HB file (bwyv.hb)] [dir_search] [step_ignore] [step_final] [dir_out]')
sys.exit(2)
filepath_hb = sys.argv[1]
dir_search = sys.argv[2]
step_ignore = int(sys.argv[3])
dir_out = sys.argv[-1]
f_hb = open(filepath_hb, 'r')
hbs = []
idx = []
cg1 = []
cg2 = []
nt1 = []
nt2 = []
ref = []
num_con = 0
for l in f_hb:
l = l.split()
idx.append(int(l[CLM_HB_IDX]))
cg1.append(int(l[CLM_HB_CG1]))
cg2.append(int(l[CLM_HB_CG2]))
nt1.append(l[CLM_HB_NT1])
nt2.append(l[CLM_HB_NT2])
ref.append(float(l[CLM_HB_REF]))
num_con = num_con + 1
f_hb.close()
''' Collect dirnames and detect Ion conc. and Forces. '''
dirs = glob.glob(dir_search)
cM_values = []
frc_values = []
rnds = {}
for d in dirs:
d_sp = d.split('/')[-2].split('_')
cM = d_sp[-3]
frc = d_sp[-2]
rnd = d_sp[-1]
if cM not in cM_values:
cM_values.append(cM)
if frc not in frc_values:
frc_values.append(frc)
sim = (cM,frc)
if sim in rnds:
rnds[sim].append(rnd)
else:
rnds[sim] = [rnd,]
cM_values.sort()
frc_values.sort()
''' Loop for each simulation set. '''
orig_dir = os.getcwd()
for cM in cM_values:
for frc in frc_values:
sim = (cM,frc)
if sim not in rnds:
continue
con = []
for rnd in rnds[sim]:
os.chdir('cM%s/%s_%s_%s' % (cM, cM, frc, rnd))
try:
f_con = open('hbcon.out','r')
except:
print(('Skip %s' % (os.getcwd(),) ))
os.chdir(orig_dir)
continue
step = 0
for l in f_con:
if (l.find('#') != -1):
continue
step = step + 1
if step < step_ignore:
continue
con.append([int(x) for x in l.strip()])
if flg_final and step == step_final:
break
os.chdir(orig_dir)
n = len(con)
if n == 0:
continue
avg = []
for i in range(num_con):
avg.append(0.0)
for j in range(n):
avg[-1] += con[j][i]
avg[-1] /= float(n)
var = []
for i in range(num_con):
var.append(0.0)
for j in range(n):
var[-1] += (con[j][i] - avg[i])**2
var[-1] /= float(n)
std = []
for v in var:
std.append( pow(v, 0.5) )
cov = []
for _ in range(num_con):
cov.append([0.0] * num_con)
for i in range(num_con):
for j in range(i+1, num_con):
covsum = 0.0
for ii in range(n):
covsum += (con[ii][i] - avg[i]) * (con[ii][j] - avg[j])
cov[i][j] = covsum / float(n)
cor = []
for _ in range(num_con):
cor.append([0.0] * num_con)
for i in range(num_con):
if std[i] == 0.0:
continue
for j in range(i+1, num_con):
if std[j] == 0.0:
continue
cor[i][j] = cov[i][j] / (std[i] * std[j])
f_out = open('%s/%s_%s.cor' % (dir_out, cM, frc), 'w')
for i in range(num_con):
for j in range(num_con):
f_out.write('%i %i %f\n' % (i+1,j+1,cor[i][j]))
f_out.write('\n')
f_out.close()
|
StarcoderdataPython
|
8024209
|
from django import forms
from .models import (
Book,
Category,
Shelf,
Author
)
class BookCreationAddForm(forms.ModelForm):
class Meta:
model = Book
fields = ('name', 'author', 'category',
'amount', 'price', 'image', 'shelf', )
class CategoryCreationForm(forms.ModelForm):
class Meta:
model = Category
fields = ('name',)
class CategoryUpdateForm(forms.ModelForm):
name = forms.CharField(max_length=120, label="Category Name", widget=forms.TextInput(
attrs={'placeholder': 'Enter new category name'}))
class Meta:
model = Shelf
fields = ('name',)
class ShelfCreationForm(forms.ModelForm):
class Meta:
model = Shelf
fields = ('name',)
class ShelfUpdateForm(forms.ModelForm):
name = forms.CharField(max_length=120, label="Shelf Name", widget=forms.TextInput(
attrs={'placeholder': 'Enter new shelf name'}))
class Meta:
model = Shelf
fields = ('name', 'active')
class AuthorCreationForm(forms.ModelForm):
class Meta:
model = Author
fields = ('first_name', 'last_name', 'born', 'died', 'image', )
|
StarcoderdataPython
|
3202742
|
from collections import deque
import collections
import copy
def loadGraphFromFile(filename):
rddId2Info, src2dest, dest2src = {}, {}, {}
with open(filename) as f:
for line in f:
line = line.strip().split("\t")
rddId = int(line[0])
parentIDList = list(map(int, line[1].split(","))) if line[1] else []
name, recomputeCount, totalUsedCount = line[2], int(line[4]), int(line[5])
rddId2Info[rddId] = [name, recomputeCount, totalUsedCount]
for pid in parentIDList:
if pid not in src2dest:
src2dest[pid] = []
src2dest[pid].append(rddId)
if rddId not in dest2src:
dest2src[rddId] = []
dest2src[rddId].append(pid)
return rddId2Info, src2dest, dest2src
def get_ancesters(rddId2Info, src2dest, dest2src):
leafNode = set(dest2src.keys()).difference(set(src2dest.keys()))
rdd2AncestorNodes = {}
queue = deque(leafNode)
def dfs(node):
if node in rdd2AncestorNodes:
return rdd2AncestorNodes[node]
rdd2AncestorNodes[node] = set([node])
for parentRDD in dest2src.get(node, []):
dfs(parentRDD)
rdd2AncestorNodes[node] |= rdd2AncestorNodes[parentRDD]
return rdd2AncestorNodes[node]
for node in leafNode:
dfs(node)
return rdd2AncestorNodes
rddId2Info, src2dest, dest2src = loadGraphFromFile("CovidInfo_Nopersist.txt")
rdd2AncestorNodes = get_ancesters(rddId2Info, src2dest, dest2src)
def simpleThresholdRecommendation(threshold, rddId2Info):
res = []
for key in rddId2Info:
if rddId2Info[key][1] >= threshold:
res.append(key)
return res
def computationCostRecommendation(threshold, rddId2Info):
res = []
for key in rddId2Info:
c = rddId2Info[key][1]
f = len(rdd2AncestorNodes[key])
if c * f >= threshold:
#print(key, c * f)
res.append(key)
return res
def onlyChild(threshold, method, rddId2Info):
# rdds_recompute_count_dict = {}
# for key in rddId2Info:
# recompute_times = rddId2Info[key][1]
# if recompute_times >= threshold:
# rdds_recompute_count_dict[recompute_times] = rdds_recompute_count_dict.get(recompute_times, []) + [key]
# for key in rdds_recompute_count_dict:
# rdds_recompute_count_dict[key].sort(reverse=True)
# For each iteration, only persist the child with largest rdd id among the rdds that have recompute count larger than specific threshold.
rddId2Info = copy.deepcopy(rddId2Info)
rdds_threshold = method(threshold, rddId2Info)
rdds_threshold.sort()
while rdds_threshold:
child = rdds_threshold.pop()
parents = rdd2AncestorNodes[child]
for parent in parents:
val = rddId2Info[parent][1] - rddId2Info[child][1]
if val >= 0:
rddId2Info[parent][1] = val
rddId2Info[child][1] = 0
rdds_threshold = method(threshold, rddId2Info)
rdds_threshold.sort()
print(f"Persist rdd id {child} and Name {rddId2Info[child][0]}")
if __name__ == "__main__":
rddIds = simpleThresholdRecommendation(2, rddId2Info)
nameList = [rddId2Info[child][0] for child in rddIds]
print(f"Persist with method 1: {rddIds}, {nameList}")
rddIds = computationCostRecommendation(5, rddId2Info)
nameList = [rddId2Info[child][0] for child in rddIds]
print(f"Persist with method 2: {rddIds}, {nameList}")
print("Persist with method 1 with improvement:")
onlyChild(2, simpleThresholdRecommendation, rddId2Info)
print("Persist with method 2 with improvement:")
onlyChild(5, computationCostRecommendation, rddId2Info)
|
StarcoderdataPython
|
12849188
|
#!/usr/bin/env python
# coding: utf-8
# In[5]:
import pandas as pd
import numpy as np
import glob,os
from glob import iglob
#import scanpy as sc
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import RocCurveDisplay
from sklearn.datasets import load_wine
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import roc_auc_score
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
import matplotlib as mpl
from sklearn import metrics
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
import joblib
import time
import random
import matplotlib as mpl
mpl.rcParams['pdf.fonttype']=42
mpl.rcParams['ps.fonttype']=42
# # RA PBMC data for machine learning
# In[6]:
### training data import
ra=pd.read_csv('../RNA_seq_for_autoimmune_disease/RA_bulk/GSE90081/GSE90081_ra_part.csv',index_col=0)
hd=pd.read_csv('../RNA_seq_for_autoimmune_disease/RA_bulk/GSE90081/GSE90081_hd_part.csv',index_col=0)
hd1=pd.read_csv('../RNA_seq_for_autoimmune_disease/health_bulk/GSE183204_HC_fpkm.csv',sep=',',index_col=0)
# In[7]:
### feature import
features=pd.read_csv('../script4paper2/combined_gene_for_machine_learning.csv',index_col=1).index.values
features=np.append(features,'patient')
features=[i for i in features if i in ra.index.values]
features=[i for i in features if i in hd1.index.values ]
# # remove unwanted gene
# In[8]:
### remove unwanted gene from validation data
hd1=hd1.loc[features,:].T
ra_part=ra.loc[features,:].T
hd_part=hd.loc[features,:].T
# # label data
# In[9]:
### label training data
ra_part['patient']=1
hd_part['patient']=0
hd1['patient']=0
# # machine learning data training
# In[39]:
### merge training data
df=pd.concat([ra_part,hd_part,hd1],axis=0)
### get data labels
label=df.patient.values
### split data with ratio 30% for test and 70% for training
Xtrain, Xtest, Ytrain, Ytest = train_test_split(df.drop(columns=['patient']),label,test_size=0.3)
### rf model initialization
rfc = RandomForestClassifier(random_state=43,class_weight='balanced',oob_score=True)
rfc = rfc.fit(Xtrain,Ytrain)
### document model score
score_r = rfc.score(Xtest,Ytest)
### save feature importance
ra_pbmc=pd.DataFrame(rfc.feature_importances_)
ra_pbmc['feature_importance']=features
ra_pbmc.to_csv('./model/ra_pbmc_feature_importance_bulk.csv')
### print F score and Out of bag score
print("Random Forest:{}".format(score_r))
print("OOB score:",rfc.oob_score_)
# # Figure 7A
# In[40]:
### Generating ROC curve
fig = plt.figure(figsize=(8, 8))
ax = plt.gca()
rfc_disp = RocCurveDisplay.from_estimator(rfc, Xtest, Ytest, ax=ax, alpha=0.8)
plt.legend(loc=4,prop={'size': 10})
plt.xlabel('False Positive Rate', fontsize=18)
plt.ylabel('True Positive Rate', fontsize=16)
ax.plot([0, 1], [0, 1], ls="--", c=".3")
mpl.rcParams['pdf.fonttype']=42
mpl.rcParams['ps.fonttype']=42
plt.savefig('./figure6_and_7/7a_ra_pbmc_bulk_auc.pdf',width=4,height=5)
# # save/load best performance model
# In[24]:
### save the best performance model
#joblib.dump(rfc, './model/ra_synovial_bulk_best.model')
### load model
#rfc=joblib.load('./model/sle_best.model')
# In[19]:
### 10-fold cross validation
print(cross_val_score(rfc,df.drop(columns=['patient']),label,cv=10).mean())
print(cross_val_score(rfc,df.drop(columns=['patient']),label,cv=10).var())
# # Figure 7D
# In[42]:
ra_feature=pd.read_csv('./model/ra_pbmc_feature_importance_bulk.csv')
fig, ax = plt.subplots(figsize=(15, 5))
ax.bar(x=ra_feature['feature_importance'], height=ra_feature['0'])
ax.set_title("Feature importance for RA bulk RNA PBMC model", fontsize=15)
plt.xticks(rotation = 90)
mpl.rcParams['pdf.fonttype']=42
mpl.rcParams['ps.fonttype']=42
plt.savefig('./figure6_and_7/7d_ra_pbmc_bulk.pdf',width=15,height=5)
# # Hyper-parameter adjust
# In[795]:
data=df.drop(columns=['patient'])
label=df.patient.values
start=time.time()
scorel = []
for i in range(0,200,10): # loop for 0-200 decision trees
rfc = RandomForestClassifier(n_estimators=i+1,n_jobs=-1,random_state=0)
score = cross_val_score(rfc,data,label,cv=10).mean()
scorel.append(score)
print(max(scorel),(scorel.index(max(scorel))*10)+1)
end=time.time()
print('Running time: %s Seconds'%(end-start))
plt.figure(figsize=[20,5])
plt.plot(range(1,201,10),scorel)
plt.show()
# In[801]:
scorel = []
for i in range(185,205):
rfc = RandomForestClassifier(n_estimators=i+1,n_jobs=-1,random_state=0)
score = cross_val_score(rfc,data,label,cv=10).mean()
scorel.append(score)
print(max(scorel),([*range(185,205)][scorel.index(max(scorel))]))
plt.figure(figsize=[20,5])
plt.plot(range(185,205),scorel)
plt.show()
# In[802]:
start=time.time()
param_grid = {'max_depth':np.arange(1, 90,2)}
alg = RandomForestClassifier(n_estimators=190,random_state=0)
GS = GridSearchCV(alg,param_grid,cv=10)
GS.fit(data,label)
print(GS.best_params_)
print(GS.best_score_)
end=time.time()
print('Running time: %s Seconds'%(end-start))
# In[803]:
start=time.time()
param_grid = {'max_features':np.arange(5,80,1)}
rfc = RandomForestClassifier(n_estimators=190,random_state=0)
GS = GridSearchCV(rfc,param_grid,cv=10)
GS.fit(data,label)
print(GS.best_params_)
print(GS.best_score_)
end=time.time()
print('Running time: %s Seconds'%(end-start))
# # 100 loop of 10-fold cross validation
# In[35]:
df_n=df.drop(columns=['patient'])
rfc_l = []
fpr_l=[]
tpr_l=[]
acc_l=[]
skf =StratifiedKFold(n_splits=10)
for i in range(100):
for train_index, test_index in skf.split(df_n,label):
rfc = RandomForestClassifier(random_state=0,class_weight="balanced",oob_score=True)
rfc = rfc.fit(df_n.iloc[train_index],label[train_index])
rfc_l.append(roc_auc_score(label[test_index], rfc.predict_proba(df_n.iloc[test_index])[:, 1]))
acc_l.append(accuracy_score(label[test_index], rfc.predict(df_n.iloc[test_index])))
# In[36]:
### average AUC and its standard deviation error
print(np.mean(rfc_l))
print(np.std(rfc_l))
|
StarcoderdataPython
|
5121490
|
<reponame>spacemanspiff2007/aerich<filename>aerich/models.py
from tortoise import Model, fields
MAX_VERSION_LENGTH = 255
class Aerich(Model):
version = fields.CharField(max_length=MAX_VERSION_LENGTH)
app = fields.CharField(max_length=20)
content = fields.JSONField()
class Meta:
ordering = ["-id"]
|
StarcoderdataPython
|
1641352
|
""" The signals module provides classes to build buy/sell signals
Notes
------
All strategies should inherit from BaseSignal, and provide a request_historical
method. For details of this method see docstring of base/BaseSignal or the
request_historical method in ZeroCrossBuyUpSellDown in this module.
"""
#from abc import ABC,abstractmethod
import copy
import numpy as np
import pandas as pd
from .base import BaseSignal
class ZeroCrossBuyUpSellDown(BaseSignal):
""" Signal that checks for indicator crossing zero
This signal goves a buy signal for positive gradient crossing, and sell for
a negative gradient crossing
"""
def __init__(self,indicator,filter,extra_param=0.0):
""" Signal initialised with an indicator and a filter, and other params
Args:
- indicator: a models.indicator object to collect indicator for
signal to base its decisions on
- filter: a models.filter object for ticker selection
- extra_param: an extra parameter of this signal
"""
self.extra_param=extra_param
super().__init__(indicator,filter)
def request_historical(self,stocks_df,signal_name='signal'):
""" use historical data to get a dictionary of signals
Args:
- stocks_df: pandas dataframe of tickers over time
- signal_name: a name to give this signal as output column
Returns:
- signal_dict: a dictionary with keys being the tickers that the
signal considers (selected by this signal's filter),
and values being dataframes, indexed by times at which
signals are seen, and a column named by argument
'signal_name', with +/-1 for a buy/sell signal.
"""
if not isinstance(signal_name,str):
raise TypeError("singal_name must be a string")
if not isinstance(stocks_df,pd.DataFrame):
raise TypeError("singal_name must be a string")
if self.filter is not None:
stock_df = self.filter.apply_in(stocks_df) # new df, not overwritten
else:
stock_df = stocks_df
indi = self.indicator.get_indicator(stock_df)
signal_dict = dict()
in_to_out_dict = self.filter.output_map()
# loop over tickers
for c in stock_df.columns.to_list():
indi_comp_0 = indi[c].values*indi[c].shift().values # <zero at cross
indi_comp_0[0] = 1.0 # instead of NaN - make >0 - supresses warnings
indi_comp_g = np.sign(indi[c].values-indi[c].shift().values) #grad - up or down
cross_inds = np.where(indi_comp_0<0.0) # indices of crossing
# for this ticker, dataframe of all crossing times, and whether indicator
# was growing or falling
mydf = pd.DataFrame(index=stock_df.iloc[cross_inds].index,
data={signal_name:indi_comp_g[cross_inds]})
# append dataframe into dict of signals
for tick_out in in_to_out_dict[c]:
signal_dict[tick_out] = mydf
return signal_dict
|
StarcoderdataPython
|
6623065
|
from unittest import TestCase
import simplejson as json
from mock import patch, PropertyMock, Mock
from pyqrllib.pyqrllib import bin2hstr
from qrl.core.misc import logger
from qrl.core.AddressState import AddressState
from qrl.core.ChainManager import ChainManager
from qrl.core.TransactionInfo import TransactionInfo
from qrl.core.txs.Transaction import Transaction
from qrl.core.txs.TransferTransaction import TransferTransaction
from tests.core.txs.testdata import test_json_Simple, test_signature_Simple
from tests.misc.helper import get_alice_xmss, get_bob_xmss, get_slave_xmss, replacement_getTime
logger.initialize_default()
@patch('qrl.core.txs.Transaction.logger')
class TestSimpleTransaction(TestCase):
def __init__(self, *args, **kwargs):
super(TestSimpleTransaction, self).__init__(*args, **kwargs)
self.alice = get_alice_xmss()
self.bob = get_bob_xmss()
self.slave = get_slave_xmss()
self.alice.set_ots_index(10)
self.maxDiff = None
def setUp(self):
self.tx = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk
)
def test_create(self, m_logger):
# Alice sending coins to Bob
tx = TransferTransaction.create(addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk)
self.assertTrue(tx)
def test_create_negative_amount(self, m_logger):
with self.assertRaises(ValueError):
TransferTransaction.create(addrs_to=[self.bob.address],
amounts=[-100],
fee=1,
xmss_pk=self.alice.pk)
def test_create_negative_fee(self, m_logger):
with self.assertRaises(ValueError):
TransferTransaction.create(addrs_to=[self.bob.address],
amounts=[-100],
fee=-1,
xmss_pk=self.alice.pk)
def test_to_json(self, m_logger):
tx = TransferTransaction.create(addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk)
txjson = tx.to_json()
self.assertEqual(json.loads(test_json_Simple), json.loads(txjson))
def test_from_json(self, m_logger):
tx = Transaction.from_json(test_json_Simple)
tx.sign(self.alice)
self.assertIsInstance(tx, TransferTransaction)
# Test that common Transaction components were copied over.
self.assertEqual(0, tx.nonce)
self.assertEqual('010300a1da274e68c88b0ccf448e0b1916fa789b01eb2ed4e9ad565ce264c9390782a9c61ac02f',
bin2hstr(tx.addr_from))
self.assertEqual('01030038ea6375069f8272cc1a6601b3c76c21519455603d370036b97c779ada356'
'5854e3983bd564298c49ae2e7fa6e28d4b954d8cd59398f1225b08d6144854aee0e',
bin2hstr(tx.PK))
self.assertEqual('554f546305d4aed6ec71c759942b721b904ab9d65eeac3c954c08c652181c4e8', bin2hstr(tx.txhash))
self.assertEqual(10, tx.ots_key)
self.assertEqual(test_signature_Simple, bin2hstr(tx.signature))
# Test that specific content was copied over.
self.assertEqual('0103001d65d7e59aed5efbeae64246e0f3184d7c42411421eb385ba30f2c1c005a85ebc4419cfd',
bin2hstr(tx.addrs_to[0]))
self.assertEqual(100, tx.total_amount)
self.assertEqual(1, tx.fee)
def test_validate_tx(self, m_logger):
# If we change amount, fee, addr_from, addr_to, (maybe include xmss stuff) txhash should change.
# Here we use the tx already defined in setUp() for convenience.
# We must sign the tx before validation will work.
self.tx.sign(self.alice)
# We have not touched the tx: validation should pass.
self.assertTrue(self.tx.validate_or_raise())
def test_validate_tx2(self, m_logger):
tx = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk
)
tx.sign(self.alice)
self.assertTrue(tx.validate_or_raise())
tx._data.transaction_hash = b'abc'
# Should fail, as we have modified with invalid transaction_hash
with self.assertRaises(ValueError):
tx.validate_or_raise()
@patch('qrl.core.txs.Transaction.config')
def test_validate_tx_invalid(self, m_config, m_logger):
# Test all the things that could make a TransferTransaction invalid
self.tx.sign(self.alice)
# Validation in creation, Protobuf, type conversion etc. gets in our way all the time!
# So to get dirty data to the validate() function, we need PropertyMocks
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.amounts',
new_callable=PropertyMock) as m_amounts:
# TX amount of 0 shouldn't be allowed.
m_amounts.return_value = [0]
with self.assertRaises(ValueError):
self.tx.validate_or_raise()
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.fee', new_callable=PropertyMock) as m_fee:
m_fee.return_value = -1
with self.assertRaises(ValueError):
self.tx.validate_or_raise()
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.addrs_to',
new_callable=PropertyMock) as m_addrs_to:
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.amounts',
new_callable=PropertyMock) as m_amounts:
# Validation could fail because len(m_addrs_to) != len(m_amounts),
# or if len(m_addrs_to) > transaction_multi_output_limit.
# This second patch level is to make sure the only the latter case happens.
m_amounts = [100, 100, 100, 100]
m_config.dev.transaction_multi_output_limit = 3
m_addrs_to.return_value = [2, 2, 2, 2]
with self.assertRaises(ValueError):
self.tx.validate_or_raise()
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.addrs_to',
new_callable=PropertyMock) as m_addrs_to:
# len(addrs_to) must equal len(amounts)
m_addrs_to.return_value = [2, 2]
with self.assertRaises(ValueError):
self.tx.validate_or_raise()
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.addr_from',
new_callable=PropertyMock) as m_addr_from:
m_addr_from.return_value = b'If this isnt invalid Ill eat my shoe'
with self.assertRaises(ValueError):
self.tx.validate_or_raise()
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.addrs_to',
new_callable=PropertyMock) as m_addrs_to:
with patch('qrl.core.txs.TransferTransaction.TransferTransaction.amounts',
new_callable=PropertyMock) as m_amounts:
m_amounts.return_value = [100, 100]
m_addrs_to.return_value = [self.bob.address, b'If this isnt invalid Ill eat my shoe']
with self.assertRaises(ValueError):
self.tx.validate_or_raise()
def test_validate_extended(self, m_logger):
"""
validate_extended() handles these parts of the validation:
1. Master/slave
2. balance, amount + fee from AddressState
3. OTS key reuse from AddressState
:return:
"""
m_addr_state = Mock(autospec=AddressState, balance=200)
m_addr_from_pk_state = Mock(autospec=AddressState)
m_addr_from_pk_state.ots_key_reuse.return_value = False
self.tx.validate_slave = Mock(autospec=Transaction.validate_slave, return_value=True)
self.tx.sign(self.alice)
result = self.tx.validate_extended(m_addr_state, m_addr_from_pk_state)
self.assertTrue(result)
# Suppose there was ots key reuse. The function should then return false.
m_addr_from_pk_state.ots_key_reuse.return_value = True
result = self.tx.validate_extended(m_addr_state, m_addr_from_pk_state)
self.assertFalse(result)
# Reset conditions from above
m_addr_from_pk_state.ots_key_reuse.return_value = False
# Suppose the slave XMSS address does not have permission for this type of Transaction. It should return False.
self.tx.validate_slave.return_value = False
result = self.tx.validate_extended(m_addr_state, m_addr_from_pk_state)
self.assertFalse(result)
# Reset conditions from above
self.tx.validate_slave.return_value = True
# Suppose the address doesn't have enough coins.
m_addr_state.balance = 99
result = self.tx.validate_extended(m_addr_state, m_addr_from_pk_state)
self.assertFalse(result)
def test_validate_transaction_pool(self, m_logger):
"""
Two TransferTransactions. Although they're the same, they are signed with different OTS indexes.
Therefore they should not conflict when they are both in the TransactionPool.
:return:
"""
tx = self.tx
tx2 = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk
)
tx.sign(self.alice)
tx2.sign(self.alice)
tx_info = Mock(autospec=TransactionInfo, transaction=tx)
tx2_info = Mock(autospec=TransactionInfo, transaction=tx2)
transaction_pool = [(replacement_getTime(), tx_info), (replacement_getTime(), tx2_info)]
result = tx.validate_transaction_pool(transaction_pool)
self.assertTrue(result)
def test_validate_transaction_pool_reusing_ots_index(self, m_logger):
"""
Two different TransferTransactions. They are signed with the same OTS indexe, from the same public key.
Therefore they should conflict.
:return:
"""
tx = self.tx
tx2 = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=5,
xmss_pk=self.alice.pk
)
# alice_clone's OTS index is still at 10, while self.alice will be at 11 after signing.
alice_clone = get_alice_xmss()
alice_clone.set_ots_index(10)
tx.sign(self.alice)
tx2.sign(alice_clone)
tx_info = Mock(autospec=TransactionInfo, transaction=tx)
tx2_info = Mock(autospec=TransactionInfo, transaction=tx2)
transaction_pool = [(replacement_getTime(), tx_info), (replacement_getTime(), tx2_info)]
result = tx.validate_transaction_pool(transaction_pool)
self.assertFalse(result)
def test_validate_transaction_pool_different_pk_same_ots_index(self, m_logger):
"""
Two TransferTransactions. They are signed with the same OTS indexes, but from different public keys.
Therefore they should NOT conflict.
:return:
"""
tx = self.tx
tx2 = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.bob.pk
)
tx.sign(self.alice)
tx2.sign(self.bob)
tx_info = Mock(autospec=TransactionInfo, transaction=tx)
tx2_info = Mock(autospec=TransactionInfo, transaction=tx2)
transaction_pool = [(replacement_getTime(), tx_info), (replacement_getTime(), tx2_info)]
result = tx.validate_transaction_pool(transaction_pool)
self.assertTrue(result)
def test_apply_state_changes(self, m_logger):
"""
apply_state_changes() is the part that actually updates everybody's balances.
Then it forwards the addresses_state to _apply_state_changes_for_PK(), which updates everybody's addresses's
nonce, OTS key index, and associated TX hashes
If there is no AddressState for a particular Address, nothing is done.
"""
addresses_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState', transaction_hashes=[],
balance=200),
self.bob.address: Mock(autospec=AddressState, name='bob AddressState', transaction_hashes=[], balance=0),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState', transaction_hashes=[], balance=0)
}
self.tx._apply_state_changes_for_PK = Mock(autospec=TransferTransaction._apply_state_changes_for_PK)
self.tx.apply_state_changes(addresses_state)
# Now Alice should have 99 coins left (200 - 100 - 1) and Bob should have 100 coins.
self.assertEqual(99, addresses_state[self.alice.address].balance)
self.assertEqual(100, addresses_state[self.bob.address].balance)
self.tx._apply_state_changes_for_PK.assert_called_once()
# If there are no AddressStates related to the Addresses in this transaction, do nothing.
self.tx._apply_state_changes_for_PK.reset_mock()
addresses_state_dummy = {
b'a': 'ABC',
b'b': 'DEF'
}
self.tx.apply_state_changes(addresses_state_dummy)
self.assertEqual(addresses_state_dummy, {b'a': 'ABC', b'b': 'DEF'})
self.tx._apply_state_changes_for_PK.assert_called_once()
def test_apply_state_changes_tx_sends_to_self(self, m_logger):
"""
If you send coins to yourself, you should only lose the fee for the Transaction.
"""
addresses_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState', transaction_hashes=[],
balance=200),
self.bob.address: Mock(autospec=AddressState, name='bob AddressState', transaction_hashes=[], balance=0),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState', transaction_hashes=[], balance=0)
}
tx = TransferTransaction.create(
addrs_to=[self.alice.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk
)
tx._apply_state_changes_for_PK = Mock(autospec=TransferTransaction._revert_state_changes_for_PK)
tx.apply_state_changes(addresses_state)
self.assertEqual(199, addresses_state[self.alice.address].balance)
self.assertIn(tx.txhash, addresses_state[self.alice.address].transaction_hashes)
def test_apply_state_changes_multi_send(self, m_logger):
"""
Test that apply_state_changes() also works with multiple recipients.
"""
addresses_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState', transaction_hashes=[],
balance=200),
self.bob.address: Mock(autospec=AddressState, name='bob AddressState', transaction_hashes=[], balance=0),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState', transaction_hashes=[], balance=0)
}
tx_multisend = TransferTransaction.create(
addrs_to=[self.bob.address, self.slave.address],
amounts=[20, 20],
fee=1,
xmss_pk=self.alice.pk
)
tx_multisend._apply_state_changes_for_PK = Mock(autospec=TransferTransaction._apply_state_changes_for_PK)
tx_multisend.apply_state_changes(addresses_state)
# Now Alice should have 159 coins left (200 - 20 - 20 - 1) and Bob should have 100 coins.
self.assertEqual(159, addresses_state[self.alice.address].balance)
self.assertEqual(20, addresses_state[self.bob.address].balance)
self.assertEqual(20, addresses_state[self.slave.address].balance)
tx_multisend._apply_state_changes_for_PK.assert_called_once()
def test_apply_state_changes_for_PK(self, m_logger):
"""
This updates the node's AddressState database with which OTS index a particular address should be on, and what
tx hashes is this address associated with.
Curiously enough, if the TX was signed by a master XMSS tree, it doesn't add this tx's txhash to the list of
txs that address is associated with.
:return:
"""
addr_state = {
self.alice.address: Mock(autospec=AddressState)
}
old_ots_index = self.alice.ots_index
self.tx.sign(self.alice)
self.tx._apply_state_changes_for_PK(addr_state)
addr_state[self.alice.address].increase_nonce.assert_called_once()
addr_state[self.alice.address].set_ots_key.assert_called_once_with(old_ots_index)
def test_apply_state_changes_for_PK_master_slave_XMSS(self, m_logger):
"""
If the TX was signed by a slave XMSS, the slave XMSS's AddressState should be updated (not the master's).
:return:
"""
tx = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.slave.pk,
master_addr=self.alice.address
)
addr_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState'),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState')
}
old_ots_index = self.slave.ots_index
tx.sign(self.slave)
tx._apply_state_changes_for_PK(addr_state)
addr_state[self.slave.address].increase_nonce.assert_called_once()
addr_state[self.slave.address].set_ots_key.assert_called_once_with(old_ots_index)
addr_state[self.slave.address].transaction_hashes.append.assert_called_once()
def test_revert_state_changes(self, m_logger):
"""
Alice has sent 100 coins to Bob, using 1 as Transaction fee. Now we need to undo this.
"""
addresses_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState',
transaction_hashes=[self.tx.txhash],
balance=99),
self.bob.address: Mock(autospec=AddressState, name='bob AddressState', transaction_hashes=[self.tx.txhash],
balance=100),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState', transaction_hashes=[], balance=0)
}
unused_chain_manager_mock = Mock(autospec=ChainManager, name='unused ChainManager')
self.tx._revert_state_changes_for_PK = Mock(autospec=TransferTransaction._revert_state_changes_for_PK)
self.tx.revert_state_changes(addresses_state, unused_chain_manager_mock)
self.assertEqual(200, addresses_state[self.alice.address].balance)
self.assertEqual(0, addresses_state[self.bob.address].balance)
self.assertEqual([], addresses_state[self.alice.address].transaction_hashes)
self.assertEqual([], addresses_state[self.bob.address].transaction_hashes)
self.tx._revert_state_changes_for_PK.assert_called_once()
# If there are no AddressStates related to the Addresses in this transaction, do nothing.
self.tx._revert_state_changes_for_PK.reset_mock()
addresses_state_dummy = {
b'a': 'ABC',
b'b': 'DEF'
}
self.tx.revert_state_changes(addresses_state_dummy, unused_chain_manager_mock)
self.assertEqual(addresses_state_dummy, {b'a': 'ABC', b'b': 'DEF'})
self.tx._revert_state_changes_for_PK.assert_called_once()
def test_revert_state_changes_multi_send(self, m_logger):
"""
Alice has sent 20 coins to Bob and Slave each, using 1 as Transaction fee. Now we need to undo this.
"""
addresses_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState',
transaction_hashes=[self.tx.txhash],
balance=159),
self.bob.address: Mock(autospec=AddressState, name='bob AddressState', transaction_hashes=[self.tx.txhash],
balance=20),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState',
transaction_hashes=[self.tx.txhash], balance=20)
}
unused_chain_manager_mock = Mock(autospec=ChainManager, name='unused ChainManager')
tx_multisend = TransferTransaction.create(
addrs_to=[self.bob.address, self.slave.address],
amounts=[20, 20],
fee=1,
xmss_pk=self.alice.pk
)
tx_multisend._revert_state_changes_for_PK = Mock(autospec=TransferTransaction._revert_state_changes_for_PK)
tx_multisend.revert_state_changes(addresses_state, unused_chain_manager_mock)
self.assertEqual(200, addresses_state[self.alice.address].balance)
self.assertEqual(0, addresses_state[self.bob.address].balance)
self.assertEqual(0, addresses_state[self.slave.address].balance)
self.assertEqual([], addresses_state[self.alice.address].transaction_hashes)
self.assertEqual([], addresses_state[self.bob.address].transaction_hashes)
self.assertEqual([], addresses_state[self.slave.address].transaction_hashes)
tx_multisend._revert_state_changes_for_PK.assert_called_once()
def test_revert_state_changes_tx_sends_to_self(self, m_logger):
"""
Alice sent coins to herself, but she still lost the Transaction fee. Undo this.
"""
addresses_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState',
transaction_hashes=[self.tx.txhash],
balance=199),
self.bob.address: Mock(autospec=AddressState, name='bob AddressState', transaction_hashes=[],
balance=0),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState', transaction_hashes=[], balance=0)
}
unused_chain_manager_mock = Mock(autospec=ChainManager, name='unused ChainManager')
tx = TransferTransaction.create(
addrs_to=[self.alice.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk
)
tx._revert_state_changes_for_PK = Mock(autospec=TransferTransaction._revert_state_changes_for_PK)
tx.revert_state_changes(addresses_state, unused_chain_manager_mock)
self.assertEqual(200, addresses_state[self.alice.address].balance)
self.assertEqual(0, addresses_state[self.bob.address].balance)
self.assertEqual([], addresses_state[self.alice.address].transaction_hashes)
self.assertEqual([], addresses_state[self.bob.address].transaction_hashes)
tx._revert_state_changes_for_PK.assert_called_once()
def test_revert_state_changes_for_PK(self, m_logger):
"""
This is just an undo function.
:return:
"""
tx = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.alice.pk
)
addr_state = {
self.alice.address: Mock(autospec=AddressState)
}
tx.sign(self.alice)
tx._revert_state_changes_for_PK(addr_state, Mock(name='unused State Mock'))
addr_state[self.alice.address].decrease_nonce.assert_called_once()
addr_state[self.alice.address].unset_ots_key.assert_called_once()
def test_revert_state_changes_for_PK_master_slave_XMSS(self, m_logger):
tx = TransferTransaction.create(
addrs_to=[self.bob.address],
amounts=[100],
fee=1,
xmss_pk=self.slave.pk,
master_addr=self.alice.address
)
addr_state = {
self.alice.address: Mock(autospec=AddressState, name='alice AddressState'),
self.slave.address: Mock(autospec=AddressState, name='slave AddressState')
}
tx.sign(self.slave)
tx._revert_state_changes_for_PK(addr_state, Mock(name='unused State Mock'))
addr_state[self.slave.address].decrease_nonce.assert_called_once()
addr_state[self.slave.address].unset_ots_key.assert_called_once()
addr_state[self.slave.address].transaction_hashes.remove.assert_called_once()
def test_affected_address(self, m_logger):
# The default transaction params involve only two addresses.
affected_addresses = set()
self.tx.set_affected_address(affected_addresses)
self.assertEqual(2, len(affected_addresses))
# This transaction should involve 3 addresses.
affected_addresses = set()
tx = TransferTransaction.create(
addrs_to=[self.bob.address, self.slave.address],
amounts=[100, 100],
fee=1,
xmss_pk=self.alice.pk
)
tx.set_affected_address(affected_addresses)
self.assertEqual(3, len(affected_addresses))
|
StarcoderdataPython
|
1909473
|
for i in range(10):
print(i)
print('=====')
for i in range(0,10):
print(i)
print('=====')
for i in range(0,10,1):
print(i)
print('=====')
i=0
while i<10:
print(i)
i = i+1
print('=====')
|
StarcoderdataPython
|
3481680
|
<reponame>claydodo/potty
class SessionStatus(object):
def __init__(self, session_id, is_alive=None, expire_dt=None, **kwargs):
self.session_id = session_id
self.is_alive = is_alive
self.expire_dt = expire_dt
|
StarcoderdataPython
|
11344326
|
# -*- coding: utf-8 -*-
import random
class Tile:
def __init__(self, coordinates = (0, 0)):
self.location = coordinates
self.hive = False
#Is this tile a food source?
if random.random() > 0.99:
self.foodSource = True
self.availableFood = int(random.gauss(50, 10))
self.fragrance = float(self.availableFood)
else:
self.foodSource = False
self.availableFood = 0
self.fragrance = 0.00
self.originalFood = self.availableFood
#Getters
def getX(self):
return self.location[0]
def getY(self):
return self.location[1]
def isFoodSource(self):
return self.foodSource
def hasHive(self):
return self.hive
def getFood(self):
return self.availableFood
def getFragrance(self):
return self.fragrance
def getCoordinates(self):
return self.location
#Modifiers
def addHive(self):
self.hive = True
#adds to fragrance.
def increaseFragrance(self, val):
self.fragrance += val
#removes from fragrance (could be done by passing a negative value, but this is more clear I feel)
def reduceFragrance(self, val):
self.fragrance -= val
#Reduces food in the tile.
def foodReduce(self):
self.availableFood += 1
if self.availableFood <= 0:
self.foodSource = False
#Display
def displayInfo(self):
print("Internal Information:")
print("Coordinates: " + self.location)
print("Food Source: " + str(self.foodSource))
print("Available Food: " + str(self.availableFood))
print("Fragrance: " + str(self.fragrance))
#OVERRIDES
def print(self):
print(self.availableFood)
|
StarcoderdataPython
|
37199
|
<filename>tools/check_encrypted_hash.py
import sys
sys.path.insert(0, './app/app')
from tools import decrypt, check_hash # noqa
# sys.argv[1] = plain text
# sys.argv[2] = hash to compare
print(check_hash(sys.argv[1], decrypt(sys.argv[2])))
|
StarcoderdataPython
|
3553113
|
import xgboost as xgb
import time
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.metrics import precision_score, recall_score, roc_auc_score, accuracy_score
from sklearn.externals import joblib
def load_data(file_path):
return pd.read_csv(file_path)
def save_model(model, model_name=None):
if not model_name:
model_name = 'model/model-' + str(int(time.time()))
joblib.dump(model, model_name)
def load_model(model_path):
return joblib.load(model_path)
def evaluate(model, file_path='./test.csv'):
test = load_data(file_path=file_path)
y_true = test['label']
y_predict = model.predict(test.set_index('label'))
print(accuracy_score(y_true=y_true, y_pred=y_predict))
def train(file_path='./train.csv',
n_estimators=10,
max_depth=10,
learning_rate=0.1):
train_data_path = file_path
df = load_data(file_path=train_data_path)
train = df.set_index('label')
gb = GradientBoostingClassifier(
n_estimators=n_estimators,
max_depth=max_depth,
learning_rate=learning_rate,
# n_jobs=-1,
verbose=True)
gb.fit(train, df['label'])
return gb
def predict(model, file_name='result.csv'):
test = load_data(file_path='./predict.csv')
predict = model.predict(test)
with open(file_name, 'w') as f:
f.write('ImageId,Label\n')
k = 1
for v in list(predict):
result = str(k) + ',' + str(v) + '\n'
k += 1
f.write(result)
# def main():
# print 'Training...'
# model = train(file_path='my_test.csv')
# print 'Training finished!'
#
# print 'Saving...'
# save_model(model=model)
#
# print 'Evaluating...'
# evaluate(model, file_path='my_test.csv')
# print 'Done!'
def get_result(model_name):
model = load_model(model_name)
predict(model)
if __name__ == '__main__':
# hyper_param_file = 'hyper_param.txt'
# with open(hyper_param_file, 'r') as f:
# for line in f.readlines():
# param_list = line.strip().split()
# n_estimators = int(param_list[0])
# max_depth = int(param_list[1])
# learning_rate = float(param_list[2])
# model_name = 'model/model_' + str(n_estimators) + '_' + str(
# max_depth) + '_' + str(learning_rate) + '_' + str(
# int(time.time()))
# print(model_name)
# model = train(
# file_path='my_train.csv',
# n_estimators=n_estimators,
# max_depth=max_depth,
# learning_rate=learning_rate)
# save_model(model=model, model_name=model_name)
# evaluate(model, file_path='my_test.csv')
get_result(model_name='model/model_140_7_0.1_1528690818')
|
StarcoderdataPython
|
272569
|
<filename>normflowpy/flows/helpers.py<gh_stars>0
import torch
def safe_log(x: torch.Tensor, eps=1e-22) -> torch.Tensor:
return torch.log(x.clamp(min=eps))
|
StarcoderdataPython
|
9735331
|
# coding: utf8
def get_caps_filename(norm_t1w):
"""Generate output CAPS filename from input CAPS filename
Args:
norm_t1w: T1w in Ixi549Space space
(output from t1-volume-tissue-segmentation)
Returns:
Filename (skull-stripped T1w in Ixi549Space space) for t1-extensive pipeline
"""
from nipype.utils.filemanip import split_filename
orig_dir, base, ext = split_filename(norm_t1w)
# <base>: <participant_id>_<session_id>*_space-Ixi549Space_T1w
skull_stripped_t1w = (
base.replace(
"_space-Ixi549Space_T1w", "_space-Ixi549Space_desc-SkullStripped_T1w"
)
+ ext
)
return skull_stripped_t1w
def apply_binary_mask(input_img, binary_img, output_filename):
"""Apply binary mask to input_img.
Args:
input_img: Image with same header than binary_image
binary_img: Binary image
output_filename: Output filename
Returns:
input_img*binary_img
"""
import os
import nibabel as nib
original_image = nib.load(input_img)
mask = nib.load(binary_img)
data = original_image.get_data() * mask.get_data()
masked_image_path = os.path.join(os.getcwd(), output_filename)
masked_image = nib.Nifti1Image(
data, original_image.affine, header=original_image.header
)
nib.save(masked_image, masked_image_path)
return masked_image_path
def get_file_from_server(remote_file, cache_path=None):
"""
Download file from server
Args:
remote_file (str): RemoteFileStructure defined in clinica.utils.inputs
cache_path (str): (default: ~/.cache/clinica/data)
Returns:
Path to downloaded file.
Note:
This function will be in Clinica.
"""
import os
from pathlib import Path
from clinica.utils.stream import cprint
from clinica.utils.inputs import fetch_file
home = str(Path.home())
if cache_path:
cache_clinica = os.path.join(home, ".cache", cache_path)
else:
cache_clinica = os.path.join(home, ".cache", "clinica", "data")
if not (os.path.exists(cache_clinica)):
os.makedirs(cache_clinica)
local_file = os.path.join(cache_clinica, remote_file.filename)
if not (os.path.exists(local_file)):
try:
local_file = fetch_file(remote_file, cache_clinica)
except IOError as err:
cprint(
f"Unable to download {remote_file.filename} from {remote_file.url}: {err}"
)
return local_file
|
StarcoderdataPython
|
264956
|
import os
import traceback
from flask_restful import Resource
from flask_uploads import UploadNotAllowed
from flask import request, send_file
from flask_jwt_extended import jwt_required, get_jwt_identity, get_jwt_claims, jwt_optional
from marshmallow import ValidationError
from helpers import image_helper
from helpers.sirv import Sirv
from helpers.strings import get_text
from schemas.Image import ImageSchema
image_schema = ImageSchema()
# Sirv setup
client_id = os.environ.get('SIRV_CLIENT_ID', None)
client_secret = os.environ.get('SIRV_CLIENT_SECRET', None)
sirv_utils = Sirv(client_id, client_secret)
SIRV_BASE_FOLDER_NAME = "MYANNime"
def authorized():
'''Checks if admin or not.'''
role = get_jwt_claims().get("role", None)
return role is not None
class ImageUpload(Resource):
@classmethod
@jwt_required
def post(cls):
'''
Used to upload an image file.
Uses JWT to retrieve user info and then saves the image to the user's folder.
If there is a filename conflict, it appends a number at the end.
'''
if not authorized():
return {
"message": get_text('image_unauthorized')
}, 401
try:
# request.files = {"form_fild_name" : 'FileStorage' from werkzeug}
data = image_schema.load(request.files)
user_id = get_jwt_identity()
folder = f'admin_{user_id}' # static/images/user_idxxx
image_path = image_helper.save_image(data['image'], folder=folder)
basename = image_helper.get_basename(image_path)
absolute_path = f'{os.getcwd()}/static/images/{folder}'
image_url = sirv_utils.upload(
basename, absolute_path, SIRV_BASE_FOLDER_NAME)
return {
"message": get_text('image_photo_uploaded').format(basename=basename),
"image_url": image_url["image_path"]
}, 201
except UploadNotAllowed:
extension = image_helper.get_extension(data['image'])
return {
"message": get_text('image_illegal_file_type').format(extension=extension)
}, 400
except ValidationError as error:
return {"message": get_text('input_error_generic'), "info": error.messages}, 400
except Exception as ex:
print(ex)
return {
}, 500
class Image(Resource):
@classmethod
@jwt_required
def delete(cls, filename: str):
if not authorized():
return {
"message": get_text('image_unauthorized')
}, 401
if not image_helper.is_filename_safe(filename):
return {
"message": get_text('image_illegal_file_name')
}, 400
user_id = get_jwt_identity()
user_folder = f'admin_{user_id}'
try:
sirv_utils.delete(filename, SIRV_BASE_FOLDER_NAME)
return {
"message": get_text('image_deletion_successful')
}
except Exception as ex:
print(ex)
return {"message": get_text('image_deletion_failed')}, 500
class AvatarGET(Resource):
@classmethod
@jwt_optional
def get(cls, username: str):
AVATAR_FOLDER = 'avatars' if not authorized() else 'admin_avatars'
try:
filename = f'user_{username}'
avatar_path = image_helper.find_image_any_format(
filename, AVATAR_FOLDER)
if avatar_path:
try:
return send_file(avatar_path)
except FileNotFoundError:
return {
"message": get_text('image_file_not_found')
}, 404
return {
"message": get_text('image_file_not_found')
}, 404
except:
return {
}, 500
class AvatarPUT(Resource):
@classmethod
@jwt_required
def put(cls):
try:
data = image_schema.load(request.files)
filename = f'user_{get_jwt_identity()}'
folder = 'avatars' if not authorized() else 'admin_avatars'
avatar_path = image_helper.find_image_any_format(filename, folder)
if avatar_path:
try:
os.remove(avatar_path)
except Exception as ex:
print(ex)
try:
absolute_path = f'{os.getcwd()}/static/images/{folder}'
ext = image_helper.get_extension(data['image'].filename)
avatar = filename + ext
image_helper.save_image(data["image"], folder, avatar)
image_helper.change_image_type_and_resize(
absolute_path, avatar)
return {
"message": get_text('image_avatar_uploaded')
}, 201
except UploadNotAllowed:
extension = image_helper.get_extension(data['image'])
return {
"message": get_text('image_illegal_file_type').format(extension=extension)
}, 400
except ValidationError as error:
return {"message": get_text('input_error_generic'), "info": error.messages}, 400
except Exception as ex:
print(ex)
return {
}, 500
|
StarcoderdataPython
|
3281959
|
<filename>backend/file_service.py
import uuid
from pydub import AudioSegment
from google.cloud import storage, speech
from google.cloud.speech import enums
from google.cloud.speech import types
from google.protobuf.json_format import MessageToJson
from oauth2client.client import GoogleCredentials
from googleapiclient import discovery
from store import get_file_metadata
import json
GCS_BUCKET_NAME = "steno"
BASE_GCS_URI = "gs://steno/"
#BASE_GCS_URI = "https://storage.googleapis.com/steno/"
def get_metadata(file):
"""
Returns an object containing the sampling rate and the duration
"""
af = AudioSegment.from_file(file, format="wav")
metadata = {
"file_name": file.filename,
"sampling_rate": af.frame_rate,
"channels": af.channels,
"duration": af.duration_seconds
}
return metadata
def upload_to_gcs(file):
"""
Uploads file to Google Cloud Storage
Returns
-------
string
Google Cloud Storage URI
"""
storage_client = storage.Client()
bucket = storage_client.get_bucket(GCS_BUCKET_NAME)
# Generate an UUID for the filename
id = str(uuid.uuid4())
file_type = file.filename.split(".")[-1]
file_name = "{}.{}".format(id, file_type)
blob = bucket.blob(file_name)
blob.upload_from_file(file)
gcs_uri = BASE_GCS_URI + file_name
return gcs_uri
def async_transcribe(id):
"""
Transcribe the given audio file asynchronously and output the word time
offsets.
"""
file_metadata = json.loads(get_file_metadata(id))
client = speech.SpeechClient()
audio = types.RecognitionAudio(uri=file_metadata['uri'])
config = types.RecognitionConfig(
sample_rate_hertz=file_metadata['sampling_rate'],
enable_word_time_offsets=True,
enable_automatic_punctuation=True,
audio_channel_count=file_metadata['channels'],
language_code='en-US')
operation = client.long_running_recognize(config, audio)
return MessageToJson(operation.operation)
def poll_operation(name):
"""
Polls the status of the long running operation
"""
credentials = GoogleCredentials.get_application_default()
speech_service = discovery.build('speech', 'v1', credentials=credentials)
get_operation_request = speech_service.operations().get(name=name)
response = get_operation_request.execute()
if('done' in response):
return response['response']
return {"name": response['name']}
|
StarcoderdataPython
|
6507590
|
<filename>app/grandchallenge/challenges/admin.py
from django.contrib import admin
from grandchallenge.challenges.models import (
BodyRegion,
BodyStructure,
Challenge,
ChallengeSeries,
ExternalChallenge,
ImagingModality,
TaskType,
)
admin.site.register(Challenge)
admin.site.register(ExternalChallenge)
admin.site.register(ChallengeSeries)
admin.site.register(BodyRegion)
admin.site.register(BodyStructure)
admin.site.register(ImagingModality)
admin.site.register(TaskType)
|
StarcoderdataPython
|
11207062
|
<filename>external_tools/src/main/python/images/ValidateFileIntegrity.py
"""
Validate the integrity of image files
In data-release 7 and 8 we had issues with image files that were corrupt.
This was causing problems when Omero tried to upload them.
This script checks the filetypes specified and reports any that seem
corrupt. It does this by attempting to load them using the imread function
in matplotlib.pyplot
"""
import os
import sys
import argparse
import logging
# Import pyplot using 'agg' backend as there is no display on the server
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
# Use SimpleItk for dicom images use readDicom from below
import qc_helper as helper
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Run script to verify integrity of image files')
parser.add_argument('-d', '--rootDir', dest='rootDir',
help='Root directory to start search for images')
parser.add_argument('-t', '--filetypes', dest='filetypes',
default='jpg,jpeg,tif,tiff,png,dcm,bmp',
help='comma separated list of filetypes to verify')
parser.add_argument('--logfile-path', dest='logfilePath', default=None,
help='path to save logfile')
parser.add_argument('-f', '--filelist-path', dest='filelist_path',
help='path to file containing files to check')
parser.add_argument('-o', '--output-path', dest='outputPath',
help='path to save list of corrupt images. If not supplied no list is saved but the paths to the corrupt images could be extracted from the log file')
args = parser.parse_args()
# Configure logger - if logging output file not specified create in this
# directory with timestamp
if args.logfilePath is None or args.logfilePath=="":
import time
import datetime
t = time.time()
tstamp = datetime.datetime.fromtimestamp(t).strftime('%Y%m%d_%H%M%S')
logfile_path = "validate_file_integrity_" + tstamp + ".log"
else:
logfile_path = args.logfilePath
log_format = '%(asctime)s - %(name)s - %(levelname)s:%(message)s'
logging.basicConfig(format=log_format, filename=logfile_path,
level=logging.INFO)
log_formatter = logging.Formatter(log_format)
logger = logging.getLogger('ValidateFileIntegrity')
root_logger = logging.getLogger()
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
root_logger.addHandler(console_handler)
logger.info("running main method to validate integrity of the following image types: " + args.filetypes)
# List of filetypes to check - including '.'
filetypes = args.filetypes.split(',')
for i in range(len(filetypes)):
if filetypes[i][0] != '.':
filetypes[i] = "."+filetypes[i]
nfs_file_list = []
# If --filelist-path is preset simply use this list
if args.filelist_path is not None:
logger.info('loading list of files to check from "' + args.filelist_path + '"')
with open(args.filelist_path,'rt') as fid:
for f in fid.readlines():
f2 = f.strip('\n')
ext = os.path.splitext(f2)[-1]
try:
filetypes.index(ext)
nfs_file_list.append(f2)
except ValueError:
continue
elif args.rootDir is not None:
logger.info('rootDir is "' + args.rootDir + '"')
# Get the files in NFS
file_tuple = os.walk(args.rootDir)
nfs_file_list = []
for ft in file_tuple:
for f in ft[2]:
ext = os.path.splitext(f)[-1]
try:
filetypes.index(ext)
nfs_file_list.append(os.path.join(ft[0],f))
except ValueError:
continue
else:
logger.error("At least one of --filelist-path or --rootDir must be supplied. Exiting")
sys.exit(-1)
logger.info("Number of files from NFS = " + str(len(nfs_file_list)))
n_invalid = 0
corrupt_files = []
for f in nfs_file_list:
try:
if f.endswith('dcm'):
im = helper.readDicom(f)
else:
im = plt.imread(f)
# If image is empty add it to corrupt files list
# KB 28/06/2021 - this seems unintuitive, but in dr14 there were
# two images with no data!
if im.size < 1:
raise Exception(f + " does not contain image data")
except Exception as e:
logger.error("Problem with " + f + ". Error was: " + str(e))
n_invalid += 1
corrupt_files.append(os.path.abspath(f)+'\n')
logger.info("Number of invalid files: " + str(n_invalid))
if n_invalid > 0 and args.outputPath is not None:
if os.path.isfile(args.outputPath):
try:
with open(args.outputPath, 'rt') as fid:
fnames = [f.strip('\n') for f in fid.readlines()]
for f in fnames:
corrupt_files.append(os.path.abspath(f) + '\n')
except Exception as e:
print(args.outputPath + " already exists. However, " + \
"attempt to read it and merge current results " + \
"with it failed. Will therefore overwrite it. " + \
"Error was: " + str(e))
set_corrupt_files = set(corrupt_files)
corrupt_files = list(set_corrupt_files)
corrupt_files.sort()
try:
with open(args.outputPath, 'wt') as fid:
fid.writelines(corrupt_files)
logger.info("Written paths of corrupt images to " + args.outputPath)
except Exception as e:
logger.error("Problem writing paths of corrupt images to " + args.outputPath + ". Error was: " + str(e))
|
StarcoderdataPython
|
46152
|
<gh_stars>0
from django.conf.urls import url
from . import views
from .views import HotDweetFeed, NewDweetFeed, RandomDweetFeed
from .views import TopWeekDweetFeed, TopMonthDweetFeed, TopYearDweetFeed, TopAllDweetFeed
from .views import NewHashtagFeed, TopHashtagFeed
urlpatterns = [
url(r'^test/', HotDweetFeed.as_view()),
url(r'^$', HotDweetFeed.as_view(), name='root'),
url(r'^hot$', HotDweetFeed.as_view(), name='hot_feed'),
# Default top to top of the month
url(r'^top$', TopMonthDweetFeed.as_view(), name='top_feed'),
url(r'^top/week$', TopWeekDweetFeed.as_view(), name='top_feed_week'),
url(r'^top/month$', TopMonthDweetFeed.as_view(), name='top_feed_month'),
url(r'^top/year$', TopYearDweetFeed.as_view(), name='top_feed_year'),
url(r'^top/all$', TopAllDweetFeed.as_view(), name='top_feed_all'),
url(r'^new$', NewDweetFeed.as_view(), name='new_feed'),
url(r'^random$', RandomDweetFeed.as_view(), name='random_feed'),
url(r'^h/(?P<hashtag_name>[\w._]+)$', NewHashtagFeed.as_view(), name='hashtag_feed'),
url(r'^h/(?P<hashtag_name>[\w._]+)/top$', TopHashtagFeed.as_view(), name='top_hashtag_feed'),
url(r'^d/(?P<dweet_id>\d+)$',
views.dweet_show, name='dweet_show'),
url(r'^d/(?P<dweet_id>\d+)/reply$',
views.dweet_reply, name='dweet_reply'),
url(r'^d/(?P<dweet_id>\d+)/delete$',
views.dweet_delete, name='dweet_delete'),
url(r'^d/(?P<dweet_id>\d+)/like$', views.like, name='like'),
url(r'^e/(?P<dweet_id>\d+)$',
views.dweet_embed, name='dweet_embed'),
url(r'^dweet$', views.dweet, name='dweet'),
]
|
StarcoderdataPython
|
1846381
|
<filename>dependencies/vaticle/repositories.bzl
#
# Copyright (C) 2021 Vaticle
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def vaticle_dependencies():
git_repository(
name = "vaticle_dependencies",
remote = "https://github.com/vaticle/dependencies",
commit = "e1e21118201b71855927062fb67f267b54f71017", # sync-marker: do not remove this comment, this is used for sync-dependencies by @vaticle_dependencies
)
def vaticle_typedb_common():
git_repository(
name = "vaticle_typedb_common",
remote = "https://github.com/vaticle/typedb-common",
tag = "2.1.1"
)
def vaticle_typeql_lang_java():
git_repository(
name = "vaticle_typeql_lang_java",
remote = "https://github.com/vaticle/typeql-lang-java",
tag = "2.1.0"
)
def vaticle_typedb_client_java():
git_repository(
name = "vaticle_typedb_client_java",
remote = "https://github.com/vaticle/typedb-client-java",
tag = "2.1.1" # sync-marker: do not remove this comment, this is used for sync-dependencies by @vaticle_typedb_client_java
)
def vaticle_typedb_client_python():
git_repository(
name = "vaticle_typedb_client_python",
remote = "https://github.com/vaticle/typedb-client-python",
tag = "2.1.1" # sync-marker: do not remove this comment, this is used for sync-dependencies by @vaticle_typedb_client_python
)
|
StarcoderdataPython
|
1829705
|
<filename>setup.py
from setuptools import setup, find_packages
setup(
name="textextractor",
version='0.1',
description="Extracts relevant body of text from HTML page content.",
keywords='textextractor',
author='<NAME>',
author_email="Use the github issues",
url="https://github.com/prashanthellina/textextractor",
license='MIT License',
install_requires=[
'lxml',
],
package_dir={'textextractor': 'textextractor'},
packages=find_packages('.'),
include_package_data=True,
entry_points = {
'console_scripts': ['textextractor = textextractor:textextractor_command'],
},
)
|
StarcoderdataPython
|
3453049
|
import logging
class Transformer():
def __init__(self):
self._tags = {}
def predict(self, X, feature_names, meta):
logging.warning(X)
logging.warning(feature_names)
logging.warning(meta)
self._tags["value_at_three"] = X.tolist()
self._tags["current"] = "three"
self._tags["three"] = "yes"
return X
def tags(self):
return self._tags
|
StarcoderdataPython
|
1976600
|
# __future__ imports
from __future__ import print_function, unicode_literals
# Stdlib
import os
import sys
# External Libraries
import configobj
if sys.version_info < (3, 0, 0):
input = raw_input # noqa pylint: disable=all
class ConfigGenerator:
"""class for config generation"""
def get_tools(self):
"""Lets the user enter the tools he want to use"""
tools = "flake8,pylint,vulture,pyroma,isort,yapf,black,safety,dodgy,pytest,pypi".split(
",")
print("Available tools: {0}".format(",".join(tools)))
answer = ask_list("What tools would you like to use?",
["flake8", "pytest"])
if any(tool not in tools for tool in answer):
print("Invalid answer, retry.")
self.get_tools()
return answer
def flake8(self):
"""Configuring flake8"""
pass
def pylint(self):
"""Configuring pylint, will do nothing"""
pass
def vulture(self):
"""Configuring vulture"""
pass
def pyroma(self):
"""Configuring pyroma"""
pass
def isort(self):
"""Configuring isort"""
pass
def yapf(self):
"""Configuring yapf"""
pass
def safety(self):
"""Configuring safety"""
pass
def dodgy(self):
"""Configuring dodgy"""
pass
def pytest(self):
"""Configuring pytest"""
pass
def pypi(self):
"""Configuring pypi"""
pass
def main(self):
"""The main function for generating the config file"""
path = ask_path("where should the config be stored?", ".snekrc")
conf = configobj.ConfigObj()
tools = self.get_tools()
for tool in tools:
conf[tool] = getattr(self, tool)() # pylint: disable=assignment-from-no-return
conf.filename = path
conf.write()
print("Written config file!")
if "pylint" in tools:
print(
"Please also run `pylint --generate-rcfile` to complete setup")
def ask_bool(question, default=True):
"""Asks a question yes no style"""
default_q = "Y/n" if default else "y/N"
answer = input("{0} [{1}]: ".format(question, default_q))
lower = answer.lower()
if not lower:
return default
return lower == "y"
def ask_int(question, default=None):
"""Asks for a number in a question"""
default_q = (" [default: {0}]: ".format(default)
if default is not None else '')
answer = input("{0} [{1}]: ".format(question, default_q))
if not answer:
if default is None:
print("No default set, try again.")
return ask_int(question, default)
return default
if any(x not in "1234567890" for x in answer):
print("Please enter only numbers (0-9).")
return ask_int(question, default)
return int(answer)
def ask_path(question, default=None):
"""Asks for a path"""
default_q = (" [default: {0}]: ".format(default)
if default is not None else '')
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == '':
return default
if os.path.isdir(answer):
return answer
print(
"No such directory: {answer}, please try again".format(answer=answer))
return ask_path(question, default)
def ask_list(question, default=None):
"""Asks for a comma seperated list of strings"""
default_q = (" [default: {0}]: ".format(",".join(default))
if default is not None else '')
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == '':
return default
return [ans.strip() for ans in answer.split(",")]
def ask_str(question, default=None):
"""Asks for a simple string"""
default_q = (" [default: {0}]: ".format(default)
if default is not None else '')
answer = input("{0} [{1}]: ".format(question, default_q))
if answer == '':
return default
return answer
def generate():
generator = ConfigGenerator()
generator.main()
|
StarcoderdataPython
|
11220270
|
<reponame>HydAu/AzureSDKForPython<filename>azure-mgmt-logic/azure/mgmt/logic/models/workflow_version.py<gh_stars>0
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class WorkflowVersion(Resource):
"""WorkflowVersion
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Gets or sets the resource id.
:type id: str
:param name: Gets the resource name.
:type name: str
:param type: Gets the resource type.
:type type: str
:param location: Gets or sets the resource location.
:type location: str
:param tags: Gets or sets the resource tags.
:type tags: dict
:ivar created_time: Gets the created time.
:vartype created_time: datetime
:ivar changed_time: Gets the changed time.
:vartype changed_time: datetime
:param state: Gets or sets the state. Possible values include:
'NotSpecified', 'Enabled', 'Disabled', 'Deleted', 'Suspended'
:type state: str or :class:`WorkflowState
<azure.mgmt.logic.models.WorkflowState>`
:ivar version: Gets the version.
:vartype version: str
:ivar access_endpoint: Gets the access endpoint.
:vartype access_endpoint: str
:param sku: Gets or sets the sku.
:type sku: :class:`Sku <azure.mgmt.logic.models.Sku>`
:param definition_link: Gets or sets the link to definition.
:type definition_link: :class:`ContentLink
<azure.mgmt.logic.models.ContentLink>`
:param definition: Gets or sets the definition.
:type definition: object
:param parameters_link: Gets or sets the link to parameters.
:type parameters_link: :class:`ContentLink
<azure.mgmt.logic.models.ContentLink>`
:param parameters: Gets or sets the parameters.
:type parameters: dict
"""
_validation = {
'created_time': {'readonly': True},
'changed_time': {'readonly': True},
'version': {'readonly': True},
'access_endpoint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'created_time': {'key': 'properties.createdTime', 'type': 'iso-8601'},
'changed_time': {'key': 'properties.changedTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'WorkflowState'},
'version': {'key': 'properties.version', 'type': 'str'},
'access_endpoint': {'key': 'properties.accessEndpoint', 'type': 'str'},
'sku': {'key': 'properties.sku', 'type': 'Sku'},
'definition_link': {'key': 'properties.definitionLink', 'type': 'ContentLink'},
'definition': {'key': 'properties.definition', 'type': 'object'},
'parameters_link': {'key': 'properties.parametersLink', 'type': 'ContentLink'},
'parameters': {'key': 'properties.parameters', 'type': '{WorkflowParameter}'},
}
def __init__(self, id=None, name=None, type=None, location=None, tags=None, state=None, sku=None, definition_link=None, definition=None, parameters_link=None, parameters=None):
super(WorkflowVersion, self).__init__(id=id, name=name, type=type, location=location, tags=tags)
self.created_time = None
self.changed_time = None
self.state = state
self.version = None
self.access_endpoint = None
self.sku = sku
self.definition_link = definition_link
self.definition = definition
self.parameters_link = parameters_link
self.parameters = parameters
|
StarcoderdataPython
|
8053077
|
<reponame>RanulfoSoares/adminfuneraria<gh_stars>0
from kivymd.uix.screen import MDScreen
from kivy.storage.jsonstore import JsonStore
class JazigoCriarScreen(MDScreen):
"""
Example Screen
"""
dados = JsonStore('hello.json')
def inserir_dados(self):
rua = self.ids.rua.text
quadra = self.ids.quadra.text
setor = self.ids.setor.text
jazigo = self.ids.jazigo.text
jazigo_perpetuo_provisorio = self.ids.jazigo.text
comprador_jazigo = self.ids.jazigo.text
jazigo_valor_pago = self.ids.jazigo.text
sepultamento_valor_pago = self.ids.jazigo.text
valor_total_pago = self.ids.jazigo.text
recibo_pagto_numero = self.ids.jazigo.text
observacao = self.ids.jazigo.text
self.dados.put(jazigo, jazigo=jazigo,rua=rua,quadra=quadra,
setor=setor,comprador_jazigo=comprador_jazigo,jazigo_valor_pago=jazigo_valor_pago,
sepultamento_valor_pago=sepultamento_valor_pago,
valor_total_pago=valor_total_pago,recibo_pagto_numero=recibo_pagto_numero,
observacao=observacao,jazigo_perpetuo_provisorio=jazigo_perpetuo_provisorio)
if self.dados.exists(jazigo):
return True
|
StarcoderdataPython
|
11376966
|
<reponame>hickford/warehouse
# Copyright 2014 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pretend
from warehouse.fastly import FastlyKey, FastlyFormatter
class TestFastlyKey:
def test_format_keys(self):
fastly_key = FastlyKey("foo", "foo/{bar}", "foo/{bar!n}")
assert fastly_key.format_keys(bar="WaT") == [
"foo",
"foo/WaT",
"foo/wat",
]
def test_plain_decorator(self):
fastly_key = FastlyKey("foo", "foo/{bar}", "foo/{bar!n}")
@fastly_key
def tester(app, request, bar=None):
return pretend.stub(headers={})
assert (
tester(None, None, bar="WaT").headers["Surrogate-Key"]
== "foo foo/WaT foo/wat"
)
def test_advanced_decorator(self):
fastly_key = FastlyKey("foo", "foo/{bar}", "foo/{bar!n}")
@fastly_key(not_bar="bar")
def tester(app, request, not_bar=None):
return pretend.stub(headers={})
assert (
tester(None, None, not_bar="WaT").headers["Surrogate-Key"]
== "foo foo/WaT foo/wat"
)
def test_fastly_formatter():
assert FastlyFormatter().format("{0}", "Foo") == "Foo"
assert FastlyFormatter().format("{0!n}", "Foo") == "foo"
|
StarcoderdataPython
|
4944483
|
lista = [int(input(f'Digite um valor: ')) for c in range(0, 5)] # ou então pode ser lista = list()
lista.append(4)
lista.append(45)
lista.append(56)
lista.append(41)
print(lista)
lista[2] = 2
print(lista)
num = [4, 12, 5, 9, 1]
num[3] = 3
print(num)
num.append(11)
print(num)
num.sort()
print(num)
num.sort(reverse=True)
print(num)
print(f'Essa lista tem {len(num)} elementos.')
num.insert(2, 0)
print(num)
num.pop()
print(num)
num.pop(2)
print(num)
num.insert(6, 11)
num.remove(11)
print(num)
if 24 in num:
num.remove(24)
else:
print('Não achei o numero 24')
print(num, '\n\n')
valor = []
valor.append(4)
valor.append(35)
valor.append(5)
for c in range(0,5):
valor.append(int(input('Digite um valor: ')))
print(valor)
for v in valor:
print(f'{v} ... ', end='')
print('')
for v in valor:
print(f'{valor} ;;; ', end='')
print('')
for c, v in enumerate(valor):
print(f'Na posição {c} encontrei o valor {v}!')
valor.sort()
for c, v in enumerate(valor):
print(f'Na posição {c} temos em ordem {v}!!')
print('')
a = [2, 3, 7, 5, 9]
c = a[:] # assim ele recebe so os elementos da lista
b = a # assim python faz ligação de uma lista a outra e toda alteração é compartilhada
b[2] = 15
print(f'A lista A: {a}')
print(f'A lista B: {b}')
print(f'A lista C: {c}')
|
StarcoderdataPython
|
6564357
|
import asyncio
from concurrent.futures import ThreadPoolExecutor
from typing import Optional
import pytest
from mock import MagicMock, PropertyMock, call
from serial import Serial # type: ignore[import]
from opentrons.drivers.asyncio.communication import AsyncSerial
@pytest.fixture
def mock_timeout_prop() -> PropertyMock:
return PropertyMock()
@pytest.fixture
def mock_write_timeout_prop() -> PropertyMock:
return PropertyMock()
@pytest.fixture
def mock_serial(
mock_timeout_prop: PropertyMock, mock_write_timeout_prop: PropertyMock
) -> MagicMock:
"""Mock Serial"""
m = MagicMock(spec=Serial)
type(m).timeout = mock_timeout_prop
type(m).write_timeout = mock_write_timeout_prop
return m
@pytest.fixture
async def subject(mock_serial: MagicMock) -> AsyncSerial:
"""The test subject."""
return AsyncSerial(
serial=mock_serial,
executor=ThreadPoolExecutor(),
loop=asyncio.get_running_loop(),
reset_buffer_before_write=False,
)
@pytest.mark.parametrize(
argnames=["default", "override"],
argvalues=[
[None, 5],
[5, 6],
],
)
async def test_write_timeout_override(
subject: AsyncSerial,
mock_write_timeout_prop: PropertyMock,
default: Optional[int],
override: Optional[int],
):
"""It should override the timeout and return to default after the call."""
mock_write_timeout_prop.return_value = default
async with subject.timeout_override("write_timeout", override):
await subject.write(b"")
# Three calls: read, override, reset default.
assert mock_write_timeout_prop.call_args_list == [
call(),
call(override),
call(default),
]
@pytest.mark.parametrize(
argnames=["default", "override"],
argvalues=[
[None, 5],
[5, 6],
],
)
async def test_timeout_override(
subject: AsyncSerial,
mock_timeout_prop: PropertyMock,
default: Optional[int],
override: Optional[int],
):
"""It should override the timeout and return to default after the call."""
mock_timeout_prop.return_value = default
async with subject.timeout_override("timeout", override):
await subject.read_until(b"")
# Three calls: read, override, reset default.
assert mock_timeout_prop.call_args_list == [call(), call(override), call(default)]
@pytest.mark.parametrize(
argnames=["default", "override"],
argvalues=[
[5, 5],
[5, None],
[None, None],
],
)
async def test_write_timeout_dont_override(
subject: AsyncSerial,
mock_write_timeout_prop: PropertyMock,
default: Optional[int],
override: Optional[int],
):
"""It should not override the timeout if not necessary."""
mock_write_timeout_prop.return_value = default
async with subject.timeout_override("write_timeout", override):
await subject.write(b"")
mock_write_timeout_prop.assert_called_once()
@pytest.mark.parametrize(
argnames=["default", "override"],
argvalues=[
[5, 5],
[5, None],
[None, None],
],
)
async def test_read_timeout_dont_override(
subject: AsyncSerial,
mock_timeout_prop: PropertyMock,
default: Optional[int],
override: Optional[int],
):
"""It should not override the timeout if not necessary."""
mock_timeout_prop.return_value = default
async with subject.timeout_override("timeout", override):
await subject.read_until(b"")
mock_timeout_prop.assert_called_once()
def test_reset_input_buffer(mock_serial: MagicMock, subject: AsyncSerial):
"""It should call the underlying serial port's Reset function"""
subject.reset_input_buffer()
mock_serial.reset_input_buffer.assert_called_once()
|
StarcoderdataPython
|
5003215
|
import grokcore.view as grok
class Mammoth(grok.Context):
pass
class Index(grok.View):
pass
index = grok.PageTemplate("""\
<html>
<body>
<a tal:attributes="href static/file.txt">Some text in a file</a>
</body>
</html>""")
|
StarcoderdataPython
|
4868363
|
<reponame>atx/dellfan
#! /usr/bin/env python3
import argparse
import psutil
import subprocess
import sys
import time
def ipmi_raw(bytes_):
subprocess.check_call(
["ipmitool", "raw"] + ["0x{:02x}".format(b) for b in bytes_]
)
# Magic bytes from
# https://www.reddit.com/r/homelab/comments/7xqb11/dell_fan_noise_control_silence_your_poweredge/
def ipmi_disable_fan_control():
ipmi_raw([0x30, 0x30, 0x01, 0x00])
def ipmi_set_fan_speed(speed):
raw = int(round(speed*100))
raw = min(raw, 100)
ipmi_raw([0x30, 0x30, 0x02, 0xff, raw])
def temperature_to_fan_speed(temperature):
# This is totally adhoc
fan_speed = 1.0
if temperature < 40:
fan_speed = 0.0
elif temperature < 70:
fan_speed = (temperature - 40) / 30 * 0.5
else:
fan_speed = 1.0
return fan_speed
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"-r", "--poll-rate",
type=float, default=10.,
)
parser.add_argument(
"--dump-curve",
action="store_true",
)
parser.add_argument(
"--print",
action="store_true",
)
parser.add_argument(
"-m", "--min-speed",
type=float, default=0.18
)
args = parser.parse_args()
if args.dump_curve:
# Useful for debugging
for t in range(10, 100):
print(t, temperature_to_fan_speed(t))
sys.exit()
# TODO: Systemd watchdog
# TODO: Trap kills and enable
print("Disabling automatic fan control")
ipmi_disable_fan_control()
print("Entering the feedback loop")
next_run = time.monotonic()
while True:
now = time.monotonic()
if now < next_run:
time.sleep(next_run - now)
next_run = time.monotonic() + args.poll_rate
# Get the temperatures and pick the maximal one
temperature = max(t.current for t in psutil.sensors_temperatures()["coretemp"])
# Adjust the fan speed
# TODO: Add some hysteresis here
# The minimum speed setting is to stop iDRAC complaining about failed
# fans. The server runs fine even with 0RPM in idle, but iDRAC
# complains.
fan_speed = max(args.min_speed, temperature_to_fan_speed(temperature))
ipmi_set_fan_speed(fan_speed)
if args.print:
print(temperature, fan_speed)
|
StarcoderdataPython
|
1712592
|
from datetime import timedelta
from oauth2_provider.oauth2_validators import (
OAuth2Validator,
GRANT_TYPE_MAPPING,
)
from oauth2_provider.models import AbstractApplication, get_grant_model
from oauth2_provider.settings import oauth2_settings
from oauth2_provider.scopes import get_scopes_backend
from django.utils import timezone
from .claims import get_claims_provider
from .jwt import get_jwt_builder
GRANT_TYPE_MAPPING["openid"] = (AbstractApplication.GRANT_AUTHORIZATION_CODE, )
ClaimsProvider = get_claims_provider()
JWTBuilder = get_jwt_builder()
Grant = get_grant_model()
class RequestValidator(OAuth2Validator):
def validate_response_type(self,
client_id,
response_type,
client,
request,
*args,
**kwargs):
# https://github.com/jazzband/django-oauth-toolkit/blob/master/oauth2_provider/oauth2_validators.py#L404
# TODO restrict response_types to a set defined in `settings`
if "code" in response_type:
return client.allows_grant_type(
AbstractApplication.GRANT_AUTHORIZATION_CODE
)
elif "token" in response_type:
return client.allows_grant_type(AbstractApplication.GRANT_IMPLICIT)
else:
return False
def validate_silent_login(self, request):
return False
def validate_silent_authorization(self, request):
return False
def validate_user_match(self, id_token_hint, scopes, claims, request):
if id_token_hint is None:
return True
return False
def get_id_token(self, token, token_handler, request):
cp = ClaimsProvider(user=request.user, token=token, request=request)
claims = cp.get_claims()
return JWTBuilder().encode(claims)
def save_bearer_token(self, token, request, *args, **kwargs):
# Should also check that response_type was only "id_token"
if request.response_type == "id_token":
return
super().save_bearer_token(token, request, args, kwargs)
def validate_code(self, client_id, code, client, request, *args, **kwargs):
try:
grant = Grant.objects.get(code=code, application=client)
if not grant.is_expired():
request.scopes = grant.scope.split(" ")
request.user = grant.user
request.nonce = grant.nonce
return True
return False
except Grant.DoesNotExist:
return False
def save_authorization_code(self,
client_id,
code,
request,
*args,
**kwargs):
expires = timezone.now() + timedelta(
seconds=oauth2_settings.AUTHORIZATION_CODE_EXPIRE_SECONDS)
g = Grant(
application=request.client,
user=request.user,
code=code["code"],
expires=expires,
redirect_uri=request.redirect_uri,
scope=" ".join(request.scopes),
nonce=getattr(request, 'nonce', None)
)
g.save()
def validate_scopes(self,
client_id,
scopes, client,
request,
*args,
**kwargs):
available_scopes = get_scopes_backend().get_available_scopes(
application=client,
request=request)
request.scopes = list(set(available_scopes) & set(scopes))
# https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest
# unexpected scopes should be ignored
return True
|
StarcoderdataPython
|
3308133
|
#
# ESG request manager Python interface
#
from Fnorb.orb import CORBA
RequestManagerUnavailable = 'Request manager is unavailable.'
InvalidRequestManager = 'Invalid request manager!'
class RequestManager:
"""
ESG request manager singleton class. The first instance initializes CORBA
and creates the server proxy object. Subsequent instances call the cached
server object.
"""
server = None
def __init__(self, iorFile = 'Server.ref', orbArgs = []):
if self.server==None:
# Initialise the ORB.
orb = CORBA.ORB_init(orbArgs, CORBA.ORB_ID)
# Read the server's stringified IOR from a file (this is just a quick and
# dirty way of locating the server - a better method is to use
# the naming or trader services).
try:
stringified_ior = open(iorFile).read()
except:
raise RequestManagerUnavailable
# Convert the stringified IOR into an active object reference.
server = orb.string_to_object(stringified_ior)
# Make sure that the server is not a 'nil object reference' (represented
# in Python by the value 'None').
if server is None:
raise 'Nil object reference!'
# Make sure that the object implements the expected interface!
try:
server_check = server._is_a('IDL:RequestManager:1.0')
except:
raise RequestManagerUnavailable
if not server_check:
raise InvalidRequestManager
RequestManager.server = server
def request(self, userid, requestList):
"""
server.request(userid, requestList)
creates a request for files to be transferred. The function returns a tuple
(result, token), where <result> is an error indicator, and <token> is the
string request token.
<userid> is a string user ID.
<requestList> is a list of REQUESTs, where:
A REQUEST is created by REQUEST(source, target, spec, flag);
<source> and target are FILE_LOCATIONs: FILE_LOCATION(dataset, file);
<dataset> and <file> are strings;
<spec> is a SLABSPEC: SLABSPEC(variableName, datatype, selection);
<variableName> and <datatype> are strings;
<selection> is a list of TUPLES: [TUPLE(start, stop, stride), TUPLE(...), ...]
<flag> is true (1) iff a replica catalog should be searched for the actual source file.
Example:
lc = "lc=B04.10.atm, rc=PCMDI Replica Catalog, o=Argonne National Laboratory, o=Globus, c=US"
targetdir = "file:/pcmdi/drach/ngi"
source1 = FILE_LOCATION(lc, "B04.10.atm.0049.nc")
target1 = FILE_LOCATION(targetdir, "B04.10.atm.0049.nc")
selection1 = [TUPLE(0,12,1),TUPLE(0,18,1),TUPLE(0,64,1),TUPLE(0,128,1)]
specs11 = SLABSPEC("U", "Float", selection1)
specs12 = SLABSPEC("V", "Float", selection1)
specs1 = [specs11, specs12]
req1 = REQUEST(source1, target1, specs1, 1)
source2 = FILE_LOCATION(lc, "B04.10.atm.0050.nc")
target2 = FILE_LOCATION(targetdir, "B04.10.atm.0050.nc")
selection2 = [TUPLE(0,12,1),TUPLE(0,18,1),TUPLE(0,64,1),TUPLE(0,128,1)]
specs21 = SLABSPEC("U", "Float", selection2)
specs22 = SLABSPEC("V", "Float", selection2)
specs2 = [specs21, specs22]
req2 = REQUEST(source2, target2, specs2, 1)
requests = [req1, req2]
result, token = server.request("u17374", requests)
"""
return self.server.request(userid, requestList)
def requestFile(self, userid, dataset, sourcefile, targetfile, useReplica=1):
"""
server.requestFile(self, dataset, sourcefile, targetfile, useReplica=1)
creates a request for a file to be tranferred. The function returns a tuple
(result, token), where <result> is an error indicator, and <token> is the
string request token.
<dataset> is the string name of the source dataset.
<sourcefile> is the path of the source file.
<targetfile> is the path of the target file.
<useReplica> is true, iff <dataset> is a logical collection name in
the replica catalog. If false, <dataset> is a URL.
"""
from _GlobalIDL import FILE_LOCATION, REQUEST
source = FILE_LOCATION(dataset, sourcefile)
target = FILE_LOCATION("", targetfile)
requests = [REQUEST(source, target, [], useReplica)]
return self.request(userid, requests)
def execute(self, token):
"""
server.execute(token)
executes a request.
<token> is the request token returned by a request call.
"""
return self.server.execute(token)
def cancel(self, token):
"""
server.cancel(token)
cancels a request.
<token> is the request token returned by a request call.
"""
return self.server.cancel(token)
def estimate(self, token):
"""
server.estimate(token)
returns an estimate of time remaining.
<token> is the request token returned by a request call.
"""
return self.server.estimate(token)
def status(self, token):
"""
server.status(token)
returns the status of a file transfer, as a list of FILE_STATUS objects.
<token> is the request token returned by a request call.
"""
return self.server.status(token)
def staged(self, token, filename):
"""
server.staged(token, filename)
return true iff the file has been staged.
<token> is the request token returned by a request call.
"""
return self.server.staged(token, filename)
def stageFailed(self, token, filename):
"""
server.stageFailed(token, filename)
return true iff the file stage operation failed.
<token> is the request token returned by a request call.
"""
return self.server.stageFailed(token, filename)
|
StarcoderdataPython
|
42532
|
def validate_positive_integer(param):
if isinstance(param,int) and (param > 0):
return(None)
else:
raise ValueError("Invalid value, expected positive integer, got {0}".format(param))
|
StarcoderdataPython
|
11344660
|
from keras_cv_attention_models.model_surgery.model_surgery import (
SAMModel,
DropConnect,
add_l2_regularizer_2_model,
convert_to_mixed_float16,
convert_mixed_float16_to_float32,
convert_to_fused_conv_bn_model,
get_actual_survival_probabilities,
get_actual_drop_connect_rates,
replace_ReLU,
replace_add_with_drop_connect,
replace_add_with_stochastic_depth,
replace_stochastic_depth_with_add,
)
|
StarcoderdataPython
|
291523
|
<filename>tests/models_builder_result/h_record.py
# generated by ModelBuilder
from scielo_migration.isisdb.base_h_record import BaseArticleRecord
# generated by ModelBuilder
class ArticleRecord(BaseArticleRecord):
def __init__(
self, record, multi_val_tags=None,
data_dictionary=None):
super().__init__(
record, multi_val_tags, data_dictionary)
|
StarcoderdataPython
|
290889
|
#Documentation: https://sefiks.com/2019/02/13/apparent-age-and-gender-prediction-in-keras/
import numpy as np
import cv2
from keras.models import Model, Sequential
from keras.layers import Input, Convolution2D, ZeroPadding2D, MaxPooling2D, Flatten, Dense, Dropout, Activation
from PIL import Image
from keras.preprocessing.image import load_img, save_img, img_to_array
from keras.applications.imagenet_utils import preprocess_input
from keras.preprocessing import image
from keras.models import model_from_json
import matplotlib.pyplot as plt
from os import listdir
#-----------------------
#you can find male and female icons here: https://github.com/serengil/tensorflow-101/tree/master/dataset
enableGenderIcons = True
male_icon = cv2.imread("male.jpg")
male_icon = cv2.resize(male_icon, (40, 40))
female_icon = cv2.imread("female.jpg")
female_icon = cv2.resize(female_icon, (40, 40))
#-----------------------
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
def preprocess_image(image_path):
img = load_img(image_path, target_size=(224, 224))
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
img = preprocess_input(img)
return img
def loadVggFaceModel():
model = Sequential()
model.add(ZeroPadding2D((1,1),input_shape=(224,224, 3)))
model.add(Convolution2D(64, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(ZeroPadding2D((1,1)))
model.add(Convolution2D(512, (3, 3), activation='relu'))
model.add(MaxPooling2D((2,2), strides=(2,2)))
model.add(Convolution2D(4096, (7, 7), activation='relu'))
model.add(Dropout(0.5))
model.add(Convolution2D(4096, (1, 1), activation='relu'))
model.add(Dropout(0.5))
model.add(Convolution2D(2622, (1, 1)))
model.add(Flatten())
model.add(Activation('softmax'))
return model
def ageModel():
model = loadVggFaceModel()
base_model_output = Sequential()
base_model_output = Convolution2D(101, (1, 1), name='predictions')(model.layers[-4].output)
base_model_output = Flatten()(base_model_output)
base_model_output = Activation('softmax')(base_model_output)
age_model = Model(inputs=model.input, outputs=base_model_output)
#you can find the pre-trained weights for age prediction here: https://drive.google.com/file/d/1YCox_4kJ-BYeXq27uUbasu--yz28zUMV/view?usp=sharing
age_model.load_weights("age_model_weights.h5")
return age_model
def genderModel():
model = loadVggFaceModel()
base_model_output = Sequential()
base_model_output = Convolution2D(2, (1, 1), name='predictions')(model.layers[-4].output)
base_model_output = Flatten()(base_model_output)
base_model_output = Activation('softmax')(base_model_output)
gender_model = Model(inputs=model.input, outputs=base_model_output)
#you can find the pre-trained weights for gender prediction here: https://drive.google.com/file/d/1wUXRVlbsni2FN9-jkS_f4UTUrm1bRLyk/view?usp=sharing
gender_model.load_weights("gender_model_weights.h5")
return gender_model
age_model = ageModel()
gender_model = genderModel()
#age model has 101 outputs and its outputs will be multiplied by its index label. sum will be apparent age
output_indexes = np.array([i for i in range(0, 101)])
#------------------------
cap = cv2.VideoCapture(0) #capture webcam
while(True):
ret, img = cap.read()
#img = cv2.resize(img, (640, 360))
faces = face_cascade.detectMultiScale(img, 1.3, 5)
for (x,y,w,h) in faces:
if w > 130: #ignore small faces
#mention detected face
"""overlay = img.copy(); output = img.copy(); opacity = 0.6
cv2.rectangle(img,(x,y),(x+w,y+h),(128,128,128),cv2.FILLED) #draw rectangle to main image
cv2.addWeighted(overlay, opacity, img, 1 - opacity, 0, img)"""
cv2.rectangle(img,(x,y),(x+w,y+h),(128,128,128),1) #draw rectangle to main image
#extract detected face
detected_face = img[int(y):int(y+h), int(x):int(x+w)] #crop detected face
try:
#age gender data set has 40% margin around the face. expand detected face.
margin = 30
margin_x = int((w * margin)/100); margin_y = int((h * margin)/100)
detected_face = img[int(y-margin_y):int(y+h+margin_y), int(x-margin_x):int(x+w+margin_x)]
except:
print("detected face has no margin")
try:
#vgg-face expects inputs (224, 224, 3)
detected_face = cv2.resize(detected_face, (224, 224))
img_pixels = image.img_to_array(detected_face)
img_pixels = np.expand_dims(img_pixels, axis = 0)
img_pixels /= 255
#find out age and gender
age_distributions = age_model.predict(img_pixels)
apparent_age = str(int(np.floor(np.sum(age_distributions * output_indexes, axis = 1))[0]))
gender_distribution = gender_model.predict(img_pixels)[0]
gender_index = np.argmax(gender_distribution)
if gender_index == 0: gender = "F"
else: gender = "M"
#background for age gender declaration
info_box_color = (46,200,255)
#triangle_cnt = np.array( [(x+int(w/2), y+10), (x+int(w/2)-25, y-20), (x+int(w/2)+25, y-20)] )
triangle_cnt = np.array( [(x+int(w/2), y), (x+int(w/2)-20, y-20), (x+int(w/2)+20, y-20)] )
cv2.drawContours(img, [triangle_cnt], 0, info_box_color, -1)
cv2.rectangle(img,(x+int(w/2)-50,y-20),(x+int(w/2)+50,y-90),info_box_color,cv2.FILLED)
#labels for age and gender
cv2.putText(img, apparent_age, (x+int(w/2), y - 45), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 111, 255), 2)
if enableGenderIcons:
if gender == 'M': gender_icon = male_icon
else: gender_icon = female_icon
img[y-75:y-75+male_icon.shape[0], x+int(w/2)-45:x+int(w/2)-45+male_icon.shape[1]] = gender_icon
else:
cv2.putText(img, gender, (x+int(w/2)-42, y - 45), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 111, 255), 2)
except Exception as e:
print("exception",str(e))
cv2.imshow('img',img)
if cv2.waitKey(1) & 0xFF == ord('q'): #press q to quit
break
#kill open cv things
cap.release()
cv2.destroyAllWindows()
|
StarcoderdataPython
|
1918662
|
<filename>lib/xupdate/writers.py
########################################################################
# amara/xupdate/writers.py
"""
XUpdate output writers
"""
class text_writer(object):
__slots__ = ('_data', '_write')
def __init__(self):
self._data = []
self._write = self._data.append
def get_result(self):
return u''.join(self._data)
def start_element(self, namespace, name, attributes=()):
pass
def end_element(self, namespace, name):
pass
def attribute(self, namespace, name, value):
pass
def text(self, data):
self._write(data)
def processing_instruction(self, target, data):
pass
def comment(self, data):
pass
class node_writer(object):
__slots__ = ()
def __init__(self, parent, refnode=None):
self._parent = parent
#FIXME: Use regular constructor. No more DOM factory
self._factory = parent.rootNode
self._nodes = []
self._last = parent
def get_result(self):
return self._nodes
def start_element(self, namespace, name, attributes=()):
pass
def end_element(self, namespace, name):
node = self._nodes[-1]
def attribute(self, namespace, name, value):
try:
self._last.xml_attributes[namespace, name] = value
except AttributeError:
raise XUpdateError(XUpdateError.ATTRIBUTE_NOT_ALLOWED)
def text(self, data):
if self._last.nodeType == Node.TEXT_NODE:
self._last.appendData(data)
else:
node = self._last = self._factory.createTextNode(data)
self._nodes.append(node)
def processing_instruction(self, target, data):
pass
def comment(self, data):
pass
|
StarcoderdataPython
|
6671805
|
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:percent
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.13.6
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% [md]
# # Predicting costumer subscription
# ## Training ML Models
# %%
# Load the preprocessed data
import pandas as pd
data = pd.read_pickle("data/preprocessed_data.pkl")
data.head()
# %%
# Since our dataset is imbalanced, calculate a majority baseline.
from sklearn.dummy import DummyClassifier
SEED = 42
majority = DummyClassifier(random_state=SEED)
# %%
# Use SVM to train the model.
# SVM typically leads to near-optimal results in linearly separable problems.
from sklearn.svm import LinearSVC
svm = LinearSVC(dual=False, random_state=SEED)
# %%
# Use kNN to train the model.
# kNN may work well when a general function cannot be learned
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier()
# %%
# Use a random forest to train the model.
# Random forests may offer explainable solutions.
from sklearn.ensemble import RandomForestClassifier
rf = RandomForestClassifier(n_estimators=10, random_state=SEED)
# %%
# Use logistic regression to train the model.
# Logistic regression is a strong baseline for binary classification problems
# and also provides an explainable model.
from sklearn.linear_model import LogisticRegression
lr = LogisticRegression(random_state=SEED, max_iter=1000)
# %%
# Use a boosting algorithm to train the model.
# Boosting may generalize better on test data.
from sklearn.ensemble import GradientBoostingClassifier
# HistGradientBoostingClassifier(categorical_features=[0])
gb = GradientBoostingClassifier(random_state=SEED)
# %%
# Drop columns used only for visualization
data = data.drop(['y_yes', 'age_group', 'duration_min_group'], axis=1)
# %%
# Encode categorical data
dummies = pd.get_dummies(data)
dummies.head()
# %%
# Create training and test sets
X_train = dummies.drop(['y_no', 'y_yes'], axis=1)
y_train = dummies['y_yes']
# %%
# Persist data for reuse
X_train.to_pickle("data/X_train.pkl")
y_train.to_pickle("data/y_train.pkl")
# %%
# Iterate over classifier to generate repors
from sklearn.model_selection import cross_val_score, cross_val_predict
from sklearn.metrics import classification_report
from notebooks.util import plot_confusion_matrix
SCORING = 'f1_macro'
classifiers = {
'Majority': majority,
'SVM': svm,
'kNN': knn,
'Random Forest': rf,
'LR': lr,
'Gradient Boosting': gb
}
results = pd.DataFrame(columns=[SCORING, "model"])
for k, v in classifiers.items():
# Cross-validate the dataset.
clf_scores = cross_val_score(v, X_train, y_train, scoring=SCORING, cv=10)
results = pd.concat([results, pd.DataFrame({SCORING: clf_scores, 'model': k})])
print(f"{k}: {clf_scores.mean()} +- {clf_scores.std()}")
# Generate confusion matrix.
pred = cross_val_predict(v, X_train, y_train, cv=10)
plot_confusion_matrix(y_train, pred, k)
# Display classification report.
print(classification_report(y_train, pred))
# %%
# Compare overall results using boxplot
from matplotlib import pyplot as plt
results.boxplot(column=[SCORING], by='model',# positions=[1, 3, 2],
showmeans=True, figsize=(7, 5))
plt.ylim([0.4, 1.0])
# %%
# Random Forest: plot one decision tree to explain the model
from sklearn.tree import plot_tree
rf = RandomForestClassifier(n_estimators=10, random_state=SEED)
print(cross_val_score(rf, X_train, y_train, scoring=SCORING, cv=10).mean())
rf.fit(X_train, y_train)
ESTIMATOR = 0
fig = plt.figure(figsize=(150, 100))
plot_tree(rf.estimators_[ESTIMATOR], max_depth=3, feature_names=X_train.columns,
class_names=["no", "yes"], filled=True, proportion=True, rounded=True)
# %%
# Feature importance
import numpy as np
classifiers = {
'Random Forest': rf,
'Gradient Boosting': gb
}
for name, clf in classifiers.items():
clf.fit(X_train, y_train)
importance = pd.DataFrame({'feature': X_train.columns, 'importance': clf.feature_importances_})
importance = importance.sort_values('importance', ascending=False)
top = importance.head(10)
top.plot.bar()
plt.xticks(np.arange(len(top)), labels=top['feature'])
plt.title(f"Top {len(top)} features for {name}")
plt.show()
# %%
# # Predict on test data
test = pd.read_excel("data/test_file.xlsx")
print(test.dtypes)
test.head()
# %%
# Encode test data the same as train data
test_dummies = pd.get_dummies(test).reindex(columns=X_train.columns, fill_value=0)
test_dummies.head()
# %%
# Predict and save results
test['y_pred'] = gb.predict(test_dummies)
test.to_excel("data/test_file_pred.xlsx")
|
StarcoderdataPython
|
11389986
|
<filename>superannotate/input_converters/converters/sagemaker_converters/sagemaker_to_sa_vector.py
import os
import json
from glob import glob
import numpy as np
def _create_classes(classes_map):
classes_loader = []
for key, value in classes_map.items():
color = np.random.choice(range(256), size=3)
hexcolor = "#%02x%02x%02x" % tuple(color)
sa_classes = {
'id': int(key),
'name': value,
'color': hexcolor,
'attribute_groups': []
}
classes_loader.append(sa_classes)
return classes_loader
def sagemaker_object_detection_to_sa_vector(data_path, main_key):
sa_jsons = {}
dataset_manifest = []
try:
img_map_file = open(os.path.join(data_path, 'output.manifest'))
except Exception as e:
raise Exception("'output.manifest' file doesn't exist")
for line in img_map_file:
dataset_manifest.append(json.loads(line))
json_list = glob(os.path.join(data_path, '*.json'))
classes_ids = {}
for json_file in json_list:
data_json = json.load(open(json_file))
for img in data_json:
if 'consolidatedAnnotation' not in img.keys():
print('Wrong json files')
raise Exception
manifest = dataset_manifest[int(img['datasetObjectId'])]
file_name = os.path.basename(
manifest['source-ref']
) + '___objects.json'
classes = img['consolidatedAnnotation']['content'][
main_key + '-metadata']['class-map']
for key, value in classes.items():
if key not in classes_ids.keys():
classes_ids[key] = value
annotations = img['consolidatedAnnotation']['content'][main_key][
'annotations']
sa_loader = []
for annotation in annotations:
points = {
'x1': annotation['left'],
'y1': annotation['top'],
'x2': annotation['left'] + annotation['width'],
'y2': annotation['top'] + annotation['height']
}
sa_obj = {
'type': 'bbox',
'points': points,
'className': classes[str(annotation['class_id'])],
'classId': int(annotation['class_id']),
'attributes': [],
'probability': 100,
'locked': False,
'visible': True,
'groupId': 0
}
sa_loader.append(sa_obj.copy())
sa_jsons[file_name] = sa_loader
sa_classes = _create_classes(classes_ids)
return sa_jsons, sa_classes, None
|
StarcoderdataPython
|
8006317
|
<filename>tests/DjangoTest/DjangoTest/view_401_settings.py
from .settings import *
APP_ERROR_VIEW_PERMISSION = None
|
StarcoderdataPython
|
8010112
|
<filename>pyethapp/tests/test_app.py
from builtins import str
import os
import pytest
from pyethapp import app
from pyethapp import config
from click.testing import CliRunner
genesis_json = {
"nonce": "0x00000000000000ff",
"difficulty": "0xff0000000",
"mixhash": "0xff00000000000000000000000000000000000000000000000000000000000000",
"coinbase": "0xff00000000000000000000000000000000000000",
"timestamp": "0xff",
"parentHash": "0xff00000000000000000000000000000000000000000000000000000000000000",
"extraData": "0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa",
"gasLimit": "0xffff",
"alloc": {
"ffffffffffffffffffffffffffffffffffffffff": {"balance": "9876543210"},
"0000000000000000000000000000000000000000": {"balance": "1234567890"}
}
}
genesis_yaml = """
eth:
genesis: {}
""".format(genesis_json)
def test_show_usage():
runner = CliRunner()
result = runner.invoke(app.app, [])
assert "Usage: app " in result.output, result.output
def test_no_such_option():
runner = CliRunner()
result = runner.invoke(app.app, ['--WTF'])
assert 'no such option: --WTF' in result.output, result.output
def test_no_such_command():
runner = CliRunner()
result = runner.invoke(app.app, ['eat'])
assert 'Error: No such command "eat"' in result.output, result.output
@pytest.mark.parametrize('content', ['', '<html/>', 'print "hello world"'])
def test_non_dict_yaml_as_config_file(content):
runner = CliRunner()
with runner.isolated_filesystem():
with open('config.yaml', 'w') as text_file:
text_file.write(content)
result = runner.invoke(app.app, ['-C', 'config.yaml'])
assert 'content of config should be an yaml dictionary' in result.output, result.output
@pytest.mark.parametrize('param', [('--Config', 'myconfig.yaml'),
('-C', 'myconfig.yaml'),
('-c', 'mygenesis.json'),
('-c', 'dict')])
def test_custom_config_file(param):
runner = CliRunner()
with runner.isolated_filesystem():
opt, arg = param
if arg.endswith('.yaml'):
with open(arg, 'w') as text_file:
text_file.write(genesis_yaml)
elif arg.endswith('.json'):
with open(arg, 'w') as text_file:
text_file.write(str(genesis_json))
else:
arg = str(genesis_json).replace('\n', '').replace(' ', '')
if opt == '-c':
arg = 'eth.genesis={}'.format(arg)
result = runner.invoke(app.app, [opt, arg, 'config'])
if arg.endswith('.json'):
patterns = ['genesis: {}'.format(param[1])]
else:
patterns = ["{}: '{}'".format(k, v) for k, v in list(genesis_json.items()) if k != 'alloc']
for pat in patterns:
assert pat in result.output, '`{}` not found'.format(pat)
for k, v in list(genesis_json['alloc'].items()):
assert k in result.output
assert v['balance'] in result.output
def test_config_from_datadir(tmpdir):
"""Test, that when given a `--data-dir`, the app
reads the config from the '`--data-dir`/config.yaml'.
"""
DIR = "datadir"
runner = CliRunner()
with runner.isolated_filesystem():
os.mkdir(DIR)
runner.invoke(app.app, ["--data-dir", DIR, "config"])
with open(os.path.join(DIR, config.CONFIG_FILE_NAME), "w") as configfile:
configfile.write("p2p:\n max_peers: 9000")
result = runner.invoke(app.app, ["--data-dir", DIR, "config"])
assert "max_peers: 9000" in result.output
if __name__ == '__main__':
test_show_usage()
test_no_such_option()
test_no_such_command()
test_non_dict_yaml_as_config_file('')
test_non_dict_yaml_as_config_file('<html/>')
test_non_dict_yaml_as_config_file('print "hello world"')
test_custom_config_file(('--Config', 'myconfig.yaml'))
test_custom_config_file(('-C', 'myconfig.yaml'))
test_custom_config_file(('-c', 'mygenesis.json'))
test_custom_config_file(('-c', 'dict'))
|
StarcoderdataPython
|
8042170
|
import os
from django.apps import AppConfig
class UnitDataConfig(AppConfig):
name = 'unit_data'
def ready(self):
"""起動処理
- モデルのデータベースインデクスの作成
"""
# 開発時のオートリロード機能による二重起動を防ぐためのおまじない
if not os.environ.get('RUN_MAIN'): return
from . import models
models.ensure_indices()
|
StarcoderdataPython
|
6577128
|
<reponame>ncsl/virtual_cortical_stim_epilepsy
import os
import warnings
import matplotlib as mp
import matplotlib.colors as colors
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.axes_grid1 import make_axes_locatable
from natsort import index_natsorted, order_by_index
try:
import brewer2mpl
except ImportError as e:
print(e)
from cortstim.base.utils.data_structures_utils import ensure_list
from cortstim.base.utils.data_structures_utils import generate_region_labels
from cortstim.edv.base.config.config import FiguresConfig
# outcome_dabest.mean_diff.plot()
# dabest.TwoGroupsEffectSize
# set the colormap and centre the colorbar
class MidpointNormalize(colors.Normalize):
"""
Normalise the colorbar so that diverging bars work there way either side from a prescribed midpoint value)
e.g. im=ax1.imshow(array, norm=MidpointNormalize(midpoint=0.,vmin=-100, vmax=100))
e.g. plt.imshow(ras, cmap=cmap, clim=(elev_min, elev_max), norm=MidpointNormalize(midpoint=mid_val,vmin=elev_min, vmax=elev_max))
plt.colorbar()
plt.show()
"""
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y), np.isnan(value))
class AbstractBasePlot():
"""
Abstract base plotting class that houses all logistical settings for a figure.
Does not provide any functionality in terms of plotting. Helps with formatting figure,
setting font sizes, showing it, saving it.
"""
@staticmethod
def figure_filename(fig=None, figure_name=None):
if fig is None:
fig = plt.gcf()
if figure_name is None:
figure_name = fig.get_label()
# replace all unnecessary characters
figure_name = figure_name.replace(
": ", "_").replace(
" ", "_").replace(
"\t", "_").replace(
",", "")
return figure_name
def save_vid(self, figdir, outdir, figname, prefix='img'):
print("Command: ffmpeg -r 1 -i {figdir}/{prefix}%01d.png -vcodec mpeg4 -y {outdir}/{figname}".format(
prefix=prefix,
figdir=figdir,
outdir=outdir,
figname=figname))
os.system("ffmpeg -r 1 -i {figdir}/{prefix}%01d.png -vcodec mpeg4 -y {outdir}/{figname}".format(prefix=prefix,
figdir=figdir,
outdir=outdir,
figname=figname))
def _check_show(self):
if FiguresConfig.SHOW_FLAG:
mp.use('TkAgg')
plt.ion()
plt.show()
else:
mp.use('Agg')
plt.ioff()
plt.close()
def format_figure(self, fig, axes, fontsize):
axes = ensure_list(axes)
for ax in axes:
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(fontsize / 2.0)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(fontsize / 2.0)
def _nat_order_labels(self, mat, labels):
# get the natural order indices
natindices = index_natsorted(labels)
# order the matrix
ordered_mat = np.array(order_by_index(mat, natindices))
ordered_labels = np.array(order_by_index(labels, natindices))
return ordered_mat, ordered_labels
def save_figure(self, fig, figure_name):
if FiguresConfig.SAVE_FLAG:
if "." in figure_name:
figure_name, ext = figure_name.split('.')
else:
ext = self.figure_format
# get figure name and set it with the set format
figure_name = self.figure_filename(fig, figure_name)
figure_name = figure_name[:np.min(
[100, len(figure_name)])]
# + '.' + self.figure_format
if not (os.path.isdir(self.figure_dir)):
os.mkdir(self.figure_dir)
outputfilepath = os.path.join(self.figure_dir, figure_name + f".{ext}")
plt.savefig(outputfilepath,
box_inches='tight')
def set_colorbar(self, img, axes, cbarlabel):
# set the colormap and its axes
# cbar = plt.colorbar(img)
# cax1 = cbar.ax
# divider = make_axes_locatable(axes)
# # cax1 = divider.append_axes("right", size="5%", pad=0.05)
# cax1=None
# cbar = plt.colorbar(img)
# make a color bar
divider = make_axes_locatable(axes)
cax1 = divider.append_axes("right", size="5%", pad=0.05)
cbar = plt.colorbar(img, cax=cax1)
return cbar, cax1
class BasePlotter(AbstractBasePlot):
def __init__(self, figure_dir):
self.figure_dir = figure_dir # self.config.out.FOLDER_FIGURES
self.figure_format = FiguresConfig.FIG_FORMAT
# set highlighting cursor
self.HighlightingDataCursor = lambda *args, **kwargs: None
# get the mp backend
if mp.get_backend() in mp.rcsetup.interactive_bk:
# and self.config.figures.MOUSE_HOOVER:
try:
from mpldatacursor import HighlightingDataCursor
self.HighlightingDataCursor = HighlightingDataCursor
except ImportError:
# self.config.figures.MOUSE_HOOVER = False
warnings.warn(
"Importing mpldatacursor failed! No highlighting functionality in plots!")
else:
warnings.warn(
"Noninteractive matplotlib backend! No highlighting functionality in plots!")
# self.config.figures.MOUSE_HOOVER = False
def setup_figure(self, nrow=1, ncol=1, figure_size=FiguresConfig.VERY_LARGE_SIZE):
fig, axs = plt.subplots(nrows=nrow, ncols=ncol,
# sharey="row",
# sharex="col",
figsize=figure_size)
axs = np.array(axs)
axs = axs.reshape(-1)
return fig, axs
def plotvertlines(self, ax, time, color='k', label=None):
"""
Function to plot vertical dashed lines on an axis.
:param ax: (Axes) object to plot on
:param time: time to plot a vertical line on
:param color: color to make the vertical line
:return: (Axes) object
"""
if isinstance(time, list):
t = time.pop()
ax = self.plotvertlines(ax, t, color=color)
# plot vertical lines of 'predicted' onset/offset
ax.axvline(time,
color=color,
linestyle='dashed',
linewidth=10, label=label)
return ax
@staticmethod
def plot_heatmap_overtime(mat, subplot, titlestr,
ylabels=[], xlabels=[],
ax=None,
show_y_labels=True, show_x_labels=False,
indicecolors=[], colors=[],
sharey=None,
fontsize=FiguresConfig.LARGE_FONT_SIZE,
cbarlabel="",
cmapname='inferno'):
"""
Static method of base plotter for plotting a 2D heatmap.
:param mat:
:param subplot:
:param titlestr:
:param ylabels:
:param xlabels:
:param ax:
:param show_y_labels:
:param show_x_labels:
:param indicecolors:
:param colors:
:param sharey:
:param fontsize:
:param cbarlabel:
:param cmapname:
:return:
"""
assert len(indicecolors) == len(colors)
if ax is None:
ax = plt.subplot(subplot, sharey=sharey) # initialize ax
# set title
ax.set_title(titlestr, fontsize=fontsize)
# get the size of the matrix to plot
mat_size = mat.shape[0]
time_size = mat.shape[1]
# set the yticks & color
y_ticks = np.arange(mat_size).astype(int)
# plot the heatmap
# cmap = plt.set_cmap(cmapname)
if cmapname == 'OrRd':
bmap = brewer2mpl.get_map("OrRd", 'Sequential', 9, reverse=False)
cmap = bmap.mpl_colormap
elif cmapname == 'inferno':
cmap = 'inferno'
else:
cmap = cmapname
# cmap = 'viridis'
img = ax.imshow(mat,
origin='lower',
cmap=cmap,
aspect='auto',
interpolation='nearest',
alpha=0.3,
)
# set a grid on the plot
ax.grid(True, color='grey')
# set x ticks and ylabels
if show_x_labels:
# set the xticks & color
x_ticks = np.array(
np.arange(0, time_size, time_size / 10), dtype=np.int32)
x_color = 'k'
ax.set_xticks(x_ticks)
ax.set_xticklabels(xlabels[x_ticks])
# set y ticks and ylabels
if show_y_labels:
# get the ylabbels
region_labels = np.array(
["%d. %s" % l for l in zip(range(mat_size), ylabels)])
# region_labels = np.array(ylabels)
ax.set_yticks(y_ticks)
ax.set_yticklabels(region_labels, fontsize=fontsize / 1.5)
# # check if there was only one color set
ticklabels = ax.get_yticklabels(minor=False)
# set colors based on lists passed in
for inds, color in zip(indicecolors, colors):
for idx in inds:
ticklabels[idx].set_color(color)
ax.set_yticklabels(ticklabels)
else:
ax.set_yticklabels([])
# set tick ylabels and markers along the heatmap x/y axis
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(fontsize / 1.5)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(fontsize / 1.5)
# format the object correctly
ax.autoscale(tight=True)
# make a color bar
cbar, cax1 = BasePlotter.set_colorbar(BasePlotter, img, ax, cbarlabel)
cbar.set_label(cbarlabel, rotation=270,
fontsize=fontsize, labelpad=60)
cax1.tick_params(labelsize=fontsize)
return ax
def _plot_ts(self, data, labels, ax,
show_ylabels=True,
offset=0.0,
special_idx=[],
errors_list=[],
fontsize=FiguresConfig.LARGE_FONT_SIZE):
"""
Method for plotting a set of time series data.
:param data: (np.ndarray) dataset of time series to be plotting. (Samples X Time)
:param labels: (list) of labels (Samples x 1)
:param ax: (Axes) object to plot on
:param show_ylabels:
:param offset:
:param special_idx:
:param fontsize:
:return:
"""
if data.ndim == 1:
data = data[np.newaxis, :]
offset = int(offset)
# apply offset setting onto the data
data = data[:, offset:]
# get shape of data to be plotted
nsamples, ntimes = data.shape
nTS = 1
def_alpha = 1.0
# generate ylabels for the plot
labels = generate_region_labels(nsamples, labels)
# set plotting parameters: alpha_ratio, colors, alphas
alpha_ratio = 1.0 / nsamples
colors = np.array(['k'] * nTS)
alphas = np.maximum(np.array(
[def_alpha] *
nTS) *
alpha_ratio,
1.0)
colors[special_idx] = 'r'
alphas[special_idx] = np.maximum(alpha_ratio, 0.1)
# apply normalization for each trace
for i in range(nsamples):
data[i, :] = data[i, :] / np.nanmax(data[i, :])
# plot each trace
x = np.arange(ntimes)
for itrace in range(nTS):
for i in range(nsamples):
y = data[i, :] + np.r_[i]
ax.plot(x, y,
color=colors[itrace],
label=labels[itrace],
alpha=alphas[itrace])
# plot errors bars
if errors_list:
error = errors_list[error]
ax.fill_between(x, y - error, y + error,
color=colors[itrace],
alphas=alphas[itrace])
if show_ylabels:
# print("Labels are : ", labels)
y_ticks = np.arange(len(labels))
ax.set_yticks(y_ticks)
ax.set_yticklabels(labels, fontsize=fontsize / 1.5)
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(fontsize / 1.5)
for tick in ax.xaxis.get_major_ticks():
tick.label.set_fontsize(fontsize / 1.5)
return ax
@staticmethod
def plot_vector(vector, subplot, title, labels,
flipy=False,
sharey=None,
fontsize=FiguresConfig.VERY_LARGE_FONT_SIZE):
"""
Method for plotting a vector (can be time series) into a subplot.
:param vector:
:param subplot:
:param title:
:param labels:
:param flipy:
:param sharey:
:param fontsize:
:return:
"""
ax = plt.subplot(subplot, sharey=sharey)
plt.title(title, fontsize=fontsize)
n_vector = labels.shape[0]
y_ticks = np.array(range(0, n_vector * 3, 3), dtype=np.int32)
color = 'black'
# plot vector
if flipy:
ax.plot(vector, np.arange(0, len(vector)),
color=color, linestyle='-')
ax.invert_yaxis()
else:
ax.plot(vector, color=color, linestyle='-')
# ax.grid(True, color='grey')
# format the axes
ax.autoscale(tight=True)
return ax
|
StarcoderdataPython
|
6540693
|
from numpy import nan
from .check_nd_array_for_bad import check_nd_array_for_bad
def apply_function_on_2_1d_arrays(
_1d_array_0,
_1d_array_1,
function,
n_required=None,
raise_for_n_less_than_required=True,
raise_for_bad=True,
use_only_good=True,
):
is_good_0 = ~check_nd_array_for_bad(_1d_array_0, raise_for_bad=raise_for_bad)
is_good_1 = ~check_nd_array_for_bad(_1d_array_1, raise_for_bad=raise_for_bad)
if use_only_good:
is_good = is_good_0 & is_good_1
if n_required is not None:
if n_required <= 1:
n_required *= is_good.size
n_good = is_good.sum()
if n_good < n_required:
if raise_for_n_less_than_required:
raise ValueError("{} <= n_required ({})".format(n_good, n_required))
else:
return nan
_1d_array_0 = _1d_array_0[is_good]
_1d_array_1 = _1d_array_1[is_good]
return function(_1d_array_0, _1d_array_1)
|
StarcoderdataPython
|
1774861
|
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
"""
import unittest
from gremlin_python.structure.graph import Path, Edge, Vertex
from gremlin_python.process.traversal import T
from graph_notebook.network.EventfulNetwork import EVENT_ADD_NODE
from graph_notebook.network.gremlin.GremlinNetwork import GremlinNetwork
class TestGremlinNetwork(unittest.TestCase):
def test_add_vertex_with_callback(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
reached_callback = {}
expected_data = {
'data': {
'group': 'airport',
'label': 'airport',
'properties': {
T.id: '1234',
T.label: 'airport',
'code': 'SEA',
'runways': '4',
'type': 'Airport'},
'title': 'airport'},
'node_id': '1234'}
def add_node_callback(network, event_name, data):
self.assertEqual(event_name, EVENT_ADD_NODE)
self.assertEqual(expected_data, data)
reached_callback[event_name] = True
gn = GremlinNetwork(callbacks={EVENT_ADD_NODE: [add_node_callback]})
gn.add_vertex(vertex)
self.assertTrue(reached_callback[EVENT_ADD_NODE])
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(expected_data['data']['properties'], node['properties'])
def test_add_explicit_type_vertex_without_node_property(self):
vertex = Vertex(id='1')
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['label'], 'vertex')
def test_add_explicit_type_vertex_with_invalid_node_property_label(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(display_property='foo')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['label'], 'vertex')
def test_add_explicit_type_vertex_with_node_property_label(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(display_property='label')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['label'], 'vertex')
def test_add_explicit_type_vertex_with_node_property_id(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(display_property='id')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['label'], '1')
def test_add_explicit_type_vertex_with_node_property_json(self):
vertex1 = Vertex(id='1')
gn = GremlinNetwork(display_property='{"vertex":"id"}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get('1')
self.assertEqual(node1['label'], '1')
def test_add_explicit_type_vertex_with_node_property_json_invalid_json(self):
vertex1 = Vertex(id='1')
gn = GremlinNetwork(display_property='{"vertex":id}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get('1')
self.assertEqual(node1['label'], 'vertex')
def test_add_explicit_type_vertex_with_node_property_json_invalid_key(self):
vertex1 = Vertex(id='1')
gn = GremlinNetwork(display_property='{"foo":"id"}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get('1')
self.assertEqual(node1['label'], 'vertex')
def test_add_explicit_type_vertex_with_node_property_json_invalid_value(self):
vertex1 = Vertex(id='1')
gn = GremlinNetwork(display_property='{"vertex":"code"}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get('1')
self.assertEqual(node1['label'], 'vertex')
def test_add_explicit_type_multiple_vertex_with_node_property_string(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
gn = GremlinNetwork(display_property='id')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get('1')
node2 = gn.graph.nodes.get('2')
self.assertEqual(node1['label'], '1')
self.assertEqual(node2['label'], '2')
def test_add_explicit_type_multiple_vertex_with_node_property_json(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
gn = GremlinNetwork(display_property='{"vertex":"id"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get('1')
node2 = gn.graph.nodes.get('2')
self.assertEqual(node1['label'], '1')
self.assertEqual(node2['label'], '2')
def test_add_vertex_without_node_property(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'airport')
def test_add_vertex_with_node_property_string(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(display_property='code')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'SEA')
def test_add_vertex_with_node_property_string_invalid(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(display_property='desc')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'airport')
def test_add_vertex_with_node_property_json(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(display_property='{"airport":"code"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'SEA')
def test_add_vertex_with_node_property_json_invalid_json(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(display_property='{"airport":code}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'airport')
def test_add_vertex_with_node_property_json_invalid_key(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(display_property='{"country":"code"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'airport')
def test_add_vertex_with_node_property_json_invalid_value(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(display_property='{"airport":"desc"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'airport')
def test_add_vertex_multiple_with_node_property_string(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'continent': 'NA',
'code': 'USA'
}
gn = GremlinNetwork(display_property='code')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['label'], 'SEA')
self.assertEqual(node2['label'], 'USA')
def test_add_vertex_multiple_with_multiple_node_properties(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'continent': 'NA',
'code': 'USA'
}
gn = GremlinNetwork(display_property='{"airport":"code","country":"continent"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['label'], 'SEA')
self.assertEqual(node2['label'], 'NA')
def test_add_vertex_with_label_length(self):
vertex = {
T.id: '1234',
T.label: 'Seattle-Tacoma International Airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(label_max_length=15)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'Seattle-Taco...')
def test_add_vertex_with_bracketed_label_and_label_length(self):
vertex = {
T.id: '1234',
T.label: "['Seattle-Tacoma International Airport']",
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(label_max_length=15)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'Seattle-Taco...')
def test_add_vertex_with_label_length_less_than_3(self):
vertex = {
T.id: '1234',
T.label: 'Seattle-Tacoma International Airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(label_max_length=-50)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], '...')
def test_add_vertex_with_node_property_string_and_label_length(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA',
'desc': 'Seattle-Tacoma International Airport'
}
gn = GremlinNetwork(display_property='{"airport":"desc"}', label_max_length=15)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'Seattle-Taco...')
def test_add_vertex_with_node_property_json_and_label_length(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA',
'desc': 'Seattle-Tacoma International Airport'
}
gn = GremlinNetwork(display_property='desc', label_max_length=15)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['label'], 'Seattle-Taco...')
def test_add_explicit_type_single_edge_without_edge_property(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork()
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_explicit_type_single_edge_with_invalid_edge_property(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='length')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_explicit_type_single_edge_with_edge_property_string_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='label')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_explicit_type_single_edge_with_edge_property_string_id(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='id')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], '1')
def test_add_explicit_type_single_edge_with_edge_property_json_single_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='{"route":"inV"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'v[2]')
def test_add_explicit_type_single_edge_with_edge_property_malformed_json_single_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='{"route":inV}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_explicit_type_single_edge_with_edge_property_json_invalid_key_single_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='{"road":"inV"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_explicit_type_single_edge_with_edge_property_json_invalid_value_single_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
gn = GremlinNetwork(edge_display_property='{"route":"distance"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_explicit_type_multiple_edges_with_edge_property_string(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
vertex3 = Vertex(id='3')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='airway')
edge2 = Edge(id='2', outV=vertex2, inV=vertex3, label='road')
gn = GremlinNetwork(edge_display_property='id')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
gn.add_path_edge(edge2)
edge_route = gn.graph.get_edge_data('1', '2')
edge_path = gn.graph.get_edge_data('2', '3')
self.assertEqual(edge_route['1']['label'], '1')
self.assertEqual(edge_path['2']['label'], '2')
def test_add_explicit_type_multiple_edges_with_edge_property_json_single_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
vertex3 = Vertex(id='3')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='route')
edge2 = Edge(id='2', outV=vertex2, inV=vertex3, label='route')
gn = GremlinNetwork(edge_display_property='{"route":"inV"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
gn.add_path_edge(edge2)
edge_route = gn.graph.get_edge_data('1', '2')
edge_path = gn.graph.get_edge_data('2', '3')
self.assertEqual(edge_route['1']['label'], 'v[2]')
self.assertEqual(edge_path['2']['label'], 'v[3]')
def test_add_explicit_type_multiple_edges_with_edge_property_json_multiple_labels(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
vertex3 = Vertex(id='3')
edge1 = Edge(id='1', outV=vertex1, inV=vertex2, label='airway')
edge2 = Edge(id='2', outV=vertex2, inV=vertex3, label='road')
gn = GremlinNetwork(edge_display_property='{"airway":"inV","road":"id"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1)
gn.add_path_edge(edge2)
edge_route = gn.graph.get_edge_data('1', '2')
edge_path = gn.graph.get_edge_data('2', '3')
self.assertEqual(edge_route['1']['label'], 'v[2]')
self.assertEqual(edge_path['2']['label'], '2')
def test_add_single_edge_without_edge_property(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork()
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_single_edge_with_invalid_edge_property(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='distance')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_single_edge_with_edge_property_string_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='T.label')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_single_edge_with_edge_property_string_id(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='T.id')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], '1')
def test_add_single_edge_with_edge_property_json(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='{"route":"inV"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'v[2]')
def test_add_single_edge_with_edge_property_invalid_json(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='{"route":inV}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_single_edge_with_edge_property_json_invalid_key(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='{"distance":"inV"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_single_edge_with_edge_property_json_invalid_value(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
gn = GremlinNetwork(edge_display_property='{"route":"foo"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], 'route')
def test_add_multiple_edges_with_edge_property_string(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
edge2 = {T.id: '2', T.label: 'route', 'outV': 'v[2]', 'inV': 'v[3]'}
gn = GremlinNetwork(edge_display_property='inV')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
gn.add_path_edge(edge2, from_id='2', to_id='3')
edge1_data = gn.graph.get_edge_data('1', '2')
edge2_data = gn.graph.get_edge_data('2', '3')
self.assertEqual(edge1_data['1']['label'], 'v[2]')
self.assertEqual(edge2_data['2']['label'], 'v[3]')
def test_add_multiple_edges_with_edge_property_json_single_label(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'route', 'outV': 'v[1]', 'inV': 'v[2]'}
edge2 = {T.id: '2', T.label: 'route', 'outV': 'v[2]', 'inV': 'v[3]'}
gn = GremlinNetwork(edge_display_property='{"route":"inV"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
gn.add_path_edge(edge2, from_id='2', to_id='3')
edge1_data = gn.graph.get_edge_data('1', '2')
edge2_data = gn.graph.get_edge_data('2', '3')
self.assertEqual(edge1_data['1']['label'], 'v[2]')
self.assertEqual(edge2_data['2']['label'], 'v[3]')
def test_add_multiple_edges_with_edge_property_json_multiple_labels(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
edge1 = {T.id: '1', T.label: 'airway', 'outV': 'v[1]', 'inV': 'v[2]'}
edge2 = {T.id: '2', T.label: 'road', 'outV': 'v[2]', 'inV': 'v[3]'}
gn = GremlinNetwork(edge_display_property='{"airway":"outV","road":"T.id"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
gn.add_path_edge(edge1, from_id='1', to_id='2')
gn.add_path_edge(edge2, from_id='2', to_id='3')
edge1_data = gn.graph.get_edge_data('1', '2')
edge2_data = gn.graph.get_edge_data('2', '3')
self.assertEqual(edge1_data['1']['label'], 'v[1]')
self.assertEqual(edge2_data['2']['label'], '2')
def test_add_path_with_integer(self):
path = Path([], ['ANC', 3030, 'DFW'])
gn = GremlinNetwork()
gn.add_results([path])
self.assertEqual(len(path), len(gn.graph.nodes))
def test_group_with_groupby(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='code')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], 'SEA')
def test_group_with_groupby_multiple_labels_with_same_property(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'name': 'Seattle-Tacoma International Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe',
}
gn = GremlinNetwork(group_by_property='name')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], 'Seattle-Tacoma International Airport')
self.assertEqual(node2['group'], 'Austria')
def test_group_with_groupby_properties_json_single_label(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='{"airport":"code"}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get(vertex1[T.id])
self.assertEqual(node1['group'], 'SEA')
def test_group_with_groupby_properties_json_multiple_labels(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe'
}
gn = GremlinNetwork(group_by_property='{"airport":"code","country":"continent"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], 'SEA')
self.assertEqual(node2['group'], 'Europe')
def test_group_with_groupby_invalid_json_single_label(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='{"airport":{"code"}}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get(vertex1[T.id])
self.assertEqual(node1['group'], '')
def test_group_with_groupby_invalid_json_multiple_labels(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe'
}
gn = GremlinNetwork(group_by_property='{"airport":{"code"},"country":{"groupby":"continent"}}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], '')
self.assertEqual(node2['group'], '')
def test_group_nonexistent_groupby(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='foo')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], '')
def test_group_nonexistent_groupby_properties_json_single_label(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='{"airport":{"groupby":"foo"}}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get(vertex1[T.id])
self.assertEqual(node1['group'], '')
def test_group_nonexistent_groupby_properties_json_multiple_labels(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe'
}
gn = GremlinNetwork(group_by_property='{"airport":"foo","country":"continent"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], '')
self.assertEqual(node2['group'], 'Europe')
def test_group_nonexistent_label_properties_json_single_label(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='{"air_port":{"groupby":"code"}')
gn.add_vertex(vertex1)
node1 = gn.graph.nodes.get(vertex1[T.id])
self.assertEqual(node1['group'], '')
def test_group_nonexistent_label_properties_json_multiple_labels(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe'
}
gn = GremlinNetwork(group_by_property='{"airport":"code","a_country":"continent"}')
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], 'SEA')
self.assertEqual(node2['group'], '')
def test_group_without_groupby(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], 'airport')
def test_group_without_groupby_multiple_labels(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe'
}
gn = GremlinNetwork()
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], 'airport')
self.assertEqual(node2['group'], 'country')
def test_group_valueMap_true(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], 'airport')
def test_group_without_groupby_list(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'code': ['SEA']
}
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], 'airport')
def test_group_without_groupby_choose_label(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'code': ['SEA']
}
gn = GremlinNetwork(group_by_property='T.label')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], 'airport')
def test_group_with_groupby_list(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'code': ['SEA']
}
gn = GremlinNetwork(group_by_property='code')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], "['SEA']")
def test_group_with_groupby_list_properties_json(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'code': ['SEA']
}
gn = GremlinNetwork(group_by_property='{"airport":"code"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], "['SEA']")
def test_group_notokens_groupby(self):
vertex = {
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(group_by_property='code')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('graph_notebook-ed8fddedf251d3d5745dccfd53edf51d')
self.assertEqual(node['group'], 'SEA')
def test_group_notokens_without_groupby(self):
vertex = {
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get('graph_notebook-ed8fddedf251d3d5745dccfd53edf51d')
self.assertEqual(node['group'], '')
def test_add_path_with_edge_property_string(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
path = Path([], [vertex1, Edge(id='1', outV=vertex1, inV=vertex2, label='route'), vertex2])
gn = GremlinNetwork(edge_display_property='id')
gn.add_results([path])
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], '1')
def test_add_path_with_edge_property_json(self):
vertex1 = Vertex(id='1')
vertex2 = Vertex(id='2')
path = Path([], [vertex1, Edge(id='1', outV=vertex1, inV=vertex2, label='route'), vertex2])
gn = GremlinNetwork(edge_display_property='{"route":"id"}')
gn.add_results([path])
edge = gn.graph.get_edge_data('1', '2')
self.assertEqual(edge['1']['label'], '1')
def test_add_path_without_groupby(self):
path = Path([], [{'country': ['US'], 'code': ['SEA'], 'longest': [11901], 'city': ['Seattle'],
T.label: 'airport', 'lon': [-122.30899810791], 'type': ['airport'], 'elev': [432],
T.id: '22', 'icao': ['KSEA'], 'runways': [3], 'region': ['US-WA'],
'lat': [47.4490013122559], 'desc': ['Seattle-Tacoma']},
{'country': ['US'], 'code': ['ATL'], 'longest': [12390], 'city': ['Atlanta'],
T.label: 'airport', 'lon': [-84.4281005859375], 'type': ['airport'], 'elev': [1026],
T.id: '1', 'icao': ['KATL'], 'runways': [5], 'region': ['US-GA'],
'lat': [33.6366996765137], 'desc': ['Hartsfield - Jackson Atlanta International Airport']}])
gn = GremlinNetwork()
gn.add_results([path])
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], "airport")
def test_add_path_with_groupby(self):
path = Path([], [{'country': ['US'], 'code': ['SEA'], 'longest': [11901], 'city': ['Seattle'],
T.label: 'airport', 'lon': [-122.30899810791], 'type': ['airport'], 'elev': [432],
T.id: '22', 'icao': ['KSEA'], 'runways': [3], 'region': ['US-WA'],
'lat': [47.4490013122559], 'desc': ['Seattle-Tacoma']},
{'country': ['US'], 'code': ['ATL'], 'longest': [12390], 'city': ['Atlanta'],
T.label: 'airport', 'lon': [-84.4281005859375], 'type': ['airport'], 'elev': [1026],
T.id: '1', 'icao': ['KATL'], 'runways': [5], 'region': ['US-GA'],
'lat': [33.6366996765137], 'desc': ['Hartsfield - Jackson Atlanta International Airport']}])
gn = GremlinNetwork(group_by_property="code")
gn.add_results([path])
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], "['ATL']")
def test_add_path_with_groupby_multiple_labels(self):
path = Path([], [{'country': ['US'], 'code': ['SEA'], 'longest': [11901], 'city': ['Seattle'],
T.label: 'airport', 'lon': [-122.30899810791], 'type': ['airport'], 'elev': [432],
T.id: '22', 'icao': ['KSEA'], 'runways': [3], 'region': ['US-WA'],
'lat': [47.4490013122559], 'desc': ['Seattle-Tacoma']},
{'country': ['US'], 'code': ['ATL'], 'longest': [12390], 'city': ['Atlanta'],
T.label: 'airport', 'lon': [-84.4281005859375], 'type': ['airport'], 'elev': [1026],
T.id: '1', 'icao': ['KATL'], 'runways': [5], 'region': ['US-GA'],
'lat': [33.6366996765137], 'desc': ['Hartsfield - Jackson Atlanta International Airport']},
{'code': ['AN'], T.label: 'continent', T.id: '3741', 'desc': ['Antarctica']}])
gn = GremlinNetwork(group_by_property='code')
gn.add_results([path])
node1 = gn.graph.nodes.get('1')
node2 = gn.graph.nodes.get('3741')
self.assertEqual(node1['group'], "['ATL']")
self.assertEqual(node2['group'], "['AN']")
def test_add_path_with_groupby_properties_json(self):
path = Path([], [{'country': ['US'], 'code': ['SEA'], 'longest': [11901], 'city': ['Seattle'],
T.label: 'airport', 'lon': [-122.30899810791], 'type': ['airport'], 'elev': [432],
T.id: '22', 'icao': ['KSEA'], 'runways': [3], 'region': ['US-WA'],
'lat': [47.4490013122559], 'desc': ['Seattle-Tacoma']},
{'country': ['US'], 'code': ['ATL'], 'longest': [12390], 'city': ['Atlanta'],
T.label: 'airport', 'lon': [-84.4281005859375], 'type': ['airport'], 'elev': [1026],
T.id: '1', 'icao': ['KATL'], 'runways': [5], 'region': ['US-GA'],
'lat': [33.6366996765137], 'desc': ['Hartsfield - Jackson Atlanta International Airport']}])
gn = GremlinNetwork(group_by_property='{"airport":"code"}')
gn.add_results([path])
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], "['ATL']")
def test_add_path_with_groupby_properties_json_multiple_labels(self):
path = Path([], [{'country': ['US'], 'code': ['SEA'], 'longest': [11901], 'city': ['Seattle'],
T.label: 'airport', 'lon': [-122.30899810791], 'type': ['airport'], 'elev': [432],
T.id: '22', 'icao': ['KSEA'], 'runways': [3], 'region': ['US-WA'],
'lat': [47.4490013122559], 'desc': ['Seattle-Tacoma']},
{'country': ['US'], 'code': ['ATL'], 'longest': [12390], 'city': ['Atlanta'],
T.label: 'airport', 'lon': [-84.4281005859375], 'type': ['airport'], 'elev': [1026],
T.id: '1', 'icao': ['KATL'], 'runways': [5], 'region': ['US-GA'],
'lat': [33.6366996765137], 'desc': ['Hartsfield - Jackson Atlanta International Airport']},
{'code': ['AN'], T.label: 'continent', T.id: '3741', 'desc': ['Antarctica']}])
gn = GremlinNetwork(group_by_property='{"airport":"region","continent":"code"}')
gn.add_results([path])
node1 = gn.graph.nodes.get('1')
node2 = gn.graph.nodes.get('3741')
self.assertEqual(node1['group'], "['US-GA']")
self.assertEqual(node2['group'], "['AN']")
def test_ignore_group(self):
vertex = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
gn = GremlinNetwork(ignore_groups=True)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], '')
gn = GremlinNetwork(group_by_property="code", ignore_groups=True)
gn.add_vertex(vertex)
node = gn.graph.nodes.get(vertex[T.id])
self.assertEqual(node['group'], '')
def test_ignore_group_properties_json(self):
vertex1 = {
T.id: '1234',
T.label: 'airport',
'type': 'Airport',
'runways': '4',
'code': 'SEA'
}
vertex2 = {
T.id: '2345',
T.label: 'country',
'type': 'Country',
'name': 'Austria',
'continent': 'Europe'
}
gn = GremlinNetwork(ignore_groups=True)
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], '')
self.assertEqual(node2['group'], '')
gn = GremlinNetwork(group_by_property='{"airport":"code","country":"continent"}',
ignore_groups=True)
gn.add_vertex(vertex1)
gn.add_vertex(vertex2)
node1 = gn.graph.nodes.get(vertex1[T.id])
node2 = gn.graph.nodes.get(vertex2[T.id])
self.assertEqual(node1['group'], '')
self.assertEqual(node2['group'], '')
def test_group_returnvertex_groupby_notspecified(self):
vertex = Vertex(id='1')
gn = GremlinNetwork()
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], 'vertex')
def test_group_returnvertex_groupby_label(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(group_by_property="label")
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], 'vertex')
gn = GremlinNetwork(group_by_property="T.label")
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], 'vertex')
def test_group_returnvertex_groupby_label_properties_json(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(group_by_property='{"vertex":"label"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], 'vertex')
gn = GremlinNetwork(group_by_property='{"vertex":"T.label"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], 'vertex')
def test_group_returnvertex_groupby_id(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(group_by_property="id")
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], '1')
gn = GremlinNetwork(group_by_property="T.id")
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], '1')
def test_group_returnvertex_groupby_id_properties_json(self):
vertex = Vertex(id='1')
gn = GremlinNetwork(group_by_property='{"vertex":"id"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], '1')
gn = GremlinNetwork(group_by_property='{"vertex":"T.id"}')
gn.add_vertex(vertex)
node = gn.graph.nodes.get('1')
self.assertEqual(node['group'], '1')
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
1663887
|
<reponame>EkremBayar/bayar
import pandas as pd
from plotnine import ggplot, aes, geom_point, labs, theme
_theme = theme(subplots_adjust={'right': 0.80})
df = pd.DataFrame({
'x': [1, 2],
'y': [3, 4],
'cat': ['a', 'b']
})
def test_labelling_with_colour():
p = (ggplot(df, aes('x', 'y', color='cat'))
+ geom_point()
+ labs(colour='Colour Title')
)
assert p + _theme == 'labelling_with_colour'
|
StarcoderdataPython
|
3483458
|
<filename>conanfile.py
# The MIT License (MIT)
#
# Copyright (c) 2016 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from conans import ConanFile, CMake, tools
import re
def get_version():
try:
content = tools.load("src/CMakeLists.txt")
version = re.search(r"project\([^\)]+VERSION (\d+\.\d+\.\d+)[^\)]*\)", content).group(1)
return version.strip()
except Exception:
return None
class NewProjectConan(ConanFile):
name = "new-project"
version = get_version()
author = "<NAME>"
license = "https://github.com/mpusz/new-project-template/blob/master/LICENSE"
url = "https://github.com/mpusz/new-project-template"
description = "A template to quickly start a new project"
exports = ["LICENSE.md"]
settings = "os", "compiler", "build_type", "arch"
requires = ()
options = { # remove for a header-only library
"shared": [True, False],
"fPIC": [True, False]
}
default_options = {
"shared": False, # remove for a header-only library
"fPIC": True, # remove for a header-only library
"gtest:shared": False
}
scm = {
"type": "git",
"url": "auto",
"revision": "auto",
"submodule": "recursive"
}
generators = "cmake"
@property
def _run_tests(self):
return tools.get_env("CONAN_RUN_TESTS", False)
def _configure_cmake(self, folder="src"):
cmake = CMake(self)
if self.settings.compiler == "Visual Studio" and self.options.shared:
cmake.definitions["CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS"] = True
if self._run_tests:
# developer's mode (unit tests, examples, restrictive compilation warnings, ...)
cmake.configure()
else:
# consumer's mode (library sources only)
cmake.configure(source_folder=folder, build_folder=folder)
return cmake
def configure(self):
tools.check_min_cppstd(self, "11")
def config_options(self):
if self.settings.os == 'Windows':
del self.options.fPIC # remove for a header-only library
def build_requirements(self):
if self._run_tests:
self.build_requires("gtest/1.10.0")
def build(self):
cmake = self._configure_cmake()
cmake.build()
if self._run_tests:
self.run("ctest -VV -C %s" % cmake.build_type, run_environment=True)
def package(self):
self.copy(pattern="*license*", dst="licenses", excludes="cmake/common/*", ignore_case=True, keep_path=False)
cmake = self._configure_cmake()
cmake.install()
def package_info(self):
self.cpp_info.libs = ['new-project']
# uncomment for a header-only library
# def package_id(self):
# self.info.header_only()
|
StarcoderdataPython
|
3343547
|
<reponame>happy-jihye/Cartoon-StyleGAN
import os
import torch
import argparse
from argparse import Namespace
def prepare_data(dataset_folder, zip_file=None, target_size=256):
# Unzip
if zip_file is not None:
os.system(f'unzip {zip_file} -d "/{zip_file}"')
os.system(f'rm {zip_file}')
# prepare data
os.system(f'python prepare_data.py --out {dataset_folder}/LMDB --size {target_size} {dataset_folder}')
def download_pretrained_model(DownLoad_All=True, file=''):
from utils import download_pretrained_model
if DownLoad_All:
download_pretrained_model()
else:
download_pretrained_model(False, "ffhq256.pt")
def project(encoder = True, img='00006.jpg'):
if encoder:
os.system(f'python projector_factor.py --ckpt=/networks/ffhq256.pt --e_ckpt=/networks/encoder_ffhq.pt \
--files=/asset/ffhq-sample/{img}')
else:
os.system(f'python projector_factor.py --ckpt=/networks/ffhq256.pt \
--files=/asset/ffhq-sample/{img}')
def generate_using_styleclip(description, seed=100,
network1="/networks/ffhq256.pt",
network2="networks/ffhq256.pt",
latent_path=None, optimization_steps=300, truncation = 0.7,
l2_lambda = 0.004, result_dir = "asset/results_styleclip",
device = 'cuda',
number_of_step = 5 ,
strength = 1.5,
swap = True,
swap_layer_num = 1):
# -----------------------------
args = {
"seed" : seed,
"description": description,
"ckpt": network1,
"ckpt2": network2,
"stylegan_size": 256,
"latent_dim" : 14,
"lr_rampup": 0.05,
"lr": 0.1,
"step": optimization_steps,
"l2_lambda": l2_lambda,
"latent_path": latent_path,
"truncation": truncation,
"device" : device,
"results_dir": result_dir,
}
from run_optimization import main
final_result, latent_init1, latent_fin1 = main(Namespace(**args))
# ---------------
# Generator
# ---------------
from model import Generator
# Generator1
network1 = torch.load(network1)
generator1 = Generator(256, 512, 8, channel_multiplier=2).to(device)
generator1.load_state_dict(network1["g_ema"], strict=False)
trunc1 = generator1.mean_latent(4096)
# Generator2
network2 = torch.load(network2)
generator2 = Generator(256, 512, 8, channel_multiplier=2).to(device)
generator2.load_state_dict(network2["g_ema"], strict=False)
trunc2 = generator2.mean_latent(4096)
# ---------------
# Interpolation
# ---------------
latent_interp = torch.zeros(number_of_step, latent_init1.shape[1], latent_init1.shape[2]).to(device)
with torch.no_grad():
for j in range(number_of_step):
latent_interp[j] = latent_init1 + strength * (latent_fin1-latent_init1) * float(j/(number_of_step-1))
imgs_gen1, save_swap_layer = generator1([latent_interp],
input_is_latent=True,
truncation=0.7,
truncation_latent=trunc1,
swap=swap, swap_layer_num=swap_layer_num,
randomize_noise=False)
imgs_gen2, _ = generator2([latent_interp],
input_is_latent=True,
truncation=0.7,
swap=swap, swap_layer_num=swap_layer_num, swap_layer_tensor=save_swap_layer,
truncation_latent=trunc2)
im1 = torch.cat([img_gen for img_gen in imgs_gen1], dim=2)
im2 = torch.cat([img_gen for img_gen in imgs_gen2], dim=2)
result = torch.cat([im1, im2], dim=1)
return result # if you want to show image :: `imshow(tensor2image(result))`
def generate_using_latent_mixing(seed1=100, seed2=200,
network1="/networks/ffhq256.pt",
network2="networks/ffhq256.pt",
latent_mixing1=10, latent_mixing2=10,
latent_path=None, optimization_steps=300, truncation = 0.7,
l2_lambda = 0.004, result_dir = "asset/results_styleclip",
device = 'cuda',
number_of_step = 5 ,
strength = 1.5,
swap = True,
swap_layer_num = 1):
# ----------------------
# Source Images (FFHQ)
# ----------------------
from model import Generator
# Genearator1
network1 = torch.load(network1)
generator1 = Generator(256, 512, 8, channel_multiplier=2).to(device)
generator1.load_state_dict(network1["g"], strict=False)
trunc1 = generator1.mean_latent(4096)
# latent1
torch.manual_seed(seed1)
r_latent1 = torch.randn(1, 14, 512, device=device)
latent1 = generator1.get_latent(r_latent1)
# latent2
torch.manual_seed(seed2)
r_latent2 = torch.randn(1, 14, 512, device=device)
latent2 = generator1.get_latent(r_latent2)
# latent mixing
latent3 = torch.cat([latent1[:,:latent_mixing1,:], latent2[:,latent_mixing1:,:]], dim = 1)
latent4 = torch.cat([latent1[:,:,:latent_mixing2], latent2[:,:,latent_mixing2:]], dim = 2)
# Latent !
latent = torch.cat([latent1, latent2, latent3, latent4], dim = 0)
# generate image
img1, save_swap_layer = generator1(
[latent],
input_is_latent=True,
truncation=truncation,
truncation_latent=trunc1,
swap=swap, swap_layer_num=swap_layer_num,
)
# =================================================
# ----------------------
# Target Images (Cartoon)
# ----------------------
# Genearator2
network2 = torch.load(network2)
generator2 = Generator(256, 512, 8, channel_multiplier=2).to(device)
generator2.load_state_dict(network2["g"], strict=False)
trunc2 = generator2.mean_latent(4096)
# generate image
img2, _ = generator2(
[latent],
input_is_latent=True,
truncation=truncation,
truncation_latent=trunc1,
swap=swap, swap_layer_num=swap_layer_num, swap_layer_tensor=save_swap_layer,
)
# return
ffhq = torch.cat([img1[0], img1[1], img1[2], img1[3]], dim=2)
cartoon = torch.cat([img2[0], img2[1], img2[2], img2[3]], dim=2)
return torch.cat([ffhq, cartoon], dim = 1)
def generate_using_sefa(network1="/networks/ffhq256.pt",
network2="/networks/ffhq256.pt",
factor='factor.pt',
index=7, degree=14, seed=116177, n_sample=5,
result_dir='asset/results-sefa'):
os.system(f'python apply_factor.py --index={index} --degree={degree} --seed={seed} --n_sample={n_sample} \
--ckpt={network1} --ckpt2={network2} \
--factor={factor} --outdir={result_dir} --video')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="")
# parser.add_argument("path", type=str, help="path to the lmdb dataset")
parser.add_argument("--prepare_data", type=str, default=None)
parser.add_argument("--zip", type=str, default=None)
parser.add_argument("--size", type=int, default=256)
parser.add_argument("--gif", action="store_true", help="path to the lmdb dataset")
args = parser.parse_args()
if args.prepare_data is not None:
prepare_data(dataset_folder = args.prepare_data, zip_file = args.zip, target_size = args.size)
|
StarcoderdataPython
|
3381720
|
from __future__ import print_function
import FWCore.ParameterSet.Config as cms
from FWCore.ParameterSet.VarParsing import VarParsing
from BristolAnalysis.NTupleTools.options import CMSSW_MAJOR_VERSION, registerOptions, is2015, is2016
import sys
# register options
options = registerOptions(VarParsing('python'))
isData = options.isData
isMC = not isData
isTTbarMC = options.isTTbarMC
isReHLT = options.isReHLT
# Define the CMSSW process
process = cms.Process("Ntuples")
# Load the standard set of configuration modules
process.load('Configuration.StandardSequences.Services_cff')
process.load('Configuration.StandardSequences.GeometryDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_cff')
# Message Logger settings
process.load("FWCore.MessageService.MessageLogger_cfi")
# process.MessageLogger.cout.FwkReport.reportEvery = 1000
process.MessageLogger.cerr.FwkReport.reportEvery = 1000
#process.MessageLogger.detailedInfo.threshold = 'DEBUG'
# Maximum Number of Events
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(-1))
# Set the process options -- Display summary at the end, enable
# unscheduled execution
process.options = cms.untracked.PSet(
allowUnscheduled=cms.untracked.bool(True),
wantSummary=cms.untracked.bool(False),
)
# print event content
process.printEventContent = cms.EDAnalyzer("EventContentAnalyzer")
# Source
process.source = cms.Source("PoolSource",
fileNames=cms.untracked.vstring(
'root://xrootd.unl.edu//store/data/Run2015C_25ns/SingleMuon/MINIAOD/16Dec2015-v1/00000/002C24D4-E1AF-E511-AE8E-001E673971CA.root',
)
)
# If you would like to change the Global Tag e.g. for JEC
globalTags = {
'data': {
7: '76X_dataRun2_16Dec2015_v0', # ReReco+Prompt JECv6
8: '80X_dataRun2_2016SeptRepro_v7',
},
'MC': {
7: '76X_mcRun2_asymptotic_RunIIFall15DR76_v1', # 25ns MC
8: '80X_mcRun2_asymptotic_2016_TrancheIV_v8',
}
}
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff")
globaltag = ''
if (isData):
globaltag = globalTags['data'][CMSSW_MAJOR_VERSION]
else:
globaltag = globalTags['MC'][CMSSW_MAJOR_VERSION]
process.GlobalTag.globaltag = cms.string(globaltag)
process.load('JetMETCorrections.Configuration.DefaultJEC_cff')
# process.load('JetMETCorrections.Configuration.CorrectedJetProducersDefault_cff')
if CMSSW_MAJOR_VERSION == '7':
print("Running on 2015 Data")
else:
print("Running on 2016 Data")
print("Using Global Tag:", globaltag)
# TT Gen Event configuration
if isTTbarMC:
from BristolAnalysis.NTupleTools.ttGenConfig_cff import setupTTGenEvent
setupTTGenEvent(process, cms)
# Particle level definitions
from BristolAnalysis.NTupleTools.pseudoTopConfig_cff import setupPseudoTop
setupPseudoTop(process, cms)
# Overwrite JEC/JER if useJECFromFile is true
# if options.useJECFromFile:
from BristolAnalysis.NTupleTools.Jets_Setup_cff import setup_jets
setup_jets(process, cms, options)
# Rerun MET
from BristolAnalysis.NTupleTools.MET_Setup_cff import setup_MET
setup_MET(process, cms, options)
# Electron Regression, Smearing and VID
from BristolAnalysis.NTupleTools.Electron_Setup_cff import setup_electrons
setup_electrons(process, cms, options)
# Bad Muon Filters
from BristolAnalysis.NTupleTools.Muon_Setup_cff import setup_muons
setup_muons(process, cms, options)
# Load the selection filters and the selection analyzers
process.load('BristolAnalysis.NTupleTools.muonSelections_cff')
process.load('BristolAnalysis.NTupleTools.electronSelections_cff')
process.load('BristolAnalysis.NTupleTools.SelectionCriteriaAnalyzer_cfi')
if options.tagAndProbe:
process.topPairEPlusJetsSelection.tagAndProbeStudies = cms.bool(True)
process.topPairEPlusJetsSelectionTagging.tagAndProbeStudies = cms.bool(
True)
process.topPairEPlusJetsSelection.jetSelectionInTaggingMode = cms.bool(
True)
process.topPairEPlusJetsSelectionTagging.jetSelectionInTaggingMode = cms.bool(
True)
# Maximum Number of Events
process.maxEvents = cms.untracked.PSet(input=cms.untracked.int32(-1))
from BristolAnalysis.NTupleTools.NTupler_cff import setup_ntupler
setup_ntupler(process, cms)
process.nTupleGenEventInfo.isTTbarMC = cms.bool(isTTbarMC)
# mapping between MiniAOD collections and our object selections
process.load('BristolAnalysis.NTupleTools.indices_cff')
# adds process.eventUserDataSequence
process.load('BristolAnalysis.NTupleTools.userdata.EventUserData_cff')
if isTTbarMC:
process.makingNTuples = cms.Path(
# process.metFilters *
process.badMuonTagger *
process.processedElectrons *
# process.reapplyJEC *
process.electronSelectionAnalyzerSequence *
process.muonSelectionAnalyzerSequence *
process.qcdMuonSelectionAnalyzerSequence *
process.qcdElectronSelectionAnalyzerSequence *
process.ttGenEvent *
process.selectionCriteriaAnalyzer *
process.makePseudoTop *
process.indexSequence *
process.eventUserDataSequence *
process.printEventContent *
process.nTuples *
process.nTupleTree
)
else:
process.makingNTuples = cms.Path(
# process.metFilters *
process.badMuonTagger *
process.processedElectrons *
# process.reapplyJEC *
process.electronSelectionAnalyzerSequence *
process.muonSelectionAnalyzerSequence *
process.qcdMuonSelectionAnalyzerSequence *
process.qcdElectronSelectionAnalyzerSequence *
process.selectionCriteriaAnalyzer *
process.indexSequence *
process.eventUserDataSequence *
process.printEventContent *
process.nTuples *
process.nTupleTree
)
process.nTupleTree.outputCommands.extend(
[
'keep uint*_topPairMuPlusJetsSelectionTagging_*_*',
'keep uint*_topPairMuPlusJetsQCDSelectionTagging1_*_*',
'keep uint*_topPairMuPlusJetsQCDSelectionTagging2_*_*',
'keep uint*_topPairEPlusJetsSelectionTagging_*_*',
'keep uint*_topPairEPlusJetsQCDSelectionTagging_*_*',
'keep uint*_topPairEPlusJetsConversionSelectionTagging_*_*',
'keep bool_topPairMuPlusJetsSelectionTagging_*_*',
'keep bool_topPairMuPlusJetsQCDSelectionTagging1_*FullSelection*_*',
'keep bool_topPairMuPlusJetsQCDSelectionTagging2_*FullSelection*_*',
'keep bool_topPairEPlusJetsSelectionTagging_*_*',
'keep bool_topPairEPlusJetsQCDSelectionTagging_*FullSelection*_*',
'keep bool_topPairEPlusJetsConversionSelectionTagging_*FullSelection*_*',
'keep uint*_*Indices*_*_*',
'keep double_eventUserData*_*_*',
]
)
if isMC:
# Remove Data Triggers
process.triggerSequence.remove(process.nTupleTriggerEle32erWPTightGsf)
process.triggerSequence.remove(process.nTupleTriggerIsoMu24)
process.triggerSequence.remove(process.nTupleTriggerIsoTkMu24)
process.triggerSequence.remove(process.nTupleTrigger)
del process.nTupleTriggerEle32erWPTightGsf
del process.nTupleTriggerIsoMu24, process.nTupleTriggerIsoTkMu24
del process.nTupleTrigger
if isData:
# Use cleaned MET collection in data
process.nTupleMET.InputTag = cms.InputTag('slimmedMETsMuEGClean')
process.eventUserDataTopPairElectronPlusJetsSelection.metInputTag = cms.InputTag('slimmedMETsMuEGClean')
process.eventUserDataTopPairElectronPlusJetsConversionSelection.metInputTag = cms.InputTag('slimmedMETsMuEGClean')
process.eventUserDataTopPairElectronPlusJetsNonIsoSelection.metInputTag = cms.InputTag('slimmedMETsMuEGClean')
process.eventUserDataTopPairMuonPlusJetsSelection.metInputTag = cms.InputTag('slimmedMETsMuEGClean')
process.eventUserDataTopPairMuonPlusJetsQCD1Selection.metInputTag = cms.InputTag('slimmedMETsMuEGClean')
process.eventUserDataTopPairMuonPlusJetsQCD2Selection.metInputTag = cms.InputTag('slimmedMETsMuEGClean')
# Remove MC Triggers
process.triggerSequence.remove(process.nTupleTriggerEle32erWPTightGsfMC)
process.triggerSequence.remove(process.nTupleTriggerIsoMu24MC)
process.triggerSequence.remove(process.nTupleTriggerIsoTkMu24MC)
process.triggerSequence.remove(process.nTupleTrigger)
del process.nTupleTriggerEle32erWPTightGsfMC
del process.nTupleTriggerIsoMu24MC, process.nTupleTriggerIsoTkMu24MC
del process.nTupleTrigger
# Remove PseudoTop and MC Gen Variables
process.makingNTuples.remove(process.makePseudoTop)
process.nTuples.remove(process.pseudoTopSequence)
process.nTuples.remove(process.nTupleGenMET)
process.nTuples.remove(process.nTupleGenJets)
process.nTuples.remove(process.nTupleGenEventInfo)
process.nTuples.remove(process.nTupleGenParticles)
# Do not keep Gen branches
process.nTupleTree.outputCommands.append('drop *_nTuplePFJets_*Gen*_*')
# Delete removed modules and sequences (So they do not run on unscheduled)
del process.makePseudoTop, process.pseudoTopSequence, process.pseudoTop
del process.nTuplePseudoTopJets, process.nTuplePseudoTopLeptons, process.nTuplePseudoTopNeutrinos, process.nTuplePseudoTops
del process.nTupleGenMET, process.nTupleGenJets, process.nTupleGenEventInfo, process.nTupleGenParticles
if not isTTbarMC:
print('Not a ttbar MC - removing TTbar specific modules')
process.selectionCriteriaAnalyzer.genSelectionCriteriaInput = cms.VInputTag()
# 76X datasets are all ReReco so far
process.nTupleEvent.metFiltersInputTag = cms.InputTag('TriggerResults', '', 'PAT')
if not options.printEventContent:
process.makingNTuples.remove(process.printEventContent)
process.TFileService = cms.Service(
"TFileService",
fileName=cms.string('ntuple.root')
)
process.load('BristolAnalysis.NTupleTools.userdata.ElectronUserData_cfi')
process.load('BristolAnalysis.NTupleTools.userdata.MuonUserData_cfi')
process.load('BristolAnalysis.NTupleTools.userdata.JetUserData_cfi')
if options.useJECFromFile:
process.jetUserData.jetCollection=cms.InputTag("patJetsReapplyJEC")
###############################################################################
# Here we define the objects we want to work with. As an example we have
# 3 types of muons:
# - our signal muons with tight isolation
# - our non-isolated muons for control region 1
# - our non-isolated muons for control region 2
# for each muon we have 1 jet collection and therefore 1 b-jet collection
# which leads to a total of 6 jet collections and 3 muon collections.
###############################################################################
cleaningDeltaR = 0.4
from PhysicsTools.PatAlgos.cleaningLayer1.jetCleaner_cfi import cleanPatJets
from PhysicsTools.PatAlgos.selectionLayer1.muonSelector_cfi import selectedPatMuons
from PhysicsTools.PatAlgos.selectionLayer1.electronSelector_cfi import selectedPatElectrons
process.globalOrTrackerMuons= selectedPatMuons.clone(
src='muonUserData',
cut='userInt("isGlobalOrTrackerMuon")',
)
process.goodMuons = selectedPatMuons.clone(
src='muonUserData',
cut='userInt("isGood")',
)
process.vetoMuons = selectedPatMuons.clone(
src='muonUserData',
cut='userInt("isLoose")',
)
process.goodNonIsoR1Muons = process.goodMuons.clone(
cut='userInt("isGoodNonIsoR1")',
)
process.goodNonIsoR2Muons = process.goodMuons.clone(
cut='userInt("isGoodNonIsoR2")',
)
process.goodElectrons = selectedPatElectrons.clone(
src='electronUserData',
cut=cms.string('userInt("isGood")'),
lazyParser=cms.untracked.bool(True),
)
process.vetoElectrons = process.goodElectrons.clone(
cut=cms.string('userInt("isVeto")'),
)
process.goodConversionElectrons = process.goodElectrons.clone(
cut='userInt("isGoodConversion")',
)
process.goodNonIsoElectrons = process.goodElectrons.clone(
cut='userInt("isGoodNonIso")',
)
process.goodJets = cleanPatJets.clone(
src=cms.InputTag("jetUserData"),
preselection='userInt("passesPt") && userInt("isGood")',
checkOverlaps=cms.PSet(
electrons=cms.PSet(
src=cms.InputTag("goodElectrons"),
algorithm=cms.string("byDeltaR"),
preselection=cms.string(""),
deltaR=cms.double(cleaningDeltaR),
# don't check if they share some AOD object ref
checkRecoComponents=cms.bool(False),
pairCut=cms.string(""),
requireNoOverlaps=cms.bool(True),
),
muons=cms.PSet(
src=cms.InputTag("goodMuons"),
algorithm=cms.string("byDeltaR"),
preselection=cms.string(""),
deltaR=cms.double(cleaningDeltaR),
# don't check if they share some AOD object ref
checkRecoComponents=cms.bool(False),
pairCut=cms.string(""),
requireNoOverlaps=cms.bool(True),
),
)
)
process.goodJetsEConversionRegion = process.goodJets.clone()
process.goodJetsEConversionRegion.checkOverlaps.electrons.src = 'goodConversionElectrons'
process.goodJetsENonIsoRegion = process.goodJets.clone()
process.goodJetsENonIsoRegion.checkOverlaps.electrons.src = 'goodNonIsoElectrons'
process.goodJetsMuNonIsoR1Region = process.goodJets.clone()
process.goodJetsMuNonIsoR1Region.checkOverlaps.muons.src = 'goodNonIsoR1Muons'
process.goodJetsMuNonIsoR2Region = process.goodJets.clone()
process.goodJetsMuNonIsoR2Region.checkOverlaps.muons.src = 'goodNonIsoR2Muons'
# goodBJets = goodJets + Btag
process.goodBJets = cms.EDFilter(
"PATJetSelector",
src=cms.InputTag('goodJets'),
cut=cms.string('userInt("passesMediumBtagWP")')
)
process.goodTightBJets = cms.EDFilter(
"PATJetSelector",
src=cms.InputTag('goodJets'),
cut=cms.string('userInt("passesTightBtagWP")')
)
process.goodBJetsEConversionRegion = process.goodBJets.clone(
src='goodJetsEConversionRegion')
process.goodBJetsENonIsoRegion = process.goodBJets.clone(
src='goodJetsENonIsoRegion')
process.goodBJetsMuNonIsoR1Region = process.goodBJets.clone(
src='goodJetsMuNonIsoR1Region')
process.goodBJetsMuNonIsoR2Region = process.goodBJets.clone(
src='goodJetsMuNonIsoR2Region')
# tight b-tag WP
process.goodTightBJetsEConversionRegion = process.goodTightBJets.clone(
src='goodJetsEConversionRegion')
process.goodTightBJetsENonIsoRegion = process.goodTightBJets.clone(
src='goodJetsENonIsoRegion')
process.goodTightBJetsMuNonIsoR1Region = process.goodTightBJets.clone(
src='goodJetsMuNonIsoR1Region')
process.goodTightBJetsMuNonIsoR2Region = process.goodTightBJets.clone(
src='goodJetsMuNonIsoR2Region')
###############################################################################
# End of analysis object definition
###############################################################################
###############################################################################
# Begin selection
###############################################################################
# steps from
# https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookPATExampleTopQuarks
from PhysicsTools.PatAlgos.selectionLayer1.jetCountFilter_cfi import countPatJets
process.step6a = countPatJets.clone(src='goodJets', minNumber=1)
process.step6b = countPatJets.clone(src='goodJets', minNumber=2)
process.step6c = countPatJets.clone(src='goodJets', minNumber=3)
process.step7 = countPatJets.clone(src='goodJets', minNumber=4)
process.step8a = countPatJets.clone(src='goodBJets', minNumber=1)
process.step8b = countPatJets.clone(src='goodBJets', minNumber=2)
###############################################################################
# end selection
###############################################################################
# ntuple output
process.nTupleElectrons.InputTag = 'electronUserData'
process.nTupleMuons.InputTag = 'muonUserData'
process.nTuplePFJets.InputTag = 'jetUserData'
# EDM NTuples
# process.load('BristolAnalysis.NTupleTools.content')
#
# process.ntuples = cms.OutputModule(
# "PoolOutputModule",
# fileName=cms.untracked.string('ntuple.root'),
# outputCommands=cms.untracked.vstring(
# "drop *",
# "keep *_electrons_*_*",
# ),
# dropMetaData=cms.untracked.string('ALL'),
# )
#
# process.endPath = cms.EndPath(process.ntuples)
|
StarcoderdataPython
|
9776981
|
# Generated by Django 3.2.8 on 2021-11-06 11:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('quizz', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='category',
old_name='category_title',
new_name='name',
),
migrations.RenameField(
model_name='choice',
old_name='choice_text',
new_name='context',
),
migrations.RenameField(
model_name='languageprogramming',
old_name='lg_key',
new_name='name',
),
migrations.RenameField(
model_name='question',
old_name='question_text',
new_name='context',
),
migrations.RemoveField(
model_name='category',
name='category_key',
),
migrations.RemoveField(
model_name='languageprogramming',
name='lg_title',
),
]
|
StarcoderdataPython
|
4921703
|
from directory import Directory
from courses import CourseCatalog
|
StarcoderdataPython
|
3525444
|
"""
A collection of base classes to use to shortcut having to import things.
This is used to provide a base class which can be imported into two locations
to identify a type, so that either location doesn't have to import the other.
"""
class DatabaseEntryType:
"""
A base class for `sunpy.database.tables.DatabaseEntry`.
This class should not be used directly.
"""
# This is currently used to prevent `sunpy.map` having to import
# `sunpy.database`.
|
StarcoderdataPython
|
16460
|
<gh_stars>1-10
#! /usr/bin/env python
"""
usage: demoselsim.py outfilename popn h pct
"""
import numpy
import sys
def pnext(AC, N, Nout, h, s):
AC = float(AC); N = float(N); h = float(h); s = float(s)
p = AC/(2*N)
w11 = 1+s; w12 = 1+h*s; w22 = 1
wbar = ((p**2) * w11) + (2*p*(1-p)*w12) + (((1-p)**2) * w22)
pdet = p*(p*w11 + (1-p)*w12)/wbar
#print '{0}: {1} {2} {3}'.format(step, N, AC,pdet)
x=numpy.random.binomial(2*Nout, pdet)
return x
def trajecvec(h, s, popn, pct):
if popn == "EUR": #initialize EUR
tNcurr = 620
tB1 = 720
tAdmix = 1900
N = 10085
NB1 = 549
Nfinal = 85
elif popn == "ASN": #initialize ASN
N = 10063
tNcurr = 540
NB1 = 407
tAdmix = 1900
tB1 = 640
Nfinal = 97
#get interval points
first_point = tAdmix-tB1
second_point = tAdmix-tNcurr
AC = int(round(N*2*(pct/100.)))
AC = [AC]
for t in range(0, first_point): #current population
AC.append(pnext(AC[t], N, N, h, s))
if (AC[t+1] == 0) or (AC[t+1] == 2*N):
AC[-1] = 'FIXED:{0}'.format(AC[-1])
return AC
AC.append(pnext(AC[first_point], N, NB1, h, s))
if (AC[first_point+1] == 0) or (AC[first_point+1] == 2*NB1):
AC[-1] = 'FIXED:{0}'.format(AC[-1])
return AC
for t in range(first_point+1, second_point):
AC.append(pnext(AC[t], NB1, NB1, h, s))
if (AC[t+1] == 0) or (AC[t+1] == 2*NB1):
AC[-1] = 'FIXED:{0}'.format(AC[-1])
return AC
AC.append(pnext(AC[second_point], NB1, N, h, s))
if (AC[second_point+1] == 0) or (AC[second_point+1] == 2*N):
AC[-1] = 'FIXED:{0}'.format(AC[-1])
return AC
for t in range(second_point+1, tAdmix):
AC.append(pnext(AC[t], N, N, h, s))
if (AC[t+1] == 0) or (AC[t+1] == 2*N):
AC[-1] = 'FIXED:{0}'.format(AC[-1])
return AC
AC.append(pnext(AC[t], N, Nfinal, h, s))
if (AC[-1] == 0) or (AC[t+1] == 2*Nfinal):
AC[-1] = 'FIXED:{0}'.format(AC[-1])
return AC
def checkint(str):
try:
int(str)
return True
except ValueError:
return False
def summary(AC):
if checkint(AC[-1]):
return 'SEG:{0}'.format(AC[-1])
elif AC[-1] == 'FIXED:0':
time = len(AC)
return 'LOST:{0}'.format(time)
else:
time = len(AC)
return 'FIXED:{0}'.format(time)
def main():
outfilename = sys.argv[1]
popn = sys.argv[2]
h = float(sys.argv[3])
pct = int(sys.argv[4])
niter = 1000000
outfile = open(outfilename, 'w')
results_vec = []
for i in xrange(niter):
s = numpy.random.gamma(0.184, 8200)
s = s/(2.*25636)
results_vec.append(summary(trajecvec(h, s, popn, pct)))
sys.stdout.write('{0}\r'.format(i))
lost_count = 0; fixed_count = 0; seg_count = 0
seg_vec = []; lost_vec = []; fixed_vec = []
for result in results_vec:
state, count = result.split(':')
if state == 'LOST':
#print state
lost_count += 1
lost_vec.append(int(count))
elif state == 'SEG':
#print state
seg_count += 1
seg_vec.append(int(count))
elif state == 'FIXED':
#print state
fixed_count += 1
fixed_vec.append(int(count))
if popn == "EUR": #initialize EUR
N = 85
elif popn == "ASN": #initialize ASN
N = 97
wfreq = numpy.mean([0]*lost_count + seg_vec + [1]*fixed_count)/(2.*N)
if seg_vec != []:
segfreq = numpy.mean(seg_vec)/(2.*N)
else:
segfreq = 0
s = "dist"
outstr = '{0} {1} {2} {3} {4}'.format(popn, h, s, wfreq, segfreq)
outfile.write(outstr)
outfile.close()
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
4875326
|
<filename>app/types.py<gh_stars>0
from __future__ import annotations
import json
from typing import Optional, Dict, List, Union, TypeVar
from functools import cached_property # type: ignore
from datetime import datetime
from pathlib import Path
from pydantic import BaseModel, Field, HttpUrl
from fastapi.responses import JSONResponse
class Metadata(BaseModel):
"Metadata to check namespace data"
webpage: HttpUrl = Field("https://igsn.github.io/registered/")
last_updated: Optional[datetime] = None
metadata_file: Path = Path("./igsn_namespace_meta.json")
data_file: Path = Path("./igsn_namespace.json")
class Namespace(BaseModel):
"Store information about an IGSN namespace"
namespace: str = Field(
...,
description="The namespace name (same as the IGSN prefix)",
regex="[A-Z0-9]+",
example="AU",
)
owner: str = Field(
...,
description="The Allocating Agent which governs this namespace",
example="GEOAUS",
)
handle_prefix: str = Field(
...,
description="The prefix for the fully-resolved handle",
regex="10273/[A-Z0-9]+",
example="10273/AU",
)
date_created: Optional[str] = Field(
None,
description="The date that the namespace was created, in ISO format",
example="2015-02-12T08:23:28+01:00",
)
class Namespaces:
"Store info about all namespaces"
def __init__(self, items: List[Namespace]):
self.items = items
def json(self):
"Serialize to JSON"
return json.dumps([ns.json() for ns in self.items])
@cached_property
def namespaces(self) -> Dict[str, Namespace]:
"Index by namespace"
return {ns.namespace.upper(): ns for ns in self.items}
@cached_property
def agents(self) -> Dict[str, Agent]:
"Index by agent"
# Construct by_agent index
index: Dict[str, Agent] = {}
for ns in self.items:
try:
index[ns.owner.upper()].namespaces.append(ns)
except KeyError:
# Make a new Agent instance with this namespace
index[ns.owner.upper()] = Agent(name=ns.owner, namespaces=[ns])
return index
def by_agent(self, agent: str) -> Optional[Agent]:
"Search by agent"
return self.agents.get(agent.upper(), None)
def by_namespace(self, namespace: str) -> Optional[Namespace]:
"Search by namespace"
return self.namespaces.get(namespace.upper(), None)
class Agent(BaseModel):
"Stores information about an IGSN Allocating Agent"
name: str = Field(
...,
title="Allocating agent",
description="The name of the allocating agent",
example="GEOAUS",
)
namespaces: List[Namespace]
# A return type for app errors
T = TypeVar("T")
HTTPResponse = Union[T, JSONResponse]
# A class for error messages
class HTTPErrorMessage(BaseModel):
detail: str
|
StarcoderdataPython
|
11369873
|
#!/usr/bin/env python3
# Copyright Catch2 Authors
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# https://www.boost.org/LICENSE_1_0.txt)
# SPDX-License-Identifier: BSL-1.0
from ConfigureTestsCommon import configure_and_build, run_and_return_output
import os
import re
import sys
"""
Tests the CMake configure option for CATCH_CONFIG_DEFAULT_REPORTER
Requires 2 arguments, path folder where the Catch2's main CMakeLists.txt
exists, and path to where the output files should be stored.
"""
if len(sys.argv) != 3:
print('Wrong number of arguments: {}'.format(len(sys.argv)))
print('Usage: {} catch2-top-level-dir base-build-output-dir'.format(sys.argv[0]))
exit(1)
catch2_source_path = os.path.abspath(sys.argv[1])
build_dir_path = os.path.join(os.path.abspath(sys.argv[2]), 'CMakeConfigTests', 'DefaultReporter')
configure_and_build(catch2_source_path,
build_dir_path,
[("CATCH_CONFIG_DEFAULT_REPORTER", "compact")])
stdout, _ = run_and_return_output(os.path.join(build_dir_path, 'tests'), 'SelfTest', ['[approx][custom]'])
# This matches the summary line made by compact reporter, console reporter's
# summary line does not match the regex.
summary_regex = 'Passed \d+ test case with \d+ assertions.'
if not re.match(summary_regex, stdout):
print("Could not find '{}' in the stdout".format(summary_regex))
print('stdout: "{}"'.format(stdout))
exit(2)
|
StarcoderdataPython
|
5026278
|
<gh_stars>1-10
def is_leap_year(year):
return (year % 4 == 0 and year % 100 != 0) or (year % 400 == 0)
def get_year_status(year):
return 'Високосный' if is_leap_year(year) else 'Обычный'
print(get_year_status(int(input())))
|
StarcoderdataPython
|
1672540
|
"""Editor related to `Shapes`.
As you can easily assumes, `editor` is a high-level api, so
* This sub-module can call other more premitive api freely.
* On contrary, the more premitive sub-modules should not call this.
"""
import numpy as np
import _ctypes
from pywintypes import com_error
from fairypptx import constants
from fairypptx.shape import Shape, Shapes
from fairypptx.shape import Box
from fairypptx.table import Table
from fairypptx.shape import Shape, Shapes
from fairypptx.object_utils import is_object
from typing import Sequence
def _to_shapes(arg):
"""Convert to `Shapes`."""
if isinstance(arg, Shapes):
return arg
elif isinstance(arg, Sequence):
return Shapes(arg)
elif isinstance(arg, Shape):
return Shapes([arg])
elif is_object(arg, "Shapes"):
return Shapes(arg)
elif is_object(arg, "Shape"):
return Shapes(arg)
raise ValueError(f"Cannot interpret `{arg}`.")
class ShapesEncloser:
"""Enclose the specified shapes.
"""
def __init__(self,
line=3,
fill=None,
linecolor=(0, 0, 0),
*,
margin=0.10,
left_margin=None,
top_margin=None,
right_margin=None,
bottom_margin=None,
):
self.line = line
self.fill = fill
self.linecolor = linecolor
self.margin = margin
self.left_margin = left_margin
self.top_margin = top_margin
self.right_margin = right_margin
self.bottom_margin = bottom_margin
def _margin_solver(self, c_box):
"""Solves the margin of
it returns the actual pixel(float) of margin. (i.e. not ratio)
(left_margin, top_margin, right_margin, bottom_margin).
"""
def _to_pixel(margin, length):
if isinstance(margin, float) and abs(margin) < 1.0:
return length * margin
else:
return margin
def _solve_margin(first_value, length):
value = first_value
if value is None:
value = self.margin
assert value is not None
return _to_pixel(value, length)
left_margin = _solve_margin(self.left_margin, c_box.x_length)
top_margin = _solve_margin(self.top_margin, c_box.y_length)
right_margin = _solve_margin(self.right_margin, c_box.x_length)
bottom_margin = _solve_margin(self.bottom_margin, c_box.y_length)
return (left_margin, top_margin, right_margin, bottom_margin)
def __call__(self, shapes):
if not shapes:
return None
shapes = _to_shapes(shapes)
c_box = shapes.circumscribed_box
left_margin, top_margin, right_margin, bottom_margin = self._margin_solver(c_box)
width = c_box.width + (left_margin + right_margin)
height = c_box.height + (top_margin + bottom_margin)
shape = Shape.make(1)
shape.api.Top = c_box.top - top_margin
shape.api.Left = c_box.left - left_margin
shape.api.Width = width
shape.api.Height = height
shape.line = self.line
shape.fill = self.fill
if self.linecolor:
shape.line = self.linecolor
shape.api.Zorder(constants.msoSendToBack)
return Shapes(list(shapes) + [shape])
class TitleProvider:
def __init__(self,
title,
fontsize=None,
fontcolor=(0, 0, 0),
fill=None,
line=None,
bold=True,
underline=False,
):
self.title = title
self.fontsize = fontsize
self.fontcolor = fontcolor
self.fill = fill
self.line = line
self.bold = bold
self.underline = underline
def __call__(self, shapes):
shapes = _to_shapes(shapes)
c_box = shapes.circumscribed_box
shape = Shape.make(1)
shape.fill = self.fill
shape.line = self.line
shape.textrange.text = self.title
shape.textrange.font.bold = self.bold
shape.textrange.font.underline = self.underline
shape.textrange.font.size = self._yield_fontsize(self.fontsize, shapes)
shape.textrange.font.color = self.fontcolor
shape.tighten()
shape.api.Top = c_box.top - shape.height
shape.api.Left = c_box.left
return shape
def _yield_fontsize(self, fontsize, shapes):
if fontsize is not None:
return fontsize
fontsizes =[]
for shape in shapes:
fontsizes.append(shape.textrange.font.size)
if fontsizes:
return max(fontsizes)
else:
return 18
class ShapesResizer:
"""Shapes Resizer.
This class resize the given shapes to the equivalent size.
Related Class.
-----------
`shapes.BoundingResizer`: the bounding box of the shapes is resized.
"""
def __init__(self, size="max"):
self.size = size
def _yield_size(self, shapes):
"""Determine the return of size,
based on the given parameters.
"""
size = self.size
if isinstance(size, (list, tuple)):
width, height = size
elif isinstance(size, Shape):
shape = size
width, height = shape.width, shape.height
elif isinstance(size, str):
if size == "max":
width = max(shape.width for shape in shapes)
height = max(shape.height for shape in shapes)
else:
raise NotImplementedError("This error message must be displayed in `__init``. ")
return width, height
def __call__(self, shapes):
width, height = self._yield_size(shapes)
for shape in shapes:
shape.width = width
shape.height = height
return shapes
class BoundingResizer:
"""Resize the bounding box of `Shapes`.
Args:
size: 2-tuple. (width, height).
The expected width and height.
fontsize: (float)
The fontsize of the expected minimum over the shapes.
"""
def __init__(self, size=None, *, fontsize=None):
self.size = size
self.fontsize = fontsize
def _to_minimum_fontsize(self, textrange):
fontsizes = set()
for run in textrange.runs:
if run.text:
fontsizes.add(run.font.size)
if fontsizes:
return min(fontsizes)
else:
return None
def _get_minimum_fontsize(self, shapes):
fontsizes = set()
for shape in shapes:
if shape.is_table():
table = Table(shape)
for row in table.rows:
for cell in row:
textrange = cell.shape.textrange
fontsize = self._to_minimum_fontsize(textrange)
if fontsize:
fontsizes.add(fontsize)
else:
try:
fontsize = self._to_minimum_fontsize(shape.textrange)
except com_error as e:
pass
else:
if fontsize:
fontsizes.add(fontsize)
if fontsizes:
return min(fontsizes)
else:
return None
def _set_fontsize(self, textrange, ratio):
for run in textrange.runs:
run.api.Font.Size = round(run.font.size * ratio)
def _yield_size(self, shapes):
"""Determine the the return of `size`.
* Priority
1. `fontsize`
2. `size`.
"""
size = self.size
fontsize = self.fontsize
# For fallback.
if size is None and fontsize is None:
fontsize = self._get_minimum_fontsize(shapes.slide.shapes)
if fontsize is None:
fontsize = 12
if fontsize is not None:
c_box = shapes.circumscribed_box
c_fontsize = self._get_minimum_fontsize(shapes)
ratio = fontsize / c_fontsize
size = ratio
if isinstance(size, (int , float)):
c_box = shapes.circumscribed_box
c_width = c_box.x_length
c_height = c_box.y_length
n_width = c_width * size
n_height = c_height * size
elif isinstance(size, (list, tuple)):
n_width, n_height = size
else:
raise ValueError("Invalid size.", size)
return n_width, n_height
def __call__(self, shapes):
"""Perform `resize` for all the shapes.
Not only it changes the size of `Shape`,
but also changes the size of `Font` proportionaly.
Note:
It works only for shapes whose rotation is 0.
"""
# If the given is `Shape`, then, `Shape` is returned.
if isinstance(shapes, Shape):
is_shape = True
else:
is_shape = False
if not shapes:
return shapes
shapes = _to_shapes(shapes)
n_width, n_height = self._yield_size(shapes)
c_box = shapes.circumscribed_box
width, height = c_box.x_length, c_box.y_length
pivot = (c_box.top, c_box.left) # [y_min, x_min]
ratios = (n_height / height, n_width / width)
ratio = np.mean(ratios)
for shape in shapes:
# Processings for all the shapes.
shape.api.Left = (shape.api.Left - pivot[1]) * ratios[1] + pivot[1]
shape.api.Width = shape.api.Width * ratios[1]
shape.api.Top = (shape.api.Top - pivot[0]) * ratios[0] + pivot[0]
shape.api.Height = shape.api.Height * ratios[0]
# For Table.
if shape.is_table():
table = Table(shape)
for row in table.rows:
for cell in row:
self._set_fontsize(cell.shape.textrange, ratio)
else:
try:
self._set_fontsize(shape.textrange, ratio)
except com_error as e:
pass
if not is_shape:
return Shapes(shapes)
else:
return shapes[0]
if __name__ == "__main__":
pass
|
StarcoderdataPython
|
5044182
|
# Generated by Django 3.1.2 on 2021-11-10 10:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app_api', '0014_wxuser'),
]
operations = [
migrations.AddField(
model_name='wxuser',
name='edu',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='学历'),
),
migrations.AddField(
model_name='wxuser',
name='enroll',
field=models.IntegerField(blank=True, default=1, null=True, verbose_name='入学年份'),
),
migrations.AddField(
model_name='wxuser',
name='grad',
field=models.IntegerField(blank=True, default=1, null=True, verbose_name='毕业年份'),
),
]
|
StarcoderdataPython
|
283450
|
# flake8: noqa
# import apis into api package
from xero_python.payrollnz.api.payroll_nz_api import PayrollNzApi
|
StarcoderdataPython
|
249147
|
from .worker import NB201Worker
|
StarcoderdataPython
|
3429008
|
<reponame>paulzzh/mahjong
# -*- coding: utf-8 -*-
import unittest
from mahjong.constants import EAST, SOUTH, WEST, NORTH, FIVE_RED_SOU
from mahjong.hand_calculating.hand import HandCalculator
from mahjong.hand_calculating.hand_config import HandConfig, OptionalRules
from mahjong.hand_calculating.yaku_config import YakuConfig
from mahjong.meld import Meld
from mahjong.tests_mixin import TestMixin
from mahjong.tile import TilesConverter
class YakuCalculationTestCase(unittest.TestCase, TestMixin):
def setUp(self):
self.config = YakuConfig()
def test_hands_calculation(self):
"""
Group of hands that were not properly calculated on tenhou replays
I did fixes and leave hands in tests, to be sure that bugs were fixed.
"""
hand = HandCalculator()
player_wind = EAST
tiles = self._string_to_136_array(pin='112233999', honors='11177')
win_tile = self._string_to_136_tile(pin='9')
melds = [
self._make_meld(Meld.PON, honors='111'),
self._make_meld(Meld.CHI, pin='123'),
self._make_meld(Meld.CHI, pin='123'),
]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
# we had a bug with multiple dora indicators and honor sets
# this test is working with this situation
tiles = self._string_to_136_array(pin='22244456799', honors='444')
win_tile = self._string_to_136_tile(pin='2')
dora_indicators = [self._string_to_136_tile(sou='3'), self._string_to_136_tile(honors='3')]
melds = [self._make_meld(Meld.KAN, honors='4444')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 6)
self.assertEqual(result.fu, 50)
self.assertEqual(len(result.yaku), 2)
# if we can't ad pinfu to the hand hand
# we can add 2 fu to make hand more expensive
tiles = self._string_to_136_array(sou='678', man='11', pin='123345', honors='666')
win_tile = self._string_to_136_tile(pin='3')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.fu, 40)
tiles = self._string_to_136_array(man='234789', pin='12345666')
win_tile = self._string_to_136_tile(pin='6')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.fu, 30)
tiles = self._string_to_136_array(sou='678', pin='34555789', honors='555')
win_tile = self._string_to_136_tile(pin='5')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.fu, 40)
tiles = self._string_to_136_array(sou='123345678', man='678', pin='88')
win_tile = self._string_to_136_tile(sou='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
tiles = self._string_to_136_array(sou='12399', man='123456', pin='456')
win_tile = self._string_to_136_tile(sou='1')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
tiles = self._string_to_136_array(sou='111123666789', honors='11')
win_tile = self._string_to_136_tile(sou='1')
melds = [self._make_meld(Meld.PON, sou='666')]
dora_indicators = [self._string_to_136_tile(honors='4')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds,
dora_indicators=dora_indicators,
config=self._make_hand_config(player_wind=player_wind))
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 4)
tiles = self._string_to_136_array(pin='12333', sou='567', honors='666777')
win_tile = self._string_to_136_tile(pin='3')
melds = [self._make_meld(Meld.PON, honors='666'), self._make_meld(Meld.PON, honors='777')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(pin='12367778', sou='678', man='456')
win_tile = self._string_to_136_tile(pin='7')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_riichi=True))
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 1)
tiles = self._string_to_136_array(man='11156677899', honors='777')
win_tile = self._string_to_136_tile(man='7')
melds = [
self._make_meld(Meld.KAN, honors='7777'),
self._make_meld(Meld.PON, man='111'),
self._make_meld(Meld.CHI, man='678'),
]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 3)
tiles = self._string_to_136_array(man='122223777888', honors='66')
win_tile = self._string_to_136_tile(man='2')
melds = [self._make_meld(Meld.CHI, man='123'), self._make_meld(Meld.PON, man='777')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(pin='11144678888', honors='444')
win_tile = self._string_to_136_tile(pin='8')
melds = [
self._make_meld(Meld.PON, honors='444'),
self._make_meld(Meld.PON, pin='111'),
self._make_meld(Meld.PON, pin='888'),
]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(sou='67778', man='345', pin='999', honors='222')
win_tile = self._string_to_136_tile(sou='7')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 1)
tiles = self._string_to_136_array(sou='33445577789', man='345')
win_tile = self._string_to_136_tile(sou='7')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(pin='112233667788', honors='22')
win_tile = self._string_to_136_tile(pin='3')
melds = [self._make_meld(Meld.CHI, pin='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(sou='345', man='12333456789')
win_tile = self._string_to_136_tile(man='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(sou='11123456777888')
melds = [
self._make_meld(Meld.CHI, sou='123'),
self._make_meld(Meld.PON, sou='777'),
self._make_meld(Meld.PON, sou='888'),
]
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 5)
tiles = self._string_to_136_array(sou='112233789', honors='55777')
melds = [self._make_meld(Meld.CHI, sou='123')]
win_tile = self._string_to_136_tile(sou='2')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 4)
tiles = self._string_to_136_array(pin='234777888999', honors='22')
melds = [self._make_meld(Meld.CHI, pin='234'), self._make_meld(Meld.CHI, pin='789')]
win_tile = self._string_to_136_tile(pin='9')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 2)
tiles = self._string_to_136_array(pin='77888899', honors='777', man='444')
melds = [self._make_meld(Meld.PON, honors='777'), self._make_meld(Meld.PON, man='444')]
win_tile = self._string_to_136_tile(pin='8')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 1)
tiles = self._string_to_136_array(pin='12333345', honors='555', man='567')
win_tile = self._string_to_136_tile(pin='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.fu, 40)
self.assertEqual(result.han, 1)
tiles = self._string_to_136_array(pin='34567777889', honors='555')
win_tile = self._string_to_136_tile(pin='7')
melds = [self._make_meld(Meld.CHI, pin='345')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.fu, 30)
self.assertEqual(result.han, 3)
tiles = self._string_to_136_array(pin='567', sou='333444555', honors='77')
win_tile = self._string_to_136_tile(sou='3')
melds = [self._make_meld(Meld.KAN, is_open=False, sou='4444')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds, config=self._make_hand_config(is_riichi=True))
self.assertEqual(result.fu, 60)
self.assertEqual(result.han, 1)
def test_is_riichi(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_riichi=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds, config=self._make_hand_config(is_riichi=True))
self.assertNotEqual(result.error, None)
def test_is_tsumo(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
# with open hand tsumo not giving yaku
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds, config=self._make_hand_config(is_tsumo=True))
self.assertNotEqual(result.error, None)
def test_is_ippatsu(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_riichi=True, is_ippatsu=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
# without riichi ippatsu is not possible
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_riichi=False, is_ippatsu=True))
self.assertNotEqual(result.error, None)
def test_is_rinshan(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_rinshan=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_chankan(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_chankan=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_haitei(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_haitei=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_houtei(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_houtei=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_renhou(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile, config=self._make_hand_config(is_renhou=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 5)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_daburu_riichi(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123444', man='234456', pin='66')
win_tile = self._string_to_136_tile(sou='4')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_daburu_riichi=True, is_riichi=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_nagashi_mangan(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='13579', man='234456', pin='66')
result = hand.estimate_hand_value(tiles, None, config=self._make_hand_config(is_nagashi_mangan=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 5)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_chitoitsu_hand(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='113355', man='113355', pin='11')
self.assertTrue(self.config.chiitoitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='2299', man='2299', pin='1199', honors='44')
self.assertTrue(self.config.chiitoitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='113355', man='113355', pin='11')
win_tile = self._string_to_136_tile(pin='1')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 25)
self.assertEqual(len(result.yaku), 1)
def test_is_tanyao(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='234567', pin='22')
self.assertTrue(self.config.tanyao.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='123456', man='234567', pin='22')
self.assertFalse(self.config.tanyao.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='234567', man='234567', honors='22')
self.assertFalse(self.config.tanyao.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='234567', man='234567', pin='22')
win_tile = self._string_to_136_tile(man='7')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_tsumo=False, is_riichi=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 3)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 3)
tiles = self._string_to_136_array(sou='234567', man='234567', pin='22')
win_tile = self._string_to_136_tile(man='7')
melds = [self._make_meld(Meld.CHI, sou='234')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds,
config=self._make_hand_config(has_open_tanyao=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
tiles = self._string_to_136_array(sou='234567', man='234567', pin='22')
win_tile = self._string_to_136_tile(man='7')
melds = [self._make_meld(Meld.CHI, sou='234')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds,
config=self._make_hand_config(has_open_tanyao=False))
self.assertNotEqual(result.error, None)
def test_is_pinfu_hand(self):
player_wind, round_wind = EAST, WEST
hand = HandCalculator()
tiles = self._string_to_136_array(sou='123456', man='123456', pin='55')
win_tile = self._string_to_136_tile(man='6')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
# waiting in two pairs
tiles = self._string_to_136_array(sou='123456', man='123555', pin='55')
win_tile = self._string_to_136_tile(man='5')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertNotEqual(result.error, None)
# contains pon or kan
tiles = self._string_to_136_array(sou='111456', man='123456', pin='55')
win_tile = self._string_to_136_tile(man='6')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertNotEqual(result.error, None)
# penchan waiting
tiles = self._string_to_136_array(sou='123456', man='123456', pin='55')
win_tile = self._string_to_136_tile(sou='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertNotEqual(result.error, None)
# kanchan waiting
tiles = self._string_to_136_array(sou='123567', man='123456', pin='55')
win_tile = self._string_to_136_tile(sou='6')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertNotEqual(result.error, None)
# tanki waiting
tiles = self._string_to_136_array(man='22456678', pin='123678')
win_tile = self._string_to_136_tile(man='2')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertNotEqual(result.error, None)
# valued pair
tiles = self._string_to_136_array(sou='123678', man='123456', honors='11')
win_tile = self._string_to_136_tile(sou='6')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(player_wind=player_wind, round_wind=round_wind))
self.assertNotEqual(result.error, None)
# not valued pair
tiles = self._string_to_136_array(sou='123678', man='123456', honors='22')
win_tile = self._string_to_136_tile(sou='6')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
# open hand
tiles = self._string_to_136_array(sou='12399', man='123456', pin='456')
win_tile = self._string_to_136_tile(man='1')
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertNotEqual(result.error, None)
def test_is_iipeiko(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='112233', man='123', pin='23444')
self.assertTrue(self.config.iipeiko.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='112233', man='333', pin='12344')
win_tile = self._string_to_136_tile(man='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertNotEqual(result.error, None)
def test_is_ryanpeiko(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='112233', man='22', pin='223344')
self.assertTrue(self.config.ryanpeiko.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='111122223333', man='22')
self.assertTrue(self.config.ryanpeiko.is_condition_met(self._hand(tiles, 1)))
tiles = self._string_to_34_array(sou='112233', man='123', pin='23444')
self.assertFalse(self.config.ryanpeiko.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='112233', man='33', pin='223344')
win_tile = self._string_to_136_tile(pin='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 3)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertNotEqual(result.error, None)
def test_is_sanshoku(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='123', man='123', pin='12345677')
self.assertTrue(self.config.sanshoku.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='123456', man='23455', pin='123')
self.assertFalse(self.config.sanshoku.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='123456', man='12399', pin='123')
win_tile = self._string_to_136_tile(man='2')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_sanshoku_douko(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='111', man='111', pin='11145677')
self.assertTrue(self.config.sanshoku_douko.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='111', man='222', pin='33344455')
self.assertFalse(self.config.sanshoku_douko.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='222', man='222', pin='22245699')
melds = [self._make_meld(Meld.CHI, sou='222')]
win_tile = self._string_to_136_tile(pin='9')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_toitoi(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='111333', man='333', pin='44555')
self.assertTrue(self.config.toitoi.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='777', pin='777888999', honors='44')
self.assertTrue(self.config.toitoi.is_condition_met(self._hand(tiles, 1)))
tiles = self._string_to_136_array(sou='111333', man='333', pin='44555')
melds = [self._make_meld(Meld.PON, sou='111'), self._make_meld(Meld.PON, sou='333')]
win_tile = self._string_to_136_tile(pin='5')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
tiles = self._string_to_136_array(sou='777', pin='777888999', honors='44')
melds = [self._make_meld(Meld.PON, sou='777')]
win_tile = self._string_to_136_tile(pin='9')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_sankantsu(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='111333', man='123', pin='44666')
melds = [
self._make_meld(Meld.KAN, sou='1111'),
self._make_meld(Meld.KAN, sou='3333'),
self._make_meld(Meld.KAN, pin='6666'),
]
self.assertTrue(self.config.sankantsu.is_condition_met(hand, melds))
melds = [
self._make_meld(Meld.CHANKAN, sou='1111'),
self._make_meld(Meld.KAN, sou='3333'),
self._make_meld(Meld.KAN, pin='6666'),
]
win_tile = self._string_to_136_tile(man='3')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 60)
self.assertEqual(len(result.yaku), 1)
def test_is_honroto(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='111999', man='111', honors='11222')
self.assertTrue(self.config.honroto.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(pin='11', honors='22334466', man='1199')
self.assertTrue(self.config.honroto.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='111999', man='111', honors='11222')
win_tile = self._string_to_136_tile(honors='2')
melds = [self._make_meld(Meld.PON, sou='111')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 4)
self.assertEqual(result.fu, 50)
self.assertEqual(len(result.yaku), 2)
tiles = self._string_to_136_array(pin='11', honors='22334466', man='1199')
win_tile = self._string_to_136_tile(man='1')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.fu, 25)
self.assertEqual(result.han, 4)
def test_is_sanankou(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='111444', man='333', pin='44555')
win_tile = self._string_to_136_tile(sou='4')
melds = [
self._make_meld(Meld.PON, sou='444'),
self._make_meld(Meld.PON, sou='111')
]
self.assertFalse(self.config.sanankou.is_condition_met(self._hand(tiles), win_tile, melds, False))
melds = [
self._make_meld(Meld.PON, sou='111')
]
self.assertFalse(self.config.sanankou.is_condition_met(self._hand(tiles), win_tile, melds, False))
self.assertTrue(self.config.sanankou.is_condition_met(self._hand(tiles), win_tile, melds, True))
tiles = self._string_to_34_array(pin='444789999', honors='22333')
win_tile = self._string_to_136_tile(pin='9')
self.assertTrue(self.config.sanankou.is_condition_met(self._hand(tiles), win_tile, [], False))
melds = [
self._make_meld(Meld.CHI, pin='456')
]
tiles = self._string_to_34_array(pin='222456666777', honors='77')
win_tile = self._string_to_136_tile(pin='6')
self.assertFalse(self.config.sanankou.is_condition_met(self._hand(tiles), win_tile, melds, False))
tiles = self._string_to_136_array(sou='123444', man='333', pin='44555')
melds = [
self._make_meld(Meld.CHI, sou='123')
]
win_tile = self._string_to_136_tile(pin='5')
result = hand.estimate_hand_value(tiles, win_tile, melds=melds, config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_shosangen(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='123', man='345', honors='55666777')
self.assertTrue(self.config.shosangen.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='123', man='345', honors='55666777')
win_tile = self._string_to_136_tile(honors='7')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 4)
self.assertEqual(result.fu, 50)
self.assertEqual(len(result.yaku), 3)
def test_is_chanta(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='123', man='123789', honors='22333')
self.assertTrue(self.config.chanta.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='111', man='111999', honors='22333')
self.assertFalse(self.config.chanta.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='111999', man='111999', pin='11999')
self.assertFalse(self.config.chanta.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='123', man='123789', honors='22333')
win_tile = self._string_to_136_tile(honors='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, sou='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_junchan(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='789', man='123789', pin='12399')
self.assertTrue(self.config.junchan.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='111', man='111999', honors='22333')
self.assertFalse(self.config.junchan.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(sou='111999', man='111999', pin='11999')
self.assertFalse(self.config.junchan.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='789', man='123789', pin='12399')
win_tile = self._string_to_136_tile(man='2')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 3)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, sou='789')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_honitsu(self):
hand = HandCalculator()
tiles = self._string_to_34_array(man='123456789', honors='11122')
self.assertTrue(self.config.honitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(man='123456789', pin='123', honors='22')
self.assertFalse(self.config.honitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(man='12345666778899')
self.assertFalse(self.config.honitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(man='123455667', honors='11122')
win_tile = self._string_to_136_tile(honors='2')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 3)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, man='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_chinitsu(self):
hand = HandCalculator()
tiles = self._string_to_34_array(man='12345666778899')
self.assertTrue(self.config.chinitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(man='123456778899', honors='22')
self.assertFalse(self.config.chinitsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(man='11234567677889')
win_tile = self._string_to_136_tile(man='1')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 6)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, man='678')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 5)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_ittsu(self):
hand = HandCalculator()
tiles = self._string_to_34_array(man='123456789', sou='123', honors='22')
self.assertTrue(self.config.ittsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(man='112233456789', honors='22')
self.assertTrue(self.config.ittsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_34_array(man='122334567789', honors='11')
self.assertFalse(self.config.ittsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(man='123456789', sou='123', honors='22')
win_tile = self._string_to_136_tile(sou='3')
result = hand.estimate_hand_value(tiles, win_tile)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
melds = [self._make_meld(Meld.CHI, man='123')]
result = hand.estimate_hand_value(tiles, win_tile, melds=melds)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 1)
def test_is_haku(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='555')
self.assertTrue(self.config.haku.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='555')
win_tile = self._string_to_136_tile(honors='5')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_tsumo=False, is_riichi=False))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_hatsu(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='666')
self.assertTrue(self.config.hatsu.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='666')
win_tile = self._string_to_136_tile(honors='6')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_tsumo=False, is_riichi=False))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_chun(self):
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='777')
self.assertTrue(self.config.chun.is_condition_met(self._hand(tiles)))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='777')
win_tile = self._string_to_136_tile(honors='7')
result = hand.estimate_hand_value(tiles, win_tile,
config=self._make_hand_config(is_tsumo=False, is_riichi=False))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
def test_is_east(self):
player_wind, round_wind = EAST, WEST
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='111')
self.assertTrue(self.config.east.is_condition_met(self._hand(tiles), player_wind, round_wind))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='111')
win_tile = self._string_to_136_tile(honors='1')
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind
))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
round_wind = EAST
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind
))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
def test_is_south(self):
player_wind, round_wind = SOUTH, EAST
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='222')
self.assertTrue(self.config.south.is_condition_met(self._hand(tiles), player_wind, round_wind))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='222')
win_tile = self._string_to_136_tile(honors='2')
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind
))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
round_wind = SOUTH
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind
))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
def test_is_west(self):
player_wind, round_wind = WEST, EAST
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='333')
self.assertTrue(self.config.west.is_condition_met(self._hand(tiles), player_wind, round_wind))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='333')
win_tile = self._string_to_136_tile(honors='3')
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind
))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
round_wind = WEST
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
def test_is_north(self):
player_wind, round_wind = NORTH, EAST
hand = HandCalculator()
tiles = self._string_to_34_array(sou='234567', man='23422', honors='444')
self.assertTrue(self.config.north.is_condition_met(self._hand(tiles), player_wind, round_wind))
tiles = self._string_to_136_array(sou='234567', man='23422', honors='444')
win_tile = self._string_to_136_tile(honors='4')
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 1)
round_wind = NORTH
result = hand.estimate_hand_value(tiles,
win_tile,
config=self._make_hand_config(
is_tsumo=False,
is_riichi=False,
player_wind=player_wind,
round_wind=round_wind
))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
def test_dora_in_hand(self):
hand = HandCalculator()
# hand without yaku, but with dora should be consider as invalid
tiles = self._string_to_136_array(sou='345678', man='456789', honors='55')
win_tile = self._string_to_136_tile(sou='5')
dora_indicators = [self._string_to_136_tile(sou='5')]
melds = [self._make_meld(Meld.CHI, sou='678')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators, melds=melds)
self.assertNotEqual(result.error, None)
tiles = self._string_to_136_array(sou='123456', man='123456', pin='33')
win_tile = self._string_to_136_tile(man='6')
dora_indicators = [self._string_to_136_tile(pin='2')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators)
self.assertEqual(result.error, None)
self.assertEqual(result.han, 3)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 2)
tiles = self._string_to_136_array(man='22456678', pin='123678')
win_tile = self._string_to_136_tile(man='2')
dora_indicators = [self._string_to_136_tile(man='1'), self._string_to_136_tile(pin='2')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators,
config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 4)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 2)
# double dora
tiles = self._string_to_136_array(man='678', pin='34577', sou='123345')
win_tile = self._string_to_136_tile(pin='7')
dora_indicators = [self._string_to_136_tile(sou='4'), self._string_to_136_tile(sou='4')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators,
config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 3)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 2)
# double dora and honor tiles
tiles = self._string_to_136_array(man='678', pin='345', sou='123345', honors='66')
win_tile = self._string_to_136_tile(pin='5')
dora_indicators = [self._string_to_136_tile(honors='5'), self._string_to_136_tile(honors='5')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators,
config=self._make_hand_config(is_riichi=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 5)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
# double dora indicators and red fives
tiles = self._string_to_136_array(sou='12346', man='123678', pin='44')
win_tile = self._string_to_136_tile(pin='4')
tiles.append(FIVE_RED_SOU)
dora_indicators = [self._string_to_136_tile(pin='2'), self._string_to_136_tile(pin='2')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators,
config=self._make_hand_config(is_tsumo=True, has_aka_dora=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 2)
self.assertEqual(result.fu, 30)
self.assertEqual(len(result.yaku), 2)
# dora in kan
tiles = self._string_to_136_array(man='777', pin='34577', sou='123345')
win_tile = self._string_to_136_tile(pin='7')
melds = [self._make_meld(Meld.KAN, is_open=False, man='7777')]
dora_indicators = [self._string_to_136_tile(man='6')]
result = hand.estimate_hand_value(tiles, win_tile, dora_indicators=dora_indicators, melds=melds,
config=self._make_hand_config(is_tsumo=True))
self.assertEqual(result.error, None)
self.assertEqual(result.han, 5)
self.assertEqual(result.fu, 40)
self.assertEqual(len(result.yaku), 2)
def test_is_agari_and_closed_kan(self):
"""
There were a bug when we don't count closed kan set for agari
and calculator though that hand was agari (but it doesn't)
:return:
"""
hand = HandCalculator()
tiles = self._string_to_136_array(man='45666777', pin='111', honors='222')
win_tile = self._string_to_136_tile(man='4')
melds = [
self._make_meld(Meld.PON, pin='111'),
self._make_meld(Meld.KAN, man='6666', is_open=False),
self._make_meld(Meld.PON, man='777'),
]
result = hand.estimate_hand_value(tiles, win_tile, melds)
# error is correct answer
self.assertNotEqual(result.error, None)
def test_kazoe_settings(self):
hand = HandCalculator()
tiles = self._string_to_136_array(man='22244466677788')
win_tile = self._string_to_136_tile(man='7')
melds = [
self._make_meld(Meld.KAN, man='2222', is_open=False),
]
dora_indicators = [
self._string_to_136_tile(man='1'),
self._string_to_136_tile(man='1'),
self._string_to_136_tile(man='1'),
self._string_to_136_tile(man='1'),
]
config = HandConfig(is_riichi=True, options=OptionalRules(kazoe_limit=HandConfig.KAZOE_LIMITED))
result = hand.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config)
self.assertEqual(result.han, 28)
self.assertEqual(result.cost['main'], 32000)
config = HandConfig(is_riichi=True, options=OptionalRules(kazoe_limit=HandConfig.KAZOE_SANBAIMAN))
result = hand.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config)
self.assertEqual(result.han, 28)
self.assertEqual(result.cost['main'], 24000)
config = HandConfig(is_riichi=True, options=OptionalRules(kazoe_limit=HandConfig.KAZOE_NO_LIMIT))
result = hand.estimate_hand_value(tiles, win_tile, melds, dora_indicators, config)
self.assertEqual(result.han, 28)
self.assertEqual(result.cost['main'], 64000)
def test_open_hand_without_additional_fu(self):
hand = HandCalculator()
tiles = self._string_to_136_array(sou='234678', man='234567', pin='22')
win_tile = self._string_to_136_tile(sou='6')
melds = [self._make_meld(Meld.CHI, sou='234')]
config = HandConfig(options=OptionalRules(has_open_tanyao=True, fu_for_open_pinfu=False))
result = hand.estimate_hand_value(tiles, win_tile, melds, config=config)
self.assertEqual(result.han, 1)
self.assertEqual(result.fu, 20)
self.assertEqual(result.cost['main'], 700)
def test_aka_dora(self):
hand_calculator = HandCalculator()
win_tile = TilesConverter.string_to_136_array(man='9')[0]
hand_config = HandConfig(
is_tsumo=True,
options=OptionalRules(has_aka_dora=True)
)
# three red old style, but not that useful
tiles = TilesConverter.string_to_136_array(sou='345', pin='456', man='12355599', has_aka_dora=False)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 4)
# zero red
tiles = TilesConverter.string_to_136_array(sou='345', pin='456', man='12355599', has_aka_dora=True)
win_tile = TilesConverter.string_to_136_array(man='9')[0]
hand_config = HandConfig(
is_tsumo=True,
options=OptionalRules(has_aka_dora=True)
)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 1)
# one red
tiles = TilesConverter.string_to_136_array(sou='34r', pin='456', man='12355599', has_aka_dora=True)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 2)
# two red
tiles = TilesConverter.string_to_136_array(sou='34r', pin='4r6', man='12355599', has_aka_dora=True)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 3)
# three red
tiles = TilesConverter.string_to_136_array(sou='34r', pin='4r6', man='123r5599', has_aka_dora=True)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 4)
# four red
tiles = TilesConverter.string_to_136_array(sou='34r', pin='4r6', man='123rr599', has_aka_dora=True)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 5)
# five+ red (technically not legal in mahjong but not the fault of evaluator, really)
tiles = TilesConverter.string_to_136_array(sou='34r', pin='4r6', man='123rrr99', has_aka_dora=True)
hand_calculation = hand_calculator.estimate_hand_value(tiles, win_tile, config=hand_config)
self.assertIsNone(hand_calculation.error)
self.assertEqual(hand_calculation.han, 6)
|
StarcoderdataPython
|
11221517
|
<filename>saka/serializers.py
from rest_framework import serializers
from . import models
from django.utils import timezone
class Cash_serializers(serializers.ModelSerializer):
class Meta:
model = models.Cash
fields = '__all__'
#read_only_fields = ("laboratoryid", 'analyteid', 'department')
# def validate_name(self, value):
# if value == 'milad':
# raise serializers.ValidationError('you are blocked!')
# else:
# return value
def update(self, instance, validated_data):
old_laboratoryid = instance.laboratoryid
old_analyteid = instance.old_analyteid
old_department = instance.department
obj = super().update(instance, validated_data)
obj.laboratoryid = old_laboratoryid
obj.analyteid = old_analyteid
obj.department = old_department
obj.save()
return obj
class Analyte_serializers(serializers.ModelSerializer):
class Meta:
model = models.Analyte
fields = '__all__'
class Device_serializers(serializers.ModelSerializer):
class Meta:
model = models.Device
fields = '__all__'
class Tmethod_serializers(serializers.ModelSerializer):
class Meta:
model = models.Tmethod
fields = '__all__'
class Devicecompany_serializers(serializers.ModelSerializer):
class Meta:
model = models.Devicecompany
fields = '__all__'
class Facttable_serializers(serializers.ModelSerializer):
class Meta:
model = models.Facttable
fields = '__all__'
class Info_serializers(serializers.ModelSerializer):
class Meta:
model = models.Info
fields = '__all__'
class InfoDevice_serializers(serializers.ModelSerializer):
class Meta:
model = models.InfoDevice
fields = '__all__'
class InfoTD_serializers(serializers.ModelSerializer):
class Meta:
model = models.InfoTD
fields = '__all__'
class InfoTmethod_serializers(serializers.ModelSerializer):
class Meta:
model = models.InfoTmethod
fields = '__all__'
class Laboratory_serializers(serializers.ModelSerializer):
class Meta:
model = models.Laboratory
fields = '__all__'
class Latnumber_serializers(serializers.ModelSerializer):
class Meta:
model = models.Latnumber
fields = '__all__'
class Loc_serializers(serializers.ModelSerializer):
class Meta:
model = models.Loc
fields = ['form','labid','loc']
class Tmethod_serializers(serializers.ModelSerializer):
class Meta:
model = models.Tmethod
fields = '__all__'
class Singup_serializers(serializers.ModelSerializer):
class Meta:
model = models.Singup
fields = '__all__'
|
StarcoderdataPython
|
335976
|
<gh_stars>0
# from emuBot import
from definintions import *
import time
class orderSet(list):
def __sub__(self, y):
x = self
for a in y:
if a in x:
x.remove(a)
return x
class Reader(object):
def __init__(self, filename):
# Initialises the file to be read and processed
# Terminates class instance if file cannot be found
self.filename = filename
try:
self.file = open(filename)
except IOError:
print('Given file not found.')
'''
def __del__(self):
# Deletes the instance and prints exit message
print('Read Object was removed with name ' + self.filename)
'''
def process_file(self):
self.terrain = None
raise NotImplementedError
def set_adj_list(self):
# Creates an adjacency list to allow the BFS to work
if self.terrain is None:
raise NotImplementedError
grid = self.terrain
print()
new = {}
for x in range(len(grid)):
for y in range(len(grid[x])):
if grid[x][y] == 0 or grid[x][y]==2:
c = []
def make_c(a, b):
if a < 0 or b < 0:
return False
try:
if grid[a][b] == 0 or grid[a][b]==2:
c.append((a,b))
except:
return False
make_c(x+1, y)
make_c(x-1, y)
make_c(x, y+1)
make_c(x, y-1)
new[(x,y)] = orderSet(c)
for x in new: print(x)
print()
self.adj_list = new
def bfs(self, start=(0,0)):
# Finds a path through the maze
grid = self.terrain
visited, queue = orderSet(), [start]
while queue:
#print('a' + str(queue))
vertex = queue.pop(0)
if vertex not in visited:
visited.append(vertex)
if grid[vertex[0]][vertex[1]] == 2:
self.path = visited
return
queue.extend(self.adj_list[vertex] - visited)
def starting_direction(self, a, b):
# Initialises a direction to begin the relay_action function
if a[1] < b[1]:
return 'east'
elif a[0] < b[0]:
return 'south'
elif a[1] > b[1]:
return 'west'
elif a[0] > b[0]:
return 'north'
def relay_action(self):
# Creates a list of directions to traverse the maze
print(self.path)
actions = []
direction = self.starting_direction(self.path[0], self.path[1])
for x in range(len(self.path)-1):
a, b = self.path[x], self.path[x+1]
xval1 = a[1]
xval2 = b[1]
yval1 = b[0]
yval2 = a[0]
if direction == "east":
if xval1 > xval2:
actions.append("backward")
elif xval2 > xval1:
actions.append("forward")
elif yval1 > yval2:
actions.append("right")
actions.append("forward")
direction = "south"
elif yval2 > yval1:
actions.append("left")
actions.append("forward")
direction = "north"
elif direction == "south":
if yval1 > yval2:
actions.append("forward")
elif yval2 > yval1:
actions.append("backward")
elif xval1 > xval2:
actions.append("right")
actions.append("forward")
direction = "west"
elif xval2 > xval1:
actions.append("left")
actions.append("forward")
direction = "east"
elif direction == "north":
if yval1 < yval2:
actions.append("forward")
elif yval2 < yval1:
actions.append("backward")
elif xval1 < xval2:
actions.append("right")
actions.append("forward")
direction = "east"
elif xval2 < xval1:
actions.append("left")
actions.append("forward")
direction = "west"
elif direction == "west":
if xval1 < xval2:
actions.append("backward")
elif xval2 < xval1:
actions.append("forward")
elif yval1 < yval2:
actions.append("right")
actions.append("forward")
direction = "north"
elif yval2 < yval1:
actions.append("left")
actions.append("forward")
direction = "south"
self.actions = actions
print(self.actions)
print(len(actions),len(self.path))
def act(self):
# Communicates with servos to traverse path in real life
print(self.actions)
for action in self.actions:
if action == 'forward':
LWheels(200)
RWheels(-200)
elif action == 'left':
RWheels(-530)
LWheels(0)
elif action == 'right':
LWheels(530)
RWheels(0)
elif action == 'backward':
LWheels(-200)
RWheels(200)
time.sleep(3)
LWheels(0)
RWheels(0)
class ImageReader(Reader):
def __init__(self, filename):
from scipy.misc import imread
self.img = imread(filename)
'''
def process_file(self):
# Processes the image into a np.ndarray
try:
from scipy.misc import imread
except ImportError:
print('Module not installed on system.')
self.__del__()
self.terrain = imread(self.file)
'''
def look_rgb(self, val):
# Returns a tuple of the co-ordinates of all pixels that
# matches the rgb value given. Returns False if not found
i = 0
j = 0
matches = []
for row in self.process_file():
for pixel in row:
if list(pixel) == val:
matches.append((i, j))
j += 1
i += 1
if matches:
return matches
return False
def generate_terrain(self):
terrain = []
start = tuple()
i = j = 0
for row in self.img:
terrain.append([])
for pixel in row:
# print(pixel)
if ImageReader.look_colour(0, pixel):
# print('black')
terrain[len(terrain)-1].append(1)
elif ImageReader.look_colour(255, pixel):
# print('white')
terrain[len(terrain)-1].append(0)
elif ImageReader.look_colour2(255, 1, pixel):
# print('Green')
terrain[len(terrain)-1].append(2)
elif ImageReader.look_colour2(255, 2, pixel):
# print('Blue')
terrain[len(terrain)-1].append(0)
start = (i,j)
j += 1
i += 1
self.terrain = terrain
return start
@staticmethod
def look_colour(colour, pixel):
r = list(range(colour - 10, colour + 11))
for p in pixel:
if p not in r:
return False
return True
@staticmethod
def look_colour2(colour, index, pixel):
r = list(range(colour - 10, colour + 11))
return pixel[index] in r
class FileReader(Reader):
def process_file(self):
self.terrain = [ [int(i) for i in line.strip()] for line in self.file ]
print(self.terrain)
class TraverseFile:
def __init__(self, m, start):
maze = FileReader(m + '.txt')
maze.process_file()
maze.set_adj_list()
maze.bfs(start=start)
maze.relay_action()
a = input('Are you sure you want to proceed? ')
if a == 'yes':
maze.act()
if __name__ == '__main__':
# x = TraverseFile('maze', (0, 1))
y = ImageReader('Maze 1.png')
y.generate_terrain()
y.set_adj_list()
y.bfs(start=(10, 0))
print(y.path)
'''x = ImageReader('13x13.jpg')
s = x.generate_terrain()
#for i in x.terrain: print(i)
x.set_adj_list()
#print(x.adj_list)
for i in x.adj_list: print(i)
x.bfs(start=s)
x.relay_action()
x.act()
'''
|
StarcoderdataPython
|
1913054
|
<gh_stars>0
# coding: utf-8
import numpy as np
x = np.arange(0, 6, 0.1)
y1 = np.sin(x)
y2 = np.cos(x)
def print_array(data):
datas = []
for i in data:
if float("%.3f" % abs(i)) == 0:
datas.append(float("%.3f" % abs(i)))
else:
datas.append(float("%.3f" % i))
print(datas)
print(len(x))
print_array(list(x))
print(len(y1))
print_array(list(y1))
print(len(y2))
print_array(list(y2))
|
StarcoderdataPython
|
292967
|
<filename>data/external/repositories_2to3/120243/tradeshift-text-classification-master/src/ensemble.py<gh_stars>0
import pandas as pd
import subprocess, sys, os, time
start = time.time()
sub_dir=sys.argv[1]
cmd = 'pypy src/ensemble/ave41.py '+sub_dir
subprocess.call(cmd, shell=True)
cmd = 'pypy src/ensemble/ave58.py '+sub_dir
subprocess.call(cmd, shell=True)
cmd = 'pypy src/ensemble/ave91.py '+sub_dir
subprocess.call(cmd, shell=True)
cmd = 'pypy src/ensemble/ave92.py '+sub_dir
subprocess.call(cmd, shell=True)
cmd = 'pypy src/ensemble/ave93.py '+sub_dir
subprocess.call(cmd, shell=True)
cmd = 'pypy src/ensemble/ave98.py '+sub_dir
subprocess.call(cmd, shell=True)
cmd = 'pypy src/ensemble/ave99.py '+sub_dir
subprocess.call(cmd, shell=True)
|
StarcoderdataPython
|
4975471
|
#!/usr/bin/env python
import os
import pytest
import gitkup
def test_setup_logging():
""" test logging.json configuration """
assert gitkup.setup_logging() is True
def test_setup_logging_missing():
""" test missing logging.json configuration """
with pytest.raises(SystemExit) as err:
gitkup.setup_logging(filepath="logging.test")
assert 'Create logging.json config file and restart.' in str(err.value)
|
StarcoderdataPython
|
9731128
|
from abc import abstractmethod
from typing import TypeVar, Union, Tuple
from ..sheet import IRow, ISheet
from ... import ex
# import ex.relational
class IRelationalRow(IRow):
@property
@abstractmethod
def sheet(self) -> 'IRelationalSheet':
pass
@property
@abstractmethod
def default_value(self) -> object:
pass
@abstractmethod
def __getitem__(self, item: str) -> object:
pass
@abstractmethod
def get_raw(self, column_name: str, **kwargs) -> object:
pass
T = TypeVar('T', bound=IRelationalRow)
class IRelationalSheet(ISheet[T]):
@property
@abstractmethod
def header(self) -> 'ex.relational.RelationalHeader':
pass
@property
@abstractmethod
def collection(self) -> 'ex.relational.RelationalExCollection':
pass
@abstractmethod
def __getitem__(self,
item: Union[int, Tuple[int, str]]) -> \
Union[T, IRelationalRow, object]:
pass
@abstractmethod
def indexed_lookup(self, index: str, key: int) -> IRelationalRow:
pass
|
StarcoderdataPython
|
128452
|
"""
Main entry point of zserio pip module.
"""
import sys
import zserio.compiler
def main() -> int:
"""
Main entry point of zserio pip module.
This method envokes zserio compilers. It is called if zserio pip module is called on the command line
(using 'python3 -m zserio').
:returns: Exit value of zserio compiler.
"""
completed_process = zserio.run_compiler(sys.argv[1:], capture_output = False)
sys.exit(completed_process.returncode)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
4922225
|
import unittest
from g1.asyncs import kernels
from g1.asyncs.bases import queues
class QueuesTest(unittest.TestCase):
def test_queue_without_kernel(self):
self.assertIsNone(kernels.get_kernel())
checks = [
(queues.Queue, [1, 2, 3], [1, 2, 3]),
(queues.PriorityQueue, [1, 3, 2], [1, 2, 3]),
(queues.LifoQueue, [1, 2, 3], [3, 2, 1]),
]
for cls, test_input, expect in checks:
with self.subTest(check=cls.__name__):
queue = cls()
self.assertFalse(queue)
for item in test_input:
queue.put_nonblocking(item)
self.assertTrue(queue)
self.assertEqual(len(queue), len(test_input))
self.assertFalse(queue.is_full())
self.assertFalse(queue.is_closed())
actual = []
while queue:
actual.append(queue.get_nonblocking())
self.assertEqual(actual, expect)
@kernels.with_kernel
def test_queue(self):
checks = [
(queues.Queue, [1, 2, 3], [1, 2, 3]),
(queues.PriorityQueue, [1, 3, 2], [1, 2, 3]),
(queues.LifoQueue, [1, 2, 3], [3, 2, 1]),
]
for cls, test_input, expect in checks:
with self.subTest(check=cls.__name__):
queue = cls()
self.assertFalse(queue)
for item in test_input:
kernels.run(queue.put(item))
self.assertTrue(queue)
self.assertEqual(len(queue), len(test_input))
self.assertFalse(queue.is_full())
self.assertFalse(queue.is_closed())
actual = []
while queue:
actual.append(kernels.run(queue.get()))
self.assertEqual(actual, expect)
@kernels.with_kernel
def test_capacity(self):
queue = queues.Queue(3)
self.assertFalse(queue.is_closed())
for x in (42, 43, 44):
self.assertFalse(queue.is_full())
kernels.run(queue.put(x))
self.assertTrue(queue.is_full())
with self.assertRaises(queues.Full):
queue.put_nonblocking(45)
self.assertTrue(queue.is_full())
with self.assertRaises(kernels.KernelTimeout):
kernels.run(queue.put(45), timeout=0)
self.assertTrue(queue.is_full())
self.assertEqual(kernels.run(queue.get()), 42)
self.assertFalse(queue.is_full())
kernels.run()
actual = []
while queue:
actual.append(kernels.run(queue.get()))
self.assertEqual(actual, [43, 44, 45])
@kernels.with_kernel
def test_close(self):
queue = queues.Queue()
self.assertFalse(queue.is_closed())
for x in (42, 43, 44):
kernels.run(queue.put(x))
self.assertEqual(queue.close(), [])
self.assertTrue(queue.is_closed())
self.assertTrue(queue)
self.assertEqual(queue.close(), [])
with self.assertRaises(queues.Closed):
kernels.run(queue.put(45))
with self.assertRaises(queues.Closed):
queue.put_nonblocking(45)
actual = []
while queue:
actual.append(kernels.run(queue.get()))
self.assertEqual(actual, [42, 43, 44])
with self.assertRaises(queues.Closed):
kernels.run(queue.get())
with self.assertRaises(queues.Closed):
queue.get_nonblocking()
@kernels.with_kernel
def test_close_not_graceful(self):
queue = queues.Queue()
self.assertFalse(queue.is_closed())
for x in (42, 43, 44):
kernels.run(queue.put(x))
self.assertEqual(queue.close(False), [42, 43, 44])
self.assertTrue(queue.is_closed())
self.assertFalse(queue)
@kernels.with_kernel
def test_close_repeatedly(self):
queue = queues.Queue()
self.assertFalse(queue.is_closed())
for x in (42, 43, 44):
kernels.run(queue.put(x))
self.assertEqual(queue.close(True), [])
self.assertTrue(queue.is_closed())
self.assertEqual(queue.close(False), [42, 43, 44])
self.assertTrue(queue.is_closed())
self.assertEqual(queue.close(True), [])
self.assertTrue(queue.is_closed())
self.assertEqual(queue.close(False), [])
self.assertTrue(queue.is_closed())
if __name__ == '__main__':
unittest.main()
|
StarcoderdataPython
|
8180608
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import os
import uvicorn
from _bareasgi import Application
from bareasgi_static import add_static_file_provider
from engine import knowledge, message
here = os.path.abspath(os.path.dirname(__file__))
app = Application()
app.http_router.add({'GET'}, '/knowledge', knowledge)
app.http_router.add({'POST'}, '/message', message)
add_static_file_provider(app, os.path.join(here, 'static'), index_filename='index.html')
port = int(os.getenv('PORT', '80'))
uvicorn.run(app, host='0.0.0.0', port=port)
|
StarcoderdataPython
|
6524184
|
# local modules
import numpy,dill
from . import operators
from . import basis
from . import tools
__version__ = "0.3.4"
__all__ = ["basis","operators","tools"]
|
StarcoderdataPython
|
4912205
|
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
import scipy
from qiskit import *
from qiskit.quantum_info import *
from qiskit.providers.aer.noise import *
from qiskit.providers.aer.utils import insert_noise
sys.path.append("..")
from json_tools import *
from basis_ops import *
from decomposition import *
from channels import *
from stinespring import stinespring_algorithm
from variational_approximation import get_approx_circuit, get_varform_circuit
qc = QuantumCircuit(2)
qc.cx(0,1)
target_unitary = Operator(qc).data
target_choi = Choi(qc).data
n_qubits = 2
noise_model = NoiseModel.from_dict(json_from_file("2020_04_08.json"))
noise_model.add_quantum_error(noise_model._local_quantum_errors['cx']['2,3'], 'cx', [0,2])
noise_model.add_quantum_error(noise_model._local_quantum_errors['cx']['3,2'], 'cx', [2,0])
depth = 6
cfac_budget = None
full_connectivity = False
saved_circuits = list()
def noise_oracle(U, num_anc):
if num_anc == 0:
qc = QuantumCircuit(n_qubits)
qc.unitary(U, list(range(n_qubits)))
qc = qiskit.compiler.transpile(qc, basis_gates=noise_model.basis_gates,
coupling_map=[[0,1]])
saved_circuits.append(qc)
qc_noisy = insert_noise(qc, noise_model)
return Choi(qc_noisy).data
elif num_anc == 1:
exp = channel_expand(n_qubits, num_anc)
tr = channel_trace(n_qubits, num_anc)
_,params = get_approx_circuit(U, n_qubits+num_anc, depth, full_connectivity)
qc = get_varform_circuit(params, n_qubits+num_anc, depth, full_connectivity)
coupling_map = [[0,1],[1,2],[0,2]] if full_connectivity else [[0,1],[1,2]]
qc = qiskit.compiler.transpile(qc, basis_gates=noise_model.basis_gates,
coupling_map=coupling_map)
saved_circuits.append(qc)
qc_noisy = insert_noise(qc, noise_model)
qc_noisy = SuperOp(qc_noisy)
return Choi( exp.compose(qc_noisy.compose(tr)) ).data
else: raise
# Stinespring algorithm
fixed_unitaries, fixed_choi, coeffs = stinespring_algorithm(target_unitary, n_qubits, noise_oracle, disp=True, cfac_tol=1.2, bm_ops=8, cfac_budget=cfac_budget)
print("STINESPRING:", np.sum(np.abs(coeffs)))
np.savez("data/final_cnot.npz", fixed_unitaries, fixed_choi, coeffs)
for i in range(len(saved_circuits)):
saved_circuits[i].qasm(filename="data/final_cnot_sim_circ{}.qasm".format(i))
# Endo basis reference
endo_ops = list()
basis_ops = get_basis_ops(endo_unitaries=True, endo_projections=True)
def noisy_unitary(u, n_q=1):
qc = QuantumCircuit(n_q)
qc.unitary(u, list(range(n_q)))
qc_noisy = insert_noise(qc, noise_model, transpile=True)
return Choi(qc_noisy)
for i in range(len(basis_ops)):
op = basis_ops[i]
if op.is_unitary:
endo_ops.append(noisy_unitary(op.u).data)
else:
ch = noisy_unitary(op.u1)
ch = ch.compose(channel_project(1, 0))
if op.u2 is not None:
ch = ch.compose(noisy_unitary(op.u2))
endo_ops.append(Choi(ch).data)
endo_ops = gen_two_qubit_basis(endo_ops )
endo_ops.append(noisy_unitary(target_unitary, n_q=2).data)
print(len(endo_ops))
coeffs,_ = qpd(target_choi, endo_ops, 2)
print("ENDO REF:", np.sum(np.abs(coeffs)))
|
StarcoderdataPython
|
318117
|
<reponame>ParspooyeshFanavar/pyibsng<filename>ibsng/handler/util/run_debug_code.py
"""Run debug code API method."""
from ibsng.handler.handler import Handler
class runDebugCode(Handler):
"""Run debug code method class."""
def control(self):
"""Validate inputs after setup method.
:return: None
:rtype: None
"""
self.is_valid(self.command, str)
def setup(self, command):
"""Setup required parameters.
:param str command: command
:return: None
:rtype: None
"""
self.command = command
|
StarcoderdataPython
|
11350494
|
frase = str(input('Insira uma frase: ')).strip().lower()
print('A letra "A" aparece {} vezes.'.format(frase.count('a')))
print('A primeira letra "A" apareceu na posição {}.'.format(frase.find('a') + 1))
print('A ultima letra "A" aparece na posição {}'.format(frase.rfind('a') + 1))
|
StarcoderdataPython
|
9772579
|
import gym
reward_sum = 0
total_episode = 20
episode_count = 1
env = gym.make('Lis-v2')
while episode_count <= total_episode:
observation = env.reset()
for t in range(100):
action = env.action_space.sample() #take a random action
observation, reward, end_episode, info =env.step(action)
reward_sum += reward
print(" episode: "+str(episode_count)+" , step: "+str(t+1)+" , reward: "+str(reward))
if end_episode:
print("Episode finished after {} timesteps".format(t+1)+" , reward sum: "+str(reward_sum))
episode_count += 1
reward_sum = 0
break
|
StarcoderdataPython
|
1733313
|
from __future__ import annotations
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .utils import GTextToolbar
from PyQt5.QtWidgets import QWidget, QHBoxLayout, QVBoxLayout, QFrame, QComboBox, \
QCheckBox, QLineEdit, QPushButton, QLabel, QGridLayout, \
QComboBox
from ..answer import Answer, DragItem, DragText, ClozeItem
from ..enums import ClozeFormat, Format, Grading, ShowUnits
from ..wrappers import CombinedFeedback, FText, Hint, MultipleTries, UnitHandling
from .utils import GTextEditor, action_handler
from ..questions import QCrossWord
import logging
log = logging.getLogger(__name__)
class GAnswer(QGridLayout):
def __init__(self, toolbar: GTextToolbar, **kwargs) -> None:
super(QGridLayout, self).__init__(**kwargs)
self.obj = None
self._text = GTextEditor(toolbar)
self.addWidget(self._text, 0, 0, 2, 1)
self.addWidget(QLabel("Feedback"), 0, 1)
self._feedback = GTextEditor(toolbar)
self._feedback.setFixedHeight(30)
self.addWidget(self._feedback, 0, 2)
self.addWidget(QLabel("Grade"), 1, 1)
self._grade = QLineEdit()
self._grade.setFixedWidth(50)
self.addWidget(self._grade, 1, 2)
def __del__(self):
for i in range(self.count()):
self.itemAt(i).widget().deleteLater()
def from_obj(self, obj: Answer) -> None:
self._grade.setText(str(obj.fraction))
self._text.text_format = obj.formatting
if obj.formatting == Format.MD:
self._text.setMarkdown(obj.text)
elif obj.formatting == Format.HTML:
self._text.setHtml(obj.text)
elif obj.formatting == Format.PLAIN:
self._text.setPlainText(obj.text)
self.obj = obj
def to_obj(self) -> Answer:
fraction = float(self._grade.text())
formatting = self._text.text_format
if formatting == Format.MD:
text = self._text.toMarkdown()
elif formatting == Format.HTML:
text = self._text.toHtml()
else:
text = self._text.toPlainText()
feedback = self._feedback.getFText()
if self.obj is not None:
self.obj.fraction = fraction
self.obj.text = text
self.obj.feedback = feedback
self.obj.formatting = formatting
else:
self.obj = Answer(fraction, text, feedback, formatting)
return self.obj
def setVisible(self, visible: bool) -> None:
for child in self.children():
child.setVisible(visible)
# ----------------------------------------------------------------------------------------
class GCloze(QGridLayout):
def __init__(self, **kwargs) -> None:
super(QGridLayout, self).__init__(**kwargs)
self.obj: ClozeItem = None
self.opts = []
self._pos = QLineEdit()
self._pos.setFixedWidth(40)
self._pos.setToolTip("Position in the plain text")
self.addWidget(self._pos, 0, 0)
self._grade = QLineEdit()
self._grade.setFixedWidth(25)
self._grade.setToolTip("Grade for the given answer")
self.addWidget(self._grade, 0, 1)
self._form = QComboBox()
self._form.addItems([a.value for a in ClozeFormat])
self._form.setToolTip("Cloze format")
self._form.setStyleSheet("margin:0px 15px 0px 0px")
self.addWidget(self._form, 0, 2)
self._opts = QComboBox()
self._opts.setFixedWidth(140)
self._opts.currentIndexChanged.connect(self.__changed_opt)
self.addWidget(self._opts, 0, 3)
self._frac = QLineEdit()
self._frac.setFixedWidth(35)
self._frac.setToolTip("Fraction of the total grade (in percents)")
self.addWidget(self._frac, 0, 4)
self._text = QLineEdit()
self._text.setToolTip("Answer text")
self.addWidget(self._text, 0, 5)
self._fdbk = QLineEdit()
self._fdbk.setToolTip("Answer feedback")
self.addWidget(self._fdbk, 0, 6)
self._add = QPushButton("Add")
self._add.setFixedWidth(30)
self._add.clicked.connect(self.add_opts)
self.addWidget(self._add, 0, 7)
self._pop = QPushButton("Pop")
self._pop.setFixedWidth(30)
self._pop.clicked.connect(self.pop_opts)
self.addWidget(self._pop, 0, 8)
def __del__(self):
for i in range(self.count()):
self.itemAt(i).widget().deleteLater()
def __changed_opt(self, index):
self._frac.setText(str(self.opts[index].fraction))
self._text.setText(self.opts[index].text)
self._fdbk.setText(self.opts[index].feedback.text)
@action_handler
def add_opts(self, stat: bool):
text = self._text.text()
frac = float(self._frac.text())
fdbk = FText(self._fdbk.text(), Format.PLAIN)
self.opts.append(Answer(frac, text, fdbk, Format.PLAIN))
self._opts.addItem(text)
@action_handler
def pop_opts(self, stat: bool):
self.opts.pop()
self._opts.removeItem(self._opts.count()-1)
def from_obj(self, obj: ClozeItem) -> None:
self._pos.setText(str(obj.start))
self._form.setCurrentText(str(obj.cformat.value))
self._grade.setText(str(obj.grade))
self.opts = obj.opts
self._opts.addItems([a.text for a in self.opts])
def to_obj(self) -> None:
pos = int(self._pos.text())
grade = float(self._grade.text())
cform = ClozeFormat(self._form.currentText())
if self.obj is not None:
self.obj.start = pos
self.obj.grade = grade
self.obj.cformat = cform
# Self opts and obj opts are already the same list
else:
self.obj = ClozeItem(pos, grade, cform, self.opts)
return self.obj
def setVisible(self, visible: bool) -> None:
for child in self.children():
child.setVisible(visible)
# ----------------------------------------------------------------------------------------
class GDrag(QGridLayout):
"""This class works from both DragText and DragItem.
I hope someday people from moodle unify these 2.
Args:
QGridLayout ([type]): [description]
"""
TYPES = ["Image", "Text"]
def __init__(self, only_text: bool, **kwargs) -> None:
super(QGridLayout, self).__init__(**kwargs)
self.addWidget(QLabel("Text"), 0, 0)
self.text = QLineEdit()
self.addWidget(self.text, 0, 1)
self.addWidget(QLabel("Group"), 0, 2)
self.group = QLineEdit()
self.group.setFixedWidth(20)
self.addWidget(self.group, 0, 3)
self.addWidget(QLabel("Type"), 0, 4)
self.itype = QComboBox()
self.itype.addItems(self.TYPES)
self.itype.setFixedWidth(55)
self.addWidget(self.itype, 0, 5)
self.unlimited = QCheckBox("Unlimited")
self.addWidget(self.unlimited, 0, 6)
if only_text:
self.itype.setCurrentIndex(1)
self.itype.setEnabled(False)
else:
self.imagem = QPushButton("Imagem")
self.addWidget(self.imagem, 1, 2)
self.only_text = only_text
self.img = None
self.obj = None
def __del__(self):
for i in range(self.count()):
self.itemAt(i).widget().deleteLater()
def from_obj(self, obj):
self.text.setText(obj.text)
self.group.setText(str(obj.group))
self.unlimited.setChecked(obj.unlimited)
self.obj = obj
def setVisible(self, visible: bool) -> None:
for child in self.children():
child.setVisible(visible)
def to_obj(self) -> DragItem:
if self.obj is not None:
self.obj.text = self.text.text()
self.obj.group = self.group.text()
self.obj.unlimited = self.unlimited.isChecked()
if not self.only_text: self.obj.image = self.img
else:
if self.only_text:
self.obj = DragText(self.text.text(), self.group.text(),
self.unlimited.isChecked())
else:
self.obj = DragItem(0, self.text.text(), self.unlimited.isChecked(),
self.group.text(), self.img)
return self.obj
# ----------------------------------------------------------------------------------------
class GDropZone(QGridLayout):
TYPES = ["Image", "Text"]
def __init__(self, **kwargs) -> None:
super(GAnswer, self).__init__(**kwargs)
self.addWidget(QLabel("Type"), 0, 0)
self.itype = QComboBox()
self.itype.addItem(self.TYPES)
self.addWidget(self.itype, 0, 1)
self.group = QLineEdit()
self.addWidget(QLabel("Group"), 0, 2)
self.addWidget(self.group, 0, 3)
self.text = QLineEdit()
self.addWidget(QLabel("Text"), 0, 4)
self.addWidget(self.text, 0, 5)
def __del__(self):
for i in range(self.count()):
self.itemAt(i).widget().deleteLater()
def from_obj(self, obj: DragItem):
pass
def setVisible(self, visible: bool) -> None:
for child in self.children():
child.setVisible(visible)
def to_obj(self) -> DragItem:
pass
# ----------------------------------------------------------------------------------------
class GOptions(QVBoxLayout):
def __init__(self, toolbar, **kwargs) -> None:
super(QVBoxLayout, self).__init__(**kwargs)
self.visible = True
self.toolbar = toolbar
self.__ctype = None
def _soft_clear(self, new_size=0, new_type=None):
if len(self.children()) == 0: return
to_rem = 0
if new_type and new_type != self.__ctype: to_rem = self.count()
elif self.count() > new_size: to_rem = self.count() - new_size
for i in reversed(range(to_rem)): self.itemAt(i).layout().deleteLater()
def add(self, obj):
if not isinstance(obj, self.__ctype):
raise ValueError(f"Objects in this Layout can only be of type {self.__ctype}.")
item = self.add_default()
item.from_obj(obj)
def add_default(self):
if self.__ctype is Answer: item = GAnswer(self.toolbar)
elif self.__ctype is DragText: item = GDrag(True)
elif self.__ctype is DragItem: item = GDrag(False)
elif self.__ctype is ClozeItem: item = GCloze()
else: raise TypeError(f"Type {self.__ctype} is not implemented")
self.addLayout(item)
return item
def addLayout(self, layout, stretch: int = 0) -> None:
if not isinstance(layout, GAnswer) and not isinstance(layout, GDrag):
log.warning(f"Attempted adding non-valid layout {type(layout)} to GOptions.")
return super().addLayout(layout, stretch=stretch)
def from_obj(self, objs:list) -> None:
self._soft_clear(len(objs), None if not objs else type(objs[0]))
if not objs: return
self.__ctype = type(objs[0])
for obj, child in zip(objs, self.children()):
if hasattr(child, "from_obj"): child.from_obj(obj)
if self.count() < len(objs):
for obj in objs[self.count():]: self.add(obj)
def pop(self) -> None:
if not self.count(): return
self.itemAt(self.count()-1).layout().deleteLater()
def setVisible(self, visible: bool) -> None:
if self.visible == visible: return
for child in self.children():
child.setVisible(visible)
def to_obj(self):
return [child.to_obj() for child in self.children()]
# ----------------------------------------------------------------------------------------
class GCFeedback(QFrame):
def __init__(self, toolbar: GTextToolbar, **kwargs) -> None:
super().__init__(**kwargs)
self.setStyleSheet(".GCFeedback{border:1px solid rgb(41, 41, 41); background-color: #e4ebb7}")
self._correct = GTextEditor(toolbar)
self._incomplete = GTextEditor(toolbar)
self._incorrect = GTextEditor(toolbar)
self._show = QCheckBox("Show the number of correct responses once the question has finished")
_content = QGridLayout(self)
_content.addWidget(QLabel("Feedback for correct answer"), 0, 0)
_content.addWidget(self._correct, 1, 0)
_content.addWidget(QLabel("Feedback for incomplete answer"), 0, 1)
_content.addWidget(self._incomplete, 1, 1)
_content.addWidget(QLabel("Feedback for incorrect answer"), 0, 2)
_content.addWidget(self._incorrect, 1, 2)
_content.addWidget(self._show, 2, 0, 1, 3)
_content.setColumnStretch(3,1)
def from_obj(self, obj: CombinedFeedback) -> None:
self._correct.setFText(obj.correct)
self._incomplete.setFText(obj.incomplete)
self._incorrect.setFText(obj.incorrect)
def to_obj(self) -> None:
correct = self._correct.getFText()
incomplete = self._incomplete.getFText()
incorrect = self._incorrect.getFText()
return CombinedFeedback(correct, incomplete, incorrect, self._show.isChecked())
# ----------------------------------------------------------------------------------------
class GHint(QFrame):
def __init__(self, toolbar: GTextToolbar, **kwargs) -> None:
super().__init__(**kwargs)
self.setStyleSheet(".GHint{border:1px solid rgb(41, 41, 41); background-color: #e4ebb7}")
self._text = GTextEditor(toolbar)
self._show = QCheckBox("Show the number of correct responses")
self._state = QCheckBox("State which markers are incorrectly placed")
self._clear = QCheckBox("Move incorrectly placed markers back to default start position")
_content = QVBoxLayout(self)
_content.addWidget(self._text)
_content.addWidget(self._show)
_content.addWidget(self._state)
_content.addWidget(self._clear)
def from_obj(self, obj: Hint) -> None:
self._show.setChecked(obj.show_correct)
self._clear.setChecked(obj.clear_wrong)
self._state.setChecked(obj.state_incorrect)
self._text.text_format = obj.formatting
if obj.formatting == Format.MD:
self._text.setMarkdown(obj.text)
elif obj.formatting == Format.HTML:
self._text.setHtml(obj.text)
elif obj.formatting == Format.PLAIN:
self._text.setPlainText(obj.text)
def to_obj(self):
formatting = self._text.text_format
if formatting == Format.MD:
text = self._text.toMarkdown()
elif formatting == Format.HTML:
text = self._text.toHtml()
else:
text = self._text.toPlainText()
return Hint(formatting, text, self._show.isChecked(),
self._clear.isChecked(), self._state.isChecked())
# ----------------------------------------------------------------------------------------
class GMultipleTries(QVBoxLayout):
def __init__(self, toolbar: GTextToolbar, **kwargs) -> None:
super().__init__(**kwargs)
self._penalty = QLineEdit()
self._penalty.setText("0")
add = QPushButton("Add Hint")
add.clicked.connect(lambda: self.addWidget(GHint(toolbar)))
rem = QPushButton("Remove Last")
_header = QHBoxLayout()
_header.addWidget(QLabel("Penalty for each try"))
_header.addWidget(self._penalty)
_header.addWidget(add)
_header.addWidget(rem)
self.addLayout(_header)
self._toolbar = toolbar
def from_obj(self, obj: MultipleTries) -> None:
self._penalty.setText(str(obj.penalty))
if len(obj.hints) > self.count()-1:
for _ in range(len(obj.hints)-self.count()):
self.addWidget(GHint(self._toolbar))
elif len(obj.hints)+1 < self.count():
for i in reversed(range(self.count()-len(obj.hints))):
self.itemAt(i).layout().deleteLater()
for num in range(len(obj.hints)):
self.itemAt(num+2).from_obj(obj.hints[num])
def to_obj(self) -> None:
penalty = float(self._penalty.text())
hints = []
for num in range(self.count()-1):
hints.append(self.itemAt(num+1).to_obj())
return MultipleTries(penalty, hints)
# ----------------------------------------------------------------------------------------
class GUnitHadling(QFrame):
GRADE = {"Ignore": "IGNORE", "Fraction of reponse": "RESPONSE",
"Fraction of question": "QUESTION"}
SHOW_UNITS = {"Text input": "TEXT", "Multiple choice": "MC",
"Drop-down": "DROP_DOWN", "Not visible": "NONE"}
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
self.setStyleSheet(".GUnitHadling{border:1px solid; background-color: #e4ebb7}")
self._grading = QComboBox()
self._grading.addItems(["Ignore", "Fraction of reponse", "Fraction of question"])
self._penalty = QLineEdit()
self._penalty.setFixedWidth(70)
self._penalty.setText("0")
self._show = QComboBox()
self._show.addItems(["Text input", "Multiple choice", "Drop-down", "Not visible"])
self._left = QCheckBox("Put units on the left")
_content = QGridLayout(self)
_content.addWidget(QLabel("Grading"), 0, 0)
_content.addWidget(self._grading, 0, 1)
_content.addWidget(QLabel("Penalty"), 0, 2)
_content.addWidget(self._penalty, 0, 3)
_content.addWidget(QLabel("Show units"), 1, 0)
_content.addWidget(self._show, 1, 1)
_content.addWidget(self._left, 1, 2, 1, 2)
_content.setContentsMargins(5,2,5,1)
self.setFixedSize(300, 55)
def from_obj(self, obj: UnitHandling) -> None:
for k, v in self.GRADE.items():
if obj.grading_type.value == v:
self._grading.setCurrentText(k)
for k, v in self.SHOW_UNITS.items():
if obj.show.value == v:
self._show.setCurrentIndex(k)
self._penalty.setText(str(obj.penalty))
def to_obj(self) -> UnitHandling:
grade = Grading[self.GRADE[self._grading.currentText()]]
penalty = float(self._penalty.text())
show = ShowUnits[self.SHOW_UNITS[self._show.currentText()]]
return UnitHandling(grade, penalty, show, self._left.isChecked())
# ----------------------------------------------------------------------------------------
class GCrossWord(QWidget):
def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)
def setAnswerString(self, x, y, direction, value):
"""
Set answered string.
"""
if not self.active():
raise ValueError("puzzle is inactive")
self._puzzle.setAnswerString(x, y, direction, value)
def from_obj(self, obj: QCrossWord) -> None:
pass
def to_obj(self) -> None:
pass
def verify(self) -> None:
"""
Iterate over the object list to verify if it is valid.
"""
pass
# ----------------------------------------------------------------------------------------
|
StarcoderdataPython
|
3500782
|
# Generated by Django 2.2.10 on 2020-04-10 16:25
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0019_game_max_word_length'),
]
operations = [
migrations.AlterField(
model_name='game',
name='blue_giver',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='blue_giver', to='core.Player'),
),
migrations.AlterField(
model_name='game',
name='red_giver',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='red_giver', to='core.Player'),
),
]
|
StarcoderdataPython
|
6499956
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#- Author : (DEK) <NAME>
# program100:
# Write a program to solve a classic ancient Chinese puzzle:
# We count 35 heads and 94 legs among the chickens and rabbits
# in a farm. How many rabbits and how many chickens do we have?
# Hint:
# Use for loop to iterate all possible solutions.
def main(total, numLegs):
for rabbits in range(total + 1):
chickens = total - rabbits
if 2 * chickens + 4 * rabbits == numLegs:
return chickens, rabbits
return None, None
if __name__ == '__main__':
try:
numHeads = int(raw_input("Input number of heads: "))
numLegs = int(raw_input("Input number of legs: "))
result = main(numHeads, numLegs)
print "Number of chickens are %d and rabbits are %d." % result
except TypeError:
print 'TypeError: invalid input'
|
StarcoderdataPython
|
3511644
|
#CODIGO PARA MULTIPLES ROUTES EN FLASK PERSONALIZADAS SEGUN RUTA INGRESADA
#Nota: flask NO viene en librerias por defecto, debemos instalarla adicionalmente (pip install flask)
from flask import Flask
#Creamos web application llamada "app" (basada en Flask), con nombre del archivo actual (__name__)
app = Flask(__name__)
#Indicamos funcion asociada a la ruta por defecto "/" (osea si NO tiene nada de ruta adicional)
@app.route("/")
def index():
return( "PRIMERA PAGINA WEB CON FLASK!!!!!")
#Creamos ahora una ruta PERSONALIZADA donde se puede ingresar LO QUE SEA!!!!
#Ahora, el servidor nos muestra un saludo, seguno lo ingresado respectivamente
@app.route("/<string:nombre>")
def saludar(nombre):
nombre = nombre.upper()
return("BUENOS DIAS, " + nombre + " <3")
|
StarcoderdataPython
|
5008483
|
from ..commandparser import Time
from ..discordbot import mute_user
import time
from forumsweats import db
name = 'gulag'
args = '[time]'
async def run(message, length_time: Time = Time(60)):
'Puts you in gulag for one minute (or however much time you specified). You cannot be rocked during this time.'
mute_remaining = int((await db.get_mute_end(message.author.id)) - time.time())
if mute_remaining > 0:
return await message.send('You are already in gulag')
if length_time < 60:
return await message.send('You must be in gulag for at least 60 seconds')
if length_time > 60 * 15:
return await message.send('You can only be in gulag for up to 15 minutes')
length = int(length_time)
if length // 60 > 1:
await message.send(f'You have entered gulag for {length // 60} minutes.')
else:
await message.send(f'You have entered gulag for {length} seconds.')
await mute_user(
message.author,
length,
message.guild.id if message.guild else None,
rock_immune=True
)
|
StarcoderdataPython
|
7816
|
<reponame>headlessme/yotta
#!/usr/bin/env python
# Copyright 2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import unittest
# internal modules:
from . import util
from . import cli
Test_Outdated = {
'module.json':'''{
"name": "test-outdated",
"version": "0.0.0",
"description": "Test yotta outdated",
"author": "<NAME> <<EMAIL>>",
"license": "Apache-2.0",
"dependencies":{
"test-testing-dummy": "*"
}
}''',
'source/foo.c':'''#include "stdio.h"
int foo(){
printf("foo!\\n");
return 7;
}''',
# test-testing-dummy v0.0.1 (a newer version is available from the registry,
# and will be installed by yt up)
'yotta_modules/test-testing-dummy/module.json':'''{
"name": "test-testing-dummy",
"version": "0.0.1",
"description": "Test yotta's compilation of tests.",
"author": "<NAME> <<EMAIL>>",
"license": "Apache-2.0"
}
'''
}
class TestCLIOutdated(unittest.TestCase):
def test_outdated(self):
path = util.writeTestFiles(Test_Outdated, True)
stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'outdated'], cwd=path)
self.assertNotEqual(statuscode, 0)
self.assertIn('test-testing-dummy', stdout + stderr)
util.rmRf(path)
def test_notOutdated(self):
path = util.writeTestFiles(Test_Outdated, True)
stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'up'], cwd=path)
self.assertEqual(statuscode, 0)
stdout, stderr, statuscode = cli.run(['-t', 'x86-linux-native', 'outdated'], cwd=path)
self.assertEqual(statuscode, 0)
self.assertNotIn('test-testing-dummy', stdout + stderr)
util.rmRf(path)
|
StarcoderdataPython
|
5070153
|
<filename>results/fig_01/main_get_cfgs.py
import numpy as np
def main(f_in, f_out):
def fetch_cfg(f_name):
data = np.load(f_name)
N = data['N']
h = data['h']
m = data['m']
cfg = data['cfg_fin']
return N, h, m, cfg
N, h_, m_, psi = fetch_cfg(f_in)
np.savez_compressed(f_out, N=N, h=h_[-1], m=m_[-1], cfg_fin=psi)
main('./obs_DOP853_ECPN_J01.200_chi1.000_N16_tmax500000.000000_Nt100000_h0-0.500000.npz', 'cfg_01.npz')
main('./obs_DOP853_ECPN_J01.200_chi1.000_N16_tmax500000.000000_Nt100000_h00.900000.npz', 'cfg_02.npz')
|
StarcoderdataPython
|
5001284
|
<reponame>nevooronni/collabstudio<filename>collabstudio/tests.py<gh_stars>0
from django.test import TestCase
from django.contrib.auth.models import User
from .models import Profile,Tags,Project,Follow,Comments
class ProfileTestClass(TestCase):
'''
test case for our profie class
'''
def setUp(self):
'''
setup method
'''
self.profile_one = Profile(bio = 'all in')#create an instance of the profile class for every test
def test_instance(self):
'''
test the method
'''
self.assertTrue(isinstance(self.profile_one,Profile))
#User.objects.all().delete()
def test_retrieve_profiles(self):
'''
retrieves all profiles from the db
'''
all_profiles = Profile.retrieve_profiles()
profiles = Profile.objects.all()
self.assertTrue(len(all_profiles) == len(profiles))
class ProjectTestClass(TestCase):
'''
test case for posts class
'''
def setUp(self):
'''
set up method
'''
self.project_one = Project(caption = 'Trap back jumping!')
def test_instance(self):
'''
test the method
'''
self.assertTrue(isinstance(self.project_one,Project))
def test_retrieve_profile_projects(self):
'''
test for get posts for a specific profile
'''
self.neville = User(username = 'nevooronni')#create new user
self.neville.save()#save new user
self.chelsea = User(username = 'chelsea')#create another user
self.chelsea.save()
self.new_profile = Profile(user=self.neville,bio = 'bla bla blab bla')#create profile for user neville
self.new_post = Project(user=self.neville,caption = 'bla bla bal bla')#create post for nevill profile
retrieve_profile = Project.retrieve_profile_projects(self.neville.id)#get the profile post for profile neville
profiles = Project.objects.all()#get all posts
self.assertTrue(len(retrieve_profile) == len(profiles))#since only profile exists(one we created) should be equla in lenghth retreived profile method
def test_retrieve_projects(self):
'''
test for retriving post from the db
'''
all_projects = Project.retrieve_projects()
projects = Project.objects.all()
self.assertTrue(len(all_projects) == len(projects))
class TagsTestClass(TestCase):
'''
test case for Tags class
'''
def setUp(self):
'''
set up method
'''
self.tag_one = Tags(title = 'new')
def test_instance(self):
'''
test the method
'''
self.assertTrue(isinstance(self.tag_one,Tags))
def test_save_tag(self):
'''
saves tag to the db
'''
self.tag_one.save_tag()
all_tags = Tags.objects.all()
self.assertTrue(len(all_tags) > 0)
def test_delete_tag(self):
'''
deletes tag from the db
'''
self.tag_one.save_tag()
all_tags = Tags.objects.all()
self.tag_one.delete_tag()
self.assertTrue(len(all_tags) == 0)
def test_retrieve_tags(self):
'''
retrieve all tags from the db
'''
self.tag_one.save_tag()
db_tags = Tags.retrieve_tags()
all_tags = Tags.objects.all()
self.assertTrue(len(db_tags) == len(all_tags))
class FollowTestClass(TestCase):
'''
test the follow class
'''
def test_instance(self):
'''
tests to see if it was instantiated properly
'''
self.neville = User(username = 'nevooronni')
self.neville.save()
self.chelsea = User(username = 'chelsea')
self.chelsea.save()
self.new_profile = Profile(user=self.neville,bio='bla bla blab bla')
self.follow = Follow(user=self.neville,profile=self.new_profile)
self.assertTrue(isinstance(self.follow, Follow))
def test_retrieve_following(self):
'''
test retrieve_following method
'''
self.neville = User(username = 'nevooronni')
self.neville.save()
self.chelsea = User(username = 'chelsea')
self.chelsea.save()
self.new_profile = Profile(user=self.neville,bio='bla bla blab bla')
self.new_project = Project(user=self.neville,caption='bla bla bla bla bla')
self.follow = Follow(user=self.neville,profile=self.new_profile)
get_following = Follow.retrieve_following(self.neville.id)
following = Follow.objects.all()
self.assertTrue(len(get_following) == len(following))
class CommentsTestClass(TestCase):
'''
test comments class
'''
def setUp(self):
'''
setup method
'''
self.comment = Comments(comment = 'bla bla bla bla bla bla')
def test_instance(self):
'''
test to see if the object is an instance of th comment class
'''
self.assertTrue(isinstance(self.comment, Comments))
def test_retrieve_project_comments(self):
'''
test the retrieve+_post_comments methods
'''
self.neville = User(username = 'nevooronni')
self.neville.save()
self.chelsea = User(username = 'chelsea')
self.chelsea.save()
self.new_profile = Profile(user=self.neville,bio='bla bla blab bla')
self.new_post = Project(user=self.neville,caption='bla bla bla bla bla')
self.comment = Comments(project=self.new_post,comment='bla bla bla')
get_comments = Comments.retrieve_project_comments(self.new_post.id)
comments = Comments.objects.all()
self.assertTrue(len(get_comments) == len(comments))
|
StarcoderdataPython
|
3221719
|
<reponame>BuildJet/distdl
import numpy as np
import torch
import torch.nn.functional as F
from distdl.nn.halo_exchange import HaloExchange
from distdl.nn.mixins.halo_mixin import HaloMixin
from distdl.nn.mixins.pooling_mixin import PoolingMixin
from distdl.nn.module import Module
from distdl.utilities.slicing import assemble_slices
from distdl.utilities.torch import TensorStructure
class DistributedPoolBase(Module, HaloMixin, PoolingMixin):
r"""A feature-space partitioned distributed pooling layer.
This class provides the user interface to a distributed pooling
layer, where the input (and output) tensors are partitioned in
feature-space only.
The base unit of work is given by the input/output tensor partition. This
class requires the following of the tensor partitions:
1. :math:`P_x` over input tensor :math:`x` has shape :math:`1 \times
1 \times P_{d-1} \times \dots \times P_0`.
The output partition, :math:`P_y`, is assumed to be the same as the
input partition.
The first dimension of the input/output partitions is the batch
dimension,the second is the channel dimension, and remaining dimensions
are feature dimensions.
There are no learnable parameters.
All inputs to this base class are passed through to the underlying PyTorch
pooling layer.
Parameters
----------
P_x :
Partition of input tensor.
kernel_size :
(int or tuple)
The size of the window to take a max over.
stride :
(int or tuple, optional)
Stride of the convolution. Default: kernel_size
padding :
(int or tuple, optional)
Zero-padding added to both sides of the input. Default: 0
dilation :
(int or tuple, optional)
A parameter that controls the stride of elements in the window. Default: 1
.. warning::
Dilation is only supported on MaxPooling layers.
buffer_manager :
(BufferManager, optional)
DistDL BufferManager. Default: None
"""
# Pooling class for base unit of work.
TorchPoolType = None # noqa F821
# Number of dimensions of a feature
num_dimensions = None
def __init__(self,
P_x,
kernel_size,
stride=1,
padding=0,
dilation=1,
buffer_manager=None):
super(DistributedPoolBase, self).__init__()
# P_x is 1 x 1 x P_d-1 x ... x P_0
self.P_x = P_x
self.is_avg = self.TorchPoolType in [torch.nn.AvgPool1d, torch.nn.AvgPool2d, torch.nn.AvgPool3d]
# Back-end specific buffer manager for economic buffer allocation
if buffer_manager is None:
buffer_manager = self._distdl_backend.BufferManager()
elif type(buffer_manager) is not self._distdl_backend.BufferManager:
raise ValueError("Buffer manager type does not match backend.")
self.buffer_manager = buffer_manager
if not self.P_x.active:
return
dims = len(self.P_x.shape)
self.kernel_size = self._expand_parameter(kernel_size)
self.stride = self._expand_parameter(stride)
self.padding = self._expand_parameter(padding)
self.dilation = self._expand_parameter(dilation)
if self.is_avg and not all(x == 1 for x in self.dilation):
raise ValueError('dilation is only supported for MaxPooling layers.')
# PyTorch does not support dilation for AvgPooling layers
if self.is_avg:
self.pool_layer = self.TorchPoolType(kernel_size=self.kernel_size,
stride=self.stride,
padding=0)
else:
self.pool_layer = self.TorchPoolType(kernel_size=self.kernel_size,
stride=self.stride,
padding=0,
dilation=self.dilation)
# We will be using global padding to compute local padding,
# so expand it to a numpy array
global_padding = np.pad(self.padding,
pad_width=(dims-len(self.padding), 0),
mode='constant',
constant_values=0)
self.global_padding = global_padding
pad_left_right = self.global_padding.reshape((dims, 1)) + np.zeros((dims, 2), dtype=np.int)
self.local_padding = self._compute_local_padding(pad_left_right)
# We need to be able to remove some data from the input to the conv
# layer.
self.needed_slices = None
# For the halo layer we also defer construction, so that we can have
# the halo shape for the input. The halo will allocate its own
# buffers, but it needs this information at construction to be able
# to do this in the pre-forward hook.
self.halo_layer = None
# Variables for tracking input changes and buffer construction
self._distdl_is_setup = False
self._input_tensor_structure = TensorStructure()
def _expand_parameter(self, param):
# If the given input is not of size num_dimensions, expand it so.
# If not possible, raise an exception.
param = np.atleast_1d(param)
if len(param) == 1:
param = np.ones(self.num_dimensions, dtype=int) * param[0]
elif len(param) == self.num_dimensions:
pass
else:
raise ValueError('Invalid parameter: ' + str(param))
return tuple(param)
def _distdl_module_setup(self, input):
r"""Distributed (feature) pooling module setup function.
This function is called every time something changes in the input
tensor structure. It should not be called manually.
Parameters
----------
input :
Tuple of forward inputs. See
`torch.nn.Module.register_forward_pre_hook` for more details.
"""
self._distdl_is_setup = True
self._input_tensor_structure = TensorStructure(input[0])
if not self.P_x.active:
return
# To compute the halo regions, we need the global tensor shape. This
# is not available until when the input is provided.
x_global_structure = \
self._distdl_backend.assemble_global_tensor_structure(input[0], self.P_x)
x_local_structure = TensorStructure(input[0])
x_global_shape = x_global_structure.shape
x_local_shape = x_local_structure.shape
x_global_shape_after_pad = x_global_shape + 2*self.global_padding
x_local_shape_after_pad = x_local_shape + np.sum(self.local_padding, axis=1, keepdims=False)
x_local_structure_after_pad = TensorStructure(input[0])
x_local_structure_after_pad.shape = x_local_shape_after_pad
# We need to compute the halos with respect to the explicit padding.
# So, we assume the padding is already added, then compute the halo regions.
compute_subtensor_shapes_unbalanced = \
self._distdl_backend.tensor_decomposition.compute_subtensor_shapes_unbalanced
subtensor_shapes = \
compute_subtensor_shapes_unbalanced(x_local_structure_after_pad, self.P_x)
# Using that information, we can get there rest of the halo information
exchange_info = self._compute_exchange_info(x_global_shape_after_pad,
self.kernel_size,
self.stride,
self._expand_parameter(0),
self.dilation,
self.P_x.active,
self.P_x.shape,
self.P_x.index,
subtensor_shapes=subtensor_shapes)
halo_shape = exchange_info[0]
recv_buffer_shape = exchange_info[1]
send_buffer_shape = exchange_info[2]
needed_ranges = exchange_info[3]
self.halo_shape = halo_shape
# We can also set up part of the halo layer.
self.halo_layer = HaloExchange(self.P_x,
halo_shape,
recv_buffer_shape,
send_buffer_shape,
buffer_manager=self.buffer_manager)
# We have to select out the "unused" entries. Sometimes there can
# be "negative" halos.
self.needed_slices = assemble_slices(needed_ranges[:, 0],
needed_ranges[:, 1])
def _distdl_module_teardown(self, input):
r"""Distributed (channel) pooling module teardown function.
This function is called every time something changes in the input
tensor structure. It should not be called manually.
Parameters
----------
input :
Tuple of forward inputs. See
`torch.nn.Module.register_forward_pre_hook` for more details.
"""
# Reset all sub_layers
self.needed_slices = None
self.halo_layer = None
# Reset any info about the input
self._distdl_is_setup = False
self._input_tensor_structure = TensorStructure()
def _distdl_input_changed(self, input):
r"""Determine if the structure of inputs has changed.
Parameters
----------
input :
Tuple of forward inputs. See
`torch.nn.Module.register_forward_pre_hook` for more details.
"""
new_tensor_structure = TensorStructure(input[0])
return self._input_tensor_structure != new_tensor_structure
def _to_torch_padding(self, pad):
r"""
Accepts a NumPy ndarray describing the padding, and produces the torch F.pad format:
[[a_0, b_0], ..., [a_n, b_n]] -> (a_n, b_n, ..., a_0, b_0)
"""
return tuple(np.array(list(reversed(pad)), dtype=int).flatten())
def _compute_local_padding(self, padding):
r"""
Computes the amount of explicit padding required on the current rank,
given the global padding.
"""
should_pad_left = [k == 0 for k in self.P_x.index]
should_pad_right = [k == d-1 for k, d in zip(self.P_x.index, self.P_x.shape)]
should_pad = np.stack((should_pad_left, should_pad_right), axis=1)
local_padding = np.where(should_pad, padding, 0)
return local_padding
def forward(self, input):
r"""Forward function interface.
Parameters
----------
input :
Input tensor to be broadcast.
"""
if not self.P_x.active:
return input.clone()
# Compute the total padding and convert to PyTorch format
total_padding = self.local_padding + self.halo_shape
torch_padding = self._to_torch_padding(total_padding)
if total_padding.sum() == 0:
input_padded = input
else:
input_padded = F.pad(input, pad=torch_padding, mode='constant', value=self.default_pad_value)
input_exchanged = self.halo_layer(input_padded)
input_needed = input_exchanged[self.needed_slices]
pool_output = self.pool_layer(input_needed)
return pool_output
class DistributedAvgPool1d(DistributedPoolBase):
r"""A feature-partitioned distributed 1d average pooling layer.
"""
TorchPoolType = torch.nn.AvgPool1d
num_dimensions = 1
default_pad_value = 0
class DistributedAvgPool2d(DistributedPoolBase):
r"""A feature-partitioned distributed 2d average pooling layer.
"""
TorchPoolType = torch.nn.AvgPool2d
num_dimensions = 2
default_pad_value = 0
class DistributedAvgPool3d(DistributedPoolBase):
r"""A feature-partitioned distributed 3d average pooling layer.
"""
TorchPoolType = torch.nn.AvgPool3d
num_dimensions = 3
default_pad_value = 0
class DistributedMaxPool1d(DistributedPoolBase):
r"""A feature-partitioned distributed 1d max pooling layer.
"""
TorchPoolType = torch.nn.MaxPool1d
num_dimensions = 1
# See https://github.com/pytorch/pytorch/issues/33384 for default `value`
default_pad_value = -float('inf')
class DistributedMaxPool2d(DistributedPoolBase):
r"""A feature-partitioned distributed 2d max pooling layer.
"""
TorchPoolType = torch.nn.MaxPool2d
num_dimensions = 2
# See https://github.com/pytorch/pytorch/issues/33384 for default `value`
default_pad_value = -float('inf')
class DistributedMaxPool3d(DistributedPoolBase):
r"""A feature-partitioned distributed 3d max pooling layer.
"""
TorchPoolType = torch.nn.MaxPool3d
num_dimensions = 3
# See https://github.com/pytorch/pytorch/issues/33384 for default `value`
default_pad_value = -float('inf')
|
StarcoderdataPython
|
1729322
|
# __BEGIN_LICENSE__
# Copyright (C) 2008-2010 United States Government as represented by
# the Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# __END_LICENSE__
import traceback
import sys
import json
from django.shortcuts import render
from django.template import RequestContext
from django.conf import settings
from django.middleware.csrf import get_token
import zerorpc
from geocamPycroraptor2 import status as statuslib
def getPyraptordClient(clientName='pyraptord'):
ports = json.loads(file(settings.ZEROMQ_PORTS, 'r').read())
rpcPort = ports[clientName]['rpc']
client = zerorpc.Client(rpcPort)
return client
def commandButton(cmd, svcName, disabled=False):
disabledFlag = ''
if disabled:
disabledFlag = ' disabled="disabled"'
return ('<button type="submit" name="cmd" value="%s.%s"%s>%s</button>'
% (cmd, svcName, disabledFlag, cmd))
def renderDashboard(request, pyraptord=None, cmd=None, response=None):
if pyraptord is None:
pyraptord = getPyraptordClient()
logDir = getattr(settings, 'SERVICES_LOG_DIR_URL', None)
status = pyraptord.getStatusAll()
serviceConfig = pyraptord.getConfig('SERVICES')
configItems = serviceConfig.items()
configItems.sort()
tb = []
tb.append('<h1 style="font-weight: bold;">Service Manager</h1>')
if cmd is not None:
tb.append('<div style="margin: 0.5em; font-size: 1.2em; background-color: #ccc;"><i>command:</i> %s <i>response:</i> %s</div>'
% (cmd, response))
tb.append('<div style="margin: 0.5em; font-size: 1.2em; "><a href="." style="font-size: 1.2em;">refresh</a></div>')
tb.append('<form method="post" action=".">')
tb.append('<input type="hidden" name="csrfmiddlewaretoken" value="%s"/>' % get_token(request))
tb.append('<table>')
for name, _cfg in configItems:
procStatus = status.get(name, {'status': 'notStarted'})
procMode = procStatus.get('status')
procColor = statuslib.getColor(procMode)
tb.append('<tr>')
tb.append('<td>%s</td>' % name)
tb.append('<td style="background-color: %s;">%s</td>' % (procColor, procMode))
tb.append('<td>%s</td>' % commandButton('start', name, disabled=not statuslib.isStartable(procMode)))
tb.append('<td>%s</td>' % commandButton('stop', name, disabled=not statuslib.isActive(procMode)))
tb.append('<td>%s</td>' % commandButton('restart', name))
if logDir:
tb.append('<td><a href="%s%s_latest.txt">latest log</a></td>'
% (logDir, name))
tb.append('<td><a href="%s%s_previous.txt">previous log</a></td>'
% (logDir, name))
tb.append('</tr>')
tb.append('<tr>')
if logDir:
tb.append('<td style="font-weight: bold;">pyraptord</td>')
tb.append('<td colspan="4"></td>')
tb.append('<td><a href="%spyraptord_latest.txt">latest log</a></td>'
% logDir)
tb.append('<td><a href="%spyraptord_previous.txt">previous log</a></td>'
% logDir)
tb.append('</tr>')
tb.append('</table>')
tb.append('<div style="margin-top: 0.5em;"><a href="%s">all logs</a></div>' % logDir)
tb.append('</form>')
return render(request,
'geocamPycroraptor2/dashboard.html',
{'html': ''.join(tb)},
)
def runCommandInternal(pyraptord, cmd, svcName):
response = 'ok'
try:
pyraptord(cmd, svcName)
except: # pylint: disable=W0702
excType, excValue, _excTb = sys.exc_info()
response = ('%s.%s: %s'
% (excType.__module__,
excType.__name__,
str(excValue)))
cmdSummary = '%s("%s")' % (cmd, svcName)
return (cmdSummary, response)
def runCommand(request, cmd, svcName):
pyraptord = getPyraptordClient()
cmdSummary, response = runCommandInternal(pyraptord, cmd, svcName)
return renderDashboard(request,
pyraptord=pyraptord,
cmd=cmdSummary,
response=response)
def dashboard(request):
if request.method == 'POST':
cmdPair = request.POST.get('cmd', None)
if cmdPair:
cmd, svcName = cmdPair.split('.', 1)
assert cmd in ('start', 'stop', 'restart')
if cmd in ('start', 'stop'):
cmd = cmd + 'Service'
return runCommand(request, cmd, svcName)
return renderDashboard(request)
def stopPyraptordServiceIfRunning(pyraptord, svcName):
try:
pyraptord.stopService(svcName)
except zerorpc.RemoteError:
traceback.print_exc()
pass
|
StarcoderdataPython
|
388882
|
# -*- coding: utf-8 -*-
"""
Copyright 2019 <NAME> S.r.l.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""Utils module for rlog_generator."""
import datetime
import logging
import random
import socket
import struct
import sys
import yaml
log = logging.getLogger(__name__)
def load_config(yaml_file):
"""Return a Python object given a YAML file
Arguments:
yaml_file {str} -- path of YAML file
Returns:
obj -- Python object of YAML file
"""
with open(yaml_file, 'r') as f:
log.debug(f"Loading file {yaml_file}")
return yaml.load(f, Loader=yaml.FullLoader)
def randint(min_value, max_value):
"""Return random integer in range [min_value, max_value],
including both end points
Arguments:
min_value {int} -- min value
max_value {int} -- max value
Returns:
int -- random integer in range [min_value, max_value]
"""
return random.randint(int(min_value), int(max_value))
def randip():
"""Return random IP address
Returns:
str -- IP address
"""
return socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff)))
def get_function(function_str, module=sys.modules[__name__]):
"""Return the function from its string name as func_name
Example: with the name 'func_randint'
you will get the function name 'randint'
Arguments:
function_str {str} -- name of function preceded by 'func_'
Keyword Arguments:
module {module obj} -- module object with the function
(default: {sys.modules[__name__]})
Returns:
obj function -- function of module
"""
function_str = function_str.split("_")[1]
return getattr(module, function_str)
def exec_function_str(function_str):
"""Return the value of all string function with/without
parameters.
Example: a complete string 'func_randint 1 10' runs the function
randint(1, 10)
Arguments:
function_str {str} -- complete string function
Returns:
any -- value of string function
"""
tokens = function_str.split()
func = get_function(tokens[0])
if len(tokens) == 1:
return func()
else:
return func(*tokens[1:])
def get_random_value(field_value):
"""Return the random value of field value in pattern configuration
Arguments:
field_value {str/list} -- value of field in pattern configuration
Raises:
ValueError: raised when field value is not valid
Returns:
any -- random value
"""
if isinstance(field_value, str):
return exec_function_str(field_value)
elif isinstance(field_value, list):
return random.choice(field_value)
else:
raise ValueError('field value can be a string or a list')
def get_template_log(template, fields):
"""Return a random log from template string in Python formatting string
(https://docs.python.org/3/library/string.html#custom-string-formatting)
Arguments:
template {str} -- template string in Python formatting string
fields {[type]} -- dict field from pattern configuration file
Returns:
str -- random log generated from template
"""
values = {k: get_random_value(v) for k, v in fields.items()}
now = datetime.datetime.now()
return template.format(now, **values)
def custom_log(level="WARNING", name=None): # pragma: no cover
if name:
log = logging.getLogger(name)
else:
log = logging.getLogger()
log.setLevel(level)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(
"%(asctime)s | "
"%(name)s | "
"%(module)s | "
"%(funcName)s | "
"%(levelname)s | "
"%(message)s")
ch.setFormatter(formatter)
log.addHandler(ch)
return log
|
StarcoderdataPython
|
6624044
|
"""
https://www.practicepython.org
Exercise 6: String Lists
2 chilis
Ask the user for a string and print out whether this string is a
palindrome or not. (A palindrome is a string that reads the same
forwards and backwards.)
"""
def palindrome_checker(s1):
for i in range(int(len(s1)/2)):
if s1[i] != s1[len(s1)-i-1] :
return(False)
return(True)
s1 = input("Enter some text: ")
if palindrome_checker(s1.lower()):
print("'" + s1 + "' is a palindrome")
else:
print("'" + s1 + "' is not a palindrome")
|
StarcoderdataPython
|
8077231
|
# Copyright 2018 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import shutil
import tempfile
import time
from mock import mock
import bossimage.core as bc
tempdir = tempfile.mkdtemp()
def setup():
bc.create_working_dir = create_working_dir
bc.instance_files = instance_files
bc.ec2_connect = probe(ec2_connect)
bc.wait_for_connection = wait_for_connection
bc.wait_for_image = probe(wait_for_image)
bc.run_ansible = probe(run_ansible)
bc.create_keypair = probe(bc.create_keypair)
bc.create_instance = probe(bc.create_instance)
bc.write_playbook = probe(bc.write_playbook)
def teardown():
shutil.rmtree(tempdir)
def probe(func):
"""
Decorator to wrap a function with the ability to be probed.
When the function is called, its name will be added to a list,
which is stored as an attribute on the probe function.
"""
def wrapper(*args, **kwargs):
if func.__name__ in probe.watch:
probe.called.append(func.__name__)
return func(*args, **kwargs)
return wrapper
def reset_probes(watch=[]):
"""
Clears the list of functions which have been probed. The `watch`
argument is a list of function names that should be watched for.
"""
probe.called = []
probe.watch = watch
def create_working_dir():
pass
def wait_for_connection(a, b, c, d, e):
time.sleep(1)
def wait_for_image(a):
time.sleep(1)
def instance_files(instance):
return dict(
state='{}/{}-state.yml'.format(tempdir, instance),
keyfile='{}/{}.pem'.format(tempdir, instance),
inventory='{}/{}.inventory'.format(tempdir, instance),
playbook='{}/{}-playbook.yml'.format(tempdir, instance),
)
def ec2_connect():
return mock_ec2()
def run_ansible(a, b, c, d, e):
pass
def mock_ec2():
def create_key_pair(KeyName=''):
keypair = mock.Mock()
keypair.key_material = 'thiskeyistotallyvalid'
keypair.key_name = KeyName
return keypair
def create_tags(Resources=None, Tags=[]):
pass
def create_instances(ImageId='', InstanceType='', MinCount='', MaxCount='',
KeyName='', NetworkInterfaces=[],
BlockDeviceMappings=[], UserData='',
IamInstanceProfile=''):
instance = mock.Mock()
instance.id = 'i-00000001'
instance.private_ip_address = '10.20.30.40'
instance.public_ip_address = '20.30.40.50'
instance.load = lambda: None
instance.reload = lambda: None
instance.wait_until_running = lambda: time.sleep(1)
return [instance]
def images_filter(ImageIds='', Filters=[]):
image = mock.Mock()
image.id = 'ami-00000002'
yield image
def Instance(id=''):
def create_image(Name=''):
image = mock.Mock()
image.id = 'ami-00000001'
image.state = 'available'
image.reload = lambda: None
return image
instance = mock.Mock()
instance.architecture = 'x86_64'
instance.hypervisor = 'xen'
instance.virtualization_type = 'hvm'
instance.create_image = create_image
instance.load = lambda: None
instance.password_data = lambda: {'Password<PASSWORD>': '<PASSWORD>'}
return instance
m = mock.Mock()
m.create_key_pair = create_key_pair
m.create_tags = probe(create_tags)
m.create_instances = probe(create_instances)
m.images.filter = images_filter
m.Instance = Instance
return m
|
StarcoderdataPython
|
8059496
|
<reponame>TobiasLundby/UAS-route-plan-optimization<filename>data_sources/weather/ibm.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
2018-01-05 TL First version
"""
"""
Description:
None
see: http://2017.compciv.org/guide/topics/python-standard-library/csv.html
see: https://docs.scipy.org/doc/numpy-1.13.0/user/basics.creation.html
For treating empy cells (usable for SDU data): https://www.youtube.com/watch?v=yQsOFWqpjkE
License: BSD 3-Clause
"""
### Import start
import csv
import numpy as np
from bokeh.io import output_file, show
from bokeh.layouts import gridplot, column, widgetbox, layout
from bokeh.plotting import figure
from bokeh.models import ColumnDataSource
from bokeh.models.widgets import Div
from datetime import datetime
import pandas as pd
### Import end
### Define start
MaxOperationWindSpeed_def = 12
MaxOperationWindGusts_def = MaxOperationWindSpeed_def+5.14 # 10kts more than wind speed based on the tower at HCA Airport
OperationMinTemperature_def = -20
OperationMaxTemperature_def = 45
icing_temp_diff_tolerance = 2.7 # the allowed difference between the surface temperature and dewpoint temperature
ICING_MAX_TEMP = 1 # just a bit over 0 degrees Celcius
ICING_MIN_TEMP = -15 # unit: degrees Celcius
MAX_PRECIPITATION = 0.76 # unit: cm
MAX_SNOWFALL = 0.5 # unit: cm
use_apparent_temp = False # for the condition checking
use_wind_gusts = True # for the condition checking
use_icing_test = True # for the condition checking
use_precipitation_test = True
use_snowfall_test = True
### Define end
### Class start
class ibm_weather_csv():
def __init__(self, inFileName, inCity, inYear, inMaxOperationWindSpeed, inMaxOperationWindGusts, inOperationMinTemperature, inOperationMaxTemperature, debug = False):
self.fileName = inFileName;
self.city = inCity
self.year = inYear
self.maxOperationWindSpeed = inMaxOperationWindSpeed
self.maxOperationWindGusts = inMaxOperationWindGusts
self.minOperationTemperature = inOperationMinTemperature
self.maxOperationTemperature = inOperationMaxTemperature
self.DateSGMT = []
self.WindSpeedKphS = []
self.SurfaceWindGustsKphS = []
self.WindSpeedMpsS = []
self.SurfaceWindGustsMpsS = []
self.WindDirectionDegreesS = []
self.SurfaceTempCS = []
self.ApparentTemperatureCS = []
self.WindChillTemperatureCS = []
self.SurfaceDewpointTempCS = [] # SurfaceDewpointTemperatureCelsius
self.PrecipitationPreviousHourCmS = []
self.SnowfallCmS = []
self.samples = 0
self.plotWidth = 800
self.plotHeight = 400
self.debugText = debug
self.days = 0
def reset(self):
self.fileName = '';
self.city = ''
self.year = np.nan
self.maxOperationWindSpeed = np.nan
self.minOperationTemperature = np.nan
self.DateSGMT = []
self.WindSpeedKphS = []
self.SurfaceWindGustsKphS = []
self.SurfaceTempCS = []
self.ApparentTemperatureCS = []
self.WindChillTemperatureCS = []
self.SurfaceDewpointTempCS = []
self.PrecipitationPreviousHourCmS = []
self.SnowfallCmS = []
self.samples = 0
self.days = 0
def loadCSV(self):
if self.debugText:
print 'Attempting to open data file'
with open(self.fileName) as csvfile:
#with open('DronePlanning/sec.csv') as csvfile:
if self.debugText:
print 'Data file opened, attempting data load'
readCSV = csv.DictReader(csvfile, delimiter=',')
for row in readCSV:
# Date load - format 01/01/2016 00:00:00
DateGMT = datetime.strptime(row['DateHrGmt'], '%m/%d/%Y %H:%M:%S')
self.DateSGMT.append(DateGMT)
# Wind speed load
WindSpeedKph = float(row['WindSpeedKph'])
self.WindSpeedKphS.append(WindSpeedKph)
SurfaceWindGustsKph = float(row['SurfaceWindGustsKph'])
self.SurfaceWindGustsKphS.append(SurfaceWindGustsKph)
WindDirectionDegrees = float(row['WindDirectionDegrees'])
self.WindDirectionDegreesS.append(WindDirectionDegrees)
# Temperature load
SurfaceTempC = float(row['SurfaceTemperatureCelsius'])
self.SurfaceTempCS.append(SurfaceTempC)
ApparentTemperatureC = float(row['ApparentTemperatureCelsius'])
self.ApparentTemperatureCS.append(ApparentTemperatureC)
WindChillTemperatureC = float(row['WindChillTemperatureCelsius'])
self.WindChillTemperatureCS.append(WindChillTemperatureC)
#SurfaceDewpointTemperatureCelsius
SurfaceDewpointTemperatureC = float(row['SurfaceDewpointTemperatureCelsius'])
self.SurfaceDewpointTempCS.append(SurfaceDewpointTemperatureC)
PrecipitationPreviousHourCm = float(row['PrecipitationPreviousHourCentimeters'])
self.PrecipitationPreviousHourCmS.append(PrecipitationPreviousHourCm)
SnowfallCm = float(row['SnowfallCentimeters'])
self.SnowfallCmS.append(SnowfallCm)
if self.debugText:
print 'Data loaded'
#x = np.arange(len(SurfaceTempS))
self.samples = len(self.DateSGMT)
self.days = int(round(len(self.DateSGMT)/24))
if self.debugText:
print 'Samples:', str(self.samples)
print 'Days:', str(self.days)
# convert to the more used m/s
self.WindSpeedMpsS = np.divide(self.WindSpeedKphS, 3.6)
self.SurfaceWindGustsMpsS = np.divide(self.SurfaceWindGustsKphS, 3.6)
#WindSpeedMpsS = [x / 3.6 for x in WindSpeedKphS] #note that the list contains floats, use calc above
#print(WindSpeedMpsS)
def convert_time_to_index(self, year, month, date, hour):
# Since I only have data from 2016 the year is just ignored
date_obj = datetime.strptime('%02d/%02d/%04d %02d' % (date, month, year, hour), '%m/%d/%Y %H')
for i in range(len(self.DateSGMT)):
if self.DateSGMT[i] == date_obj:
return i
return None
def get_wind_speed(self, index):
return self.WindSpeedMpsS[index]
def get_wind_gust(self, index):
return self.SurfaceWindGustsMpsS[index]
def get_wind_direction(self, index):
return self.WindDirectionDegreesS[index]
def get_temp(self, index):
return self.SurfaceTempCS[index]
def get_temp_dewpoint(self, index):
return self.SurfaceDewpointTempCS[index]
def get_precipitation(self, index):
return self.PrecipitationPreviousHourCmS[index]
def get_snowfall(self, index):
return self.SnowfallCmS[index]
## Check condition methods
def check_conditions_wind(self, sample_nr):
# true = sattisfies wind conditions
# self.WindSpeedMpsS[i] > self.maxOperationWindSpeed or self.SurfaceWindGustsMpsS[i] > self.maxOperationWindSpeed
if self.WindSpeedMpsS[sample_nr] > self.maxOperationWindSpeed:
#print "WIND exceed"
return False
if use_wind_gusts:
if self.SurfaceWindGustsMpsS[sample_nr] > self.maxOperationWindGusts:
#print "GUST exceed"
return False
return True
def check_conditions_temp(self, sample_nr):
# true = sattisfies temp conditions
if use_apparent_temp:
if self.ApparentTemperatureCS[sample_nr] < self.minOperationTemperature:
return False
if self.ApparentTemperatureCS[sample_nr] > self.maxOperationTemperature:
return False
return True
else:
if self.SurfaceTempCS[sample_nr] < self.minOperationTemperature:
return False
if self.SurfaceTempCS[sample_nr] > self.maxOperationTemperature:
return False
return True
def check_conditions_icing(self, sample_nr):
# true = no icing, false = icing possiblity
if use_icing_test:
diff_SurfaceTem_SurfaceDewpointTemp = abs(self.SurfaceTempCS[sample_nr] - self.SurfaceDewpointTempCS[sample_nr])
# if self.SurfaceTempCS[sample_nr] < 0:
# print "Icing: date ", self.DateSGMT[sample_nr], "diff:", diff_SurfaceTem_SurfaceDewpointTemp, "temp:", self.SurfaceTempCS[sample_nr]
if diff_SurfaceTem_SurfaceDewpointTemp < icing_temp_diff_tolerance and (self.SurfaceTempCS[sample_nr] < ICING_MAX_TEMP and self.SurfaceTempCS[sample_nr] > ICING_MIN_TEMP):
return False
else:
return True
else:
return True
def check_conditions_precipitation(self, sample_nr):
# true = can fly, false = cannot fly due to rain
if use_precipitation_test:
if self.PrecipitationPreviousHourCmS[sample_nr] > MAX_PRECIPITATION:
return False
else:
return True
else:
return True
def check_conditions_snowfall(self, sample_nr):
# true = can fly, false = cannot fly due to rain
if use_snowfall_test:
if self.SnowfallCmS[sample_nr] > MAX_SNOWFALL:
return False
else:
return True
else:
return True
def check_conditions_all(self, sample_nr):
if self.check_conditions_wind(sample_nr) and self.check_conditions_temp(sample_nr) and self.check_conditions_icing(sample_nr) and self.check_conditions_precipitation(sample_nr) and self.check_conditions_snowfall(sample_nr):
return True
else:
return False
def check_conditions_all_with_type(self, sample_nr):
# no_fly = [ [ [0,2,0], [2,14,1], [14,24,2] ],[ [0,13,0], [13,15,2], [15,24,1] ], [ [0,5,0], [5,7,1], [7,24,2] ], [ [0,13,0], [13,17,2], [17,24,1] ], [ [0,2,0], [2,14,1], [14,24,2] ],[ [0,13,0], [13,15,2], [15,24,1] ], [ [0,5,0], [5,7,1], [7,24,2] ], [ [0,13,0], [13,17,2], [17,24,1] ] ]
condition_type = 0
if self.check_conditions_all(sample_nr):
condition_type = 0 # within conditions
elif self.check_conditions_wind(sample_nr) == False and self.check_conditions_temp(sample_nr) == False and self.check_conditions_icing(sample_nr) == False:
condition_type = 1 # all exceeding
elif self.check_conditions_wind(sample_nr) == False:
condition_type = 2 # wind exceeding
elif self.check_conditions_icing(sample_nr) == False:
condition_type = 3 # icing risk
elif self.check_conditions_precipitation(sample_nr) == False:
condition_type = 4 # rain exceeding
elif self.check_conditions_snowfall(sample_nr) == False:
condition_type = 5 # snowfall exceeding
elif self.check_conditions_temp(sample_nr) == False:
condition_type = 6 # temp exceeding
#print condition_type
return condition_type
## Plot methods
def createTimePlot(self, inTitle, inXAxis, inYAxis):
return figure(
tools="pan,box_zoom,reset,save,hover", #hover
title="%s" % inTitle,
x_axis_type='datetime',
x_axis_label='%s' % inXAxis, y_axis_label='%s' % inYAxis,
plot_height = self.plotHeight, plot_width = self.plotWidth,
)
## Analyzer methods
def analyseWind(self):
# Calculate percentage statistics - simple how many samples are over the limit
print "\nWind analysis"
aboveThreshold = 0
for i in range(self.samples):
if self.check_conditions_wind(i) == False:
aboveThreshold += 1
#print self.DateSGMT[i], self.WindSpeedMpsS[i]
if self.debugText:
print 'Number of samples above %d m/s = ' % (self.maxOperationWindSpeed), aboveThreshold
print 'Percentage of samples above %d m/s = ' % (self.maxOperationWindSpeed), aboveThreshold/(self.samples * 1.0)*100.0, '%'
# Calculate consecutive periods with max conditions
per_1h_count = 0
per_2h_count = 0
per_3h_count = 0
per_4h_count = 0
periodsAbove = []
periods = []
in_period_count = 0
for i in range(self.samples):
if self.check_conditions_wind(i) == False:
in_period_count += 1
else:
if in_period_count > 0:
hours = in_period_count
periods.append (hours)
if hours <= 1.0:
per_1h_count += 1
elif hours <= 2.0:
per_2h_count += 1
elif hours <= 3.0:
per_3h_count += 1
elif hours <= 4.0:
per_4h_count += 1
else:
periodsAbove.append (hours)
in_period_count = 0
if self.debugText:
print 'Number of periods with reports above %d m/s = ' % (self.maxOperationWindSpeed), len(periods)
print '0-1 hour : ', per_1h_count
print '1-2 hours: ', per_2h_count
print '2-3 hours: ', per_3h_count
print '3-4 hours: ', per_4h_count
print "> 4 hours: ",len(periodsAbove)
#
noWindDays = 0
# init array
hoursOfWind = []
for i in range(25): # 25 instead of 24 since 'nothing' should also be included
hoursOfWind.append(0)
# loop through the data
for day in range(self.days): # itr days
extreme_hour_count = 0
for sample in range(24): # itr samples
if self.check_conditions_wind(day*24+sample) == False:
extreme_hour_count += 1
if extreme_hour_count >= 2:
hoursOfWind[extreme_hour_count] +=1
elif extreme_hour_count == 0:
noWindDays += 1
#print hoursOfWind
return [periods, hoursOfWind]
def analyseTemperature(self):
# Calculate percentage statistics - simple how many samples are over the limit
print "\nTemperature analysis"
belowThreshold = 0
for i in range(self.samples):
if self.check_conditions_temp(i) == False:
belowThreshold += 1
#print self.DateSGMT[i], self.ApparentTemperatureCS[i]
if self.debugText:
print 'Number of samples below %d °C = ' % (self.minOperationTemperature), belowThreshold
print 'Percentage of samples below %d °C = ' % (self.minOperationTemperature), belowThreshold/(self.samples * 1.0)*100.0, '%'
# Calculate consecutive periods with min conditions
per_1h_count = 0
per_2h_count = 0
per_3h_count = 0
per_4h_count = 0
periodsAbove = []
periods = []
in_period_count = 0
for i in range(self.samples):
if self.check_conditions_temp(i) == False:
in_period_count += 1
else:
if in_period_count > 0:
hours = in_period_count
periods.append (hours)
if hours <= 1.0:
per_1h_count += 1
elif hours <= 2.0:
per_2h_count += 1
elif hours <= 3.0:
per_3h_count += 1
elif hours <= 4.0:
per_4h_count += 1
else:
periodsAbove.append (hours)
in_period_count = 0
if self.debugText:
print 'Number of periods with reports below %d °C = ' % (self.minOperationTemperature), len(periods)
print '0-1 hour : ', per_1h_count
print '1-2 hours: ', per_2h_count
print '2-3 hours: ', per_3h_count
print '3-4 hours: ', per_4h_count
print "> 4 hours: ",len(periodsAbove)
return periods
def analyseCombined(self):
# Calculate percentage statistics - simple how many samples are over the limit
print "\nCombined analysis"
excedingConditions = 0
for i in range(self.samples):
if self.check_conditions_all(i) == False:
excedingConditions += 1
#print self.DateSGMT[i], self.WindSpeedMpsS[i]
if self.debugText:
print 'Number of samples exceeding conditions = ', excedingConditions
print 'Percentage of samples exceeding conditions = ', excedingConditions/(self.samples * 1.0)*100.0, '%'
# Calculate consecutive periods with max conditions
per_1h_count = 0
per_2h_count = 0
per_3h_count = 0
per_4h_count = 0
periodsAbove = []
periods = []
in_period_count = 0
for i in range(self.samples):
if self.check_conditions_all(i) == False:
in_period_count += 1
#print self.DateSGMT[i]
else:
if in_period_count > 0:
#print in_period_count
hours = in_period_count
periods.append (hours)
if hours <= 1.0:
per_1h_count += 1
elif hours <= 2.0:
per_2h_count += 1
elif hours <= 3.0:
per_3h_count += 1
elif hours <= 4.0:
per_4h_count += 1
else:
periodsAbove.append (hours)
in_period_count = 0
if self.debugText:
print 'Number of periods with reports exceeding conditions = ', len(periods)
print '0-1 hour : ', per_1h_count
print '1-2 hours: ', per_2h_count
print '2-3 hours: ', per_3h_count
print '3-4 hours: ', per_4h_count
print "> 4 hours: ",len(periodsAbove)
#
withinDays = 0
# init array
hoursExcedingConditions = []
for i in range(25): # 25 instead of 24 since 'nothing' should also be included
hoursExcedingConditions.append(0)
# loop through the data
for day in range(self.days): # itr days
extreme_hour_count = 0
for sample in range(24): # itr samples
if self.check_conditions_all(day*24+sample) == False:
extreme_hour_count += 1
if extreme_hour_count >= 2:
hoursExcedingConditions[extreme_hour_count] +=1
# if extreme_hour_count == 24:
# print "day: ", day
elif extreme_hour_count == 0:
withinDays += 1
#print hoursExcedingConditions
#
# no_fly = [ [ [0,2,0], [2,14,1], [14,24,2] ],[ [0,13,0], [13,15,2], [15,24,1] ], [ [0,5,0], [5,7,1], [7,24,2] ], [ [0,13,0], [13,17,2], [17,24,1] ], [ [0,2,0], [2,14,1], [14,24,2] ],[ [0,13,0], [13,15,2], [15,24,1] ], [ [0,5,0], [5,7,1], [7,24,2] ], [ [0,13,0], [13,17,2], [17,24,1] ] ]
combind_results = []
day_result = []
cur_start_hour = 0
cur_hour = 0
last_result = 0
cur_result = 0
#for day in range(self.days): # itr days
for day in range(self.days): # itr days
cur_start_hour = 0
last_result = 0
day_result = []
for sample in range(24): # itr samples
cur_hour = sample
cur_result = self.check_conditions_all_with_type(day*24+sample)
if cur_result != last_result:
day_result.append([cur_start_hour, cur_hour, last_result])
last_result = cur_result
cur_start_hour = cur_hour
#print self.DateSGMT[sample]
if sample == 23:
day_result.append([cur_start_hour, cur_hour, last_result])
combind_results.append(day_result)
#print combind_results
return [periods, hoursExcedingConditions, combind_results]
### Class end - Main start
if __name__ == '__main__':
# Initialize and load data
reader = ibm_weather_csv('DronePlanning/CleanedObservationsOdense.csv', 'Odense', 2016, MaxOperationWindSpeed_def, MaxOperationWindGusts_def, OperationMinTemperature_def, OperationMaxTemperature_def, debug = True)
reader.loadCSV()
# Output to static HTML file
output_file("webpages/output.html", title="Drone planning using weather data")
# %%%%%%%%%%%%%%%%%% WIND VISUALIZATION %%%%%%%%%%%%%%%%%%
# %%%%%%%%% time plot of WIND SPEED and GUST - START %%%%%%%%%
# create a new plot
p1 = reader.createTimePlot('Wind', 'Date and time', 'Wind speed [m/s]')
# Plot content
p1.line(reader.DateSGMT, reader.SurfaceWindGustsMpsS, legend="Wind gusts - %s" % reader.city, alpha=0.8, color="green")
p1.line(reader.DateSGMT, reader.WindSpeedMpsS, legend="Wind speed - %s" % reader.city, alpha=0.8)
p1.line([reader.DateSGMT[0], reader.DateSGMT[-1]], [reader.maxOperationWindSpeed, reader.maxOperationWindSpeed], legend="Wind speed limit = %0d m/s" % reader.maxOperationWindSpeed, line_color="red", line_dash="2 4")
p1.line([reader.DateSGMT[0], reader.DateSGMT[-1]], [reader.maxOperationWindGusts, reader.maxOperationWindGusts], legend="Gust speed limit = %0d m/s" % reader.maxOperationWindGusts, line_color="red", line_dash="4 2")
# %%%%%%%%% time plot of WIND SPEED and GUST - END %%%%%%%%%
# %%%%%%%%% histogram of WIND SPEED - START %%%%%%%%%
p8 = figure(title="Wind speed",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
hist,bins=np.histogram(reader.WindSpeedMpsS,bins=20)
p8.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
# Add labels
p8.xaxis.axis_label = "Wind speed [m/s] - %s" % reader.city
p8.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of WIND SPEED - END %%%%%%%%%
# %%%%%%%%% histogram of WIND GUST - START %%%%%%%%%
p9 = figure(title="Wind gusts",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
hist,bins=np.histogram(reader.SurfaceWindGustsMpsS,bins=20)
p9.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
# Add labels
p9.xaxis.axis_label = "Wind gusts [m/s] - %s" % reader.city
p9.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of WIND GUST - END %%%%%%%%%
# %%%%%%%%%%%%%%%%%% TEMPERATURE VISUALIZATION %%%%%%%%%%%%%%%%%%
# %%%%%%%%% time plot of TEMPERATURE (different types) - START %%%%%%%%%
# create a new plot
p2 = reader.createTimePlot('Temperature', 'Date and time', 'Temperature [°C]')
# Plot content
p2.line(reader.DateSGMT, reader.SurfaceTempCS, legend="Temperature - %s" % reader.city, alpha=0.8)
#p2.line(reader.DateSGMT, reader.WindChillTemperatureCS, legend="Wind Chill Temperature - %s" % reader.city, alpha=0.8, color="green")
#p2.line(reader.DateSGMT, reader.ApparentTemperatureCS, legend="Apparent Temperature - %s" % reader.city, alpha=0.8, color="orange")
p2.line(reader.DateSGMT, reader.SurfaceDewpointTempCS, legend="Dewpoint Temperature - %s" % reader.city, alpha=0.8, color="green")
# Draw illustrative lines
p2.line([reader.DateSGMT[0], reader.DateSGMT[-1]], [reader.minOperationTemperature, reader.minOperationTemperature], legend="Temperature min = %0d °C" % reader.minOperationTemperature, line_color="red", line_dash="2 4")
p2.line([reader.DateSGMT[0], reader.DateSGMT[-1]], [reader.maxOperationTemperature, reader.maxOperationTemperature], legend="Temperature max = %0d °C" % reader.maxOperationTemperature, line_color="red", line_dash="4 2")
# %%%%%%%%% time plot of TEMPERATURE (different types) - END %%%%%%%%%
# %%%%%%%%% histogram of TEMPERATURE - START %%%%%%%%%
p10 = figure(title="Temperature",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
hist,bins=np.histogram(reader.SurfaceTempCS,bins=20)
p10.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
# Add labels
p10.xaxis.axis_label = "Temperature [°C] - %s" % reader.city
p10.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of TEMPERATURE - END %%%%%%%%%
# %%%%%%%%% histogram of Apparent TEMPERATURE - START %%%%%%%%%
p11 = figure(title="Apparent Temperature",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
hist,bins=np.histogram(reader.ApparentTemperatureCS,bins=20)
p11.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
# Add labels
p11.xaxis.axis_label = "Apparent Temperature [°C] - %s" % reader.city
p11.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of Apparent TEMPERATURE - END %%%%%%%%%
# %%%%%%%%%%%%%%%%%% PRECIPITATION VISUALIZATION %%%%%%%%%%%%%%%%%%
# %%%%%%%%% time plot of PRECIPITATION - START %%%%%%%%%
# create a new plot
p3 = reader.createTimePlot('Precipitation', 'Date and time', 'Precipitation [cm]')
p3.line(reader.DateSGMT, reader.PrecipitationPreviousHourCmS, legend="Precipitation - %s" % reader.city, alpha=0.8)
# %%%%%%%%% time plot of PRECIPITATION - END %%%%%%%%%
# %%%%%%%%% time plot of PRECIPITATION:SNOWFALL - START %%%%%%%%%
# create a new plot
p4 = reader.createTimePlot('Snowfall', 'Date and time', 'Snowfall [cm]')
p4.line(reader.DateSGMT, reader.SnowfallCmS, legend="Snowfall - %s" % reader.city, alpha=0.8)
# %%%%%%%%% time plot of PRECIPITATION:SNOWFALL - END %%%%%%%%%
# %%%%%%%%%%%%%%%%%% DATA ANALYSIS %%%%%%%%%%%%%%%%%%
# %%%%%%%%% Analysis WIND %%%%%%%%%
periods_wind, hoursOfWind = reader.analyseWind()
#print periods_wind
# %%%%%%%%% histogram of Consequitive WIND hours - START %%%%%%%%%
p5 = figure(title="Wind analysis",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
hist,bins=np.histogram(periods_wind,bins=30)
p5.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
p5.xaxis.axis_label = 'Consequitive hours with wind velocity > %d m/s' % reader.maxOperationWindSpeed
p5.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of Consequitive WIND hours - END %%%%%%%%%
# %%%%%%%%% WIND analysis plot - START %%%%%%%%%
# Explanation: it shows how many days there have been ex. 4 hours of wind exceding the conditions. In other words if there is occurences of hours with wind above conditions for more than 24 hours the whole day is unflyable.
p7 = figure(
tools="pan,box_zoom,reset,save,hover",
title="Wind analysis: days with wind exceeding conditions, divided in time intervals",
x_axis_label='Hours with wind velocity > %d m/s (capped below 2 hours)' % reader.maxOperationWindSpeed,
y_axis_label='Days',
plot_height = reader.plotHeight, plot_width = reader.plotWidth
)
p7.line(range(25), hoursOfWind, alpha=0.6)
p7.circle(range(25), hoursOfWind, size=10, alpha=0.8)
# %%%%%%%%% WIND analysis plot - END %%%%%%%%%
# %%%%%%%%% Analysis TEMPERATURE %%%%%%%%%
periods_temperature = reader.analyseTemperature()
#print periods_temperature
# %%%%%%%%% histogram of Consequitive TEMPERATURE hours - START %%%%%%%%%
# p6 = figure(title="Temperature analysis - using apparent temperature",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
p6 = figure(title="Temperature analysis",tools="save",plot_width=reader.plotWidth/2,plot_height=reader.plotHeight)
hist,bins=np.histogram(periods_temperature,bins=30)
p6.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
p6.xaxis.axis_label = 'Consequitive hours with temperature < %d °C' % reader.minOperationTemperature
p6.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of Consequitive TEMPERATURE hours - START %%%%%%%%%
# %%%%%%%%% Analysis COMBINED %%%%%%%%%
periods_combined, hoursExcedingConditions, results_combined_arr = reader.analyseCombined()
# %%%%%%%%% histogram of Consequitive COMBINED hours - START %%%%%%%%%
p12 = figure(title="Combined analysis",tools="save",plot_width=reader.plotWidth,plot_height=reader.plotHeight)
hist,bins=np.histogram(periods_combined,bins=30)
p12.quad(top=hist, bottom=0, left=bins[:-1], right=bins[1:],line_color="blue")
p12.xaxis.axis_label = 'Consequitive hours exceeding conditions'
p12.yaxis.axis_label = 'Occurences'
# %%%%%%%%% histogram of Consequitive COMBINED hours - END %%%%%%%%%
# %%%%%%%%% COMBINED analysis plot - START %%%%%%%%%
p13 = figure(
tools="pan,box_zoom,reset,save,hover",
title="Combined analysis: days with conditions exceeding limits, divided in time intervals",
x_axis_label='Hours exceeding conditions (capped below 2 hours)',
y_axis_label='Days',
plot_height = reader.plotHeight, plot_width = reader.plotWidth
)
p13.line(range(25), hoursExcedingConditions, alpha=0.6)
p13.circle(range(25), hoursExcedingConditions, size=10, alpha=0.8)
# %%%%%%%%% COMBINED analysis plot - END %%%%%%%%%
# %%%%%%%%% Illustrative COMBINED analysis plot - START %%%%%%%%%
start_time = reader.DateSGMT[0].strftime('%m/%d/%Y')
#print start_time
rng = pd.date_range(start=start_time, periods=reader.days+1, freq='D' ) # reader.days+1
#print rng
interval = pd.DataFrame({'start' : rng })
interval['end'] = interval['start'] + pd.Timedelta(hours=24)#pd.Timedelta(hours=23,minutes=59,seconds=59)
#print interval,"\n\n"
p14 = figure(x_axis_type='datetime', plot_height=1,plot_width=3, tools="box_zoom,reset,save", x_range=(interval['start'][0], interval['end'][reader.days]), y_range=(0, 23)) # the plot format/size is set by heigh and width and the sizing_mode makes it reponsive
# formatting
p14.yaxis.minor_tick_line_color = None
p14.ygrid[0].ticker.desired_num_ticks = 1
for date_itr in range(len(results_combined_arr)):
for internal_itr in range(len(results_combined_arr[date_itr])):
if results_combined_arr[date_itr][internal_itr][2] == 0: # 0 = palegreen color = within conditions
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="palegreen")
if results_combined_arr[date_itr][internal_itr][2] == 1: # 1 = red color = all exceeding
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="red")
if results_combined_arr[date_itr][internal_itr][2] == 2: # 2 = orange color = wind exceeding
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="orange")
if results_combined_arr[date_itr][internal_itr][2] == 3: # 3 = cyan color = icing risk
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="brown")
if results_combined_arr[date_itr][internal_itr][2] == 4: # 3 = magenta color = rain risk
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="hotpink")
if results_combined_arr[date_itr][internal_itr][2] == 5: # 5 = magenta color = snowfall risk
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="magenta")
if results_combined_arr[date_itr][internal_itr][2] == 6: # 6 = royalblue color = temp exceeding
p14.quad(left=interval['start'][date_itr],right=interval['end'][date_itr],bottom=results_combined_arr[date_itr][internal_itr][0], top=results_combined_arr[date_itr][internal_itr][1], color="royalblue")
# Add axis labels
p14.xaxis.axis_label = "Date"
p14.yaxis.axis_label = "Time"
# Add lines to illustrate daylight. The times are based on the spring mean day https://www.dmi.dk/nyheder/arkiv/nyheder-2012/daglaengden-dykker-under-ti-timer/
p14.line([interval['start'][0], interval['end'][reader.days]], [8.15, 8.15], line_color="white", line_dash="2 4")
p14.line([interval['start'][0], interval['end'][reader.days]], [20.15, 20.15], line_color="white", line_dash="2 4")
# %%%%%%%%% Illustrative COMBINED analysis plot - END %%%%%%%%%
# %%%%%%%%%%%%%%%%%% Bokeh %%%%%%%%%%%%%%%%%%
# Make legends clickable
p1.legend.click_policy = "hide"
p2.legend.click_policy = "hide"
p3.legend.click_policy = "hide"
p4.legend.click_policy = "hide"
p7.legend.click_policy = "hide"
# %%%%%%%%% TEXT elements - START %%%%%%%%%
# divTemplate = Div(text="", width = 800)
divHeader = Div(text="""<center><h1>Drone planning using weather data</h1><br /><h2>Data: IBM, %s, %0d</h2><p><i>Data visualzation and analysis by <a href="https://github.com/TobiasLundby" target="_blank"><NAME></a>, 2018</i></p></center>""" % (reader.city, reader.year)) # , width=200, height=100
divVisualization = Div(text="""<h2>Visualization</h2>""") # , width=200, height=100
divWind = Div(text="""<h3>Wind</h3>""") # , width=200, height=100
divTemp = Div(text="""<h3>Temperature</h3>""") # , width=200, height=100
divPrecipitation = Div(text="""<h3>Precipitation</h3>""") # , width=200, height=100
divIndividual = Div(text="""<h3>Individual analysis</h3>""") # , width=200, height=100
divCombined = Div(text="""<h3>Combined analysis</h3><p>Wind and temperature excluding percipitation and snow</p>""", width = reader.plotWidth) # , width=200, height=100
divAnalysis = Div(text="""<h2>Data analysis</h2>""") # , width=200, height=100
divP14Title = Div(text="<h3>Illustrative weather analysis - combined wind and temperature</h3>")
divExplanationP14 = Div(text="""<p><center>light green = flying OK<br>red = flying not OK; wind, icing, precipitation, snowfall, and temperature exceeding limits<br>orange = flying disencouraged, wind exceeding limit<br>brown = flying disencouraged, icing risk<br>pink = flying disencouraged, rain exceeding limit<br>magenta = flying disencouraged, snowfall exceeding limit<br>blue = flying disencouraged, temperature exceeding limit<br><i>The dashed white lines represents the avg spring daytime (08:09-20:09); source dmi.dk</i></center></p>""")
# %%%%%%%%% TEXT elements - START %%%%%%%%%
# %%%%%%%%% Generate layout %%%%%%%%%
#p = column(widgetbox(divExplanation),s1,s2)
p = layout(children=[
[widgetbox(divHeader)],
[widgetbox(divVisualization)],
[widgetbox(divWind)],
[p1],
[p8,p9],
[widgetbox(divTemp)],
[p2],
[p10,p11],
[widgetbox(divPrecipitation)],
[p3],
[p4],
[widgetbox(divAnalysis)],
[widgetbox(divIndividual)],
[p5,p6],
[p7],
[widgetbox(divCombined)],
[p12],
[p13],
[divP14Title],
[p14],
[divExplanationP14]
], sizing_mode='scale_width') # None for no show and , sizing_mode='stretch_both'
# p = layout(children=[[p1],
# [p14]],
# sizing_mode='scale_width') # None for no show and , sizing_mode='stretch_both'
# show the results
show(p)
### Main end
|
StarcoderdataPython
|
12803604
|
<reponame>Elzei/show-off
from struct import pack, unpack, calcsize
dane = open('dane.dat', 'w')
toWrite = pack('c5shhl', 'w', 'ala ',
2, 4, 12)
dane.write(toWrite)
dane.close()
dane = open('dane.dat', 'r')
toRead = unpack('c5shhl', dane.read())
print toRead
|
StarcoderdataPython
|
5179266
|
# The following script contains classes with necessary Blender operators
# for the Neural material approach in material generation and editing.
#
# Code from Neural Material paper is stored in neuralmaterial directory
# and is available on the following repository: https://github.com/henzler/neuralmaterial
import os
import subprocess
import sys
import shutil
from pathlib import Path
import time
import bpy
from bpy.types import Operator
from bpy_extras.io_utils import ImportHelper
base_script_path = Path(__file__).parent.resolve()
PYTHON_EXE = os.path.join(str(base_script_path), 'venv\\Scripts\\python.exe')
def check_remove_img(name):
if name in bpy.data.images:
image = bpy.data.images[name]
bpy.data.images.remove(image)
# Function for updating textures during material generation.
def update_neural(obj, base_path):
if obj:
base_name = f"{obj.name}_neural_mat"
if base_name not in bpy.data.materials:
mat = bpy.data.materials["neural_mat"].copy()
mat.name = base_name
else:
mat = bpy.data.materials[base_name]
else:
base_name = "base_neural"
mat = bpy.data.materials["neural_mat"]
nodes = mat.node_tree.nodes
albedo = nodes.get("Image Texture")
specular = nodes.get("Image Texture.001")
rough = nodes.get("Image Texture.002")
normal = nodes.get("Image Texture.003")
if os.path.isfile(os.path.join(base_path, 'albedo.png')):
check_remove_img(f'{base_name}_render.png')
img = bpy.data.images.load(os.path.join(base_path, 'render.png'))
img.name = f'{base_name}_render.png'
check_remove_img(f'{base_name}_albedo.png')
img = bpy.data.images.load(os.path.join(base_path, 'albedo.png'))
img.name = f'{base_name}_albedo.png'
albedo.image = img
check_remove_img(f'{base_name}_specular.png')
img = bpy.data.images.load(os.path.join(base_path, 'specular.png'))
img.name = f'{base_name}_specular.png'
specular.image = img
check_remove_img(f'{base_name}_rough.png')
img = bpy.data.images.load(os.path.join(base_path, 'rough.png'))
img.name = f'{base_name}_rough.png'
rough.image = img
check_remove_img(f'{base_name}_normal.png')
img = bpy.data.images.load(os.path.join(base_path, 'normal.png'))
img.name = f'{base_name}_normal.png'
normal.image = img
def replace_file(src_path, dst_path, retries=10, sleep=0.1):
for i in range(retries):
try:
os.replace(src_path, dst_path)
except WindowsError:
import pdb
pdb.set_trace()
time.sleep(sleep)
else:
break
class MAT_OT_NEURAL_GetInterpolations(Operator):
bl_idname = "neural.get_interpolations"
bl_label = "Get interpolations"
bl_description = "Generate interpolations in discovered directions"
_popen = None
@classmethod
def poll(self, context):
return "Material" in bpy.context.scene.neural_properties.progress
def execute(self, context):
neural = bpy.context.scene.neural_properties
in_dir = neural.directory
weight_dir = os.path.join(neural.directory, 'out', 'weights.ckpt')
model_path = './trainings/Neuralmaterial'
max_w = min(neural.w_res, 1024)
max_h = min(neural.h_res, 1024)
# Call to generate texture maps
process = subprocess.Popen([PYTHON_EXE, '-u', './scripts/get_interpolations.py',
'--model', model_path,
'--input_path', in_dir,
'--weight_path', weight_dir,
'--h', str(max_h),
'--w', str(max_w)], stdout=subprocess.PIPE, cwd=str(Path(base_script_path, 'neuralmaterial')))
MAT_OT_NEURAL_GetInterpolations._popen = process
neural.progress = 'Generating interpolations ...'
bpy.ops.wm.modal_status_updater()
return {'FINISHED'}
class MAT_OT_NEURAL_FileBrowser(Operator, ImportHelper):
"""File browser operator"""
bl_idname= "neural.file_browser"
bl_label = "Selects folder with data"
filename_ext = ""
def invoke(self, context, event):
self.filepath = bpy.context.scene.neural_properties.directory
wm = context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self, context):
fdir = self.properties.filepath
gan = bpy.context.scene.neural_properties
gan.directory = os.path.dirname(fdir)
fdir = os.path.dirname(fdir)
active_obj = bpy.context.active_object
if active_obj:
# Store base material path for later saving
active_obj["Neural_Path"] = fdir
if os.path.isdir(os.path.join(fdir, 'out')):
gan.progress = "Material found."
update_neural(active_obj, os.path.join(fdir, 'out'))
else:
gan.progress = "Ready to generate."
return {'FINISHED'}
class MAT_OT_NEURAL_Generator(Operator):
bl_idname = "neural.generator"
bl_label = "Generate neural material"
bl_description = "Generate base material from flash images"
_popen = None
@classmethod
def poll(self, context):
return MAT_OT_NEURAL_Generator._popen is None and MAT_OT_NEURAL_GetInterpolations._popen is None
def execute(self, context):
neural = bpy.context.scene.neural_properties
in_dir = neural.directory
out_dir = os.path.join(neural.directory, 'out')
model_path = './trainings/Neuralmaterial'
N = neural.num_rend
epochs = str(neural.epochs)
# Call to generate texture maps
process = subprocess.Popen([PYTHON_EXE, '-u', './scripts/test.py',
'--model', model_path,
'--input_path', in_dir,
'--output_path', out_dir,
'--epochs', epochs,
'--h', str(neural.h_res),
'--w', str(neural.w_res)], stdout=subprocess.PIPE, cwd=str(Path(base_script_path, 'neuralmaterial')))
MAT_OT_NEURAL_Generator._popen = process
neural.progress = 'Epoch: [{}/{}] Loss: 0.0 0.0'.format(1, neural.epochs)
bpy.ops.wm.modal_status_updater()
return {'FINISHED'}
class MAT_OT_NEURAL_Reseed(Operator):
bl_idname = "neural.reseed"
bl_label = "Neural material reseed"
bl_description = "Generate new learned material with new seed"
@classmethod
def poll(self, context):
return "Material" in bpy.context.scene.neural_properties.progress
def execute(self, context):
neural = bpy.context.scene.neural_properties
in_dir = neural.directory
out_dir = os.path.join(neural.directory, 'out')
if not Path(out_dir, 'weights.ckpt').is_file():
neural.progress = 'Material not generated or corrupted.'
return {'FINISHED'}
model_path = './trainings/Neuralmaterial'
epochs = str(neural.epochs)
# Call to generate texture maps
process = subprocess.Popen([PYTHON_EXE, '-u', './scripts/test.py',
'--model', model_path,
'--input_path', in_dir,
'--output_path', out_dir,
'--epochs', epochs,
'--h', str(neural.h_res),
'--w', str(neural.w_res),
'--seed', str(neural.seed),
'--reseed'], stdout=subprocess.PIPE, cwd=str(Path(base_script_path, 'neuralmaterial')))
MAT_OT_NEURAL_Generator._popen = process
neural.progress = 'Reseeding material with {}'.format(neural.seed)
bpy.ops.wm.modal_status_updater()
return {'FINISHED'}
class MAT_OT_NEURAL_EditMove(Operator):
bl_idname = "neural.edit_move"
bl_label = "Move material in desired material directions."
bl_description = "Finds 9 neighbouring materials in latent space directions from the newly chosen one."
direction: bpy.props.StringProperty(default="")
@classmethod
def poll(self, context):
return "Material" in bpy.context.scene.neural_properties.progress
def preprocess(self, context):
if bpy.context.active_object:
name = f"{bpy.context.active_object.name}_neural"
else:
name = "neural"
# First unlink files
check_remove_img(f'{name}_render.png')
check_remove_img(f'{name}_albedo.png')
check_remove_img(f'{name}_rough.png')
check_remove_img(f'{name}_specular.png')
check_remove_img(f'{name}_normal.png')
def execute(self, context):
gan = bpy.context.scene.neural_properties
interp_dir = os.path.join(gan.directory, 'interps')
self.preprocess(context)
new_weight_path = os.path.join(interp_dir, f'{self.direction}_{gan.direction}_weights.ckpt')
new_render_path = os.path.join(interp_dir, f'{self.direction}_{gan.direction}_render.png')
new_albedo_path = os.path.join(interp_dir, f'{self.direction}_{gan.direction}_albedo.png')
new_rough_path = os.path.join(interp_dir, f'{self.direction}_{gan.direction}_rough.png')
new_specular_path = os.path.join(interp_dir, f'{self.direction}_{gan.direction}_specular.png')
new_normal_path = os.path.join(interp_dir, f'{self.direction}_{gan.direction}_normal.png')
# Rename old files
out = os.path.join(gan.directory, 'out')
old_weight_path = os.path.join(out, 'weights.ckpt')
replace_file(old_weight_path, os.path.join(out, 'old_weights.ckpt'))
old_render_path = os.path.join(out, 'render.png')
replace_file(old_render_path, os.path.join(out, 'old_render.png'))
old_albedo_path = os.path.join(out, 'albedo.png')
replace_file(old_albedo_path, os.path.join(out, 'old_albedo.png'))
old_rough_path = os.path.join(out, 'rough.png')
replace_file(old_rough_path, os.path.join(out, 'old_rough.png'))
old_specular_path = os.path.join(out, 'specular.png')
replace_file(old_specular_path, os.path.join(out, 'old_specular.png'))
old_normal_path = os.path.join(out, 'normal.png')
replace_file(old_normal_path, os.path.join(out, 'old_normal.png'))
# Copy and replace old files
shutil.move(new_weight_path, old_weight_path)
shutil.move(new_render_path, old_render_path)
shutil.move(new_albedo_path, old_albedo_path)
shutil.move(new_rough_path, old_rough_path)
shutil.move(new_specular_path, old_specular_path)
shutil.move(new_normal_path, old_normal_path)
# Update material textures
update_neural(bpy.context.active_object, out)
in_dir = os.path.join(gan.directory)
weight_dir = os.path.join(gan.directory, 'out', 'weights.ckpt')
model_path = './trainings/Neuralmaterial'
max_h = min(gan.h_res, 1024)
max_w = min(gan.w_res, 1024)
# Call to generate texture maps
process = subprocess.Popen([PYTHON_EXE, '-u', './scripts/get_interpolations.py',
'--model', model_path,
'--input_path', in_dir,
'--weight_path', weight_dir,
'--h', str(max_h),
'--w', str(max_w)], stdout=subprocess.PIPE, cwd=str(Path(base_script_path, 'neuralmaterial')))
MAT_OT_NEURAL_GetInterpolations._popen = process
gan.progress = 'Generating interpolations ...'
bpy.ops.wm.modal_status_updater()
return {'FINISHED'}
class MAT_OT_NEURAL_StopGenerator(Operator):
bl_idname = "neural.stop_generator"
bl_label = "Stop generator material"
bl_description = "Stop generate base material from flash images."
@classmethod
def poll(self, context):
return MAT_OT_NEURAL_Generator._popen
def execute(self, context):
MAT_OT_NEURAL_Generator._popen.terminate()
return {'FINISHED'}
class MAT_OT_NEURAL_RevertMaterial(Operator):
bl_idname = "neural.revert_material"
bl_label = "Revert edited material"
bl_description = "Trys to revert a material to older iteration if possible"
@classmethod
def poll(self, context):
return "Material" in bpy.context.scene.neural_properties.progress and \
os.path.isfile(os.path.join(bpy.context.scene.neural_properties.directory, 'out', 'old_render.png'))
def preprocess(self, context):
if bpy.context.active_object:
name = f"{bpy.context.active_object.name}_neural"
else:
name = "neural"
# First unlink files
check_remove_img(f'{name}_render.png')
check_remove_img(f'{name}_albedo.png')
check_remove_img(f'{name}_rough.png')
check_remove_img(f'{name}_specular.png')
check_remove_img(f'{name}_normal.png')
def execute(self, context):
gan = bpy.context.scene.neural_properties
self.preprocess(context)
# Rename old files
out = os.path.join(gan.directory, 'out')
old_weights_path = os.path.join(out, 'old_weights.ckpt')
old_render_path = os.path.join(out, 'old_render.png')
old_albedo_path = os.path.join(out, 'old_albedo.png')
old_rough_path = os.path.join(out, 'old_rough.png')
old_specular_path = os.path.join(out, 'old_specular.png')
old_normal_path = os.path.join(out, 'old_normal.png')
weights_path = os.path.join(out, 'weights.ckpt')
render_path = os.path.join(out, 'render.png')
albedo_path = os.path.join(out, 'albedo.png')
rough_path = os.path.join(out, 'rough.png')
specular_path = os.path.join(out, 'specular.png')
normal_path = os.path.join(out, 'normal.png')
# Copy and replace old files
shutil.move(old_weights_path, weights_path)
shutil.move(old_render_path, render_path)
shutil.move(old_albedo_path, albedo_path)
shutil.move(old_rough_path, rough_path)
shutil.move(old_specular_path, specular_path)
shutil.move(old_normal_path, normal_path)
# Update material textures
update_neural(bpy.context.active_object, out)
gan.progress = 'Material reverted'
return {'FINISHED'}
|
StarcoderdataPython
|
4879906
|
<filename>pythia/learned/bonds.py
import tensorflow.keras as keras
import tensorflow.keras.backend as K
import numpy as np
import tensorflow as tf
@tf.custom_gradient
def _custom_eigvecsh(x):
# increase the stability of the eigh calculation by removing nans
# and infs (nans occur when there are two identical eigenvalues,
# for example) and clipping the gradient magnitude
(evals, evecs) = tf.linalg.eigh(x)
def grad(dvecs):
dvecs = tf.where(tf.math.is_finite(dvecs), dvecs, tf.zeros_like(dvecs))
dvecs = K.clip(dvecs, -1, 1)
return dvecs
return evecs, grad
@tf.custom_gradient
def _ignore_nan_gradient(x):
result = tf.identity(x)
def grad(dy):
dy = tf.where(tf.math.is_finite(dy), dy, tf.zeros_like(dy))
return dy
return result, grad
def _diagonalize(xyz, mass):
rsq = K.expand_dims(K.sum(xyz**2, axis=-1, keepdims=True), -1)
# xyz::(..., num_neighbors, 3)
# f1, f2::(..., num_neighbors, 3, 3)
f1 = np.eye(3)*rsq
f2 = K.expand_dims(xyz, -2)*K.expand_dims(xyz, -1)
# mass::(..., num_neighbors)
expanded_mass = K.expand_dims(K.expand_dims(mass, -1), -1)
# I_s::(..., 3, 3)
I_s = K.sum((f1 - f2)*expanded_mass, -3)
# rotations::(..., 3, 3)
rotations = _custom_eigvecsh(I_s)
# swap z for -z when an inversion occurs
det_sign = tf.linalg.det(rotations)
inversions = K.stack(
[K.ones_like(det_sign), K.ones_like(det_sign), det_sign], axis=-1)
rotations = rotations*K.expand_dims(inversions, -2)
rotated_xyz = K.sum(K.expand_dims(xyz, -1)*K.expand_dims(rotations, -3), -2)
return rotated_xyz, I_s, rotations
class InertiaRotation(keras.layers.Layer):
"""Generate rotation-invariant point clouds by orienting via principal axes of inertia
`InertiaRotation` takes an array of neighborhood points
(shape `(..., num_neighbors, 3)`) and outputs one or more copies
which have been rotated according to the principal axes of inertia
of the neighborhood. It does this using masses that can be varied
for each point and each rotation.
For an input of shape `(..., num_neighbors, 3)`,
`InertiaRotation` produces an output of shape `(...,
num_rotations, num_neighbors, 3)`.
Before computing the inertia tensor, points can optionally be
centered via the `center` argument. A value of `True` centers the
points as if all masses were 1, a value of `"com"` centers the
points using the learned masses, and a value of `False` (the
default) does not center at all.
:param num_rotations: number of rotations to produce
:param initial_mass_variance: Variance of the initial mass distribution (mean 1)
:param center: Center the mass points before computing the inertia tensor (see description above)
""" # noqa
def __init__(self, num_rotations=1, initial_mass_variance=.25,
center=False, **kwargs):
self.num_rotations = num_rotations
self.initial_mass_variance = float(initial_mass_variance)
if center not in (False, True, 'com'):
msg = ('Center argument {} must be a bool or "com" (to '
'center using the mass stored in this layer)'.format(center)) # noqa
raise ValueError(msg)
self.center = center
super().__init__(**kwargs)
def build(self, input_shape):
mass_shape = [self.num_rotations] + list(input_shape[-2:-1])
self.mass = self.add_weight(
'mass', mass_shape, trainable=True,
initializer=keras.initializers.RandomNormal(1., self.initial_mass_variance),
constraint=keras.constraints.NonNeg())
super().build(input_shape)
def call(self, neighborhood_xyz):
norm_mass = self.mass/K.sum(self.mass, -1, keepdims=True)
norm_mass = _ignore_nan_gradient(norm_mass)
# neighborhood_xyz: (..., num_neighbors, 3) -> (..., num_rotations, num_neighbors, 3)
repeats = np.ones(len(neighborhood_xyz.shape) + 1)
repeats[-3] = self.num_rotations
neighborhood_xyz = K.expand_dims(neighborhood_xyz, -3)
if self.center == 'com':
# mass for each neighborhood is already normalized to sum to 1
center_of_mass = K.sum(
neighborhood_xyz*K.expand_dims(norm_mass, -1), -2, keepdims=True)
neighborhood_xyz = neighborhood_xyz - center_of_mass
elif self.center:
neighborhood_xyz = neighborhood_xyz - K.mean(neighborhood_xyz, -2, keepdims=True)
(self.diagonal_xyz, self.inertia_tensors, self.rotations) = \
_diagonalize(neighborhood_xyz, norm_mass)
return self.diagonal_xyz
def compute_output_shape(self, input_shape):
# (..., num_neighbors, 3)->(..., num_rotations, num_neighbors, 3)
shape = list(input_shape)
shape.insert(-2, self.num_rotations)
return tuple(shape)
def get_config(self):
config = super().get_config()
config.update(dict(num_rotations=self.num_rotations,
initial_mass_variance=self.initial_mass_variance,
center=self.center))
return config
|
StarcoderdataPython
|
5127441
|
<filename>hyperion/bin_deprec/eval-elbo-ubm.py
#!/usr/bin/env python
"""
Copyright 2018 Johns Hopkins University (Author: <NAME>)
Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""
"""
Evaluate the likelihood of the ubm on some data
"""
import sys
import os
import argparse
import time
import logging
import numpy as np
from hyperion.hyp_defs import float_cpu, config_logger
from hyperion.helpers import SequenceReader as SR
from hyperion.transforms import TransformList
from hyperion.pdfs import DiagGMM
def eval_elbo(
seq_file, file_list, model_file, preproc_file, output_file, ubm_type, **kwargs
):
sr_args = SR.filter_eval_args(**kwargs)
if preproc_file is not None:
preproc = TransformList.load(preproc_file)
else:
preproc = None
sr = SR(
seq_file,
file_list,
batch_size=1,
shuffle_seqs=False,
preproc=preproc,
**sr_args
)
t1 = time.time()
if ubm_type == "diag-gmm":
model = DiagGMM.load(model_file)
else:
model = DiagGMM.load_from_kaldi(model_file)
model.initialize()
elbo = np.zeros((sr.num_seqs,), dtype=float_cpu())
num_frames = np.zeros((sr.num_seqs,), dtype=int)
keys = []
for i in range(sr.num_seqs):
x, key = sr.read_next_seq()
keys.append(key)
elbo[i] = model.elbo(x)
num_frames[i] = x.shape[0]
num_total_frames = np.sum(num_frames)
total_elbo = np.sum(elbo)
total_elbo_norm = total_elbo / num_total_frames
logging.info("Extract elapsed time: %.2f" % (time.time() - t1))
s = "Total ELBO: %f\nELBO_NORM %f" % (total_elbo, total_elbo_norm)
logging.info(s)
with open(output_file, "w") as f:
f.write(s)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
fromfile_prefix_chars="@",
description="Evaluate UBM ELBO",
)
parser.add_argument("--seq-file", dest="seq_file", required=True)
parser.add_argument("--file-list", dest="file_list", required=True)
parser.add_argument("--preproc-file", dest="preproc_file", default=None)
parser.add_argument("--model-file", dest="model_file", required=True)
parser.add_argument("--output-file", dest="output_file", required=True)
parser.add_argument(
"--ubm-type",
dest="ubm_type",
default="diag-gmm",
choices=["diag-gmm", "kaldi-diag-gmm"],
)
SR.add_argparse_eval_args(parser)
parser.add_argument(
"-v", "--verbose", dest="verbose", default=1, choices=[0, 1, 2, 3], type=int
)
args = parser.parse_args()
config_logger(args.verbose)
del args.verbose
eval_elbo(**vars(args))
|
StarcoderdataPython
|
225853
|
<gh_stars>0
import random
import mediapipe as mp
import cv2
import torch
mpDraw = mp.solutions.drawing_utils
mpPose = mp.solutions.pose
pose = mpPose.Pose()
mpHolistic = mp.solutions.holistic
holistic = mpHolistic.Holistic()
mp_drawing_styles = mp.solutions.drawing_styles
class Model:
def __init__(self) -> None:
self.yolo_model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True)
def posenet_detect(self, image):
imgRGB = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
results = pose.process(imgRGB)
metadata = []
if results.pose_landmarks:
mpDraw.draw_landmarks(image, results.pose_landmarks, mpPose.POSE_CONNECTIONS)
res = results.pose_landmarks.landmark
for i in range(len(results.pose_landmarks.landmark)):
x = res[i].x
y = res[i].y
z = res[i].z
tmp = {"x": x, "y": y, "z": z}
metadata.append(tmp)
return metadata, image
def holistic_detect(self, image):
imgRGB = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
results = holistic.process(imgRGB)
metadata = []
if results.pose_landmarks and results.face_landmarks:
#mpDraw.draw_landmarks(image, results.pose_landmarks, mpPose.POSE_CONNECTIONS)
mpDraw.draw_landmarks(
image,
results.face_landmarks,
mpHolistic.FACEMESH_CONTOURS,
landmark_drawing_spec=None,
connection_drawing_spec=mp_drawing_styles
.get_default_face_mesh_contours_style())
mpDraw.draw_landmarks(
image,
results.pose_landmarks,
mpHolistic.POSE_CONNECTIONS,
landmark_drawing_spec=mp_drawing_styles
.get_default_pose_landmarks_style())
res = results.pose_landmarks.landmark
for i in range(len(results.pose_landmarks.landmark)):
x = res[i].x
y = res[i].y
z = res[i].z
tmp = {"x": x, "y": y, "z": z}
metadata.append(tmp)
return metadata, image
def yolov5_detect(self, image):
results = self.yolo_model(image)
metadata = []
labels = [ i for i in results.pandas().xyxy[0]['name'] ]
results = results.xyxy[0].cpu().detach().numpy()
for boundRect, label in zip(results, labels):
color = (random.randint(0,256), random.randint(0,256), random.randint(0,256))
image = cv2.rectangle(image, (int(boundRect[0]), int(boundRect[1])), (int(boundRect[2]), int(boundRect[3])), color, 2)
cv2.putText(image, label, (int(boundRect[0]), int(boundRect[1])-10), cv2.FONT_HERSHEY_SIMPLEX, 0.9, (36,255,12), 2)
return labels, image
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.