metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "AABur/python-pip-how-to",
"score": 4
} |
#### File: python-pip-how-to/brain_games/engine.py
```python
import prompt
GAME_STEPS = 3
def run_game(game_module):
"""Brain Games Engine.
Executing game process
Args:
game_module (function): game engine module
"""
print('PIP')
say_welcome(game_module.GAME_DESCRIPTION)
user_name = get_user_name()
for step in range(GAME_STEPS):
(game_task, game_answer) = game_module.game_engine()
user_answer = get_user_answer(game_task)
if user_answer != game_answer:
notify_wrong_answer(user_answer, game_answer, user_name)
return
confirm_correct_answer()
congratulate(user_name)
def get_user_name():
"""Get user name dialog.
Returns:
(str): User Name
"""
name = prompt.string("May I have your name? ")
print("Hello, {}!\n".format(name))
return name
def get_user_answer(task):
"""Q&A dialog.
Show the task and request user answer
Args:
task (str): task example
Returns:
(str): User answer
"""
return prompt.string("Question: {}\nYour answer: ".format(task))
def say_welcome(task_msg):
"""Say welcome and print game rules."""
print("Welcome to the Brain Games!\n{}\n".format(task_msg))
def congratulate(user_name):
"""User congratilations.
Args:
user_name (str): User name
"""
print("Congratulations, {}".format(user_name))
def notify_wrong_answer(user_answer, correct_answer, user_name):
"""Wrong answer info fo user.
Inform user about wrong answer
Args:
user_answer (str): User Answer
correct_answer (str): Correct Answer
user_name (str): User Name
"""
print("'{}' is wrong answer ;(. Correct answer was '{}'.".format(
user_answer, correct_answer))
print("Let's try again, {}!".format(user_name))
def confirm_correct_answer():
"""Just confirm correct answer."""
print("Correct!")
```
#### File: brain_games/games/progression.py
```python
import random
GAME_DESCRIPTION = "What number is missing in the progression?"
START_MIN = 1
START_MAX = 20
STEP_MIN = 1
STEP_MAX = 10
PROGERSSION_LENGTH = 10
def game_engine():
"""Progression game Q&A generation.
It forms an arithmetic progression,
replacing one of the numbers with two points.
Returns:
task{str} : arithmetic progression;
answer{str} : hidden number
"""
start = random.randint(START_MIN, START_MAX)
step = random.randint(STEP_MIN, STEP_MAX)
a_progression = [(start + (ind * step))
for ind in range(PROGERSSION_LENGTH)]
hidden = random.randint(1, PROGERSSION_LENGTH)
answer = str(a_progression[hidden - 1])
a_progression[hidden - 1] = ".."
task = " ".join(str(ind) for ind in a_progression)
return (task, answer)
```
#### File: brain_games/scripts/brain_progression.py
```python
from brain_games.engine import run_game
from brain_games.games import progression
def main():
"""Progression game script."""
run_game(progression)
if __name__ == "__main__":
main()
``` |
{
"source": "AABur/python-project-lvl2",
"score": 2
} |
#### File: gendiff/formaters/json.py
```python
import json
def format_json(diff):
return json.dumps(diff)
```
#### File: python-project-lvl2/tests/test_loader.py
```python
import pytest
from gendiff.loader import GendiffFileError, collect_data
@pytest.mark.parametrize(
'file_path',
[
('tests/fixtures/wrong_ext.ttt'),
('tests/fixtures/wrong_json.json'),
('tests/fixtures/wrong_yaml.yaml'),
('file_not_exists'),
],
)
def test_wrong_file(file_path):
with pytest.raises(GendiffFileError):
assert collect_data(file_path)
``` |
{
"source": "AABur/python-project-lvl3",
"score": 3
} |
#### File: python-project-lvl3/page_loader/cli.py
```python
import argparse
def arg_parser():
"""Create a parser for command line arguments .
Returns:
ArgumentParser
"""
parser = argparse.ArgumentParser(
description='CLI utility to download locally pages from url provided.',
)
parser.add_argument(
'-V',
'--version',
action='version',
version='%(prog)s 0.1.0', # noqa: WPS323
)
parser.add_argument(
'-o',
'--output',
type=str,
dest='output_dir',
metavar='[dir]',
default='./',
help='output dir (default: "./")',
)
parser.add_argument(
'page_url',
type=str,
help='page url to download',
)
return parser
```
#### File: python-project-lvl3/page_loader/resources.py
```python
import logging
from pathlib import Path
from urllib.parse import urlparse
import requests
from progress.bar import IncrementalBar
from requests.exceptions import RequestException
from page_loader.exceptions import PLIOError, PLNetworkError
logger = logging.getLogger(__name__)
TAGS = ('link', 'script', 'img')
def fetch_resources(resources: dict, resources_local_dir: Path) -> None:
logger.debug('Start downloading resources')
Path(resources_local_dir).mkdir()
with IncrementalBar(
'Downloading',
max=len(resources),
suffix='%(percent).1f%% [%(elapsed)ds]', # noqa:WPS323
) as bar:
for res_url, res_local in resources.items():
try:
download_file(res_url, res_local, resources_local_dir)
except RequestException:
logger.error('Failed access resource', exc_info=True)
raise PLNetworkError
except IOError:
logger.error('Failed write resource file', exc_info=True)
raise PLIOError
bar.next() # noqa:B305
logger.debug('Finish downloading resources')
def is_local_resource(page_url, full_resource_url):
return urlparse(full_resource_url).netloc == urlparse(page_url).netloc
def download_file(url: str, local: str, local_dir: Path):
"""Download a file from a URL.
Args:
url (str): file url
local (str): file local name
local_dir (Path): target directory
"""
response = requests.get(url, stream=True)
with open(Path(local_dir, local), 'wb') as file:
for chunk in response.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
file.write(chunk)
``` |
{
"source": "aacaldwell/TWLight",
"score": 2
} |
#### File: TWLight/emails/tests.py
```python
from datetime import datetime, timedelta
from djmail.template_mail import MagicMailBuilder, InlineCSSTemplateMail
from unittest.mock import patch
from django_comments import get_form_target
from django_comments.models import Comment
from django_comments.signals import comment_was_posted
from django.contrib.auth import signals
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core import mail
from django.core.management import call_command
from django.urls import reverse
from django.test import TestCase, RequestFactory
from TWLight.applications.factories import ApplicationFactory
from TWLight.applications.models import Application
from TWLight.resources.factories import PartnerFactory
from TWLight.resources.models import Partner
from TWLight.resources.tests import EditorCraftRoom
from TWLight.users.factories import EditorFactory, UserFactory
from TWLight.users.groups import get_coordinators
from TWLight.users.models import Authorization
# We need to import these in order to register the signal handlers; if we don't,
# when we test that those handler functions have been called, we will get
# False even when they work in real life.
from .tasks import (
send_comment_notification_emails,
send_approval_notification_email,
send_rejection_notification_email,
send_user_renewal_notice_emails,
contact_us_emails,
)
class ApplicationCommentTest(TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.editor = EditorFactory(user__email="<EMAIL>").user
coordinators = get_coordinators()
cls.coordinator1 = EditorFactory(
user__email="<EMAIL>", user__username="c1"
).user
cls.coordinator2 = EditorFactory(
user__email="<EMAIL>", user__username="c2"
).user
coordinators.user_set.add(cls.coordinator1)
coordinators.user_set.add(cls.coordinator2)
cls.partner = PartnerFactory()
def _create_comment(self, app, user):
CT = ContentType.objects.get_for_model
comm = Comment.objects.create(
content_type=CT(Application),
object_pk=app.pk,
user=user,
user_name=user.username,
comment="Content!",
site=Site.objects.get_current(),
)
comm.save()
return comm
def _set_up_email_test_objects(self):
app = ApplicationFactory(editor=self.editor.editor, partner=self.partner)
factory = RequestFactory()
request = factory.post(get_form_target())
return app, request
def test_comment_email_sending_1(self):
"""
A coordinator posts a comment to an Editor's application and an email
is send to that Editor. An email is not sent to the coordinator.
"""
app, request = self._set_up_email_test_objects()
request.user = UserFactory()
self.assertEqual(len(mail.outbox), 0)
comment1 = self._create_comment(app, self.coordinator1)
comment_was_posted.send(sender=Comment, comment=comment1, request=request)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.editor.email])
def test_comment_email_sending_2(self):
"""
After a coordinator posts a comment, the Editor posts an additional
comment. An email is sent to the coordinator who posted the earlier
comment. An email is not sent to the editor.
"""
app, request = self._set_up_email_test_objects()
request.user = UserFactory()
self.assertEqual(len(mail.outbox), 0)
_ = self._create_comment(app, self.coordinator1)
comment2 = self._create_comment(app, self.editor)
comment_was_posted.send(sender=Comment, comment=comment2, request=request)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.coordinator1.email])
def test_comment_email_sending_3(self):
"""
After the editor and coordinator post a comment, an additional
coordinator posts a comment. One email is sent to the first coordinator,
and a distinct email is sent to the editor.
"""
app, request = self._set_up_email_test_objects()
request.user = UserFactory()
self.assertEqual(len(mail.outbox), 0)
_ = self._create_comment(app, self.coordinator1)
_ = self._create_comment(app, self.editor)
comment3 = self._create_comment(app, self.coordinator2)
comment_was_posted.send(sender=Comment, comment=comment3, request=request)
self.assertEqual(len(mail.outbox), 2)
# Either order of email sending is fine.
try:
self.assertEqual(mail.outbox[0].to, [self.coordinator1.email])
self.assertEqual(mail.outbox[1].to, [self.editor.email])
except AssertionError:
self.assertEqual(mail.outbox[1].to, [self.coordinator1.email])
self.assertEqual(mail.outbox[0].to, [self.editor.email])
def test_comment_email_sending_4(self):
"""
A comment made on an application that's any further along the process
than PENDING (i.e. a coordinator has taken some action on it) should
fire an email to the coordinator who took the last action on it.
"""
app, request = self._set_up_email_test_objects()
request.user = UserFactory()
self.assertEqual(len(mail.outbox), 0)
# Create a coordinator with a test client session
coordinator = EditorCraftRoom(self, Terms=True, Coordinator=True)
self.partner.coordinator = coordinator.user
self.partner.save()
# Approve the application
url = reverse("applications:evaluate", kwargs={"pk": app.pk})
response = self.client.post(
url, data={"status": Application.QUESTION}, follow=True
)
comment4 = self._create_comment(app, self.editor)
comment_was_posted.send(sender=Comment, comment=comment4, request=request)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [coordinator.user.email])
def test_comment_email_sending_5(self):
"""
A comment from the applying editor made on an application that
has had no actions taken on it and no existing comments should
not fire an email to anyone.
"""
app, request = self._set_up_email_test_objects()
request.user = UserFactory()
self.assertEqual(len(mail.outbox), 0)
comment5 = self._create_comment(app, self.editor)
comment_was_posted.send(sender=Comment, comment=comment5, request=request)
self.assertEqual(len(mail.outbox), 0)
def test_comment_email_sending_6(self):
"""
In case the coordinator is changed for a Partner, then the
previous coordinator should not receive comment notification email.
Also now the new coordinator should receive the email.
"""
app, request = self._set_up_email_test_objects()
request.user = UserFactory()
self.assertEqual(len(mail.outbox), 0)
# Setting up coordinator1 as coordinator for partner
self.partner.coordinator = self.coordinator1
self.partner.save()
# Coordinator posts a comment, then Editor posts an additional comment
# An email is sent to the coordinator who posted the earlier comment
_ = self._create_comment(app, self.coordinator1)
comment1 = self._create_comment(app, self.editor)
comment_was_posted.send(sender=Comment, comment=comment1, request=request)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.coordinator1.email])
# Create a coordinator with a test client session
# and set it as the coordinator for partner
coordinator = EditorCraftRoom(self, Terms=True, Coordinator=True)
self.partner.coordinator = coordinator.user
self.partner.save()
# Evaluate the application
url = reverse("applications:evaluate", kwargs={"pk": app.pk})
response = self.client.post(
url, data={"status": Application.QUESTION}, follow=True
)
# Editor makes another comment
# Now the New Coordinator will receive the Email
comment2 = self._create_comment(app, self.editor)
comment_was_posted.send(sender=Comment, comment=comment2, request=request)
self.assertEqual(mail.outbox[1].to, [coordinator.user.email])
# We'd like to mock out send_comment_notification_emails and test that
# it is called when comment_was_posted is fired, but we can't; the signal
# handler is attached to the real send_comment_notification_emails, not
# the mocked one.
class ApplicationStatusTest(TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.coordinator = EditorFactory().user
coordinators = get_coordinators()
coordinators.user_set.add(cls.coordinator)
@patch("TWLight.emails.tasks.send_approval_notification_email")
def test_approval_calls_email_function(self, mock_email):
app = ApplicationFactory(status=Application.PENDING)
app.status = Application.APPROVED
app.save()
self.assertTrue(mock_email.called)
@patch("TWLight.emails.tasks.send_approval_notification_email")
def test_reapproval_does_not_call_email_function(self, mock_email):
"""
Saving an Application with APPROVED status, when it already had an
APPROVED status, should not re-send the email.
"""
app = ApplicationFactory(status=Application.PENDING)
app.status = Application.APPROVED
app.save()
app.save()
self.assertEqual(mock_email.call_count, 1)
@patch("TWLight.emails.tasks.send_rejection_notification_email")
def test_rejection_calls_email_function(self, mock_email):
app = ApplicationFactory(status=Application.PENDING)
app.status = Application.NOT_APPROVED
app.save()
self.assertTrue(mock_email.called)
@patch("TWLight.emails.tasks.send_rejection_notification_email")
def test_rerejection_does_not_call_email_function(self, mock_email):
app = ApplicationFactory(status=Application.PENDING)
app.status = Application.NOT_APPROVED
app.save()
app.save()
self.assertEqual(mock_email.call_count, 1)
def test_pending_does_not_call_email_function(self):
"""
Applications saved with a PENDING status should not generate email.
"""
orig_outbox = len(mail.outbox)
_ = ApplicationFactory(status=Application.PENDING)
self.assertEqual(len(mail.outbox), orig_outbox)
def test_question_does_not_call_email_function(self):
"""
Applications saved with a QUESTION status should not generate email.
"""
orig_outbox = len(mail.outbox)
_ = ApplicationFactory(status=Application.QUESTION)
self.assertEqual(len(mail.outbox), orig_outbox)
def test_sent_does_not_call_email_function(self):
"""
Applications saved with a SENT status should not generate email.
"""
orig_outbox = len(mail.outbox)
ApplicationFactory(status=Application.SENT, sent_by=self.coordinator)
self.assertEqual(len(mail.outbox), orig_outbox)
@patch("TWLight.emails.tasks.send_waitlist_notification_email")
def test_waitlist_calls_email_function(self, mock_email):
partner = PartnerFactory(status=Partner.WAITLIST)
app = ApplicationFactory(status=Application.PENDING, partner=partner)
self.assertTrue(mock_email.called)
partner.delete()
app.delete()
@patch("TWLight.emails.tasks.send_waitlist_notification_email")
def test_nonwaitlist_does_not_call_email_function(self, mock_email):
partner = PartnerFactory(status=Partner.AVAILABLE)
app = ApplicationFactory(status=Application.PENDING, partner=partner)
self.assertFalse(mock_email.called)
partner.delete()
app.delete()
partner = PartnerFactory(status=Partner.NOT_AVAILABLE)
app = ApplicationFactory(status=Application.PENDING, partner=partner)
self.assertFalse(mock_email.called)
partner.delete()
app.delete()
@patch("TWLight.emails.tasks.send_waitlist_notification_email")
def test_waitlisting_partner_calls_email_function(self, mock_email):
"""
Switching a Partner to WAITLIST status should call the email function
for apps to that partner with open statuses.
"""
partner = PartnerFactory(status=Partner.AVAILABLE)
app = ApplicationFactory(status=Application.PENDING, partner=partner)
self.assertFalse(mock_email.called)
partner.status = Partner.WAITLIST
partner.save()
self.assertTrue(mock_email.called)
mock_email.assert_called_with(app)
@patch("TWLight.emails.tasks.send_waitlist_notification_email")
def test_waitlisting_partner_does_not_call_email_function(self, mock_email):
"""
Switching a Partner to WAITLIST status should NOT call the email
function for apps to that partner with closed statuses.
"""
partner = PartnerFactory(status=Partner.AVAILABLE)
app = ApplicationFactory(status=Application.APPROVED, partner=partner)
app = ApplicationFactory(status=Application.NOT_APPROVED, partner=partner)
app = ApplicationFactory(
status=Application.SENT, partner=partner, sent_by=self.coordinator
)
self.assertFalse(mock_email.called)
partner.status = Partner.WAITLIST
partner.save()
self.assertFalse(mock_email.called)
class ContactUsTest(TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.editor = EditorFactory(user__email="<EMAIL>").user
@patch("TWLight.emails.tasks.contact_us_emails")
def test_contact_us_emails(self, mock_email):
factory = RequestFactory()
request = factory.post(get_form_target())
request.user = UserFactory()
editor = EditorFactory()
reply_to = ["<EMAIL>"]
cc = ["<EMAIL>"]
self.assertEqual(len(mail.outbox), 0)
mail_instance = MagicMailBuilder(template_mail_cls=InlineCSSTemplateMail)
email = mail_instance.contact_us_email(
"<EMAIL>",
{"editor_wp_username": editor.wp_username, "body": "This is a test email"},
)
email.extra_headers["Reply-To"] = ", ".join(reply_to)
email.extra_headers["Cc"] = ", ".join(cc)
email.send()
self.assertEqual(len(mail.outbox), 1)
def test_user_submit_contact_us_emails(self):
EditorCraftRoom(self, Terms=True, Coordinator=False)
self.assertEqual(len(mail.outbox), 0)
contact_us_url = reverse("contact")
contact_us = self.client.get(contact_us_url, follow=True)
contact_us_form = contact_us.context["form"]
data = contact_us_form.initial
data["email"] = "<EMAIL>"
data["message"] = "This is a test"
data["cc"] = True
data["submit"] = True
self.client.post(contact_us_url, data)
self.assertEqual(len(mail.outbox), 1)
def test_not_logged_in_user_submit_contact_us_emails(self):
self.assertEqual(len(mail.outbox), 0)
contact_us_url = reverse("contact")
contact_us = self.client.get(contact_us_url, follow=True)
contact_us_form = contact_us.context["form"]
data = contact_us_form.initial
data["email"] = "<EMAIL>"
data["message"] = "This is a test"
data["submit"] = True
data["cc"] = True
self.client.post(contact_us_url, data)
self.assertEqual(len(mail.outbox), 0)
class UserRenewalNoticeTest(TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
editor = EditorFactory(user__email="<EMAIL>")
cls.user = editor.user
cls.coordinator = EditorFactory().user
coordinators = get_coordinators()
coordinators.user_set.add(cls.coordinator)
cls.partner = PartnerFactory()
cls.authorization = Authorization()
cls.authorization.user = cls.user
cls.authorization.authorizer = cls.coordinator
cls.authorization.date_expires = datetime.today() + timedelta(weeks=1)
cls.authorization.save()
cls.authorization.partners.add(cls.partner)
def test_single_user_renewal_notice(self):
"""
Given one authorization that expires in two weeks, ensure
that our email task sends an email to that user.
"""
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.user.email])
def test_user_renewal_notice_disabled(self):
"""
Users have the option to disable renewal notices. If users have
disabled emails, we shouldn't send them one.
"""
self.user.userprofile.send_renewal_notices = False
self.user.userprofile.save()
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 0)
def test_user_renewal_notice_doesnt_duplicate(self):
"""
If we run the command a second time, the same user shouldn't receive
a second email.
"""
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 1)
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 1)
def test_user_renewal_notice_past_date(self):
"""
If the authorization expired before today, the user shouldn't
receive a notice.
"""
self.authorization.date_expires = datetime.today() - timedelta(weeks=1)
self.authorization.save()
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 0)
def test_user_renewal_notice_future_date(self):
"""
If the authorization doesn't expire for months, the user
shouldn't receive a notice.
"""
self.authorization.date_expires = datetime.today() + timedelta(weeks=8)
self.authorization.save()
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 0)
def test_user_renewal_notice_future_date_1(self):
"""
If we have multiple authorizations to send emails for, let's make
sure we send distinct emails to the right places.
"""
editor2 = EditorFactory(user__email="<EMAIL>")
authorization2 = Authorization()
authorization2.user = editor2.user
authorization2.authorizer = self.coordinator
authorization2.date_expires = datetime.today() + timedelta(weeks=1)
authorization2.save()
authorization2.partners.add(self.partner)
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 2)
# Make sure that the two emails went to the two expected
# email addresses.
# This looks a little complicated because mail.outbox[0].to is a
# (one element) list, and we need to compare sets to ensure we've
# got 1 of each email.
self.assertEqual(
{mail.outbox[0].to[0], mail.outbox[1].to[0]},
{"<EMAIL>", "<EMAIL>"},
)
def test_user_renewal_notice_after_renewal(self):
"""
If a user renews their authorization, we want to remind
them again when it runs out.
"""
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 1)
self.authorization.refresh_from_db()
self.assertTrue(self.authorization.reminder_email_sent)
# We already have an authorization, so let's setup up
# an application that 'corresponds' to it.
application = ApplicationFactory(
editor=self.user.editor,
sent_by=self.coordinator,
partner=self.partner,
status=Application.SENT,
requested_access_duration=1,
)
application.save()
# File a renewal, approve it, and send it.
self.partner.renewals_available = True
self.partner.save()
renewed_app = application.renew()
renewed_app.status = application.APPROVED
renewed_app.save()
renewed_app.status = application.SENT
renewed_app.sent_by = self.coordinator
renewed_app.save()
# Sending this renewal notice will have sent the user
# an email, so we expect 2 emails now.
self.assertEqual(len(mail.outbox), 2)
# We've correctly marked reminder_email_sent as False
self.authorization.refresh_from_db()
self.assertFalse(self.authorization.reminder_email_sent)
# And calling the command should send a third email.
call_command("user_renewal_notice")
self.assertEqual(len(mail.outbox), 3)
class CoordinatorReminderEmailTest(TestCase):
@classmethod
def setUpTestData(cls):
super().setUpTestData()
editor = EditorFactory()
cls.user = editor.user
editor2 = EditorFactory()
cls.user2 = editor2.user
cls.coordinator = EditorFactory(user__email="<EMAIL>").user
coordinators = get_coordinators()
coordinators.user_set.add(cls.coordinator)
cls.partner = PartnerFactory(coordinator=cls.coordinator)
cls.partner2 = PartnerFactory(coordinator=cls.coordinator)
def test_send_coordinator_reminder_email(self):
ApplicationFactory(
partner=self.partner, status=Application.PENDING, editor=self.user.editor
)
# Coordinator only wants reminders for apps under discussion
self.coordinator.userprofile.pending_app_reminders = False
self.coordinator.userprofile.approved_app_reminders = False
self.coordinator.userprofile.save()
call_command("send_coordinator_reminders")
self.assertEqual(len(mail.outbox), 0)
ApplicationFactory(
partner=self.partner2, status=Application.QUESTION, editor=self.user2.editor
)
call_command("send_coordinator_reminders")
self.assertEqual(len(mail.outbox), 1)
# We include the count for all waiting (PENDING, QUESTION,
# APPROVED) apps whenever we send an email, but trigger
# emails only based on preferences i.e. if a coordinator
# has enabled reminders only for QUESTION, we send a
# reminder only when we have an app of status: QUESTION,
# but include info on all apps in the email.
self.assertNotIn("1 pending application", mail.outbox[0].body)
self.assertIn("1 under discussion application", mail.outbox[0].body)
self.assertNotIn("1 approved application", mail.outbox[0].body)
ApplicationFactory(
partner=self.partner, status=Application.APPROVED, editor=self.user2.editor
)
ApplicationFactory(
partner=self.partner2,
status=Application.SENT,
editor=self.user.editor,
sent_by=self.coordinator,
)
# Clear mail outbox since approvals send emails
mail.outbox = []
# Coordinator only wants reminders for apps under discussion
self.coordinator.userprofile.pending_app_reminders = True
self.coordinator.userprofile.approved_app_reminders = True
self.coordinator.userprofile.save()
call_command("send_coordinator_reminders")
self.assertEqual(len(mail.outbox), 1)
self.assertIn("1 pending application", mail.outbox[0].body)
self.assertIn("1 under discussion application", mail.outbox[0].body)
self.assertIn("1 approved application", mail.outbox[0].body)
``` |
{
"source": "aacanakin/glim-extensions",
"score": 3
} |
#### File: glim_extensions/db/db.py
```python
from glim.core import Facade
from glim import Log
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
class Database(object):
"""
The class that holds & manipulates database connections
using SQLAlchemy's DB API.
Attributes
----------
config (dict): The dictionary to hold the configuration of
connections.
connections (dict): The list of sqlalchemy connections.
sessions (dict): The list of sqlalchemy session that are
used by the orm.
engines (dict): The list of sqlalchemy engines.
active (string): The active connection alias.
Usage
-----
db = Database(config)
sql = "INSERT INTO users (full_name, title)
VALUES ('%s','%s')" % (full_name, title))"
db.execute(sql)
db.connection('name').execute(sql)
"""
def __init__(self, config):
self.active = 'default'
self.config = config
self.connections = {}
self.sessions = {}
self.engines = {}
for k, config in self.config.items():
try:
cstring = '%s://%s@%s/%s' % (
config['driver'],
config['user'],
config['password'],
config['schema']
)
engine = create_engine(cstring)
connection = engine.connect()
Session = sessionmaker(bind=engine)
session = Session()
self.engines[k] = engine
self.sessions[k] = session
self.connections[k] = connection
except Exception as e:
Log.error(e)
def __getattr__(self, attr):
return getattr(self.connections[self.active], attr)
def session(self):
"""
Function returns the session object of active connection.
Returns
-------
session (sqlalchemy.orm.session.Session): The active
session that is used by Orm layer.
"""
return self.sessions[self.active]
def engine(self, key=None):
"""
Function returns the active engine object.
Args
----
key (string): a string based key to represent
connection dict.
Returns
-------
engine (sqlalchemy.engine.Engine): The active
engine for db connection.
"""
if key:
return self.engines[key]
else:
return self.engines[self.active]
def connection(self, key=None):
"""
Function sets the active connection and
returns the self for chaining connections.
Args
----
key (string): The connection alias.
Returns
-------
db (glim.db.Database): self for method
chaining.
"""
if key:
self.active = key
else:
self.active = 'default'
return self
def get(self, key=None):
"""
Function returns the engine object
optionally given key.
Args
----
key (string): The connection alias.
Returns
-------
connection (sqlalchemy.engine.Connection):
the created connection from active engine.
"""
if key:
return self.connections[key]
else:
return self.connections[self.active]
def close(self):
"""Function closes the database connections."""
for connection in self.config.items():
connection.close()
class DatabaseFacade(Facade):
accessor = Database
# an alias of sqlalchemy.ext.declarative.declarative_base
Model = declarative_base()
class Orm(object):
"""
This class is responsible for handling orm operations
using SQLAlchemy.
Attributes
----------
engines (list): a list of sqlalchemy engines.
Usage
-----
user = User(full_name, title) # a sqlalchemy model
ORM.add(user) # adds the user object into session
ORM.commit() # commits the transaction
"""
def __init__(self, engines):
self.active = 'default'
self.engines = engines
self.sessions = {}
DBSession = sessionmaker()
for k, engine in engines.items():
DBSession.configure(bind=engine)
self.sessions[k] = DBSession()
def __getattr__(self, attr):
return getattr(self.sessions[self.active], attr)
def session(key='default'):
"""
Function sets the active orm session and returns
self for method chaining.
Args
----
key (string): String based alias of session.
Returns
-------
orm (glim.db.Orm): self.
"""
self.active = key
return self
class OrmFacade(Facade):
accessor = Orm
```
#### File: glim_extensions/db/models.py
```python
import datetime
from . import Database as DB
from . import Orm
from . import Model
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, DateTime, Text
# class Migration:
class Migration(Model):
__tablename__ = 'glim_migrations'
id = Column(String(255), primary_key=True)
name = Column(String(255))
created_at = Column(DateTime, default=datetime.datetime.utcnow)
description = Column(Text)
def __repr__(self):
return "<Migration(id=%s, name=%s)>" % (self.id, self.name)
```
#### File: glim_extensions/job/commands.py
```python
import os
import traceback
from glim.command import Command
from glim import Log
from queue import JobQueue
from exception import FailedJobError
import glim.paths as paths
class ConsumeCommand(Command):
name = 'consume'
description = 'consumes jobs given jobs list'
def configure(self):
self.add_argument('--name', help = 'enter jobs list name', default = 'jobs')
def run(self):
Log.info("Listening for jobs..")
while(True):
name = self.args.name
job = JobQueue.pop()
if job:
try:
job.run()
Log.info('job %s is consumed' % job.id)
except FailedJobError as e:
Log.error(e)
JobQueue.push_failed(job)
except Exception as e:
Log.error("An unknown exception has occured!!")
Log.error(traceback.format_exc())
class ProduceCommand(Command):
name = 'produce'
description = 'produces a job given parameters and job name'
def configure(self):
pass
def run(self):
pass
class InitCommand(Command):
name = 'init'
description = 'initializes the jobs extension'
def configure(self):
self.add_argument('--name', help = 'enter jobs file name', default = 'jobs')
# touches the jobs.py into app folder
def run(self):
jobs_path = os.path.join(paths.APP_PATH, '%s.py' % self.args.name)
fhandle = open(jobs_path, 'a')
try:
os.utime(jobs_path, None)
Log.info("app/jobs.py created successfully")
except Exception as e:
Log.error(e)
finally:
fhandle.close()
# class CreateCommand(Command):
# name = 'create'
# description = 'appends a job on your jobs file'
# def configure(self):
# self.add_argument('name', help = 'enter job name')
# # appends a new job given name
# def run(self):
# pass
```
#### File: glim_extensions/jslint/jslint.py
```python
import subprocess
import os
from glim.core import Facade
from glim import Log
from glim import paths
DEFAULT_CONFIG = {
'source': os.path.join(paths.APP_PATH, 'assets/js'),
}
class JSLint(object):
def __init__(self, config):
self.config = DEFAULT_CONFIG
for key, value in config.items():
self.config[key] = value
Log.debug("config")
def check(self):
try:
command = 'jslint'
arguments = '%s%s' % (self.config['source'], '/*')
Log.debug("command: %s" % command)
Log.debug("arguments: %s" % arguments)
# find ./public/javascripts/ -name '*.js' -print0 | xargs -0 jslint
cmd = "find %s -name '*.js' -print0 | xargs -0 jslint" % self.config['source']
# cmd = '%s %s' % (command, arguments)
Log.debug("cmd: %s" % cmd)
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
out, err = p.communicate()
Log.info("Linting javascript..")
Log.write(out)
Log.error(err)
except Exception as e:
Log.error(e)
class JSLintFacade(Facade):
accessor = JSLint
```
#### File: glim_extensions/memcached/memcached.py
```python
from glim.core import Facade
from glim.ext import Extension
from glim import Log
from pymemcache.client import Client, MemcacheError
class MemcachedExtension(Extension):
def __init__(self, config):
self.config = config
self.active = 'default'
self.connections = {}
for key, config in self.config.items():
self.connections[key] = self.connect(config)
def __getattr__(self, attr):
try:
return getattr(self.connections[self.active], attr)
except MemcacheError as e:
Log.error(e)
def connection(self, key = None):
if key:
self.active = key
else:
self.active = 'default'
def connect(self, config):
try:
connection = Client((
config['host'], config['port']
))
return connection
except MemcacheError as e:
Log.error(e)
class Cache(Facade):
accessor = MemcachedExtension
``` |
{
"source": "aaccomazzi/adsabs-pyingest",
"score": 2
} |
#### File: extractors/grobid/process_pdf.py
```python
import os
import requests
import codecs
import logging
import argparse
import multiprocessing
import time
GROBID_SERVER = 'http://localhost:8081'
GROBID_HANDLER = 'processFulltextDocument'
DEFAULT_THREADS = multiprocessing.cpu_count() / 2;
DEFAULT_TIMEOUT = 60 # timeout on connection after this delay
DEFAULT_MAX_RETRIES = 3 # try to reconnect these many times to grobid server
DEFAULT_SLEEP_DELAY = 10 # give server enough time to restart grobid
class GrobidError(Exception):
pass
class ConnectionError(Exception):
pass
class GrobidProcessor(object):
"""
Needed to take avantage of multiprocessing.Pool
"""
def __init__(self, service, destdir=None, force=None, timeout=DEFAULT_TIMEOUT,
max_retries=DEFAULT_MAX_RETRIES, sleep_delay=DEFAULT_SLEEP_DELAY):
self.service = service
self.destdir = destdir
self.force = force
self.timeout = timeout
self.max_retries = max_retries
self.sleep_delay = sleep_delay
def __call__(self, file):
try:
fp = open(file, 'r')
except IOError, error:
logging.error("error opening file %s: %s" % (file, error))
return None
if self.destdir:
out_file = os.path.join(self.destdir, os.path.basename(file)) + '.xml'
else:
out_file = file + '.xml'
logging.debug("considering source file %s" % file)
if os.path.exists(out_file):
if os.path.getmtime(out_file) > os.path.getmtime(file):
if self.force:
logging.debug("forcing reprocessing of source file %s (target is %s)" %(file, out_file))
else:
logging.debug("target file %s is up-to-date" % out_file)
return out_file
else:
logging.debug("recreating stale target file %s" % out_file)
else:
logging.debug("creating target file %s" % out_file)
logging.info("processing file %s" % file)
retry = self.max_retries
while retry > 0:
try:
xml = self.send_to_grobid(fp)
except ConnectionError, error:
retry = retry - 1
logging.info("ran into connection error: '%s'" % error)
if retry > 0:
logging.info("retrying in %d seconds" % self.sleep_delay)
time.sleep(self.sleep_delay)
except GrobidError, error:
logging.error("error processing file %s: %s" % (file, error))
return None
else:
retry = 0
try:
fp = codecs.open(out_file, 'w', 'utf-8')
except IOError, error:
logging.error("error opening file %s: %s" % (out_file, error))
return None
fp.write(xml)
logging.info("written output file %s" % out_file)
return out_file
def send_to_grobid(self, filehandle):
try:
response = requests.post(url=self.service, files={'input': filehandle}, timeout=self.timeout)
except requests.exceptions.Timeout:
logging.debug("timeout from requests")
raise ConnectionError("request timeout after %d seconds" % self.timeout)
except requests.exceptions.RequestException as e:
raise ConnectionError("request exception: %s" % e)
if response.status_code == 200:
logging.debug("successful response from grobid server (%d bytes)" % len(response.content))
return response.text
else:
raise GrobidError("HTTP %d - %s: %s" % (response.status_code, response.reason, response.text))
def parse_arguments():
argp = argparse.ArgumentParser()
argp.add_argument(
'--debug',
default=False,
action='store_true',
dest='debug',
help='turn on debugging'
)
argp.add_argument(
'--force',
default=False,
action='store_true',
dest='force',
help='force recreation of all target files'
)
argp.add_argument(
'--server',
type=str,
default=GROBID_SERVER,
dest='server',
help='specify server to use (default is %s)' % GROBID_SERVER
)
argp.add_argument(
'--handler',
type=str,
default=GROBID_HANDLER,
dest='handler',
help='specify handler to use (default is %s)' % GROBID_HANDLER
)
argp.add_argument(
'--threads',
type=int,
default=DEFAULT_THREADS,
dest='threads',
help='specify number of threads to use (default is %d)' % DEFAULT_THREADS
)
argp.add_argument(
'--destdir',
type=str,
default=None,
dest='destdir',
help='specify output directory for extracted files'
)
argp.add_argument('files', nargs='+')
return argp.parse_args()
if __name__ == "__main__":
args = parse_arguments()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
service = os.path.join(args.server, args.handler)
threads = min(args.threads, len(args.files))
logging.info("allocating %d threads for processing %d files" %(threads, len(args.files)))
# avoid the overhead of multiprocessing unless necessary
if threads > 1:
p = multiprocessing.Pool(threads)
p.map(GrobidProcessor(service, destdir=args.destdir, force=args.force), args.files)
else:
map(GrobidProcessor(service, destdir=args.destdir, force=args.force), args.files)
``` |
{
"source": "aacebedo/bitwarden-pyro",
"score": 2
} |
#### File: bitwarden-pyro/bitwarden_pyro/bwpyro.py
```python
from time import sleep
import re
import sys
import logging
import shlex
from collections import namedtuple
from bitwarden_pyro.util.logger import ProjectLogger
from bitwarden_pyro.util.arguments import parse_arguments
from bitwarden_pyro.settings import NAME, VERSION
from bitwarden_pyro.view.rofi import Rofi
from bitwarden_pyro.controller.session import Session, SessionException
from bitwarden_pyro.controller.autotype import AutoType, AutoTypeException
from bitwarden_pyro.controller.clipboard import Clipboard, ClipboardException
from bitwarden_pyro.controller.vault import Vault, VaultException
from bitwarden_pyro.model.actions import ItemActions, WindowActions
from bitwarden_pyro.util.formatter import ItemFormatter, create_converter
from bitwarden_pyro.util.notify import Notify
from bitwarden_pyro.util.config import ConfigLoader, ConfigException
from bitwarden_pyro.controller.cache import CacheException
from bitwarden_pyro.controller.focus import Focus, FocusException
class FlowException(Exception):
"""Exceptions raised during the main loop"""
class BwPyro:
"""
Start and control the execution of the program
"""
def __init__(self):
self._rofi = None
self._session = None
self._vault = None
self._clipboard = None
self._autotype = None
self._notify = None
self._config = None
self._focus = None
self._args = parse_arguments()
self._logger = ProjectLogger(
self._args.verbose, not self._args.no_logging
).get_logger()
def start(self):
"""Start the execution of the program"""
if self._args.version:
print(f"{NAME} v{VERSION}")
sys.exit()
elif self._args.lock:
self.__lock()
elif self._args.dump_config:
self.__dump_config()
else:
self.__launch_ui()
def __dump_config(self):
try:
self._logger.setLevel(logging.ERROR)
self._config = ConfigLoader(self._args)
dump = self._config.dump()
print(dump)
except ConfigException:
self._logger.exception("Failed to dump config")
def __lock(self):
try:
self._logger.info("Locking vault and deleting session")
self._session = Session()
self._session.lock()
except SessionException:
self._logger.exception("Failed to lock session")
self._rofi = Rofi([], None, None)
self._rofi.show_error("Failed to lock and delete session")
def __unlock(self, force=False):
self._logger.info("Unlocking bitwarden vault")
if force or not self._session.has_key():
pwd = self._rofi.get_password()
if pwd is not None:
self._session.unlock(pwd)
else:
self._logger.info("Unlocking aborted")
sys.exit(0)
k = self._session.get_key()
self._vault.set_key(k)
def __show_items(self, prompt):
items = self._vault.get_items()
# Convert items to \n separated strings
formatted = ItemFormatter.unique_format(items)
selected_name, event = self._rofi.show_items(formatted, prompt)
self._logger.debug("User selected login: %s", selected_name)
# Rofi dialog has been closed
if selected_name is None:
self._logger.debug("Item selection has been aborted")
return (None, None)
# Make sure that the group item isn't a single item where
# the deduplication marker coincides
if selected_name.startswith(ItemFormatter.DEDUP_MARKER) and \
len(self._vault.get_by_name(selected_name)) == 0:
self._logger.debug("User selected item group")
group_name = selected_name[len(ItemFormatter.DEDUP_MARKER):]
selected_items = self._vault.get_by_name(group_name)
if isinstance(event, ItemActions):
event = WindowActions.GROUP
return (event, selected_items)
# A single item has been selected
self._logger.debug("User selected single item")
selected_item = self._vault.get_by_name(selected_name)
return (event, selected_item)
def __show_indexed_items(self, prompt, items=None, fields=None,
ignore=None):
if items is None:
items = self._vault.get_items()
converter = create_converter(fields, ignore)
indexed, formatted = ItemFormatter.group_format(items, converter)
selected_name, event = self._rofi.show_items(formatted, prompt)
# Rofi has been closed
if selected_name is None:
self._logger.debug("Group item selection has been aborted")
return (None, None)
# An item has been selected
regex = r"^#([0-9]+): .*"
match = re.search(regex, selected_name)
selected_index = int(match.group(1)) - 1
selected_item = indexed[selected_index]
return (event, selected_item)
def __show_folders(self, prompt):
items = self._vault.get_folders()
formatted = ItemFormatter.unique_format(items)
selected_name, event = self._rofi.show_items(formatted, prompt)
self._logger.info("User selected folder: %s", selected_name)
if selected_name is None:
self._logger.debug("Folder selection has been aborted")
return (None, None)
folder = [i for i in items if i['name'] == selected_name][0]
if folder['name'] == 'No Folder':
self._logger.debug("Clearing vault folder filter")
self._vault.set_filter(None)
else:
self._vault.set_filter(folder)
if isinstance(event, ItemActions):
event = WindowActions.NAMES
return (event, None)
def __load_items(self, use_cache=True):
try:
# First attempt at loading items
self._vault.load_items(use_cache)
except VaultException:
self._logger.warning(
"First attempt at loading vault items failed"
)
self.__unlock(force=True)
self._vault.load_items(use_cache)
def __set_keybinds(self):
keybinds = {
'type_password': ItemActions.PASSWORD,
'type_all': ItemActions.ALL,
'copy_totp': ItemActions.TOTP,
'mode_uris': WindowActions.URIS,
'mode_names': WindowActions.NAMES,
'mode_logins': WindowActions.LOGINS,
'mode_folders': WindowActions.FOLDERS,
'sync': WindowActions.SYNC
}
for name, action in keybinds.items():
self._rofi.add_keybind(
self._config.get(f'keyboard.{name}.key'),
action,
self._config.get(f'keyboard.{name}.hint'),
self._config.get(f'keyboard.{name}.show'),
)
def __init_ui(self):
try:
self._config = ConfigLoader(self._args)
self._session = Session(
self._config.get_int('security.timeout'))
RofiArgs = namedtuple('RofiArgs',
'main_window_args password_window_args additional_args')
rofi_args = RofiArgs(shlex.split(self._args.main_window_rofi_args),
shlex.split(self._args.password_window_rofi_args),
self._args.rofi_args)
self._rofi = Rofi(rofi_args,
self._config.get_itemaction('keyboard.enter'),
self._config.get_boolean('interface.hide_mesg'))
self._clipboard = Clipboard(
self._config.get_int('security.clear'))
self._autotype = AutoType()
self._vault = Vault(self._config.get_int('security.cache'))
self._notify = Notify()
self._focus = Focus(
self._config.get_boolean('autotype.select_window'),
self._config.get('autotype.slop_args')
)
self.__set_keybinds()
except (ClipboardException, AutoTypeException, CacheException,
SessionException, VaultException, ConfigException):
self._logger.exception("Failed to initialise application")
sys.exit(1)
def __display_windows(self):
action = self._config.get_windowaction('interface.window_mode')
while action is not None and isinstance(action, WindowActions):
self._logger.info("Switch window mode to %s", action)
prompt = 'Bitwarden'
if self._vault.has_filter():
prompt = self._vault.get_filter()['name']
# A group of items has been selected
if action == WindowActions.NAMES:
action, item = self.__show_items(
prompt=prompt
)
elif action == WindowActions.GROUP:
action, item = self.__show_indexed_items(
prompt=item[0]['name'],
items=item,
fields=['login.username']
)
elif action == WindowActions.URIS:
action, item = self.__show_indexed_items(
prompt=prompt,
fields=['login.uris.uri'],
ignore=['http://', 'https://', 'None']
)
elif action == WindowActions.LOGINS:
action, item = self.__show_indexed_items(
prompt=prompt,
fields=['name', 'login.username']
)
elif action == WindowActions.SYNC:
self._vault.sync()
self.__load_items(use_cache=False)
action, item = self.__show_items(
prompt=prompt
)
elif action == WindowActions.FOLDERS:
action, item = self.__show_folders(
prompt='Folders'
)
return action, item
def __delay_type(self):
# Delay typing, allowing correct window to be focused
if self._focus.is_enabled():
okay = self._focus.select_window()
if not okay:
self._logger.warning("Focus has been cancelled")
sys.exit(0)
else:
start_delay = self._config.get_int('autotype.start_delay')
focus_notification = self._config.get_boolean(
'autotype.delay_notification'
)
if focus_notification:
self._notify.send(
message=f"Waiting {start_delay} second(s) for window to refocus",
timeout=start_delay * 1000 # Convert to ms
)
sleep(start_delay)
def __execute_action(self, action, item):
if action == ItemActions.COPY:
self._logger.info("Copying password to clipboard")
# Get item with password
item = self._vault.get_item_full(item)
self._notify.send(
message="Login password copied to clipboard",
timeout=self._clipboard.clear * 1000 # convert to ms
)
self._clipboard.set(item['login']['password'])
elif action == ItemActions.ALL:
self._logger.info("Auto tying username and password")
# Get item with password
item = self._vault.get_item_full(item)
self.__delay_type()
self._notify.send(
message="Auto typing username and password"
)
tab_delay = self._config.get_float('autotype.tab_delay')
self._autotype.string(item['login']['username'])
sleep(tab_delay)
self._autotype.key('Tab')
sleep(tab_delay)
self._autotype.string(item['login']['password'])
elif action == ItemActions.PASSWORD:
self._logger.info("Auto typing password")
# Get item with password
item = self._vault.get_item_full(item)
self.__delay_type()
self._notify.send(
message="Auto typing password"
)
self._autotype.string(item['login']['password'])
elif action == ItemActions.TOTP:
self._logger.info("Copying TOTP to clipboard")
totp = self._vault.get_item_topt(item)
self._notify.send(
message="TOTP is copied to the clipboard",
timeout=self._clipboard.clear * 1000 # convert to ms
)
self._clipboard.set(totp)
else:
self._logger.error("Unknown action received: %s", action)
def __launch_ui(self):
self._logger.info("Application has been launched")
self.__init_ui()
try:
self.__unlock()
self.__load_items()
action, item = self.__display_windows()
# Selection has been aborted
if action is None:
self._logger.info("Exiting. Login selection has been aborted")
sys.exit(0)
self.__execute_action(action, item)
except (AutoTypeException, ClipboardException,
SessionException, VaultException, FocusException) as exc:
self._logger.exception("Application has received a critical error")
self._rofi.show_error(f"An error has occurred. {exc}")
def run():
"""Initialise the program controller"""
bw_pyro = BwPyro()
bw_pyro.start()
``` |
{
"source": "aacecandev/core-project-one",
"score": 3
} |
#### File: dashboard/app/signup.py
```python
import os
import time
from typing import Dict
import streamlit as st
from hydralit import HydraHeadApp
class SignUpApp(HydraHeadApp):
"""
This is an example signup application to be used to secure access within a HydraApp streamlit application.
This application is an example of allowing an application to run from the login without requiring authentication.
"""
def __init__(self, title="", **kwargs):
self.__dict__.update(kwargs)
self.title = title
def run(self) -> None:
"""
Application entry point.
"""
st.markdown(
"<h1 style='text-align: center;'>Secure Hydralit Signup</h1>",
unsafe_allow_html=True,
)
c1, c2, c3 = st.columns([2, 2, 2])
c3.image(
"./static/images/lock.png",
width=100,
)
c3.image(
"./static/images/hydra.png",
width=100,
)
pretty_btn = """
<style>
div[class="row-widget stButton"] > button {
width: 100%;
}
</style>
<br><br>
"""
c2.markdown(pretty_btn, unsafe_allow_html=True)
if "MSG" in os.environ.keys():
st.info(os.environ["MSG"])
form_data = self._create_signup_form(c2)
pretty_btn = """
<style>
div[class="row-widget stButton"] > button {
width: 100%;
}
</style>
<br><br>
"""
c2.markdown(pretty_btn, unsafe_allow_html=True)
if form_data["submitted"]:
self._do_signup(form_data, c2)
def _create_signup_form(self, parent_container) -> Dict:
login_form = parent_container.form(key="login_form")
form_state = {}
form_state["username"] = login_form.text_input("Username")
form_state["password"] = login_form.text_input("Password", type="password")
form_state["password2"] = login_form.text_input(
"Confirm Password", type="password"
)
form_state["access_level"] = login_form.selectbox(
"Example Access Level", (1, 2)
)
form_state["submitted"] = login_form.form_submit_button("Sign Up")
if parent_container.button("Login", key="loginbtn"):
# set access level to a negative number to allow a kick to the unsecure_app set in the parent
self.set_access(0, None)
# Do the kick to the signup app
self.do_redirect()
return form_state
def _do_signup(self, form_data, msg_container) -> None:
if form_data["submitted"] and (form_data["password"] != form_data["<PASSWORD>"]):
st.error("Passwords do not match, please try again.")
else:
with st.spinner("🤓 now redirecting to login...."):
self._save_signup(form_data)
time.sleep(2)
# access control uses an int value to allow for levels of permission that can be set for each user, this can then be checked within each app seperately.
self.set_access(0, None)
# Do the kick back to the login screen
self.do_redirect()
def _save_signup(self, signup_data):
# get the user details from the form and save somehwere
# signup_data
# this is the data submitted
# just show the data we captured
what_we_got = f"""
captured signup details: \n
username: {signup_data['username']} \n
password: {signup_data['password']} \n
access level: {signup_data['access_level']} \n
"""
st.write(what_we_got)
```
#### File: dashboard/utils/graphs.py
```python
from utils.common import *
from utils.requests import (
get_all_vehicles_grouped_by_month,
get_all_vehicles_grouped_by_weekday,
get_all_victims_grouped_by_month,
get_all_victims_grouped_by_weekday,
)
def victims_graph(query_time):
if query_time == "Grouped by month":
data = get_all_victims_grouped_by_month()
columns = list(list(data)[0].keys())
df = build_df(data, columns)
elif query_time == "Grouped by weekday":
data = get_all_victims_grouped_by_weekday()
columns = list(list(data)[0].keys())
df = build_df(data, columns)
return df
def vehicles_graph(query_time):
if query_time == "Grouped by month":
data = get_all_vehicles_grouped_by_month()
columns = list(list(data)[0].keys())
df = build_df(data, columns)
elif query_time == "Grouped by weekday":
data = get_all_vehicles_grouped_by_weekday()
columns = list(list(data)[0].keys())
df = build_df(data, columns)
return df
```
#### File: dashboard/utils/pdf_manager.py
```python
import base64
import re
import pdfkit
import streamlit as st
from bs4 import BeautifulSoup
from mailjet_rest import Client
def scrape_html_file():
with open("./streamlit_raw.html") as f:
html = f.read()
return BeautifulSoup(html)
def write_pdf_to_disk(soup):
with open("/app/streamlit.html", "w+") as f:
f.write(str(soup))
f.close()
options = {
"page-size": "A4",
"margin-top": "0.75in",
"margin-right": "0.75in",
"margin-bottom": "0.75in",
"margin-left": "0.75in",
}
pdfkit.from_file("./streamlit.html", "./streamlit.pdf", options=options)
def create_pdf(victims_data):
victims_average_list = [item[1] for item in victims_data["average"].items()]
soup = scrape_html_file()
for index, td in enumerate(soup.select("tbody > tr > td:nth-child(2)")):
td.string = str(victims_average_list[index])
write_pdf_to_disk(soup)
def email_validator(emai_address: str):
if re.fullmatch(r"^[^@]+@[^@]+\.[^@]+$", emai_address):
return True
else:
return False
def email_sender(email_address):
with open("./streamlit.pdf", "rb") as pdf:
data = pdf.read()
pdf_enconded = base64.b64encode(data).decode()
mailjet = Client(
auth=(st.secrets["api_key"], st.secrets["api_secret"]), version="v3.1"
)
data = {
"Messages": [
{
"From": {
"Email": st.secrets["sender_email"],
"Name": st.secrets["sender_name"],
},
"To": [
{
"Email": email_address,
}
],
"Subject": "Core Project One Report",
"TextPart": "Enjoy your report!",
"Attachments": [
{
"ContentType": "application/pdf",
"Filename": "report.pdf",
"Base64Content": pdf_enconded,
}
],
}
]
}
return mailjet.send.create(data=data)
def email_manager(email_address):
try:
if email_validator(email_address):
sent = email_sender(email_address)
if sent.status_code == 200:
return "Success!"
else:
return "Error!"
else:
raise ValueError("Invalid email address")
except ValueError as e:
print(e)
return "Invalid email address"
```
#### File: dashboard/utils/requests.py
```python
import json
import requests
import streamlit as st
###############
### FIND ALL ###
###############
# /accidents
def get_all():
res = requests.get(st.secrets["url"] + "/accidents")
return res.json()
def get_all_victims_grouped_by_month():
res = requests.get(st.secrets["url"] + "/eda/victims-grouped-month")
return res.json()
def get_all_victims_grouped_by_weekday():
res = requests.get(st.secrets["url"] + "/eda/victims-grouped-weekday")
return res.json()
def get_all_coordinates():
res = requests.get(st.secrets["url"] + "/eda/coordinates")
return res.json()
def get_all_vehicles_grouped_by_month():
res = requests.get(st.secrets["url"] + "/eda/vehicles-grouped-month")
return res.json()
def get_all_vehicles_grouped_by_weekday():
res = requests.get(st.secrets["url"] + "/eda/vehicles-grouped-weekday")
return res.json()
###############
### FIND ONE ###
###############
# /accidents/{id}
def find_accident(data):
res = requests.get(st.secrets["url"] + "/accidents/" + data["id"]).json()
return res
##############
### CREATE ###
##############
# /accidents/create
# requests.put(url, params={key: value}, args)
def create_accident(data):
payload = {
"victims": data["victims"],
"vehicles_involved": data["vehicles"],
"date": data["date"],
"location": {
"coordinates": [2.125624418258667, 41.34004592895508],
"type": "Point",
},
}
headers = {"Content-Type": "application/json", "Accept": "application/json"}
res = requests.put(
st.secrets["url"] + "/accidents/create",
data=json.dumps(payload),
headers=headers,
)
return res
##############
### UPDATE ###
##############
# /accidents/update
# requests.patch(url, params={key: value}, args)
def update_accident(data):
payload = {
"id": data["id"],
"victims": data["victims"],
"vehicles_involved": data["vehicles"],
"date": data["date"],
"location": {
"coordinates": [2.125624418258667, 41.34004592895508],
"type": "Point",
},
}
headers = {"Content-Type": "application/json", "Accept": "application/json"}
res = requests.patch(
st.secrets["url"] + "/accidents/update",
data=json.dumps(payload),
headers=headers,
)
return res
##############
### DELETE ###
##############
# /accidents/delete
def delete_accident(data):
headers = {"Content-Type": "application/json", "Accept": "application/json"}
res = requests.delete(st.secrets["url"] + "/accidents/delete/" + data["id"]).json()
return res
``` |
{
"source": "AAChartModel/AACharts-PyQt",
"score": 2
} |
#### File: aacharts/aachartcreator/AAChartModel.py
```python
from aacharts.aaenum.AAEnum import *
from aacharts.aaoptionsmodel.AAScrollablePlotArea import AAScrollablePlotArea
from aacharts.aaoptionsmodel.AAStyle import AAStyle
from aacharts.aaoptionsmodel.AAStyle import AAStyle
from aacharts.aaenum.AAEnum import *
from aacharts.aaoptionsmodel.AAYAxis import AAYAxis
from aacharts.aaoptionsmodel.AALabels import AALabels
from aacharts.aaoptionsmodel.AAXAxis import AAXAxis
from aacharts.aaoptionsmodel.AALabels import AALabels
from aacharts.aaoptionsmodel.AADataLabels import AADataLabels
from aacharts.aaoptionsmodel.AAMarker import AAMarker
from aacharts.aaoptionsmodel.AASeries import AASeries
from aacharts.aaoptionsmodel.AASubtitle import AASubtitle
from aacharts.aaoptionsmodel.AAOptions import AAOptions
from aacharts.aaoptionsmodel.AATitle import AATitle
from aacharts.aaoptionsmodel.AAChart import AAChart
from aacharts.aaoptionsmodel.AATooltip import AATooltip
from aacharts.aaoptionsmodel.AAPlotOptions import AAPlotOptions
from aacharts.aaoptionsmodel.AAAnimation import AAAnimation
from aacharts.aaoptionsmodel.AALegend import AALegend
from aacharts.aaoptionsmodel.AACredits import AACredits
from aacharts.aaoptionsmodel.AAScrollablePlotArea import AAScrollablePlotArea
class AAChartModel:
animationType: AAChartAnimationType = AAChartAnimationType.bounce # The type of chart animation
animationDuration: int = None # The chart rendering animation duration
title: str = "" # The chart title
titleStyle: AAStyle = None # The chart title style
subtitle: str = None # The chart subtitle
subtitleAlign: AAChartAlignType = None # The chart subtitle text align style
subtitleStyle: AAStyle = None # The chart subtitle style
chartType: AAChartType = AAChartType.column # The default series type for the chart. Can be any of the chart types listed under `AAChartType`. Defaults to line
stacking: AAChartStackingType = AAChartStackingType.none # Whether to stack the values of each series on top of each other. Possible values are null to disable, "normal" to stack by value or "percent". When stacking is enabled, data must be sorted in ascending X order
markerSymbol: AAChartSymbolType = AAChartSymbolType.circle # A predefined shape or symbol for the marker. When null, the symbol is pulled from options.symbols. Other possible values are "circle", "square", "diamond", "triangle" and "triangle-down"
markerSymbolStyle: AAChartSymbolStyleType = None
zoomType: AAChartZoomType = None # Decides in what dimensions the user can zoom by dragging the mouse. Can be one of x, y or xy
inverted: bool = None # Whether to invert the axes so that the x axis is vertical and y axis is horizontal. When true, the x axis is reversed by default. If a bar series is present in the chart, it will be inverted automatically.Inverting the chart doesn't have an effect if there are no cartesian series in the chart, or if the chart is polar.Defaults to false
xAxisReversed: bool = None # Whether to reverse the axis so that the highest number is closest to the origin. If the chart is inverted, the x axis is reversed by default. Defaults to false
yAxisReversed: bool = None # Whether to reverse the axis so that the highest number is closest to the origin. If the chart is inverted, the x axis is reversed by default. Defaults to false
crosshairs: bool = None # Enable or disable the crosshairs
polar: bool = None # When true, cartesian charts like line, spline, area and column are transformed into the polar coordinate system. Requires `AAHighchartsMore.js`. Defaults to false
margin: list = None
dataLabelsEnabled: bool = None # Enable or disable the data labels. Defaults to false
dataLabelsStyle: AAStyle = None # The data labels style
xAxisLabelsEnabled: bool = None # Enable or disable the axis labels. Defaults to true
xAxisLabelsStyle: AAStyle = None # The x axis labels style
categories: list = None # Set new categories for the axis
xAxisGridLineWidth: float = None # The width of the grid lines extending the ticks across the plot area.Defaults to 0
xAxisVisible: bool = None # Show the x axis or not
xAxisTickinterval: float = None # Custom x axis tick interval,It is useful when the x categories array is too long to show all of them
yAxisVisible: bool = None # Show the y axis or not
yAxisLabelsEnabled: bool = None # Enable or disable the axis labels. Defaults to true
yAxisLabelsStyle: AAStyle = None # The y axis labels style
yAxisTitle: str = None # The actual text of the axis title
xAxisTitle: str = None # The actual text of the axis title
yAxisLineWidth: float = None # The width of y axis line
yAxisGridLineWidth: float = None # The width of the grid lines extending the ticks across the plot area. Defaults to 1
yAxisMin: float = None # The y axis mini value
yAxisMax: float = None # The y axis max value
yAxisAllowDecimals: bool = None # The y axis values label allow decimals or not
tooltipEnabled: bool = None # Show the tooltip or not
tooltipValueSuffix: str = None # Custom tooltip value unit suffix
colorsTheme: list = None # An array containing the default colors for the chart's series. When all colors are used, new colors are pulled from the start again. Defaults to: ["#bb250c","#f67210","#fde680","#257679","#f1c6c5"]
series: list = None # An array of all the chart's series
legendEnabled: bool = None # Enable or disable the legend. Defaults to true
backgroundColor: str = None # The background color or gradient for the outer chart area. Defaults to #FFFFFF
borderRadius: float = None # The corner radius of the outer chart border. Defaults to 0
markerRadius: float = None # The radius of the point marker. Defaults to 4
touchEventEnabled: bool = None # Support touch event call back or not
scrollablePlotArea: AAScrollablePlotArea = None # Scroll properties if supported
def animationTypeSet(self, prop: AAChartAnimationType):
self.animationType = prop
return self
def animationDurationSet(self, prop: int):
self.animationDuration = prop
return self
def titleSet(self, prop: str):
self.title = prop
return self
def titleStyleSet(self, prop: AAStyle):
self.titleStyle = prop
return self
def subtitleSet(self, prop: str):
self.subtitle = prop
return self
def subtitleAlignSet(self, prop: AAChartAlignType):
self.subtitleAlign = prop
return self
def subtitleStyleSet(self, prop: AAStyle):
self.subtitleStyle = prop
return self
def chartTypeSet(self, prop: AAChartType):
self.chartType = prop
return self
def stackingSet(self, prop: AAChartStackingType):
self.stacking = prop
return self
def markerRadiusSet(self, prop: float):
self.markerRadius = prop
return self
def markerSymbolSet(self, prop: AAChartSymbolType):
self.markerSymbol = prop
return self
def markerSymbolStyleSet(self, prop: AAChartSymbolStyleType):
self.markerSymbolStyle = prop
return self
def zoomTypeSet(self, prop: AAChartZoomType):
self.zoomType = prop
return self
def invertedSet(self, prop: bool):
self.inverted = prop
return self
def xAxisReversedSet(self, prop: bool):
self.xAxisReversed = prop
return self
def yAxisReversedSet(self, prop: bool):
self.yAxisReversed = prop
return self
def tooltipEnabledSet(self, prop: bool):
self.tooltipEnabled = prop
return self
def tooltipValueSuffixSet(self, prop: str):
self.tooltipValueSuffix = prop
return self
def polarSet(self, prop: bool):
self.polar = prop
return self
def marginSet(self, top: float = 0, right: float = 0, bottom: float = 0, left: float = 0):
self.margin = [top, right, bottom, left]
return self
def dataLabelsEnabledSet(self, prop: bool):
self.dataLabelsEnabled = prop
return self
def dataLabelsStyleSet(self, prop: AAStyle):
self.dataLabelsStyle = prop
return self
def xAxisLabelsEnabledSet(self, prop: bool):
self.xAxisLabelsEnabled = prop
return self
def xAxisLabelsStyleSet(self, prop: AAStyle):
self.xAxisLabelsStyle = prop
return self
def categoriesSet(self, prop: list):
self.categories = prop
return self
def xAxisGridLineWidthSet(self, prop: float):
self.xAxisGridLineWidth = prop
return self
def xAxisVisibleSet(self, prop: bool):
self.xAxisVisible = prop
return self
def xAxisTickintervalSet(self, prop: float):
self.xAxisTickinterval = prop
return self
def yAxisVisibleSet(self, prop: bool):
self.yAxisVisible = prop
return self
def yAxisLabelsEnabledSet(self, prop: bool):
self.yAxisLabelsEnabled = prop
return self
def yAxisLabelsStyleSet(self, prop: AAStyle):
self.yAxisLabelsStyle = prop
return self
def yAxisTitleSet(self, prop: str):
self.yAxisTitle = prop
return self
def xAxisTitleSet(self, prop: str):
self.xAxisTitle = prop
return self
def yAxisLineWidthSet(self, prop: float):
self.yAxisLineWidth = prop
return self
def yAxisMinSet(self, prop: float):
self.yAxisMin = prop
return self
def yAxisMaxSet(self, prop: float):
self.yAxisMax = prop
return self
def yAxisAllowDecimalsSet(self, prop: bool):
self.yAxisAllowDecimals = prop
return self
def yAxisGridLineWidthSet(self, prop: float):
self.yAxisGridLineWidth = prop
return self
def colorsThemeSet(self, prop: list):
self.colorsTheme = prop
return self
def seriesSet(self, prop: list):
self.series = prop
return self
def legendEnabledSet(self, prop: bool):
self.legendEnabled = prop
return self
def backgroundColorSet(self, prop: str):
self.backgroundColor = prop
return self
def borderRadiusSet(self, prop: float):
self.borderRadius = prop
return self
def touchEventEnabledSet(self, prop: bool):
self.touchEventEnabled = prop
return self
def scrollablePlotAreaSet(self, prop: AAScrollablePlotArea):
self.scrollablePlotArea = prop
return self
def aa_toAAOptions(self):
from aacharts.aachartcreator.AAOptionsComposer import AAOptionsComposer
aaOptions = AAOptionsComposer.configureChartOptions(self)
return aaOptions
```
#### File: aacharts/aaoptionsmodel/AASeries.py
```python
from typing import List
from aacharts.aaenum.AAEnum import AAChartStackingType
from aacharts.aaoptionsmodel.AADataLabels import AADataLabels
from aacharts.aaoptionsmodel.AAMarker import AAMarker
from aacharts.aaoptionsmodel.AAShadow import AAShadow
from aacharts.aaoptionsmodel.AAStates import AAStates
from aacharts.aaoptionsmodel.AAAnimation import AAAnimation
from aacharts.aatool.AAStringPurer import AAStringPurer
class AAPointEvents:
click: str
mouseOver: str
remove: str
select: str
unselect: str
update: str
def clickSet(self, prop: str):
if (prop != None):
self.click = AAStringPurer.pureJSString(prop)
return self
def mouseOverSet(self, prop: str):
if (prop != None):
self.mouseOver = AAStringPurer.pureJSString(prop)
return self
def removeSet(self, prop: str):
if (prop != None):
self.remove = AAStringPurer.pureJSString(prop)
return self
def selectSet(self, prop: str):
if (prop != None):
self.select = AAStringPurer.pureJSString(prop)
return self
def unselectSet(self, prop: str):
if (prop != None):
self.unselect = AAStringPurer.pureJSString(prop)
return self
def updateSet(self, prop: str):
if (prop != None):
self.update = AAStringPurer.pureJSString(prop)
return self
class AAPoint:
events: AAPointEvents
def eventsSet(self, prop: AAPointEvents):
self.events = prop
return self
class AAEvents:
legendItemClick: str
def legendItemClickSet(self, prop: str):
if (prop != None):
self.legendItemClick = AAStringPurer.pureJSString(prop)
return self
class AASeries:
borderRadius: float
marker: AAMarker
stacking: str
animation: AAAnimation
keys: List
colorByPoint: bool
connectNulls: bool #Whether reconnects the broken line of the chart
events: AAEvents
shadow: AAShadow
dataLabels: AADataLabels
states: AAStates
allowPointSelect: bool
point: AAPoint
pointInterval: float
pointIntervalUnit: str
pointPlacement: str #String | Number
pointStart: float
pointPadding: float
groupPadding: float
def borderRadiusSet(self, prop: float):
self.borderRadius = prop
return self
def markerSet(self, prop: AAMarker):
self.marker = prop
return self
def stackingSet(self, prop: AAChartStackingType):
self.stacking = prop.value
return self
def animationSet(self, prop: AAAnimation):
self.animation = prop
return self
def keysSet(self, prop: List):
self.keys = prop
return self
def colorByPointSet(self, prop: bool):
self.colorByPoint = prop
return self
def connectNullsSet(self, prop: bool):
self.connectNulls = prop
return self
def eventsSet(self, prop: AAEvents):
self.events = prop
return self
def shadowSet(self, prop: AAShadow):
self.shadow = prop
return self
def dataLabelsSet(self, prop: AADataLabels):
self.dataLabels = prop
return self
def statesSet(self, prop: AAStates):
self.states = prop
return self
def pointSet(self, prop: AAPoint):
self.point = prop
return self
def pointIntervalSet(self, prop: float):
self.pointInterval = prop
return self
def pointIntervalUnitSet(self, prop: str):
self.pointIntervalUnit = prop
return self
def pointPlacementSet(self, prop: str):
self.pointPlacement = prop
return self
def pointStartSet(self, prop: float):
self.pointStart = prop
return self
def pointPaddingSet(self, prop: float):
self.pointPadding = prop
return self
def groupPaddingSet(self, prop: float):
self.groupPadding = prop
return self
```
#### File: aacharts/aaoptionsmodel/AAShadow.py
```python
class AAShadow:
color: str
offsetX: float
offsetY: float
opacity: float
width: float
def colorSet(self, prop: str):
self.color = prop
return self
def offsetXSet(self, prop: float):
self.offsetX = prop
return self
def offsetYSet(self, prop: float):
self.offsetY = prop
return self
def opacitySet(self, prop: float):
self.opacity = prop
return self
def widthSet(self, prop: float):
self.width = prop
return self
```
#### File: aacharts/aaoptionsmodel/AATooltip.py
```python
from aacharts.aaoptionsmodel.AAStyle import AAStyle
from aacharts.aaoptionsmodel.AAXAxis import AADateTimeLabelFormats
from aacharts.aatool.AAStringPurer import AAStringPurer
class AATooltip:
backgroundColor: str
borderColor: str
borderRadius: float
borderWidth: float
style: AAStyle
enabled: bool = True
useHTML: bool
formatter: str
headerFormat: str
pointFormat: str
footerFormat: str
valueDecimals: int
shared: bool
valueSuffix: str
followTouchMove: bool#https:#api.highcharts.com.cn/highcharts#chart.panning
shadow: bool
padding: float
pointFormatter: str
positioner: str
dateTimeLabelFormats: AADateTimeLabelFormats
def backgroundColorSet(self, prop: str):
self.backgroundColor = prop
return self
def borderColorSet(self, prop: str):
self.borderColor = prop
return self
def borderRadiusSet(self, prop: float):
self.borderRadius = prop
return self
def borderWidthSet(self, prop: float):
self.borderWidth = prop
return self
def styleSet(self, prop: AAStyle):
self.style = prop
return self
def enabledSet(self, prop: bool):
self.enabled = prop
return self
def useHTMLSet(self, prop: bool):
self.useHTML = prop
return self
def formatterSet(self, prop: str):
self.formatter = AAStringPurer.pureJSString(prop)
return self
def headerFormatSet(self, prop: str):
self.headerFormat = prop
return self
def pointFormatSet(self, prop: str):
self.pointFormat = prop
return self
def footerFormatSet(self, prop: str):
self.footerFormat = prop
return self
def valueDecimalsSet(self, prop: int):
self.valueDecimals = prop
return self
def sharedSet(self, prop: bool):
self.shared = prop
return self
def valueSuffixSet(self, prop: str):
self.valueSuffix = prop
return self
def followTouchMoveSet(self, prop: bool):
self.followTouchMove = prop
return self
def shadowSet(self, prop: bool):
self.shadow = prop
return self
def paddingSet(self, prop: float):
self.padding = prop
return self
def pointFormatterSet(self, prop: str):
self.pointFormatter = AAStringPurer.pureJSString(prop)
return self
def positionerSet(self, prop: str):
self.positioner = AAStringPurer.pureJSString(prop)
return self
def dateTimeLabelFormatsSet(self, prop: AADateTimeLabelFormats):
self.dateTimeLabelFormats = prop
return self
```
#### File: aacharts/aatool/AAGradientColor.py
```python
from typing import List
from enum import Enum
class AALinearGradientDirection(Enum):
toTop = 0, #⇧⇧⇧⇧⇧⇧
toBottom = 1, #⇩⇩⇩⇩⇩⇩
toLeft = 2, #⇦⇦⇦⇦⇦⇦
toRight = 3, #⇨⇨⇨⇨⇨⇨
toTopLeft = 4, #⇖⇖⇖⇖⇖⇖
toTopRight = 5, #⇗⇗⇗⇗⇗⇗
toBottomLeft = 6, #⇙⇙⇙⇙⇙⇙
toBottomRight = 7, #⇘⇘⇘⇘⇘⇘
class AAGradientColor:
# oceanBlue: map = AAGradientColor.oceanBlueColor(AALinearGradientDirection.toTop)
#
# sanguine: map = AAGradientColor.sanguineColor(AALinearGradientDirection.toTop)
#
# lusciousLime: map = AAGradientColor.lusciousLimeColor(AALinearGradientDirection.toTop)
#
# purpleLake: map = AAGradientColor.purpleLakeColor(AALinearGradientDirection.toTop)
#
# freshPapaya: map = AAGradientColor.freshPapayaColor(AALinearGradientDirection.toTop)
#
# ultramarine: map = AAGradientColor.ultramarineColor(AALinearGradientDirection.toTop)
#
# pinkSugar: map = AAGradientColor.pinkSugarColor(AALinearGradientDirection.toTop)
#
# lemonDrizzle: map = AAGradientColor.lemonDrizzleColor(AALinearGradientDirection.toTop)
#
# victoriaPurple: map = AAGradientColor.victoriaPurpleColor(AALinearGradientDirection.toTop)
#
# springGreens: map = AAGradientColor.springGreensColor(AALinearGradientDirection.toTop)
#
# mysticMauve: map = AAGradientColor.mysticMauveColor(AALinearGradientDirection.toTop)
#
# reflexSilver: map = AAGradientColor.reflexSilverColor(AALinearGradientDirection.toTop)
# neonGlow: map = AAGradientColor.neonGlowColor(AALinearGradientDirection.toTop)
#
# berrySmoothie: map = AAGradientColor.berrySmoothieColor(AALinearGradientDirection.toTop)
#
# newLeaf: map = AAGradientColor.newLeafColor(AALinearGradientDirection.toTop)
#
# cottonCandy: map = AAGradientColor.cottonCandyColor(AALinearGradientDirection.toTop)
#
# pixieDust: map = AAGradientColor.pixieDustColor(AALinearGradientDirection.toTop)
#
# fizzyPeach: map = AAGradientColor.fizzyPeachColor(AALinearGradientDirection.toTop)
#
# sweetDream: map = AAGradientColor.sweetDreamColor(AALinearGradientDirection.toTop)
#
# firebrick: map = AAGradientColor.firebrickColor(AALinearGradientDirection.toTop)
#
# wroughtIron: map = AAGradientColor.wroughtIronColor(AALinearGradientDirection.toTop)
#
# deepSea: map = AAGradientColor.deepSeaColor(AALinearGradientDirection.toTop)
#
# coastalBreeze: map = AAGradientColor.coastalBreezeColor(AALinearGradientDirection.toTop)
#
# eveningDelight: map = AAGradientColor.eveningDelightColor(AALinearGradientDirection.toTop)
# @staticmethod
# def oceanBlue():
# return AAGradientColor.linearGradient1(AALinearGradientDirection.toTop, "#2E3192", "#1BFFFF")
@staticmethod
def oceanBlueColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#2E3192","#1BFFFF")
@staticmethod
def sanguineColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#D4145A","#FBB03B")
@staticmethod
def lusciousLimeColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#009245","#FCEE21")
@staticmethod
def purpleLakeColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#662D8C","#ED1E79")
@staticmethod
def freshPapayaColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#ED1C24","#FCEE21")
@staticmethod
def ultramarineColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#00A8C5","#FFFF7E")
@staticmethod
def pinkSugarColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#D74177","#FFE98A")
@staticmethod
def lemonDrizzleColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#FB872B","#D9E021")
@staticmethod
def victoriaPurpleColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#312A6C","#852D91")
@staticmethod
def springGreensColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#009E00","#FFFF96")
@staticmethod
def mysticMauveColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#B066FE","#63E2FF")
@staticmethod
def reflexSilverColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#808080","#E6E6E6")
@staticmethod
def neonGlowColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#00FFA1","#00FFFF")
@staticmethod
def berrySmoothieColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#8E78FF","#FC7D7B")
@staticmethod
def newLeafColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#00537E","#3AA17E")
@staticmethod
def cottonCandyColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#FCA5F1","#B5FFFF")
@staticmethod
def pixieDustColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#D585FF","#00FFEE")
@staticmethod
def fizzyPeachColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#F24645","#EBC08D")
@staticmethod
def sweetDreamColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#3A3897","#A3A1FF")
@staticmethod
def firebrickColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#45145A","#FF5300")
@staticmethod
def wroughtIronColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#333333","#5A5454")
@staticmethod
def deepSeaColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#4F00BC","#29ABE2")
@staticmethod
def coastalBreezeColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#00B7FF","#FFFFC7")
@staticmethod
def eveningDelightColor(direction: AALinearGradientDirection):
return AAGradientColor.linearGradient1(direction, "#93278F", "#00A99D")
@staticmethod
def linearGradient0(
startColor: str,
endColor: str):
return AAGradientColor.linearGradient1(
AALinearGradientDirection.toTop,
startColor,
endColor
)
@staticmethod
def linearGradient1(
direction: AALinearGradientDirection,
startColor: str,
endColor: str):
return AAGradientColor.linearGradient2(
direction,[
[0, startColor],
[1, endColor]
])
@staticmethod
def linearGradient2(
direction: AALinearGradientDirection,
stops: List):
linearGradient: map = AAGradientColor.linearGradientDirectionDictionary(direction)
return {
"linearGradient": linearGradient,
"stops": stops #颜色字符串设置支持十六进制类型和 rgba 类型
}
@staticmethod
def linearGradientDirectionDictionary(direction: AALinearGradientDirection):
if direction == AALinearGradientDirection.toTop:
return {"x1":0, "y1":1, "x2":0, "y2":0}
elif direction == AALinearGradientDirection.toBottom:
return {"x1":0, "y1":0, "x2":0, "y2":1}
elif direction == AALinearGradientDirection.toLeft:
return {"x1":1, "y1":0, "x2":0, "y2":0}
elif direction == AALinearGradientDirection.toRight:
return {"x1":0, "y1":0, "x2":1, "y2":0}
elif direction == AALinearGradientDirection.toTopLeft:
return {"x1":1, "y1":1, "x2":0, "y2":0}
elif direction == AALinearGradientDirection.toTopRight:
return {"x1":0, "y1":1, "x2":1, "y2":0}
elif direction == AALinearGradientDirection.toBottomLeft:
return {"x1":1, "y1":0, "x2":0, "y2":1}
elif direction == AALinearGradientDirection.toBottomRight:
return {"x1":0, "y1":0, "x2":1, "y2":1}
AAGradientColor.oceanBlue = AAGradientColor.oceanBlueColor(AALinearGradientDirection.toTop)
AAGradientColor.sanguine = AAGradientColor.sanguineColor(AALinearGradientDirection.toTop)
AAGradientColor.lusciousLime = AAGradientColor.lusciousLimeColor(AALinearGradientDirection.toTop)
AAGradientColor.purpleLake = AAGradientColor.purpleLakeColor(AALinearGradientDirection.toTop)
AAGradientColor.freshPapaya = AAGradientColor.freshPapayaColor(AALinearGradientDirection.toTop)
AAGradientColor.ultramarine = AAGradientColor.ultramarineColor(AALinearGradientDirection.toTop)
AAGradientColor.pinkSugar = AAGradientColor.pinkSugarColor(AALinearGradientDirection.toTop)
AAGradientColor.lemonDrizzle = AAGradientColor.lemonDrizzleColor(AALinearGradientDirection.toTop)
AAGradientColor.victoriaPurple = AAGradientColor.victoriaPurpleColor(AALinearGradientDirection.toTop)
AAGradientColor.springGreens = AAGradientColor.springGreensColor(AALinearGradientDirection.toTop)
AAGradientColor.mysticMauve = AAGradientColor.mysticMauveColor(AALinearGradientDirection.toTop)
AAGradientColor.reflexSilver = AAGradientColor.reflexSilverColor(AALinearGradientDirection.toTop)
AAGradientColor.neonGlow = AAGradientColor.neonGlowColor(AALinearGradientDirection.toTop)
AAGradientColor.berrySmoothie = AAGradientColor.berrySmoothieColor(AALinearGradientDirection.toTop)
AAGradientColor.newLeaf = AAGradientColor.newLeafColor(AALinearGradientDirection.toTop)
AAGradientColor.cottonCandy = AAGradientColor.cottonCandyColor(AALinearGradientDirection.toTop)
AAGradientColor.pixieDust = AAGradientColor.pixieDustColor(AALinearGradientDirection.toTop)
AAGradientColor.fizzyPeach = AAGradientColor.fizzyPeachColor(AALinearGradientDirection.toTop)
AAGradientColor.sweetDream = AAGradientColor.sweetDreamColor(AALinearGradientDirection.toTop)
AAGradientColor.firebrick = AAGradientColor.firebrickColor(AALinearGradientDirection.toTop)
AAGradientColor.wroughtIron = AAGradientColor.wroughtIronColor(AALinearGradientDirection.toTop)
AAGradientColor.deepSea = AAGradientColor.deepSeaColor(AALinearGradientDirection.toTop)
AAGradientColor.coastalBreeze = AAGradientColor.coastalBreezeColor(AALinearGradientDirection.toTop)
AAGradientColor.eveningDelight = AAGradientColor.eveningDelightColor(AALinearGradientDirection.toTop)
```
#### File: aacharts/aatool/AAJSArrayConverter.py
```python
class AAJSArrayConverter:
@staticmethod
def JSArrayWithHaxeArray(arr):
jsArrStr = ""
for element in arr:
jsArrStr = jsArrStr + f"'{element}',"
return f"[{jsArrStr}]"
```
#### File: AACharts-PyQt/demo/JSFuncOptionsComposer.py
```python
from aacharts.aatool.AAColor import AAColor
from aacharts.aatool.AAGradientColor import AAGradientColor
from aacharts.aachartcreator.AASeriesElement import AASeriesElement
from aacharts.aachartcreator.AAChartModel import AAChartModel, AAChartSymbolStyleType, AAChartSymbolType, AAChartType
from aacharts.aatool.AAGradientColor import AAGradientColor
from aacharts.aachartcreator.AASeriesElement import AASeriesElement
from aacharts.aachartcreator.AAChartModel import *
from aacharts.aaoptionsmodel.AAMarker import AAMarker
from aacharts.aaoptionsmodel.AADataElement import AADataElement
from aacharts.aaoptionsmodel.AADataLabels import AADataLabels
from aacharts.aaoptionsmodel.AACrosshair import AACrosshair
from aacharts.aaoptionsmodel.AAStates import AAStates, AAHover, AAHalo, AAInactive, AASelect
from aacharts.aaoptionsmodel.AALegend import AAItemStyle
from aacharts.aaoptionsmodel.AASeries import AAEvents, AAPoint, AAPointEvents
from aacharts.aaoptionsmodel.AALang import AALang
from aacharts.aatool.AAGradientColor import AALinearGradientDirection
from aacharts.aatool.AAJSArrayConverter import AAJSArrayConverter
from aacharts.aaoptionsmodel.AAPlotOptions import AAColumn
import random
from string import Template
class JSFuncOptionsComposer:
@staticmethod
def customAreaChartTooltipStyleWithSimpleFormatString():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.area)#图形类型
.titleSet("近三个月金价起伏周期图")#图表主标题
.subtitleSet("金价Set(元/克)")#图表副标题
.markerSymbolStyleSet(AAChartSymbolStyleType.borderBlank)#折线连接点样式为外边缘空白
.dataLabelsEnabledSet(False)
.categoriesSet([
"10-01", "10-02", "10-03", "10-04", "10-05", "10-06", "10-07", "10-08", "10-09", "10-10", "10-11",
"10-12", "10-13", "10-14", "10-15", "10-16", "10-17", "10-18", "10-19", "10-20", "10-21", "10-22",
"10-23", "10-24", "10-25", "10-26", "10-27", "10-28", "10-29", "10-30", "10-31", "11-01", "11-02",
"11-03", "11-04", "11-05", "11-06", "11-07", "11-08", "11-09", "11-10", "11-11", "11-12", "11-13",
"11-14", "11-15", "11-16", "11-17", "11-18", "11-19", "11-20", "11-21", "11-22", "11-23", "11-24",
"11-25", "11-26", "11-27", "11-28", "11-29", "11-30", "12-01", "12-02", "12-03", "12-04", "12-05",
"12-06", "12-07", "12-08", "12-09", "12-10", "12-11", "12-12", "12-13", "12-14", "12-15", "12-16",
"12-17", "12-18", "12-19", "12-20", "12-21", "12-22", "12-23", "12-24", "12-25", "12-26", "12-27",
"12-28", "12-29", "12-30"
])
.seriesSet([
AASeriesElement()
.nameSet("2020")
.lineWidthSet(3)
.colorSet("#FFD700")#纯金色
.fillOpacitySet(0.5)
.dataSet([
1.51, 6.70, 0.94, 1.44, 1.60, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10,
4.17, 3.85, 4.17, 3.46, 3.46, 3.55, 3.50, 4.13, 2.58, 2.28, 1.51, 12.7, 0.94, 1.44,
18.6, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10, 4.17, 3.85, 4.17, 3.46,
3.46, 3.55, 3.50, 4.13, 2.58, 2.28, 1.33, 4.68, 1.31, 1.10, 13.9, 1.10, 1.16, 1.67,
2.64, 2.86, 3.00, 3.21, 4.14, 4.07, 3.68, 3.11, 3.41, 3.25, 3.32, 3.07, 3.92, 3.05,
2.18, 3.24, 3.23, 3.15, 2.90, 1.81, 2.11, 2.43, 5.59, 3.09, 4.09, 6.14, 5.33, 6.05,
5.71, 6.22, 6.56, 4.75, 5.27, 6.02, 5.48
])
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.formatterSet("""
function () {
return ' 🌕 🌖 🌗 🌘 🌑 🌒 🌓 🌔 <br/> '
+ ' Support JavaScript Function Just Right Now !!! <br/> '
+ ' The Gold Price For <b>2020 '
+ this.x
+ ' </b> Is <b> '
+ this.y
+ ' </b> Dollars ';
}
""")
.valueDecimalsSet(2)#设置取值精确到小数点后几位#设置取值精确到小数点后几位
.backgroundColorSet(AAColor.black)
.borderColorSet(AAColor.black)
.styleSet(AAStyle.colorSize("#FFD700", 12)))
return aaOptions
@staticmethod
def customAreaChartTooltipStyleWithDifferentUnitSuffix():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.areaspline)#图形类型
.titleSet("2014 ~ 2020 汪星人生存指数")#图表主标题
.subtitleSet("数据来源:www.无任何可靠依据.<EMAIL>")#图表副标题
.markerSymbolStyleSet(AAChartSymbolStyleType.innerBlank)
.colorsThemeSet([
AAGradientColor.oceanBlue,
AAGradientColor.sanguine,
])
.dataLabelsEnabledSet(False)
.stackingSet(AAChartStackingType.normal)
.seriesSet([
AASeriesElement()
.nameSet("🐶狗子")
.lineWidthSet(5.0)
.dataSet([0.45, 0.43, 0.50, 0.55, 0.58, 0.62, 0.83, 0.39, 0.56, 0.67, 0.50, 0.34, 0.50, 0.67, 0.58, 0.29, 0.46, 0.23, 0.47, 0.46, 0.38, 0.56, 0.48, 0.36])
,
AASeriesElement()
.nameSet("🌲树木")
.lineWidthSet(5.0)
.dataSet([0.38, 0.31, 0.32, 0.32, 0.64, 0.66, 0.86, 0.47, 0.52, 0.75, 0.52, 0.56, 0.54, 0.60, 0.46, 0.63, 0.54, 0.51, 0.58, 0.64, 0.60, 0.45, 0.36, 0.67])
,
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.enabledSet(True)
.formatterSet("""
function () {
var s = '第' + '<b>' + this.x + '</b>' + '年' + '<br/>';
let colorDot1 = '<span style=' + 'color:#1e90ff; font-size:13px' + '>◉</span> ';
let colorDot2 = '<span style=' + 'color:#ef476f; font-size:13px' + '>◉</span> ';
let s1 = colorDot1 + this.points[0].series.name + ': ' + this.points[0].y + '只' + '<br/>';
let s2 = colorDot2 + this.points[1].series.name + ': ' + this.points[1].y + '棵';
s += s1 + s2;
return s;
}
"""))
#禁用图例点击事件
aaOptions.plotOptions.series.events = (
AAEvents()
.legendItemClickSet("""
function() {
return false;
}
"""))
return aaOptions
@staticmethod
def customAreaChartTooltipStyleWithColorfulHtmlLabels():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.areaspline)#图形类型
.markerSymbolStyleSet(AAChartSymbolStyleType.borderBlank)#折线连接点样式为外边缘空白
.dataLabelsEnabledSet(False)
.colorsThemeSet(["#04d69f","#1e90ff","#ef476f","#ffd066",])
.stackingSet(AAChartStackingType.normal)
.markerRadiusSet(0)
.seriesSet([
AASeriesElement()
.nameSet("Tokyo Hot")
.lineWidthSet(5.0)
.fillOpacitySet(0.4)
.dataSet([0.45, 0.43, 0.50, 0.55, 0.58, 0.62, 0.83, 0.39, 0.56, 0.67, 0.50, 0.34, 0.50, 0.67, 0.58, 0.29, 0.46, 0.23, 0.47, 0.46, 0.38, 0.56, 0.48, 0.36])
,
AASeriesElement()
.nameSet("Berlin Hot")
.lineWidthSet(5.0)
.fillOpacitySet(0.4)
.dataSet([0.38, 0.31, 0.32, 0.32, 0.64, 0.66, 0.86, 0.47, 0.52, 0.75, 0.52, 0.56, 0.54, 0.60, 0.46, 0.63, 0.54, 0.51, 0.58, 0.64, 0.60, 0.45, 0.36, 0.67])
,
AASeriesElement()
.nameSet("New York Hot")
.lineWidthSet(5.0)
.fillOpacitySet(0.4)
.dataSet([0.46, 0.32, 0.53, 0.58, 0.86, 0.68, 0.85, 0.73, 0.69, 0.71, 0.91, 0.74, 0.60, 0.50, 0.39, 0.67, 0.55, 0.49, 0.65, 0.45, 0.64, 0.47, 0.63, 0.64])
,
AASeriesElement()
.nameSet("London Hot")
.lineWidthSet(5.0)
.fillOpacitySet(0.4)
.dataSet([0.60, 0.51, 0.52, 0.53, 0.64, 0.84, 0.65, 0.68, 0.63, 0.47, 0.72, 0.60, 0.65, 0.74, 0.66, 0.65, 0.71, 0.59, 0.65, 0.77, 0.52, 0.53, 0.58, 0.53])
,
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.formatterSet("""
function () {
let wholeContentStr ='<span style=' + 'color:lightGray; font-size:13px' + '>◉ Time: ' + this.x + ' year</span><br/>';
let length = this.points.length;
for (let i = 0; i < length; i++) {
let thisPoint = this.points[i];
let yValue = thisPoint.y;
if (yValue != 0) {
let spanStyleStartStr = '<span style=' + 'color:'+ thisPoint.color + '; font-size:13px' + '>◉ ';
let spanStyleEndStr = '</span> <br/>';
wholeContentStr += spanStyleStartStr + thisPoint.series.name + ': ' + thisPoint.y + '℃' + spanStyleEndStr;
}
}
return wholeContentStr;
}
""")
.backgroundColorSet("#050505")
.borderColorSet("#050505"))
return aaOptions
@staticmethod
def customLineChartTooltipStyleWhenValueBeZeroDoNotShow():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.line)#图形类型
.markerSymbolStyleSet(AAChartSymbolStyleType.borderBlank)#折线连接点样式为外边缘空白
.dataLabelsEnabledSet(False)
.categoriesSet(["临床一期","临床二期","临床三期"])
.seriesSet([
AASeriesElement()
.nameSet("上市")
.dataSet([0,0,7])
,
AASeriesElement()
.nameSet("中止")
.dataSet([4,5,1])
,
AASeriesElement()
.nameSet("无进展")
.dataSet([2,0,1])
,
AASeriesElement()
.nameSet("进行中")
.dataSet([3,5,2])
,
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.formatterSet("""
function () {
let wholeContentStr = this.points[0].x + '<br/>';
let length = this.points.length;
for (let i = 0; i < length; i++) {
let thisPoint = this.points[i];
let yValue = thisPoint.y;
if (yValue != 0) {
let prefixStr = '<span style=' + 'color:'+ thisPoint.color + '; font-size:13px' + '>◉ ';
wholeContentStr += prefixStr + thisPoint.series.name + ': ' + yValue + '<br/>';
}
}
return wholeContentStr;
}
"""))
return aaOptions
@staticmethod
def customBoxplotTooltipContent():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.boxplot)
.titleSet("BOXPLOT CHART")
.subtitleSet("virtual data")
.yAxisTitleSet("℃")
.yAxisVisibleSet(True)
.seriesSet([
AASeriesElement()
.nameSet("Observed Data")
.colorSet("#ef476f")
.fillColorSet(AAGradientColor.deepSea)
.dataSet([
[760, 801, 848, 895, 965],
[733, 853, 939, 980, 1080],
[714, 762, 817, 870, 918],
[724, 802, 806, 871, 950],
[834, 836, 864, 882, 910]
])
,
]))
pointFormatStr = (
"◉</span> <b> {series.name}</b><br/>"
+ "最大值: {point.high}<br/>"
+ "Q2: {point.q3}<br/>"
+ "中位数: {point.median}<br/>"
+ "Q1: {point.q1}<br/>"
+ "最小值: {point.low}<br/>"
)
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.headerFormatSet("<em>实验号码: point.key</em><br/>")
.pointFormatSet(pointFormatStr)
.valueDecimalsSet(2)#设置取值精确到小数点后几位#设置取值精确到小数点后几位
.backgroundColorSet(AAColor.black)
.borderColorSet(AAColor.black)
.styleSet(AAStyle.colorSize("#1e90ff", 12)))
return aaOptions
@staticmethod
def customYAxisLabels():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.line)#图形类型
.markerSymbolStyleSet(AAChartSymbolStyleType.borderBlank)#折线连接点样式为外边缘空白
.dataLabelsEnabledSet(False)
.colorsThemeSet(["#04d69f","#1e90ff","#ef476f","#ffd066",])
.stackingSet(AAChartStackingType.normal)
.markerRadiusSet(8)
.seriesSet([
AASeriesElement()
.nameSet("Scores")
.lineWidthSet(5.0)
.fillOpacitySet(0.4)
.dataSet([29.9, 71.5, 106.4, 129.2, 144.0, 176.0, 135.6, 148.5, 216.4, 194.1, 95.6, 54.4])
,
]))
aaYAxisLabels = (
AALabels()
.formatterSet("""
function () {
let yValue = this.value;
if (yValue >= 200) {
return 'Excellent';
} else if (yValue >= 150 && yValue < 200) {
return 'Very Good';
} else if (yValue >= 100 && yValue < 150) {
return 'Good';
} else if (yValue >= 50 && yValue < 100) {
return 'Not Bad';
} else {
return 'Just So So';
}
}
"""))
aaOptions = aaChartModel.aa_toAAOptions()
aaOptions.yAxis.labelsSet(aaYAxisLabels)
return aaOptions
@staticmethod
def customYAxisLabels2():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.line)#图形类型
.markerSymbolStyleSet(AAChartSymbolStyleType.borderBlank)#折线连接点样式为外边缘空白
.dataLabelsEnabledSet(False)
.colorsThemeSet(["#04d69f","#1e90ff","#ef476f","#ffd066",])
.stackingSet(AAChartStackingType.normal)
.markerRadiusSet(8)
.seriesSet([
AASeriesElement()
.nameSet("Tokyo Hot")
.lineWidthSet(5.0)
.fillOpacitySet(0.4)
.dataSet([229.9, 771.5, 1106.4, 1129.2, 6644.0, 1176.0, 8835.6, 148.5, 8816.4, 6694.1, 7795.6, 9954.4])
]))
aaYAxisLabels = (
AALabels()
.styleSet(AAStyle.colorSizeWeight(AAColor.gray, 10, AAChartFontWeightType.bold))
.formatterSet("""
function () {
let yValue = this.value;
if (yValue == 0) {
return '0';
} else if (yValue == 2500) {
return '25%';
} else if (yValue == 5000) {
return '50%';
} else if (yValue == 7500) {
return '75%';
} else if (yValue == 10000) {
return '100%';
}
}
"""))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.yAxis
.oppositeSet(True)
.tickWidthSet(2)
.lineWidthSet(1.5)#Y轴轴线颜色
.lineColorSet(AAColor.lightGray)#Y轴轴线颜色
.gridLineWidthSet(0)#Y轴网格线宽度
.tickPositionsSet([0,2500,5000,7500,10000])
.labelsSet(aaYAxisLabels))
return aaOptions
@staticmethod
def customStackedAndGroupedColumnChartTooltip():
aaChartModel = (AAChartModel()
.titleSet("Total fruit consumtion, grouped by gender")
.subtitleSet("stacked and grouped")
.yAxisTitleSet("Number of fruits")
.chartTypeSet(AAChartType.column)
.legendEnabledSet(False)#隐藏图例Set(底部可点按的小圆点)
.stackingSet(AAChartStackingType.normal)
.categoriesSet(["Apples", "Oranges", "Pears","Grapes","Bananas",])
.dataLabelsEnabledSet(True)
.seriesSet([
AASeriesElement()
.nameSet("John")
.dataSet([5,3,4,7,2,])
.stackSet("male")
,
AASeriesElement()
.nameSet("Joe")
.dataSet([3,4,4,2,5,])
.stackSet("male")
,
AASeriesElement()
.nameSet("Jane")
.dataSet([2,5,6,2,1,])
.stackSet("female")
,
AASeriesElement()
.nameSet("Janet")
.dataSet([3,0,4, 4,3,])
.stackSet("female")
,
]))
#/*Custom Tooltip Style --- 自定义图表浮动提示框样式及内容*/
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.sharedSet(False)
.formatterSet("""
function () {
return '<b>'
+ this.x
+ '</b><br/>'
+ this.series.name
+ ': '
+ this.y
+ '<br/>'
+ 'Total: '
+ this.point.stackTotal;
}
"""))
return aaOptions
@staticmethod
def customDoubleXAxesChart():
gradientColorDic1 = (AAGradientColor.linearGradient1(
AALinearGradientDirection.toTop,
"#7052f4",
"#00b0ff"#颜色字符串设置支持十六进制类型和 rgba 类型
))
gradientColorDic2 = (AAGradientColor.linearGradient1(
AALinearGradientDirection.toTop,
"#EF71FF",
"#4740C8"#颜色字符串设置支持十六进制类型和 rgba 类型
))
aaChart = (AAChart()
.typeSet(AAChartType.bar))
aaTitle = (AATitle()
.textSet("2015 年德国人口金字塔")
.styleSet(AAStyle()
.colorSet(AAColor.black)
.fontSizeSet(12.0)))
aaCategories = [
"0-4", "5-9", "10-14", "15-19",
"20-24", "25-29", "30-34", "35-39", "40-44",
"45-49", "50-54", "55-59", "60-64", "65-69",
"70-74", "75-79", "80-84", "85-89", "90-94",
"95-99", "100 + "
]
aaXAxis1 = (AAXAxis()
.reversedSet(True)
.categoriesSet(aaCategories)
.labelsSet(AALabels()
.stepSet(1)))
aaXAxis2 = (AAXAxis()
.reversedSet(True)
.oppositeSet(True)
.categoriesSet(aaCategories)
.linkedToSet(0)
.labelsSet(AALabels()
.stepSet(1)))
aaYAxis = (AAYAxis()
.gridLineWidthSet(0)# Y 轴网格线宽度
.titleSet(AATitle()
.textSet(""))#Y 轴标题
.labelsSet(AALabels()
.formatterSet("""
function () {
return (Math.abs(this.value) / 1000000) + 'M';
}
"""))
.minSet( -4000000)
.maxSet( 4000000))
aaPlotOptions = (AAPlotOptions()
.seriesSet(AASeries()
.animationSet(AAAnimation()
.durationSet(800)
.easingSet(AAChartAnimationType.bounce))
.stackingSet(AAChartStackingType.normal)))
aaTooltip = (AATooltip()
.enabledSet(True)
.sharedSet(False)
.formatterSet("""
function () {
return '<b>' + this.series.name + ', age ' + this.point.category + '</b><br/>' +
'人口: ' + Highcharts.numberFormat(Math.abs(this.point.y), 0);
}
"""))
aaSeriesElement1 = (AASeriesElement()
.nameSet("Men")
.colorSet(gradientColorDic1)
.dataSet([
-1746181, -1884428, -2089758, -2222362, -2537431, -2507081, -2443179,
-2664537, -3556505, -3680231, -3143062, -2721122, -2229181, -2227768,
-2176300, -1329968, -836804, -354784, -90569, -28367, -3878
]))
aaSeriesElement2 = (AASeriesElement()
.nameSet("Women")
.colorSet(gradientColorDic2)
.dataSet([
1656154, 1787564, 1981671, 2108575, 2403438, 2366003, 2301402, 2519874,
3360596, 3493473, 3050775, 2759560, 2304444, 2426504, 2568938, 1785638,
1447162, 1005011, 330870, 130632, 21208
]))
aaOptions = (AAOptions()
.chartSet(aaChart)
.titleSet(aaTitle)
.xAxisArraySet([aaXAxis1,aaXAxis2])
.yAxisSet(aaYAxis)
.plotOptionsSet(aaPlotOptions)
.tooltipSet(aaTooltip)
.seriesSet([aaSeriesElement1,aaSeriesElement2]))
return aaOptions
@staticmethod
def customArearangeChartTooltip():
aaChartModel = (AAChartModel()
.titleSet("LANGUAGE MARKET SHARES JANUARY,2020 TO MAY")
.subtitleSet("virtual data")
.chartTypeSet(AAChartType.arearange)
.markerSymbolStyleSet(AAChartSymbolStyleType.innerBlank)
.seriesSet([
AASeriesElement()
.nameSet("Range")
.colorSet("#1E90FF")
.typeSet(AAChartType.arearange)
.lineWidthSet(0)
.fillOpacitySet(0.3)
.dataSet([
[12464064, 14.3, 27.7],
[12464928, 14.5, 27.8],
[12465792, 15.5, 29.6],
[12466656, 16.7, 30.7],
[12467520, 16.5, 25.0],
[12468384, 17.8, 25.7],
[12469248, 13.5, 24.8],
[12470112, 10.5, 21.4],
[12470976, 9.2, 23.8],
[12471840, 11.6, 21.8],
[12472704, 10.7, 23.7],
[12473568, 11.0, 23.3],
[12474432, 11.6, 23.7],
[12475296, 11.8, 20.7],
[12476160, 12.6, 22.4],
[12477024, 13.6, 19.6],
[12477888, 11.4, 22.6],
[12478752, 13.2, 25.0],
[12479616, 14.2, 21.6],
[12480480, 13.1, 17.1],
[12481344, 12.2, 15.5],
[12482208, 12.0, 20.8],
[12483072, 12.0, 17.1],
[12483936, 12.7, 18.3],
[12484800, 12.4, 19.4],
[12485664, 12.6, 19.9],
[12486528, 11.9, 20.2],
[12487392, 11.0, 19.3],
[12488256, 10.8, 17.8],
[12489120, 11.8, 18.5],
[12489984, 10.8, 16.1]
])
.zIndexSet(0)
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.formatterSet("""
function () {
let myPointOptions = this.points[0].point.options;
let xValue = myPointOptions.x;
let lowValue = myPointOptions.low;
let highValue = myPointOptions.high;
let titleStr = '🌕 this is my custom tooltip description text content <br>';
let xValueStr = '🌖 this is x value : ' + xValue + '<br>';
let lowValueStr = ' 🌗 this is low value : ' + lowValue + '<br>';
let highValueStr = '🌘 this is high value : ' + highValue + '<br>';
let tooltipDescStr = titleStr + xValueStr + lowValueStr + highValueStr;
return tooltipDescStr;
}
""")
.backgroundColorSet(AAColor.black)
.borderColorSet(AAColor.black)
.styleSet(AAStyle.colorSize("#FFD700", 12)))
return aaOptions
@staticmethod
def customLineChartOriginalPointPositionByConfiguringXAxisFormatterAndTooltipFormatter():
categories = ["Jan", "Feb", "Mar", "Apr", "May", "Jun","Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
categoryJSArrStr = (AAJSArrayConverter.JSArrayWithHaxeArray(categories))
tooltipFormatter = ("""
function () {
return 'The value for <b>' + categoryJSArr[this.x] +
'</b> is <b>' + this.y + '</b> ' + "℃";
}
""")
tooltipFormatter = tooltipFormatter.replace("categoryJSArr", categoryJSArrStr)
xAxisLabelsFormatter = """
function () {
return categoryJSArr[this.value];
}
"""
xAxisLabelsFormatter = xAxisLabelsFormatter.replace("categoryJSArr", categoryJSArrStr)
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.line)
.colorsThemeSet(["#1e90ff","#ef476f","#ffd066","#04d69f","#25547c",])#Colors theme
.xAxisLabelsStyleSet(AAStyle.colorStr(AAColor.white))
.dataLabelsEnabledSet(False)
.tooltipValueSuffixSet("℃")
.animationTypeSet(AAChartAnimationType.bounce)
.backgroundColorSet("#22324c")#To make the chart background color transparent, set backgroundColor to "rgba Set(0,0,0,0)" or "# 00000000". Also make sure `aaChartView!.IsClearBackgroundColor = True`
.touchEventEnabledSet(True)
.seriesSet([
AASeriesElement()
.nameSet("Tokyo")
.dataSet([7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6])
,
AASeriesElement()
.nameSet("New York")
.dataSet([0.2, 0.8, 5.7, 11.3, 17.0, 22.0, 24.8, 24.1, 20.1, 14.1, 8.6, 2.5])
,
AASeriesElement()
.nameSet("Berlin")
.dataSet([0.9, 0.6, 3.5, 8.4, 13.5, 17.0, 18.6, 17.9, 14.3, 9.0, 3.9, 1.0])
,
AASeriesElement()
.nameSet("London")
.dataSet([3.9, 4.2, 5.7, 8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8])
,
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.formatterSet(tooltipFormatter))
(aaOptions.xAxis.labels
.formatterSet(xAxisLabelsFormatter))
return aaOptions
@staticmethod
def customTooltipWhichDataSourceComeFromOutSideRatherThanSeries():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.column)#图表类型
.yAxisTitleSet("")#设置 Y 轴标题
.yAxisLineWidthSet(1)#Y轴轴线线宽为0即是隐藏Y轴轴线
.yAxisGridLineWidthSet(1)#y轴横向分割线宽度为1Set(为0即是隐藏分割线)
.colorsThemeSet(["#FFD700"])#/*纯金色*/
.categoriesSet(["一月", "二月", "三月", "四月", "五月", "六月",
"七月", "八月", "九月", "十月", "十一月", "十二月"])
.yAxisMaxSet(110)
.seriesSet([
AASeriesElement()
.nameSet("2017")
.dataSet([55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, ])
]))
看近时长数组 = [70, 69, 95, 14, 18, 21, 25, 26, 23, 18, 13, 96]
看中时长数组 = [20, 80, 57, 11, 17, 22, 24, 24, 20, 14, 86, 25]
看远时长数组 = [90, 60, 35, 84, 13, 17, 18, 17, 14, 90, 39, 10]
总时长数组 = list()
# for i in 0 ... 12
# 单个总时长 = 看近时长数组[i] + 看中时长数组[i] + 看远时长数组[i]
# 总时长数组.appendSet(FloatSet(单个总时长))
有效时长数组 = [39, 42, 57, 85, 19, 15, 17, 16, 14, 13, 66, 48]
切换次数数组 = [
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
]
停止次数数组 = [
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
]
干预次数数组 = [
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
random.randint(0, 10), random.randint(0, 10), random.randint(0, 10),
]
总时长JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(总时长数组)
有效时长JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(有效时长数组)
看近时长JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(看近时长数组)
看中时长JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(看中时长数组)
看远时长JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(看远时长数组)
切换次数JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(切换次数数组)
停止次数JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(停止次数数组)
干预次数JS数组字符串 = AAJSArrayConverter.JSArrayWithHaxeArray(干预次数数组)
jsFormatterStr = ("""
function () {
let 总时长数组 = ${总时长JS数组};
let 有效时长数组 = ${有效时长JS数组};
let 看近时长数组 = ${看近时长JS数组};
let 看中时长数组 = ${看中时长JS数组};
let 看远时长数组 = ${看远时长JS数组};
let 切换次数数组 = ${切换次数JS数组};
let 停止次数数组 = ${停止次数JS数组};
let 干预次数数组 = ${干预次数JS数组};
let 时间单位后缀 = "min<br/>";
let 频率单位后缀 = "次<br/>";
let pointIndex = this.point.index;
let 单个总时长字符串 = "总时长:    " + 总时长数组[pointIndex] + 时间单位后缀;
let 单个有效时长字符串 = "有效时长:  " + 有效时长数组[pointIndex] + 时间单位后缀;
let 单个看近时长字符串 = "看近时长:  " + 看近时长数组[pointIndex] + 时间单位后缀;
let 单个看中时长字符串 = "看中时长:  " + 看中时长数组[pointIndex] + 时间单位后缀;
let 单个看远时长字符串 = "看远时长:  " + 看远时长数组[pointIndex] + 时间单位后缀;
let 单个切换次数字符串 = "切换次数:  " + 切换次数数组[pointIndex] + 频率单位后缀;
let 单个停止次数字符串 = "停止次数:  " + 停止次数数组[pointIndex] + 频率单位后缀;
let 单个干预次数字符串 = "干预次数:  " + 干预次数数组[pointIndex] + 频率单位后缀;
let wholeContentString = 单个总时长字符串 + 单个有效时长字符串 + 单个看近时长字符串 + 单个看中时长字符串 + 单个看远时长字符串 + 单个切换次数字符串 + 单个停止次数字符串 + 单个干预次数字符串;
return wholeContentString;
}
""")
jsFormatterStr = jsFormatterStr.replace("${总时长JS数组}", 总时长JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${有效时长JS数组}", 有效时长JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${看近时长JS数组}", 看近时长JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${看中时长JS数组}", 看中时长JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${看远时长JS数组}", 看远时长JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${切换次数JS数组}", 切换次数JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${停止次数JS数组}", 停止次数JS数组字符串)
jsFormatterStr = jsFormatterStr.replace("${干预次数JS数组}", 干预次数JS数组字符串)
print(jsFormatterStr)
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
#‼️以 this.point.index 这种方式获取选中的点的索引必须设置 tooltip 的 shared 为 False
#‼️共享时是 this.points Set(由多个 point 组成的 points 数组)
#‼️非共享时是 this.point 单个 point 对象
.sharedSet(False)
.useHTMLSet(True)
.formatterSet(jsFormatterStr)
.backgroundColorSet(AAColor.black)#黑色背景色
.borderColorSet("#FFD700")#边缘颜色纯金色
.styleSet(AAStyle.colorSize("#FFD700", 12)))
return aaOptions
#https():#github.com/AAChartModel/AAChartKit/issues/852 自定义蜘蛛🕷图样式
@staticmethod
def customSpiderChartStyle():
categoryArr = [
"周转天数Set(天)",
"订单满足率",
"订单履约时效",
"动销率",
"畅销商品缺货率",
"高库存金额占比",
"不动销金额占比",
"停采金额占比",
]
categoryJSArrStr = (AAJSArrayConverter.JSArrayWithHaxeArray(categoryArr))
xAxisLabelsFormatter = """
function () {
return categoryJSArr[this.value];
}
"""
xAxisLabelsFormatter = xAxisLabelsFormatter.replace("categoryJSArr", categoryJSArrStr)
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.line)#图表类型
.titleSet("健康体检表")#图表主标题
.colorsThemeSet(["#fe117c", "#ffc069",])#设置主体颜色数组
.yAxisLineWidthSet(0)
.yAxisGridLineWidthSet(1)#y轴横向分割线宽度为0Set(即是隐藏分割线)
# .yAxisTickPositionsSet([0, 5, 10, 15, 20, 25, 30, 35])
.markerRadiusSet(5)
.markerSymbolSet(AAChartSymbolType.circle)
.polarSet(True)
.seriesSet([
AASeriesElement()
.nameSet("本月得分")
.dataSet([7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5,]),
AASeriesElement()
.nameSet("上月得分")
.dataSet([0.2, 0.8, 5.7, 11.3, 17.0, 22.0, 24.8, 24.1, ]),
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.chart
.marginLeftSet(80)
.marginRightSet(80))
(aaOptions.xAxis
.lineWidthSet(0)#避免多边形外环之外有额外套了一层无用的外环
.labels
.styleSet(AAStyle.colorStr(AAColor.black))
.formatterSet(xAxisLabelsFormatter))
(aaOptions.yAxis
.gridLineInterpolationSet("polygon")#设置蜘蛛网🕸图表的网线为多边形
.labelsSet(AALabels()
.styleSet(AAStyle()
.colorSet(AAColor.black))))
#设定图例项的CSS样式。只支持有关文本的CSS样式设定。
# /* 默认是:
# "color"(): "#333333",
# "cursor"(): "pointer",
# "fontSize"(): "12px",
# "fontWeight"(): "bold"
#
# */
aaItemStyle = (AAItemStyle()
.colorSet(AAColor.gray)#字体颜色
.cursorSet("pointer")#Set(在移动端这个属性没什么意义,其实不用设置)指定鼠标滑过数据列时鼠标的形状。当绑定了数据列点击事件时,可以将此参数设置为 "pointer",用来提醒用户改数据列是可以点击的。
.fontSizeSet(14)#字体大小
.fontWeightSet(AAChartFontWeightType.thin))#字体为细体字
(aaOptions.legend
.enabledSet(True)
.alignSet(AAChartAlignType.center)#设置图例位于水平方向上的右侧
.layoutSet(AAChartLayoutType.horizontal)#设置图例排列方式为垂直排布
.verticalAlignSet(AAChartVerticalAlignType.top)#设置图例位于竖直方向上的顶部
.itemStyleSet(aaItemStyle))
return aaOptions
# Refer to the issue https():#github.com/AAChartModel/AAChartKit/issues/589
@staticmethod
def customizeEveryDataLabelSinglelyByDataLabelsFormatter():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.areaspline)#图表类型
.dataLabelsEnabledSet(True)
.tooltipEnabledSet(False)
.colorsThemeSet([AAGradientColor.fizzyPeach])
.markerRadiusSet(0)
.legendEnabledSet(False)
.categoriesSet(["美国🇺🇸","欧洲🇪🇺","中国🇨🇳","日本🇯🇵","韩国🇰🇷","越南🇻🇳","中国香港🇭🇰",])
.seriesSet([
AASeriesElement()
.dataSet([7.0, 6.9, 2.5, 14.5, 18.2, 21.5, 5.2])
]))
aaOptions = aaChartModel.aa_toAAOptions()
aaOptions.yAxis.gridLineDashStyleSet(AAChartLineDashStyleType.longDash) #设置Y轴的网格线样式为 AAChartLineDashStyleTypeLongDash
aaOptions.tooltip.shared = True
unitArr = ["美元", "欧元", "人民币", "日元", "韩元", "越南盾", "港币", ]
unitJSArrStr = (AAJSArrayConverter.JSArrayWithHaxeArray(unitArr))
# 单组 series 图表, 获取选中的点的索引是 this.point.index ,多组并且共享提示框,则是this.points[0].index
dataLabelsFormatter = """
function () {
return this.y + unitJSArr[this.point.index];
}
"""
dataLabelsFormatter = dataLabelsFormatter.replace("unitJSArr", unitJSArrStr)
aaDataLabels = (AADataLabels()
.enabledSet(True)
.styleSet(AAStyle.colorSizeWeight(AAColor.red, 10, AAChartFontWeightType.bold))
.formatterSet(dataLabelsFormatter)
.backgroundColorSet(AAColor.white)# white color
.borderColorSet(AAColor.red)# red color
.borderRadiusSet(1.5)
.borderWidthSet(1.3)
.xSet(3).ySet(-20)
.verticalAlignSet(AAChartVerticalAlignType.middle))
aaOptions.plotOptions.series.dataLabels = aaDataLabels
return aaOptions
# Refer to GitHub issue(): https():#github.com/AAChartModel/AAChartKit/issues/938
# Refer to online chart sample(): https():#www.highcharts.com/demo/column-comparison
@staticmethod
def customXAxisLabelsBeImages():
nameArr = [
"South Korea",
"Japan",
"Australia",
"Germany",
"Russia",
"China",
"Great Britain",
"United States"
]
colorArr = [
"rgb(201, 36, 39)",
"rgb(201, 36, 39)",
"rgb(0, 82, 180)",
"rgb(0, 0, 0)",
"rgb(240, 240, 240)",
"rgb(255, 217, 68)",
"rgb(0, 82, 180)",
"rgb(215, 0, 38)"
]
imageLinkFlagArr = [
"197582",
"197604",
"197507",
"197571",
"197408",
"197375",
"197374",
"197484"
]
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.column)
.titleSet("Custom X Axis Labels Be Images")
.subtitleSet("use HTML")
.categoriesSet(nameArr)
.colorsThemeSet(colorArr)
.borderRadiusSet(5)
.seriesSet([
AASeriesElement()
.nameSet("AD 2020")
.dataSet([7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5])
.colorByPointSet(True)
]))
imageLinkFlagJSArrStr = (AAJSArrayConverter.JSArrayWithHaxeArray(imageLinkFlagArr))
xLabelsFormatter = ("function () {\n" +
" let imageFlag = imageLinkFlagJSArr[this.pos];\n" +
" let imageLink = \"<span><img src=\\\"https://image.flaticon.com/icons/svg/197/\" + imageFlag + \".svg\\\" style=\\\"width: 30px; height: 30px;\\\"/><br></span>\";\n" +
" return imageLink;\n" +
" }")
xLabelsFormatter = xLabelsFormatter.replace("imageLinkFlagJSArr", imageLinkFlagJSArrStr)
# https():#api.highcharts.com.cn/highcharts#xAxis.labels.formatter
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.xAxis.labels
.useHTMLSet(True)
.formatterSet(xLabelsFormatter))
aaOptions.plotOptions.column.groupPaddingSet(0.005)
#Custom tooltip style
tooltipFormatter = ("function () {\n" +
" let imageFlag = imageLinkFlagJSArr[this.point.index];\n" +
" let imageLink = \"<span><img src=\\\"https://image.flaticon.com/icons/svg/197/\" + imageFlag + \".svg\\\" style=\\\"width: 30px; height: 30px;\\\"/><br></span>\";\n" +
" return imageLink\n" +
" + \" 🌕 🌖 🌗 🌘 🌑 🌒 🌓 🌔 <br/> \"\n" +
" + \" Support JavaScript Function Just Right Now !!! <br/> \"\n" +
" + \" The Gold Price For <b>2020 \"\n" +
" + this.x\n" +
" + \" </b> Is <b> \"\n" +
" + this.y\n" +
" + \" </b> Dollars \";\n" +
" }")
tooltipFormatter = tooltipFormatter.replace("imageLinkFlagJSArr", imageLinkFlagJSArrStr)
(aaOptions.tooltip
.sharedSet(False)
.useHTMLSet(True)
.formatterSet(tooltipFormatter))
return aaOptions
#https():#bbs.hcharts.cn/article-109-1.html
#图表自带的图例点击事件是:
#点击某个显示/隐藏的图例,该图例对应的serie就隐藏/显示。
#个人觉得不合理,正常来说,有多条折线Set(或其他类型的图表),点击某个图例是想只看该图例对应的数据;
#于是修改了图例点击事件。
#
#实现的效果是Set(以折线图为例):
#1. 当某条折线隐藏时,点击该折线的图例 --> 该折线显示;
#2. 当全部折线都显示时,点击某个图例 --> 该图例对应的折线显示,其他折线均隐藏;
#3. 当只有一条折线显示时,点击该折线的图例 --> 全部折线均显示;
#4. 其他情况,按默认处理:
#显示 --> 隐藏;
#隐藏 --> 显示;
#Customized legendItemClick Event online(): http():#code.hcharts.cn/rencht/hhhhLv/share
@staticmethod
def customLegendItemClickEvent():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.column)
.stackingSet(AAChartStackingType.normal)
.colorsThemeSet(["#fe117c","#ffc069","#06caf4","#7dffc0"])#设置主题颜色数组
.markerRadiusSet(0)
.seriesSet([
AASeriesElement()
.nameSet("2017")
.dataSet([7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6]),
AASeriesElement()
.nameSet("2018")
.dataSet([0.2, 0.8, 5.7, 11.3, 17.0, 22.0, 24.8, 24.1, 20.1, 14.1, 8.6, 2.5]),
AASeriesElement()
.nameSet("2019")
.dataSet([0.9, 0.6, 3.5, 8.4, 13.5, 17.0, 18.6, 17.9, 14.3, 9.0, 3.9, 1.0]),
AASeriesElement()
.nameSet("2020")
.dataSet([3.9, 4.2, 5.7, 8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8]),
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.legend
.enabledSet(True)
.alignSet(AAChartAlignType.center)#设置图例位于水平方向上的右侧
.layoutSet(AAChartLayoutType.horizontal)#设置图例排列方式为垂直排布
.verticalAlignSet(AAChartVerticalAlignType.top))#设置图例位于竖直方向上的顶部
#自定义图例点击事件
aaOptions.plotOptions.series.events = (
AAEvents()
.legendItemClickSet("""
function(event) {
function getVisibleMode(series, serieName) {
var allVisible = true;
var allHidden = true;
for (var i = 0; i < series.length; i++) {
if (series[i].name == serieName)
continue;
allVisible &= series[i].visible;
allHidden &= (!series[i].visible);
}
if (allVisible && !allHidden)
return 'all-visible';
if (allHidden && !allVisible)
return 'all-hidden';
return 'other-cases';
}
var series = this.chart.series;
var mode = getVisibleMode(series, this.name);
var enableDefault = false;
if (!this.visible) {
enableDefault = true;
}
else if (mode == 'all-visible') {
var seriesLength = series.length;
for (var i = 0; i < seriesLength; i++) {
var serie = series[i];
serie.hide();
}
this.show();
}
else if (mode == 'all-hidden') {
var seriesLength = series.length;
for (var i = 0; i < seriesLength; i++) {
var serie = series[i];
serie.show();
}
}
else {
enableDefault = true;
}
return enableDefault;
}
"""))
return aaOptions
# https():#github.com/AAChartModel/AAChartKit-Swift/issues/233
@staticmethod
def customTooltipPositionerFunction():
categories = [
"孤岛危机",
"使命召唤",
"荣誉勋章",
"狙击精英",
"神秘海域",
"最后生还者",
"巫师3狂猎",
"对马之魂",
"死亡搁浅",
"地狱边境",
"闪客",
"忍者之印"
]
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.column)
.yAxisTitleSet("")
.yAxisGridLineWidthSet(0)
.categoriesSet(categories)
.seriesSet([
AASeriesElement()
.nameSet("单机大作")
.colorSet(AAColor.red)
.dataSet([0.2, 0.8, 5.7, 11.3, 17.0, 22.0, 24.8, 24.1, 20.1, 14.1, 8.6, 2.5])
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.shadowSet(False)
.positionerSet("""
function (labelWidth, labelHeight, point) {
return {
x : point.plotX,
y : 20
};
}
"""))
return aaOptions
@staticmethod
def fixedTooltipPositionByCustomPositionerFunction():
aaOptions = JSFuncOptionsComposer.customTooltipPositionerFunction()
(aaOptions.tooltip
.positionerSet("""
function (labelWidth, labelHeight, point) {
return {
x : 50,
y : 50
};
}
"""))
return aaOptions
#https():#github.com/AAChartModel/AAChartKit/issues/967
@staticmethod
def disableColumnChartUnselectEventEffectBySeriesPointEventClickFunction():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.bar)
.titleSet("Custom Bar Chart select color")
.yAxisTitleSet("")
.yAxisReversedSet(True)
.xAxisReversedSet(True)
.seriesSet([
AASeriesElement()
.nameSet("ElementOne")
.dataSet([211,183,157,133,111,91,73,57,43,31,21,13,7,3])
.allowPointSelectSet(True)
.statesSet(AAStates()
.hoverSet(AAHover()
.colorSet(AAColor.yellow))
.selectSet(AASelect()
.colorSet(AAColor.red)))
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.plotOptions.series
.pointSet(AAPoint()
.eventsSet(AAPointEvents()
.clickSet(("""
function () {
if (this.selected == true) {
this.selected = false;
}
return;
}
""")))))
return aaOptions
#https():#github.com/AAChartModel/AAChartKit/issues/970
#https():#github.com/AAChartModel/AAChartKit-Swift/issues/239
#通过自定义 div 的 css 样式来自定义复杂效果的 tooltip 浮动提示框
@staticmethod
def customAreasplineChartTooltipStyleByDivWithCSS():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.areaspline)#图形类型
.stackingSet(AAChartStackingType.normal)
.categoriesSet([
"10-01", "10-02", "10-03", "10-04", "10-05", "10-06", "10-07", "10-08", "10-09", "10-10", "10-11",
"10-12", "10-13", "10-14", "10-15", "10-16", "10-17", "10-18", "10-19", "10-20", "10-21", "10-22",
"10-23", "10-24", "10-25", "10-26", "10-27", "10-28", "10-29", "10-30", "10-31", "11-01", "11-02",
"11-03", "11-04", "11-05", "11-06", "11-07", "11-08", "11-09", "11-10", "11-11", "11-12", "11-13",
"11-14", "11-15", "11-16", "11-17", "11-18", "11-19", "11-20", "11-21", "11-22", "11-23", "11-24",
"11-25", "11-26", "11-27", "11-28", "11-29", "11-30", "12-01", "12-02", "12-03", "12-04", "12-05",
"12-06", "12-07", "12-08", "12-09", "12-10", "12-11", "12-12", "12-13", "12-14", "12-15", "12-16",
"12-17", "12-18", "12-19", "12-20", "12-21", "12-22", "12-23", "12-24", "12-25", "12-26", "12-27",
"12-28", "12-29", "12-30"
])
.seriesSet([
AASeriesElement()
.nameSet("黄金上涨")
.lineWidthSet(3)
.colorSet("#FFD700")#/*纯金色*/)
.fillOpacitySet(0.5)
.dataSet([
1.51, 6.70, 0.94, 1.44, 1.60, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10,
4.17, 3.85, 4.17, 3.46, 3.46, 3.55, 3.50, 4.13, 2.58, 2.28, 1.51, 12.7, 0.94, 1.44,
18.6, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10, 4.17, 3.85, 4.17, 3.46,
3.46, 3.55, 3.50, 4.13, 2.58, 2.28, 1.33, 4.68, 1.31, 1.10, 13.9, 1.10, 1.16, 1.67,
2.64, 2.86, 3.00, 3.21, 4.14, 4.07, 3.68, 3.11, 3.41, 3.25, 3.32, 3.07, 3.92, 3.05,
2.18, 3.24, 3.23, 3.15, 2.90, 1.81, 2.11, 2.43, 5.59, 3.09, 4.09, 6.14, 5.33, 6.05,
5.71, 6.22, 6.56, 4.75, 5.27, 6.02, 5.48
])
,
AASeriesElement()
.nameSet("房价下跌")
.lineWidthSet(3)
.colorSet("#ffc069")
.fillOpacitySet(0.5)
.dataSet([
1.51, 6.70, 0.94, 1.44, 1.60, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10,
4.17, 3.85, 4.17, 3.46, 3.46, 3.55, 3.50, 4.13, 2.58, 2.28, 1.51, 12.7, 0.94, 1.44,
18.6, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10, 4.17, 3.85, 4.17, 3.46,
3.46, 3.55, 3.50, 4.13, 2.58, 2.28, 1.33, 4.68, 1.31, 1.10, 13.9, 1.10, 1.16, 1.67,
2.64, 2.86, 3.00, 3.21, 4.14, 4.07, 3.68, 3.11, 3.41, 3.25, 3.32, 3.07, 3.92, 3.05,
2.18, 3.24, 3.23, 3.15, 2.90, 1.81, 2.11, 2.43, 5.59, 3.09, 4.09, 6.14, 5.33, 6.05,
5.71, 6.22, 6.56, 4.75, 5.27, 6.02, 5.48
])
,
]))
#https():#zhidao.baidu.com/question/301691908.html
#https():#jshare.com.cn/highcharts/hhhhGc
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.tooltip
.useHTMLSet(True)
.paddingSet(0)
.borderWidthSet(0)
.formatterSet("function () {\n" +
" var box1Text = \"  2021-\" + this.x + this.points[0].series.name + this.y;\n" +
" var box2Text = \"  2021-\" + this.x + this.points[1].series.name + this.y;\n" +
" \n" +
" return '<style>\\\n" +
" div{margin:0;padding:0}\\\n" +
" #container{width:300px;height:40px;border:80px;}\\\n" +
" #container .box1{width:150px;height:40px;float:left;background:red;line-height:40px;color:#fff}\\\n" +
" #container .box2{width:150px;height:40px;float:right;background:green;line-height:40px;color:#fff}\\\n" +
" </style>\\\n" +
" <div id=\\\"container\\\">'\n" +
" +\n" +
" '<div class=\\\"box1\\\">' + box1Text + '</div>'\n" +
" +\n" +
" '<div class=\\\"box2\\\">' + box2Text + '</div>'\n" +
" +\n" +
" '</div>';\n" +
" }"))
#禁用图例点击事件
aaOptions.plotOptions.series.events = (
AAEvents()
.legendItemClickSet("""
function() {
return false;
}
"""))
return aaOptions
#https():#github.com/AAChartModel/AAChartKit/issues/901
#https():#github.com/AAChartModel/AAChartKit/issues/952
@staticmethod
def configureTheAxesLabelsFormattersOfDoubleYAxesChart():
aaChart = (AAChart()
.backgroundColorSet(AAColor.white))
aaTitle = (AATitle()
.textSet(""))
aaXAxis = (AAXAxis()
.visibleSet(True)
.minSet(0)
.categoriesSet([
"Java", "Swift", "Python", "Ruby", "PHP", "Go","C",
"C#", "C++", "Perl", "R", "MATLAB", "SQL"
]))
aaPlotOptions = (AAPlotOptions()
.seriesSet(AASeries()
.markerSet(AAMarker()
.radiusSet(7)#曲线连接点半径,默认是4
.symbolSet(AAChartSymbolType.circle.value)#曲线点类型:"circle", "square", "diamond", "triangle","triangle-down",默认是"circle"
.fillColorSet(AAColor.white)#点的填充色Set(用来设置折线连接点的填充色)
.lineWidthSet(3)#外沿线的宽度Set(用来设置折线连接点的轮廓描边的宽度)
.lineColorSet("")#外沿线的颜色Set(用来设置折线连接点的轮廓描边颜色,当值为空字符串时,默认取数据点或数据列的颜色)
)))
yAxis1 = (AAYAxis()
.visibleSet(True)
.lineWidthSet(1)
.startOnTickSet(False)
.endOnTickSet(False)
.tickPositionsSet([0, 50, 100, 150, 200])
.labelsSet(AALabels()
.enabledSet(True)
.styleSet(AAStyle()
.colorSet("DodgerBlue"))
.formatterSet(("""
function () {
let yValue = this.value;
if (yValue >= 200) {
return "极佳";
} else if (yValue >= 150 && yValue < 200) {
return "非常棒";
} else if (yValue >= 100 && yValue < 150) {
return "相当棒";
} else if (yValue >= 50 && yValue < 100) {
return "还不错";
} else {
return "一般";
}
}
""")))
.gridLineWidthSet(0)
.titleSet(AATitle()
.textSet("中文")
.styleSet(AAStyle.colorSizeWeight("DodgerBlue", 14, AAChartFontWeightType.bold))))
yAxis2 = (AAYAxis()
.visibleSet(True)
.lineWidthSet(1)
.startOnTickSet(False)
.endOnTickSet(False)
.tickPositionsSet([0, 50, 100, 150, 200])
.labelsSet(AALabels()
.enabledSet(True)
.styleSet(AAStyle()
.colorSet(AAColor.red))
.formatterSet("""
function () {
let yValue = this.value;
if (yValue >= 200) {
return "Awesome";
} else if (yValue >= 150 && yValue < 200) {
return "Great";
} else if (yValue >= 100 && yValue < 150) {
return "Very Good";
} else if (yValue >= 50 && yValue < 100) {
return "Not Bad";
} else {
return "Just So So";
}
}
"""))
.gridLineWidthSet(0)
.titleSet(AATitle()
.textSet("ENGLISH")
.styleSet(AAStyle.colorSizeWeight(AAColor.red, 14, AAChartFontWeightType.bold)))
.oppositeSet(True))
aaTooltip = (AATooltip()
.enabledSet(True)
.sharedSet(True))
seriesArr = [
AASeriesElement()
.nameSet("2020")
.typeSet(AAChartType.spline)
.lineWidthSet(7)
.colorSet(AAGradientColor.deepSea)
.yAxisSet(1)
.dataSet([
0, 71.5, 106.4, 129.2, 144.0, 176.0,
135.6, 148.5, 216.4, 194.1, 95.6, 54.4
]),
AASeriesElement()
.nameSet("2021")
.typeSet(AAChartType.spline)
.lineWidthSet(7)
.colorSet(AAGradientColor.sanguine)
.yAxisSet(0)
.dataSet([
135.6, 148.5, 216.4, 194.1, 95.6, 54.4,
0, 71.5, 106.4, 129.2, 144.0, 176.0
])
]
aaOptions = (AAOptions()
.chartSet(aaChart)
.titleSet(aaTitle)
.plotOptionsSet(aaPlotOptions)
.xAxisSet(aaXAxis)
.yAxisArraySet([yAxis1,yAxis2])
.tooltipSet(aaTooltip)
.seriesSet(seriesArr))
return aaOptions
#https():#github.com/AAChartModel/AAChartKit/issues/1042
@staticmethod
def makePieChartShow0Data():
aaOptions = (AAOptions()
.titleSet(AATitle()
.textSet(""))
.chartSet(AAChart()
.typeSet(AAChartType.pie))
.seriesSet([
AASeriesElement()
.nameSet("ZeroDataPie")
.dataSet([
{"y": 1, "isZero": True, "name": "One"},
{"y": 1, "isZero": True, "name": "Two"},
{"y": 1, "isZero": True, "name": "Three"}
])
.tooltipSet(AATooltip()
.sharedSet(False)
.pointFormatterSet("""
function() {
return "<span style=color:" + this.color + "> ◉ </span>"
+ this.series.name
+ ": <b>"
+ (this.options.isZero ? 0 : this.y)
+ "</b><br/>";
}
"""))
]))
return aaOptions
#https():#github.com/AAChartModel/AAChartKit/issues/1217
@staticmethod
def customColumnChartXAxisLabelsTextByInterceptTheFirstFourCharacters():
aaChartModel = (AAChartModel()
.chartTypeSet(AAChartType.bar)#图表类型
.titleSet("春江花月夜")#图表主标题
.subtitleSet("张若虚")#图表副标题
.xAxisReversedSet(True)
.xAxisLabelsStyleSet(AAStyle.colorStr(AAColor.black))
.legendEnabledSet(False)
.categoriesSet([
"春江潮水连海平", "海上明月共潮生",
"滟滟随波千万里", "何处春江无月明",
"江流宛转绕芳甸", "月照花林皆似霰",
"空里流霜不觉飞", "汀上白沙看不见",
"江天一色无纤尘", "皎皎空中孤月轮",
"江畔何人初见月", "江月何年初照人",
"人生代代无穷已", "江月年年望相似",
"不知江月待何人", "但见长江送流水",
"白云一片去悠悠", "青枫浦上不胜愁",
"谁家今夜扁舟子", "何处相思明月楼",
"可怜楼上月裴回", "应照离人妆镜台",
"玉户帘中卷不去", "捣衣砧上拂还来",
"此时相望不相闻", "愿逐月华流照君",
"鸿雁长飞光不度", "鱼龙潜跃水成文",
"昨夜闲潭梦落花", "可怜春半不还家",
"江水流春去欲尽", "江潭落月复西斜",
"斜月沉沉藏海雾", "碣石潇湘无限路",
"不知乘月几人归", "落月摇情满江树",
])
.seriesSet([
AASeriesElement()
.lineWidthSet(1.5)
.colorSet(AAGradientColor.linearGradient1(
AALinearGradientDirection.toTop,
"#7052f4",
"#00b0ff"
))
.nameSet("2018")
.dataSet([
1.51, 3.7, 0.94, 1.44, 1.6, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90, 4.61, 4.10,
4.17, 3.85, 4.17, 3.46, 3.46, 3.55, 3.50, 4.13, 2.58, 2.28,1.51, 2.7, 0.94, 1.44,
3.6, 1.63, 1.56, 1.91, 2.45, 3.87, 3.24, 4.90,
])
]))
aaOptions = aaChartModel.aa_toAAOptions()
(aaOptions.xAxis.labels
.formatterSet("""
function () {
let xAxisCategory = this.value;
if (xAxisCategory.length > 4) {
return xAxisCategory.substr(0, 4);
} else {
return xAxisCategory;
}
}
"""))
return aaOptions
```
#### File: AACharts-PyQt/demo/MainTreeWidget.py
```python
from random import random
from PySide6 import QtWidgets, QtCore
from aacharts.aachartcreator.PYChartView import PYChartView
from aacharts.aaenum.AAEnum import AAChartType
from demo.BasicChartComposer import BasicChartComposer
from demo.MixedChartComposer import MixedChartComposer
from demo.SpecialChartComposer import SpecialChartComposer
from demo.CustomStyleChartComposer import CustomStyleChartComposer
from demo.ChartOptionsComposer import ChartOptionsComposer
from demo.JSFuncOptionsComposer import JSFuncOptionsComposer
class MainTreeWidget(QtWidgets.QWidget):
def __init__(self):
super().__init__()
self.chartView = PYChartView()
testChartModel = CustomStyleChartComposer.configureColorfulBarChart()
self.chartView.aa_drawChartWithChartModel(testChartModel)
# https://gist.github.com/fredrikaverpil/1fa4f3360ffdb1e69507
folderTree = QtWidgets.QTreeWidget()
sectionTitleArr = [
"Basic Type Chart --- 基础类型图表",
"Special Type Chart --- 特殊类型图表",
"Custom Style Chart---一些自定义风格样式图表",
"Mixed Chart --- 混合图形",
"Draw Chart With AAOptions---通过Options绘图",
"JS Function For AAOptionns ---通过带有 JS 函数的 Options 绘图",
]
chartTypeTitleArr = [
# Basic types chart
[
"Column Chart---柱形图",
"Bar Chart---条形图",
"Area Chart---折线填充图",
"Areaspline Chart---曲线填充图",
"Step Area Chart--- 直方折线填充图",
"Step Line Chart--- 直方折线图",
"Line Chart---折线图",
"Spline Chart---曲线图",
],
# Special types chart
[
"Polar Chart---极地图",
"Pie Chart---扇形图",
"Bubble Chart---气泡图",
"Scatter Chart---散点图",
"Arearange Chart---折线区域范围图",
"Area Spline range Chart--曲线区域范围图",
"Columnrange Chart--- 柱形范围图",
"Step Line Chart--- 直方折线图",
"Step Area Chart--- 直方折线填充图",
"Boxplot Chart--- 箱线图",
"Waterfall Chart--- 瀑布图",
"Pyramid Chart---金字塔图",
"Funnel Chart---漏斗图",
"Error Bar Chart---误差图",
],
# Custom chart style by AAChartModel
[
"Colorful Column Chart---多彩条形图",
"Colorful Gradient Color Chart---多彩颜色渐变条形图",
"Discontinuous Data Chart---数值不连续の图表",
"Mixed Line Chart---虚实线混合折线图",
"Random Colors Colorful Column Chart---随机颜色の多彩柱形图",
"Gradient Color Bar Chart---颜色渐变条形图",
"Stacking polar chart---百分比堆积效果の极地图",
"Area Chart with minus--带有负数の区域填充图",
"Step Line Chart--直方折线图",
"Step Area Chart--直方折线填充图",
"Nightingale Rose Chart---南丁格尔玫瑰图",
"Specific Data Customize Datalabel",
"Chart With Shadow Style---带有阴影效果の图表",
"Colorful gradient Areaspline Chart---多层次渐变区域填充图",
"Colorful gradient Spline Chart---多层次渐变曲线图",
"Gradient Color Areaspline Chart---半透明渐变效果区域填充图",
"Special Style Marker Of Single Data Element Chart",
"Special Style Column Of Single Data Element Chart",
"configure Area Chart Threshold---自定义阈值",
"custom Scatter Chart Marker Symbol Content---自定义散点图の标志点内容",
"custom Line Chart Marker Symbol Content---自定义折线图の标志点内容",
"Triangle Radar Chart---三角形雷达图",
"Quadrangle Radar Chart---四角形雷达图",
"Pentagon Radar Chart---五角形雷达图",
"Hexagon Radar Chart----六角形雷达图",
"Draw Line Chart With Points Coordinates----通过点坐标来绘制折线图",
"custom Special Style DataLabel Of Single Data Element Chart",
"custom Bar Chart Hover Color and Select Color---自定义条形图手指滑动颜色和单个长条被选中颜色",
"custom Line Chart Chart Hover And Select Halo Style",
"custom Spline Chart Marker States Hover Style",
"customNormalStackingChartDataLabelsContentAndStyle---自定义堆积柱状图 DataLabels の内容及样式",
"upsideDownPyramidChart---倒立の金字塔图",
"doubleLayerPieChart---双层嵌套扇形图",
"doubleLayerDoubleColorsPieChart---双层嵌套双颜色主题扇形图",
"disableSomeOfLinesMouseTrackingEffect---针对部分数据列关闭鼠标或手指跟踪行为",
"configureColorfulShadowChart---彩色阴影效果の曲线图",
"configureColorfulDataLabelsStepLineChart---彩色 DataLabels の直方折线图",
"configureColorfulGradientColorAndColorfulDataLabelsStepAreaChart---彩色渐变效果且彩色 DataLabels の直方折线填充图",
"disableSplineChartMarkerHoverEffect---禁用曲线图の手指滑动 marker 点の光圈变化放大の效果",
"configureMaxAndMinDataLabelsForChart---为图表最大值最小值添加 DataLabels 标记",
"customVerticalXAxisCategoriesLabelsByHTMLBreakLineTag---通过 HTML 的换行标签来实现图表的 X 轴的 分类文字标签的换行效果",
"noMoreGroupingAndOverlapEachOtherColumnChart---不分组的相互重叠柱状图📊",
"noMoreGroupingAndNestedColumnChart---不分组的嵌套柱状图📊",
],
# Mixed Chart
[
"Arearange Mixed Line---面积范围均线图",
"Columnrange Mixed Line---柱形范围图混合折线图",
"Stacking Column Mixed Line---堆积柱形图混合折线图",
"Dash Style Types Mixed---多种类型曲线混合图",
"Negative Color Mixed Column Chart---基准线以下异色混合图",
"scatterMixedLine---散点图混合折线图",
"Negative Color Mixed Bubble Chart---基准线以下异色气泡图",
"Polygon Mixed Scatter---多边形混合散点图",
"Polar Chart Mixed---极地混合图",
"Column Mixed Scatter---柱形图混合散点图",
"Pie Mixed Line Mixed Column---扇形折线柱形混合图",
"Line Chart With Shadow---带有阴影效果の折线图",
"Negative Color Mixed Areaspline chart---基准线以下异色混合曲线填充图",
"Aerasplinerange Mixed Columnrange Mixed Line Chart---曲线面积范围混合柱形范围混合折线图"
],
# Draw Chart with AAOptions
[
"configureLegendStyle",
"Custom Chart Sample Two",
"Custom Chart Sample three",
"Custom Chart Sample 4",
"customAreaChartYAxisLabelsAndGridLineStyle---自定义曲线填充图图的 Y 轴 的 Labels 和 网格线样式",
"Adjust Y Axis Min value",
"Mirror Chart",
"Adjust The XAxis Labels",
"Adjust GroupPadding Between Columns",
"configureAAPlotBandsForChart || 值域颜色分割带🎀",
"configureAAPlotLinesForChart || 值域颜色分割线🧶",
"customAATooltipWithJSFuntion",
"customXAxisCrosshairStyle",
"configureXAxisLabelsFontColorWithHTMLString",
"configureXAxisLabelsFontColorAndFontSizeWithHTMLString",
"configure_DataLabels_XAXis_YAxis_Legend_Style",
"configureXAxisPlotBand",
"configureDoubleYAxisChartOptions",
"configureTripleYAxesMixedChart || 三重 Y 轴混合图",
"Double Y Axes And Column Line Mixed Chart || 双 Y 轴柱形曲线混合图",
"Double Y Axes Market Depth Chart || 双 Y 轴市场深度图",
"custom Area Chart Tooltip Style Like HTML Table || 自定义区域填充图浮动提示框为 HTML 表格样式",
"custom Axes Grid Line Style || 自定义 X 轴和 Y 轴网格线の样式",
"custom Radar Chart Style || 自定义雷达图样式",
"customColumnrangeChartStyle---自定义柱形范围图样式",
"self customXAxisLabelsBeImages---自定义曲线面积图 X 轴 labels 为一组图片🖼",
"Triangle Radar Chart With PlotBands---带有颜色标志带の三角形雷达图",
"Quadrangle Radar Chart With PlotBands---带有颜色标志带の四角形雷达图",
"Pentagon Radar Chart With PlotBands---带有颜色标志带の五角形雷达图",
"Hexagon Radar Char With PlotBands----带有颜色标志带の六角形雷达图",
"Spider Web Radar Chart With PlotBands----带有颜色标志带の🕸蜘蛛网状雷达图",
"configureComplicatedCustomAreasplineChart---复杂自定义曲线填充图 1",
"configureComplicatedCustomAreasplineChart2---复杂自定义曲线填充图 2",
"configureComplicatedCustomAreasplineChart3---复杂自定义曲线填充图 3",
"yAxisOnTheRightSideChart---y轴在右侧的图表",
"doubleLayerHalfPieChart---双层嵌套的玉阕图",
"customAreasplineChartTooltipContentWithHeaderFormat---通过 tooltip 的 headerFormat 属性来自定义 曲线填充图的 tooltip",
"customAreaChartTooltipStyleWithTotalValueHeader---浮动提示框 header 显示总值信息",
"configureYAxisLabelsNumericSymbolsMagnitudeOfAerasplineChart---自定义 Y 轴的 Labels 国际单位符基数及国际单位符",
"timeDataWithIrregularIntervalsChart---X 轴时间不连续的折线图",
"logarithmicAxisLineChart---对数轴折线图📈",
"logarithmicAxisScatterChart---对数轴散点图",
"Disable Mixed Chart Inactive Animation Effect----禁用混合图表的 inactive 动画效果",
"Adjust Bubble Chart Min And Max----调整气泡图的 min 和 max 相关属性",
"customLineChartDataLabelsFormat---自定义曲线图的 DataLabels 的 format 属性",
"customLineChartDataLabelsFormat2---自定义曲线图的 DataLabels 的 format 属性2(更简易方法)",
"complicatedScatterChart---复杂的自定义散点图"
],
# Custom Tooltip With JavaScript Formatter Function
[
"customAreaChartTooltipStyleWithSimpleFormatString---简单字符串拼接",
"customAreaChartTooltipStyleWithDifferentUnitSuffix---自定义不同单位后缀",
"customAreaChartTooltipStyleWithColorfulHtmlLabels---自定义多彩颜色文字",
"customLineChartTooltipStyleWhenValueBeZeroDoNotShow---值为0时,在tooltip中不显示",
"customBoxplotTooltipContent---自定义箱线图の浮动提示框头部内容",
"customYAxisLabels---自定义Y轴文字1",
"customYAxisLabels2---自定义Y轴文字2",
"customStackedAndGroupedColumnChartTooltip---自定义分组堆积柱状图tooltip内容",
"Double X Axes Mirror Chart---双 X 轴镜像图表",
"custom Arearange Chart Tooltip---自定义面积范围图浮动提示框",
"customLineChartOriginalPointPositionByConfiguringXAxisFormatterAndTooltipFormatter---调整折线图の X 轴左边距",
"customTooltipWhichDataSourceComeFromOutSideRatherThanSeries---通过来自外部の数据源来自定义 tooltip (而非常规の来自图表の series)",
"custom Spider Chart Style---自定义蜘蛛图🕷🕸样式",
"customize Every DataLabel Singlely By DataLabels Formatter---通过 DataLabels 的 formatter 函数来实现单个数据标签🏷自定义",
"custom XAxis Labels Be Images---自定义柱形图 X 轴 labels 为一组图片🖼",
"custom Legend Item Click Event---自定义图例点击事件🖱",
"customTooltipPostionerFunction---自定义浮动提示框 positioner 函数",
"fixedTooltipPositionByCustomPositionerFunction---通过 Positioner 函数来实现一个位置固定的提示框",
"disableColumnChartUnselectEventEffectBySeriesPointEventClickFunction---通过 Series 的 Point 的选中事件函数来禁用条形图反选效果",
"customAreasplineChartTooltipStyleByDivWithCSS---通过自定义 div 的 css 样式来自定义复杂效果的 tooltip 浮动提示框",
"configureTheAxesLabelsFormattersOfDoubleYAxesChart---配置双 Y 轴图表的 Y 轴文字标签的 Formatter 函数",
"makePieChartShow0Data---使饼图显示为 0 的数据",
"customColumnChartXAxisLabelsTextByInterceptTheFirstFourCharacters---通过截取前四个字符来自定义 X 轴 labels",
],
]
for sectionIndex in range(len(sectionTitleArr)):
sectionTitleStr = sectionTitleArr[sectionIndex]
sectionIndexStr = f"{sectionIndex + 1}"
sectionRoot = QtWidgets.QTreeWidgetItem(folderTree, [sectionIndexStr + " " + sectionTitleStr])
sectionRoot.setData(1, QtCore.Qt.EditRole,
sectionIndexStr)
singleSectionChartTypeTitleArr = chartTypeTitleArr[sectionIndex]
for rowIndex in range(len(singleSectionChartTypeTitleArr)):
rowIndexStr = f"{rowIndex + 1}"
chartTypeStr = singleSectionChartTypeTitleArr[rowIndex]
rowRoot = QtWidgets.QTreeWidgetItem(sectionRoot, [rowIndexStr + " " + chartTypeStr])
rowRoot.setData(1, QtCore.Qt.EditRole,
sectionIndexStr) # Data set to column 2, which is not visible
rowRoot.setData(2, QtCore.Qt.EditRole,
rowIndexStr) # Data set to column 2, which is not visible
def printer(treeItem):
foldername = treeItem.text(0)
sectionIndex = treeItem.text(1)
rowIndexStr = treeItem.text(2)
# treeItem.indexOfChild()
print(foldername + ': ' + f"(Section Index: {sectionIndex})" + f"(Row Index: {rowIndex})")
if len(rowIndexStr) > 0:
sectionIndexValue = int(sectionIndex)
rowIndexValue = int(rowIndexStr) - 1
if sectionIndexValue == 1:
aaChartModel = self.basicChartConfigurationWithSelectedIndex(rowIndexValue)
self.chartView.aa_drawChartWithChartModel(aaChartModel)
if sectionIndexValue == 2:
aaChartModel = self.specialChartConfigurationWithSelectedIndex(rowIndexValue)
self.chartView.aa_drawChartWithChartModel(aaChartModel)
elif sectionIndexValue == 3:
aaChartModel = self.customStyleChartModelWithSelectedIndex(rowIndexValue)
self.chartView.aa_drawChartWithChartModel(aaChartModel)
elif sectionIndexValue == 4:
aaChartModel = self.mixedTypeChartModelWithSelectedIndex(rowIndexValue)
self.chartView.aa_drawChartWithChartModel(aaChartModel)
elif sectionIndexValue == 5:
aaOptions = self.chartOptionsConfigurationWithSelectedIndex(rowIndexValue)
self.chartView.aa_drawChartWithChartOptions(aaOptions)
elif sectionIndexValue == 6:
aaOptions = self.chartJSFuncOptionsConfigurationWithSelectedIndex(rowIndexValue)
self.chartView.aa_drawChartWithChartOptions(aaOptions)
folderTree.itemClicked.connect(lambda: printer(folderTree.currentItem()))
folderTree.currentColumn()
self.layout = QtWidgets.QVBoxLayout(self)
self.layout.addWidget(self.chartView)
self.layout.addWidget(folderTree)
self.setWindowTitle("你好世界")
def printer(treeItem):
foldername = treeItem.text(0)
comment = treeItem.text(1)
data = treeItem.text(2)
print
foldername + ': ' + comment + ' (' + data + ')'
@QtCore.Slot()
def magic(self):
self.text.setText(random.choice(self.hello))
def basicChartConfigurationWithSelectedIndex(self, selectedIndex):
chartType = AAChartType.area
if selectedIndex == 0:
chartType = AAChartType.column
elif selectedIndex == 1:
chartType = AAChartType.bar
elif selectedIndex == 2:
chartType = AAChartType.area
elif selectedIndex == 3:
chartType = AAChartType.areaspline
elif selectedIndex == 4:
chartType = AAChartType.area
elif selectedIndex == 5:
chartType = AAChartType.line
elif selectedIndex == 6:
chartType = AAChartType.line
elif selectedIndex == 7:
chartType = AAChartType.spline
return self.configureTheStyleForDifferentTypeChart(chartType, selectedIndex)
def configureTheStyleForDifferentTypeChart(self, chartType: AAChartType,position: int):
aaChartModel = BasicChartComposer.configureAreaChart()
if (chartType == AAChartType.area or chartType == AAChartType.line) and (position == 4 or position == 5):
aaChartModel = BasicChartComposer.configureStepAreaChartAndStepLineChart()
elif chartType == AAChartType.column or chartType == AAChartType.bar:
aaChartModel = BasicChartComposer.configureColumnChartAndBarChart()
elif chartType == AAChartType.area or chartType == AAChartType.areaspline:
aaChartModel = BasicChartComposer.configureAreaChartAndAreasplineChartStyle(chartType)
elif chartType == AAChartType.line or chartType == AAChartType.spline:
aaChartModel = BasicChartComposer.configureLineChartAndSplineChartStyle(chartType)
aaChartModel.chartType = chartType
return aaChartModel
def specialChartConfigurationWithSelectedIndex(self, selectedIndex):
if selectedIndex == 0:
return SpecialChartComposer.configureColumnChart()
elif selectedIndex == 1:
return SpecialChartComposer.configurePieChart()
elif selectedIndex == 2:
return SpecialChartComposer.configureBubbleChart()
elif selectedIndex == 3:
return SpecialChartComposer.configureScatterChart()
elif selectedIndex == 4:
return SpecialChartComposer.configureArearangeChart()
elif selectedIndex == 5:
return SpecialChartComposer.configureAreasplinerangeChart()
elif selectedIndex == 6:
return SpecialChartComposer.configureColumnrangeChart()
elif selectedIndex == 7:
return SpecialChartComposer.configureStepLineChart()
elif selectedIndex == 8:
return SpecialChartComposer.configureStepAreaChart()
elif selectedIndex == 9:
return SpecialChartComposer.configureBoxplotChart()
elif selectedIndex == 10:
return SpecialChartComposer.configureWaterfallChart()
elif selectedIndex == 11:
return SpecialChartComposer.configurePyramidChart()
elif selectedIndex == 12:
return SpecialChartComposer.configureFunnelChart()
elif selectedIndex == 13:
return SpecialChartComposer.configureErrorbarChart()
def customStyleChartModelWithSelectedIndex(self, selectedIndex):
if selectedIndex == 0:
return CustomStyleChartComposer.configureColorfulBarChart()
elif selectedIndex == 1:
return CustomStyleChartComposer.configureColorfulGradientColorBarChart()
elif selectedIndex == 2:
return CustomStyleChartComposer.configureDiscontinuousDataChart()
elif selectedIndex == 3:
return CustomStyleChartComposer.configureMixedLineChart()
elif selectedIndex == 4:
return CustomStyleChartComposer.configureColorfulColumnChart()
elif selectedIndex == 5:
return CustomStyleChartComposer.configureGradientColorBarChart()
elif selectedIndex == 6:
return CustomStyleChartComposer.configureColorfulBarChart() # 待添加
elif selectedIndex == 7:
return CustomStyleChartComposer.configureWithMinusNumberChart()
elif selectedIndex == 8:
return CustomStyleChartComposer.configureStepLineChart()
elif selectedIndex == 9:
return CustomStyleChartComposer.configureStepAreaChart()
elif selectedIndex == 10:
return CustomStyleChartComposer.configureNightingaleRoseChart()
elif selectedIndex == 11:
return CustomStyleChartComposer.configureCustomSingleDataLabelChart()
elif selectedIndex == 12:
return CustomStyleChartComposer.configureChartWithShadowStyle()
elif selectedIndex == 13:
return CustomStyleChartComposer.configureColorfulGradientAreaChart()
elif selectedIndex == 14:
return CustomStyleChartComposer.configureColorfulGradientSplineChart()
elif selectedIndex == 15:
return CustomStyleChartComposer.configureGradientColorAreasplineChart()
elif selectedIndex == 16:
return CustomStyleChartComposer.configureSpecialStyleMarkerOfSingleDataElementChart()
elif selectedIndex == 17:
return CustomStyleChartComposer.configureSpecialStyleColumnOfSingleDataElementChart()
elif selectedIndex == 18:
return CustomStyleChartComposer.configureAreaChartThreshold()
elif selectedIndex == 19:
return CustomStyleChartComposer.customScatterChartMarkerSymbolContent()
elif selectedIndex == 20:
return CustomStyleChartComposer.customLineChartMarkerSymbolContent()
elif selectedIndex == 21:
return CustomStyleChartComposer.configureTriangleRadarChart()
elif selectedIndex == 22:
return CustomStyleChartComposer.configureQuadrangleRadarChart()
elif selectedIndex == 23:
return CustomStyleChartComposer.configurePentagonRadarChart()
elif selectedIndex == 24:
return CustomStyleChartComposer.configureHexagonRadarChart()
elif selectedIndex == 25:
return CustomStyleChartComposer.drawLineChartWithPointsCoordinates()
elif selectedIndex == 26:
return CustomStyleChartComposer.customSpecialStyleDataLabelOfSingleDataElementChart()
elif selectedIndex == 27:
return CustomStyleChartComposer.customBarChartHoverColorAndSelectColor()
elif selectedIndex == 28:
return CustomStyleChartComposer.customChartHoverAndSelectHaloStyle()
elif selectedIndex == 29:
return CustomStyleChartComposer.customSplineChartMarkerStatesHoverStyle()
elif selectedIndex == 30:
return CustomStyleChartComposer.customNormalStackingChartDataLabelsContentAndStyle()
elif selectedIndex == 31:
return CustomStyleChartComposer.upsideDownPyramidChart()
elif selectedIndex == 32:
return CustomStyleChartComposer.doubleLayerPieChart()
elif selectedIndex == 33:
return CustomStyleChartComposer.doubleLayerDoubleColorsPieChart()
elif selectedIndex == 34:
return CustomStyleChartComposer.disableSomeOfLinesMouseTrackingEffect()
elif selectedIndex == 35:
return CustomStyleChartComposer.configureColorfulShadowSplineChart()
elif selectedIndex == 36:
return CustomStyleChartComposer.configureColorfulDataLabelsStepLineChart()
elif selectedIndex == 37:
return CustomStyleChartComposer.configureColorfulGradientColorAndColorfulDataLabelsStepAreaChart()
elif selectedIndex == 38:
return CustomStyleChartComposer.disableSplineChartMarkerHoverEffect()
elif selectedIndex == 39:
return CustomStyleChartComposer.configureMaxAndMinDataLabelsForChart()
elif selectedIndex == 40:
return CustomStyleChartComposer.customVerticalXAxisCategoriesLabelsByHTMLBreakLineTag()
elif selectedIndex == 41:
return CustomStyleChartComposer.noMoreGroupingAndOverlapEachOtherColumnChart()
elif selectedIndex == 42:
return CustomStyleChartComposer.noMoreGroupingAndNestedColumnChart()
def mixedTypeChartModelWithSelectedIndex(self, selectedIndex):
if selectedIndex == 0:
return MixedChartComposer.configureArearangeMixedLineChart()
elif selectedIndex == 1:
return MixedChartComposer.configureColumnrangeMixedLineChart()
elif selectedIndex == 2:
return MixedChartComposer.configureStackingColumnMixedLineChart()
elif selectedIndex == 3:
return MixedChartComposer.configureDashStyleTypesMixedChart()
elif selectedIndex == 4:
return MixedChartComposer.configureNegativeColorMixedChart()
elif selectedIndex == 5:
return MixedChartComposer.configureScatterMixedLineChart()
elif selectedIndex == 6:
return MixedChartComposer.configureNegativeColorMixedBubbleChart()
elif selectedIndex == 7:
return MixedChartComposer.configurePolygonMixedScatterChart()
elif selectedIndex == 8:
return MixedChartComposer.configurePolarChartMixedChart()
elif selectedIndex == 9:
return MixedChartComposer.configureColumnMixedScatterChart()
elif selectedIndex == 10:
return MixedChartComposer.configurePieMixedLineMixedColumnChart()
elif selectedIndex == 11:
return MixedChartComposer.configureLineChartWithShadow()
elif selectedIndex == 12:
return MixedChartComposer.configureNegativeColorMixedAreasplineChart()
elif selectedIndex == 13:
return MixedChartComposer.configureAerasplinerangeMixedColumnrangeMixedLineChart()
def chartOptionsConfigurationWithSelectedIndex(self, selectedIndex):
if selectedIndex == 0:
return ChartOptionsComposer.configureLegendStyle()
elif selectedIndex == 1:
return ChartOptionsComposer.simpleGaugeChart()
elif selectedIndex == 2:
return ChartOptionsComposer.gaugeChartWithPlotBand()
elif selectedIndex == 3:
return ChartOptionsComposer.configureChartWithBackgroundImage()
elif selectedIndex == 4:
return ChartOptionsComposer.customAreaChartYAxisLabelsAndGridLineStyle() # 自定义曲线填充图图的 Y 轴 的 Labels 和 elif selectedIndex ==式
elif selectedIndex == 5:
return ChartOptionsComposer.adjustYAxisMinValueForChart()
elif selectedIndex == 6:
return ChartOptionsComposer.configureTheMirrorColumnChart()
elif selectedIndex == 7:
return ChartOptionsComposer.adjustTheXAxisLabels()
elif selectedIndex == 8:
return ChartOptionsComposer.adjustGroupPaddingBetweenColumns()
elif selectedIndex == 9:
return ChartOptionsComposer.configureAAPlotBandsForChart()
elif selectedIndex == 10:
return ChartOptionsComposer.configureAAPlotLinesForChart()
elif selectedIndex == 11:
return ChartOptionsComposer.customAATooltipWithJSFuntion()
elif selectedIndex == 12:
return ChartOptionsComposer.customXAxisCrosshairStyle()
elif selectedIndex == 13:
return ChartOptionsComposer.configureXAxisLabelsFontColorWithHTMLString()
elif selectedIndex == 14:
return ChartOptionsComposer.configureXAxisLabelsFontColorAndFontSizeWithHTMLString()
elif selectedIndex == 15:
return ChartOptionsComposer.configure_DataLabels_XAXis_YAxis_Legend_Style()
elif selectedIndex == 16:
return ChartOptionsComposer.configureXAxisPlotBand()
elif selectedIndex == 17:
return ChartOptionsComposer.configureDoubleYAxisChartOptions()
elif selectedIndex == 18:
return ChartOptionsComposer.configureTripleYAxesMixedChart()
elif selectedIndex == 19:
return ChartOptionsComposer.configureDoubleYAxesAndColumnLineMixedChart()
elif selectedIndex == 20:
return ChartOptionsComposer.configureDoubleYAxesMarketDepthChart()
elif selectedIndex == 21:
return ChartOptionsComposer.customAreaChartTooltipStyleLikeHTMLTable()
elif selectedIndex == 22:
return ChartOptionsComposer.customAxesGridLineStyle()
elif selectedIndex == 23:
return ChartOptionsComposer.customRadarChartStyle()
elif selectedIndex == 24:
return ChartOptionsComposer.customColumnrangeChartStyle()
elif selectedIndex == 25:
return ChartOptionsComposer.customXAxisLabelsBeImages() # 自定义曲线面积图 X 轴 labels 为一组图片🖼
elif selectedIndex == 26:
return ChartOptionsComposer.configureTriangleRadarChart() # 带有颜色标志带の三角形雷达图
elif selectedIndex == 27:
return ChartOptionsComposer.configureQuadrangleRadarChart() # 带有颜色标志带の四角形雷达图
elif selectedIndex == 28:
return ChartOptionsComposer.configurePentagonRadarChart() # 带有颜色标志带の五角形雷达图
elif selectedIndex == 29:
return ChartOptionsComposer.configureHexagonRadarChart() # 带有颜色标志带の六角形雷达图
elif selectedIndex == 30:
return ChartOptionsComposer.configureSpiderWebRadarChart() # 带有颜色标志带の🕸蜘蛛网状雷达elif selectedIndex ==
elif selectedIndex == 31:
return ChartOptionsComposer.configureComplicatedCustomAreasplineChart() # 复杂自定义曲线填充图 1
elif selectedIndex == 32:
return ChartOptionsComposer.configureComplicatedCustomAreasplineChart2() # 复杂自定义曲线填充图 2
elif selectedIndex == 33:
return ChartOptionsComposer.configureComplicatedCustomAreasplineChart3() # 复杂自定义曲线填充图 3
elif selectedIndex == 34:
return ChartOptionsComposer.yAxisOnTheRightSideChart() # y轴在右侧的图表
elif selectedIndex == 35:
return ChartOptionsComposer.doubleLayerHalfPieChart() # 双层嵌套的玉阕图
elif selectedIndex == 36:
return ChartOptionsComposer.customAreasplineChartTooltipContentWithHeaderFormat() # 通过 tooltip 的 elif selectedIndex ==erFormat 属性来自定义 曲线填充图的 to
elif selectedIndex == 37:
return ChartOptionsComposer.customAreaChartTooltipStyleWithTotalValueHeader() # 浮动提示框 header 显示总值信息
elif selectedIndex == 38:
return ChartOptionsComposer.configureYAxisLabelsNumericSymbolsMagnitudeOfAerasplineChart() # 自定义 Y 轴的 elif selectedIndex ==ls 国际单位符基数及国际单位符
elif selectedIndex == 39:
return ChartOptionsComposer.timeDataWithIrregularIntervalsChart() # X 轴时间不连续的折线图
elif selectedIndex == 40:
return ChartOptionsComposer.logarithmicAxisLineChart() # 对数轴折线图📈
elif selectedIndex == 41:
return ChartOptionsComposer.logarithmicAxisScatterChart() # 对数轴散点elif selectedIndex ==
elif selectedIndex == 42:
return ChartOptionsComposer.disableMixedChartInactiveAnimationEffect() # 禁用混合图表的 inactive 动画效果
elif selectedIndex == 43:
return ChartOptionsComposer.adjustBubbleChartMinAndMax() # 调整气泡图的 min 和 max 相关属性
elif selectedIndex == 44:
return ChartOptionsComposer.customLineChartDataLabelsFormat() # 自定义曲线图的 DataLabels 的 format 属性
elif selectedIndex == 45:
return ChartOptionsComposer.customLineChartDataLabelsFormat2() # 自定义曲线图的 DataLabels 的 format elif selectedIndex ==简易方法)
elif selectedIndex == 46:
return ChartOptionsComposer.complicatedScatterChart() # 复杂的自定义散点图
def chartJSFuncOptionsConfigurationWithSelectedIndex(self, selectedIndex):
if selectedIndex == 0:
return JSFuncOptionsComposer.customAreaChartTooltipStyleWithSimpleFormatString()
elif selectedIndex == 1:
return JSFuncOptionsComposer.customAreaChartTooltipStyleWithDifferentUnitSuffix()
elif selectedIndex == 2:
return JSFuncOptionsComposer.customAreaChartTooltipStyleWithColorfulHtmlLabels()
elif selectedIndex == 3:
return JSFuncOptionsComposer.customLineChartTooltipStyleWhenValueBeZeroDoNotShow()
elif selectedIndex == 4:
return JSFuncOptionsComposer.customBoxplotTooltipContent()
elif selectedIndex == 5:
return JSFuncOptionsComposer.customYAxisLabels()
elif selectedIndex == 6:
return JSFuncOptionsComposer.customYAxisLabels2()
elif selectedIndex == 7:
return JSFuncOptionsComposer.customStackedAndGroupedColumnChartTooltip()
elif selectedIndex == 8:
return JSFuncOptionsComposer.customDoubleXAxesChart()
elif selectedIndex == 9:
return JSFuncOptionsComposer.customArearangeChartTooltip()
elif selectedIndex == 10:
return JSFuncOptionsComposer.customLineChartOriginalPointPositionByConfiguringXAxisFormatterAndTooltipFormatter()
elif selectedIndex == 11:
return JSFuncOptionsComposer.customTooltipWhichDataSourceComeFromOutSideRatherThanSeries()
elif selectedIndex == 12:
return JSFuncOptionsComposer.customSpiderChartStyle()
elif selectedIndex == 13:
return JSFuncOptionsComposer.customizeEveryDataLabelSinglelyByDataLabelsFormatter()
elif selectedIndex == 14:
return JSFuncOptionsComposer.customXAxisLabelsBeImages()
elif selectedIndex == 15:
return JSFuncOptionsComposer.customLegendItemClickEvent()
elif selectedIndex == 16:
return JSFuncOptionsComposer.customTooltipPositionerFunction()
elif selectedIndex == 17:
return JSFuncOptionsComposer.fixedTooltipPositionByCustomPositionerFunction()
elif selectedIndex == 18:
return JSFuncOptionsComposer.disableColumnChartUnselectEventEffectBySeriesPointEventClickFunction()
elif selectedIndex == 19:
return JSFuncOptionsComposer.customAreasplineChartTooltipStyleByDivWithCSS()
elif selectedIndex == 20:
return JSFuncOptionsComposer.configureTheAxesLabelsFormattersOfDoubleYAxesChart()
elif selectedIndex == 21:
return JSFuncOptionsComposer.makePieChartShow0Data()
elif selectedIndex == 22:
return JSFuncOptionsComposer.customColumnChartXAxisLabelsTextByInterceptTheFirstFourCharacters()
``` |
{
"source": "Aachen-Armchair-Engineers/armchair",
"score": 2
} |
#### File: armchair/scripts/camera_interface.py
```python
import rospy
from std_msgs.msg import Header, ColorRGBA
from geometry_msgs.msg import Point, PointStamped, Pose, Quaternion, Vector3
from depthai_ros_msgs.msg import SpatialDetectionArray
from tf.transformations import quaternion_from_euler
from armchair.msg import DoorInfo
#For visualizing in rviz and foxglove
#See: https://foxglove.dev/blog/annotate-your-robots-camera-images-with-image-markers
from visualization_msgs.msg import Marker, MarkerArray, ImageMarker
from foxglove_msgs.msg import ImageMarkerArray
#TODO: Crop and visualize the depth image in rviz:
#See: https://gist.github.com/bhaskara/2400165
#from sensor_msgs.msg import PointCloud2
labels = []
target_label = ""
def init_labels(model):
''' Set the labels according to the model used '''
global labels, target_label
if model == 'armchair':
labels = ["", "door", "handle", "cabinet door", "refridgerator door"]
target_label = "handle"
elif model == 'mobilenet':
labels = [
"background", "aeroplane", "bicycle", "bird", "boat",
"bottle", "bus", "car", "cat", "chair",
"cow", "diningtable", "dog", "horse", "motorbike",
"person", "pottedplant", "sheep", "sofa", "train",
"tvmonitor"
]
target_label = "bottle"
elif model == 'yolov4':
labels = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train",
"truck", "boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench",
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant",
"bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie",
"suitcase", "frisbee", "skis", "snowboard", "sports ball", "kite", "baseball bat",
"baseball glove", "skateboard", "surfboard", "tennis racket", "bottle", "wine glass", "cup",
"fork", "knife", "spoon", "bowl", "banana", "apple", "sandwich",
"orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake",
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor",
"laptop", "mouse", "remote", "keyboard", "cell phone", "microwave", "oven",
"toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors",
"teddy bear", "hair drier", "toothbrush"]
target_label = "cup"
else:
rospy.logerr("Invalid neuronal network selected, aborting")
pub_handle = rospy.Publisher(
'/armchair/handle_position',
PointStamped,
queue_size=10
)
pub_door_info = rospy.Publisher(
'/armchair/door_info',
DoorInfo, #TODO: Custom datatype
queue_size=10
)
pub_rviz_markers = rospy.Publisher(
'/mobilenet_node_custom/color/rviz_markers',
MarkerArray,
queue_size=1
)
pub_foxglove_markers = rospy.Publisher(
'/mobilenet_node_custom/color/foxglove_markers',
ImageMarkerArray,
queue_size=1
)
def callback(data):
'''
If one or mor e targeted objects are found, pulish it to
/armchair/handle_position
This will be visualized in rviz by a pink sphere
and is the position the robot will assume the handle to be.
Furthermore publish all detected bounding boxes and colour them:
Red: Target object(s)
Cyan: Normal object(s)
We use Foxglove Studio to visualize this
'''
#Find best detection
#Alternatively use the clostest or manually select one
handles = list(filter(lambda l : labels[l.results[0].id] == target_label, data.detections))
if handles:
best_match = max(handles, key=lambda h: h.results[0].score)
pub_handle.publish(
PointStamped(
Header(
stamp = rospy.Time.now(),
frame_id = 'oak-d_frame'
),
#Swap the axis to make the transform in ros easier
Point(
best_match.position.x,
best_match.position.z,
best_match.position.y
)
)
)
#If we run the door detection publish additional information
if target_label == "handle":
doors = list( filter(lambda l : labels[l.results[0].id] not in ["", "handle"] , data.detections) )
if doors:
analyse_handle_and_door(best_match, doors)
#Create markers for all relevant object
normal_markers = MarkerArray()
image_markers = ImageMarkerArray()
for detection in data.detections:
#Calculate the positions of the bounding box edges
vertex_left = detection.bbox.center.x - detection.bbox.size_x/2
vertex_right = detection.bbox.center.x + detection.bbox.size_x/2
vertex_top = detection.bbox.center.y - detection.bbox.size_y/2
vertex_bot = detection.bbox.center.y + detection.bbox.size_y/2
#Scale from 320x320 square coordinates to camera image (1280*720) coordinates
vertex_left *= 1280/320
vertex_right *= 1280/320
vertex_top *= 720/320
vertex_bot *= 720/320
#Highlight the BBoxes of the targets
label = labels[detection.results[0].id]
if label == target_label:
marker_color=ColorRGBA(255, 0, 0, 1)
#TODO: Add more labels of special interest and (multiple) different colours for these
# elif label in highlight_lables.labels:
# color = highlight_labels.colour[label]
#Colour for normal bboxes
else:
marker_color=ColorRGBA(0, 255, 255, 1)
normal_markers.markers.append(
Marker(
header = Header(
stamp = rospy.Time.now(),
frame_id = 'oak-d_frame'
),
id=len(normal_markers.markers),
ns='/armchair/rviz_markers',
type=2,
action=0,
pose=Pose(
Point(
detection.position.x,
detection.position.z,
detection.position.y
),
Quaternion(0,0,0,1)
),
scale=Vector3(0.05, 0.05, 0.05),
color=marker_color,
#If we dont let them decay we need to clean up after them
lifetime=rospy.Duration(0.3)
)
)
image_markers.markers.append(
ImageMarker(
header = Header(
stamp = rospy.Time.now(),
frame_id = 'oak-d_frame'
),
type=ImageMarker.POLYGON,
outline_color=marker_color,
points=[
Point(vertex_left, vertex_top, 0),
Point(vertex_right, vertex_top, 0),
Point(vertex_right, vertex_bot, 0),
Point(vertex_left, vertex_bot, 0),
],
)
)
pub_rviz_markers.publish(normal_markers)
pub_foxglove_markers.publish(image_markers)
rospy.logdebug('-------')
def analyse_handle_and_door(handle, doors):
'''
TODO:
- This assumes that the bounding boxes match perfectly
- visualize information somewhere for easier debugging
- no pressing down the handle or not descission yet
- Plane detection for non-perpendicular door normals
'''
rospy.logdebug('Not working stable yet')
return
#Check if handle is inside a door bounding_box
doors = list( filter(
lambda l :
l.bbox.center.x - l.bbox.size_x/2 < handle.bbox.center.x < l.bbox.center.x + l.bbox.size_x/2 and
l.bbox.center.y - l.bbox.size_y/2 < handle.bbox.center.y < l.bbox.center.y + l.bbox.size_y/2 ,
doors) )
#Assume theres only one valid door at max:
if not doors:
return
door = doors[0]
#Handle orientation (horizontal, vertical)
if 0.6 * handle.bbox.size_x > handle.bbox.size_y:
rospy.loginfo('horizontal')
_handle_orientaton = DoorInfo.HORIZONTAL
if 0.6 * handle.bbox.size_y > handle.bbox.size_x:
rospy.loginfo('vertical')
_handle_orientaton = DoorInfo.VERTICAL
else:
rospy.loginfo('orientation undecidabele')
_handle_orientaton = DoorInfo.UNDEFINED
#Check which side the handle is closest too
#Hinge is always on the edge further away
if (door.bbox.center.x - handle.bbox.center.x) / door.bbox.size_x > 0.60:
rospy.logerr('left')
_handle_side = DoorInfo.LEFT
hinge_position = Point(handle.position.x - 2 * door.position.x, handle.position.y - 2 * door.position.y, door.position.z)
elif (door.bbox.center.x - handle.bbox.center.x) / door.bbox.size_x < 1.00- 0.60:
rospy.logerr('right')
_handle_side = DoorInfo.RIGHT
hinge_position = Point(handle.position.x + 2 * door.position.x, handle.position.y + 2 * door.position.y, door.position.z)
elif (door.bbox.center.y - handle.bbox.center.y) / door.bbox.size_y < 1.00- 0.60:
rospy.logerr('down')
_handle_side = DoorInfo.DOWN
rospy.logwarn('Vertical hinges not implemented yet')
else:
rospy.loginfo('relative handle position unclear')
_handle_side = DoorInfo.UNDEFINED
#distance between handle and side thats further away -> radius (not needed explicitly)
#Hinge pose
_hinge_pose=Pose(
hinge_position,
quaternion_from_euler(0,pi/2,0)
),
#Handle pose
_handle_pose=Pose(
handle.position,
quaternion_from_euler(-pi/2, pi, pi)
),
pub_door_info.publish(
DoorInfo(
header = Header(
stamp = rospy.Time.now(),
frame_id = 'root'
),
hinge_pose = _hinge_pose,
handle_pose = _handle_pose,
handle_orientation = _handle_orientaton,
handle_side = DoorInfo.RIGHT,
)
)
def listener():
'''
Listen for spatial detections, filter them
and if an object of interest is found
pass the position along to the robot controller
'''
rospy.init_node('camera_interface', anonymous=True)
#Set in mobile_publisher.launch
model = rospy.get_param('/camera_interface/model')
#Use the matching labels
init_labels(model)
#And the matching topic
if model == 'yolov4':
topic = '/yolov4_publisher/color/yolov4_Spatial_detections'
else:
topic = '/mobilenet_node_custom/color/spatial_detections'
rospy.Subscriber(
topic,
SpatialDetectionArray,
callback
)
rospy.spin()
if __name__ == '__main__':
try:
listener()
except rospy.ROSInterruptException:
pass
``` |
{
"source": "Aachen-Armchair-Engineers/kinova-ros",
"score": 3
} |
#### File: nodes/kinova_demo/run_COMParameters_estimation.py
```python
import roslib; roslib.load_manifest('kinova_demo')
import rospy
import actionlib
import kinova_msgs.msg
import geometry_msgs.msg
import tf
import std_msgs.msg
import math
from kinova_msgs.srv import *
import argparse
prefix = 'j2s7s300_'
nbJoints = 7
interactive = True
def joint_position_client(angle_set):
action_address = '/' + prefix + 'driver/joints_action/joint_angles'
client = actionlib.SimpleActionClient(action_address,
kinova_msgs.msg.ArmJointAnglesAction)
client.wait_for_server()
goal = kinova_msgs.msg.ArmJointAnglesGoal()
goal.angles.joint1 = angle_set[0]
goal.angles.joint2 = angle_set[1]
goal.angles.joint3 = angle_set[2]
goal.angles.joint4 = angle_set[3]
goal.angles.joint5 = angle_set[4]
goal.angles.joint6 = angle_set[5]
goal.angles.joint7 = angle_set[6]
client.send_goal(goal)
client.wait_for_result(rospy.Duration(100.0))
# Prints out the result of executing the action
return client.get_result()
def argumentParser(argument):
""" Argument parser """
parser = argparse.ArgumentParser(description='Drive robot joint to command position')
parser.add_argument('kinova_robotType', metavar='kinova_robotType', type=str, default='j2n6a300',
help='kinova_RobotType is in format of: [{j|m|r|c}{1|2}{s|n}{4|6|7}{s|a}{2|3}{0}{0}]. eg: j2n6a300 refers to jaco v2 6DOF assistive 3fingers. Please be noted that not all options are valided for different robot types.')
args_ = parser.parse_args(argument)
prefix = args_.kinova_robotType + "_"
nbJoints = int(args_.kinova_robotType[3])
def ZeroTorque():
#move robot to candle like pose
#result = joint_position_client([180]*7)
print ("torque before setting zero")
topic_name = '/' + prefix + 'driver/out/joint_torques'
sub_once = rospy.Subscriber(topic_name, kinova_msgs.msg.JointAngles, printTorqueVaules)
rospy.wait_for_message(topic_name, kinova_msgs.msg.JointAngles, timeout=2)
sub_once.unregister()
#call zero torque
service_address = '/' + prefix + 'driver/in/set_zero_torques'
rospy.wait_for_service(service_address)
try:
zeroTorques = rospy.ServiceProxy(service_address, ZeroTorques)
zeroTorques()
except rospy.ServiceException, e:
print "Service call failed: %s"%e
return None
rospy.sleep(0.5)
print "torque after setting zero"
sub_once = rospy.Subscriber(topic_name, kinova_msgs.msg.JointAngles, printTorqueVaules)
rospy.wait_for_message(topic_name, kinova_msgs.msg.JointAngles, timeout=2)
sub_once.unregister()
def runCOMParameterEstimation():
service_address = '/' + prefix + 'driver/in/run_COM_parameters_estimation'
rospy.wait_for_service(service_address)
try:
runEstimation = rospy.ServiceProxy(service_address, RunCOMParametersEstimation)
runEstimation()
except rospy.ServiceException, e:
print "Service call failed: %s"%e
return None
def printTorqueVaules(torques):
print "Torque - {}, {}, {}, {}, {}, {}, {}".format(torques.joint1,
torques.joint2, torques.joint3, torques.joint4,
torques.joint5, torques.joint6, torques.joint7)
if __name__ == '__main__':
try:
args = argumentParser(None)
rospy.init_node('torque_compensated_mode')
if (interactive == True):
nb = raw_input('Moving robot to candle like position, press return to start')
result = joint_position_client([180]*7)
if (interactive == True):
nb = raw_input('Setting torques to zero, press return')
#test zero torque
ZeroTorque()
if (interactive == True):
nb = raw_input('Sarting COM parameters estimation, press return')
runCOMParameterEstimation()
except rospy.ROSInterruptException:
print "program interrupted before completion"
``` |
{
"source": "aachenhang/crowdcount-mcnn",
"score": 2
} |
#### File: aachenhang/crowdcount-mcnn/amendnet_train_a.py
```python
import os
import torch
import numpy as np
import sys
sys.path.append('./src/')
from src.AmendNet import MCNN_BackBone, MCNNNet, AmendNet
from src import network
from src.data_loader import ImageDataLoader
from src.timer import Timer
from src import utils
from src.evaluate_model import evaluate_model
try:
from termcolor import cprint
except ImportError:
cprint = None
try:
from pycrayon import CrayonClient
except ImportError:
CrayonClient = None
def log_print(text, color=None, on_color=None, attrs=None):
if cprint is not None:
cprint(text, color=color, on_color=on_color, attrs=attrs)
else:
print(text)
method = 'amendnet_saved_models'
dataset_name = 'shtechA'
output_dir = './amendnet_saved_models/'
train_path = './data/formatted_trainval/AmendNet_shanghaitech_part_A_patches_9/train'
train_gt_path = './data/formatted_trainval/AmendNet_shanghaitech_part_A_patches_9/train_den'
val_path = './data/formatted_trainval/AmendNet_shanghaitech_part_A_patches_9/val'
val_gt_path = './data/formatted_trainval/AmendNet_shanghaitech_part_A_patches_9/val_den'
model_path = './final_models/mcnn_shtechA_490.h5'
#training configuration
start_step = 0
end_step = 2000
lr = 0.00001
momentum = 0.9
disp_interval = 500
log_interval = 250
#Tensorboard config
use_tensorboard = False
save_exp_name = method + '_' + dataset_name + '_' + 'v1'
remove_all_log = False # remove all historical experiments in TensorBoard
exp_name = None # the previous experiment name in TensorBoard
# ------------
rand_seed = 64678
if rand_seed is not None:
np.random.seed(rand_seed)
torch.manual_seed(rand_seed)
torch.cuda.manual_seed(rand_seed)
# load mcnn_net and amend_net
mcnn_backbone = MCNN_BackBone()
mcnn_net = MCNNNet(mcnn_backbone=mcnn_backbone)
network.weights_normal_init(mcnn_net, dev=0.01)
mcnn_net.cuda()
mcnn_net.train()
mcnn_net_optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, mcnn_net.parameters()), lr=lr)
amend_net = AmendNet(mcnn_backbone=mcnn_backbone)
network.weights_normal_init(amend_net, dev=0.01)
amend_net.cuda()
amend_net.train()
amend_net_optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, amend_net.parameters()), lr=lr)
print('Loading the mcnn_backbone...')
network.load_net(model_path, mcnn_backbone, prefix='DME.')
print('Done')
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# tensorboad
use_tensorboard = use_tensorboard and CrayonClient is not None
if use_tensorboard:
cc = CrayonClient(hostname='127.0.0.1')
if remove_all_log:
cc.remove_all_experiments()
if exp_name is None:
exp_name = save_exp_name
exp = cc.create_experiment(exp_name)
else:
exp = cc.open_experiment(exp_name)
# training
mcnn_net_train_loss = 0
amend_net_train_loss = 0
step_cnt = 0
re_cnt = False
t = Timer()
t.tic()
data_loader = ImageDataLoader(train_path, train_gt_path, shuffle=True, gt_downsample=True, pre_load=True)
data_loader_val = ImageDataLoader(val_path, val_gt_path, shuffle=False, gt_downsample=True, pre_load=True)
best_mae = sys.maxsize
for epoch in range(start_step, end_step+1):
step = -1
train_loss = 0
for blob in list(data_loader):
step = step + 1
im_data = blob['data']
gt_data = blob['gt_density']
step_cnt += 1
for net in [mcnn_net, amend_net]:
density_map = net(im_data, gt_data)
loss = net.loss
if net is mcnn_net:
mcnn_net_train_loss += loss.data[0]
mcnn_net_optimizer.zero_grad()
loss.backward()
mcnn_net_optimizer.step()
elif net is amend_net:
amend_net_train_loss += loss.data[0]
amend_net_optimizer.zero_grad()
loss.backward()
amend_net_optimizer.step()
else:
raise("Net is Neither mcnn_net nor amend_net!")
if step % disp_interval == 0:
duration = t.toc(average=False)
fps = step_cnt / duration
gt_count = np.sum(gt_data)
density_map = density_map.data.cpu().numpy()
et_count = np.sum(density_map)
if net is mcnn_net:
utils.save_results(im_data,gt_data,density_map, output_dir, fname='mcnnresults.png')
elif net is amend_net:
utils.save_results(im_data,gt_data,density_map, output_dir, fname='amendresults.png')
net_text = 'mcnn ' if net is mcnn_net else 'amend '
log_text = (net_text+'epoch: %4d, step %4d, Time: %.4fs, gt_cnt: %4.1f, et_cnt: %4.1f') \
% (epoch, step, duration, gt_count,et_count)
log_print(log_text, color='green', attrs=['bold'])
re_cnt = True
if re_cnt:
t.tic()
re_cnt = False
if (epoch % 2 == 0):
save_name = os.path.join(output_dir, '{}_{}_{}.h5'.format(method,dataset_name,epoch))
network.save_net(save_name, net)
#calculate error on the validation dataset
mae,mse = evaluate_model(save_name, data_loader_val, netname='AmendNet')
if mae < best_mae:
best_mae = mae
best_mse = mse
best_model = '{}_{}_{}.h5'.format(method,dataset_name,epoch)
log_text = 'EPOCH: %d, MAE: %.1f, MSE: %0.1f' % (epoch,mae,mse)
log_print(log_text, color='green', attrs=['bold'])
log_text = 'BEST MAE: %0.1f, BEST MSE: %0.1f, BEST MODEL: %s' % (best_mae,best_mse, best_model)
log_print(log_text, color='green', attrs=['bold'])
if use_tensorboard:
exp.add_scalar_value('MAE', mae, step=epoch)
exp.add_scalar_value('MSE', mse, step=epoch)
exp.add_scalar_value('mcnn_net_train_loss', mcnn_net_train_loss/data_loader.get_num_samples(), step=epoch)
exp.add_scalar_value('amend_net_train_loss', amend_net_train_loss/data_loader.get_num_samples(), step=epoch)
``` |
{
"source": "aachenmax/bitmovin-api-sdk-python",
"score": 2
} |
#### File: bitmovin_api_sdk/common/rest_client.py
```python
import json
import requests
from datetime import datetime, date
from bitmovin_api_sdk.common.bitmovin_api_logger_base import BitmovinApiLoggerBase
class RestClient(object):
HTTP_HEADERS = {
'Content-Type': 'application/json',
'X-Api-Client': 'bitmovin-api-sdk-python',
'X-Api-Client-Version': '1.43.0'
}
DELETE = 'DELETE'
GET = 'GET'
PATCH = 'PATCH'
POST = 'POST'
PUT = 'PUT'
API_KEY_HTTP_HEADER_NAME = 'X-Api-Key'
TENANT_ORG_ID_HTTP_HEADER_NAME = 'X-Tenant-Org-Id'
def __init__(self, api_key, tenant_org_id=None, base_url=None, logger=BitmovinApiLoggerBase()):
# type: (str, str, str, BitmovinApiLoggerBase) -> None
super(RestClient, self).__init__()
self.api_key = api_key
self.tenant_org_id = tenant_org_id
self.logger = BitmovinApiLoggerBase()
if logger is not None and isinstance(logger, BitmovinApiLoggerBase) is False:
raise TypeError("Logger must be subclass of BitmovinApiLoggerBase")
elif logger is not None and issubclass(type(logger), BitmovinApiLoggerBase) is True:
self.logger = logger
if base_url is None or base_url == '':
self.base_url = 'https://api.bitmovin.com/v1'
else:
self.base_url = base_url
if not self.api_key:
raise TypeError("api_key has to be set")
self.http_headers = self.HTTP_HEADERS.copy()
self.http_headers.update({self.API_KEY_HTTP_HEADER_NAME: self.api_key})
if self.tenant_org_id is not None and self.tenant_org_id != '':
self.http_headers.update({self.TENANT_ORG_ID_HTTP_HEADER_NAME: self.tenant_org_id})
def request(self, method, relative_url, payload=None):
# type: (str, str, object) -> object
url = self.urljoin(self.base_url, relative_url)
if payload is not None and type(payload) != list:
# Remove none set values
payload = {k: v for k, v in payload.items() if v is not None}
self._log_request(method, url, payload)
if payload is None:
response = requests.request(method, url, headers=self.http_headers)
else:
response = requests.request(method, url, headers=self.http_headers, data=self._serialize(payload))
response.raise_for_status()
self.logger.log('RESPONSE: {}'.format(response.text))
if not response.text:
return dict()
return response.json()
def _serialize(self, object_):
# type: (object) -> object
if object_ is None:
return None
serialized = json.dumps(object_, sort_keys=True, default=self._default_to_dict)
self.logger.log('Serialized request object: {}'.format(serialized))
return serialized
def _log_request(self, method, url, payload=None):
# type: (str, str, object) -> None
log_line = 'REQUEST: {} {}'.format(method, url)
if payload:
log_line += ' --> {}'.format(json.dumps(payload, default=self._default_to_dict))
self.logger.log(log_line)
@staticmethod
def urljoin(*args):
# type: (*object) -> str
return '/'.join([str(x).strip('/') for x in args])
@staticmethod
def _default_to_dict(obj):
# type: (object) -> object
if hasattr(obj, 'to_dict'):
return obj.to_dict()
if hasattr(obj, '__dict__'):
return obj.__dict__
if isinstance(obj, datetime):
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
if isinstance(obj, date):
return obj.isoformat()
``` |
{
"source": "aachenmax/couler",
"score": 2
} |
#### File: core/templates/volume.py
```python
from collections import OrderedDict
class Volume(object):
def __init__(self, name, claim_name=None, config_map=None):
self.name = name
self.claim_name = claim_name
self.config_map = config_map
def to_dict(self):
if self.claim_name is not None:
return OrderedDict(
{
"name": self.name,
"persistentVolumeClaim": {"claimName": self.claim_name},
}
)
elif self.config_map is not None:
return OrderedDict(
{
"name": self.name,
"configMap": {"name": self.config_map,
"defaultMode": "0777"},
}
)
class VolumeMount(object):
def __init__(self, name, mount_path, sub_path=None):
self.name = name
self.mount_path = mount_path
self.sub_path = None
def to_dict(self):
if self.sub_path is not None:
return OrderedDict({"name": self.name,
"mountPath": self.mount_path,
"subPath": self.sub_path})
else:
return OrderedDict({"name": self.name,
"mountPath": self.mount_path})
``` |
{
"source": "aachenmax/vmaf",
"score": 2
} |
#### File: src/vmaf/config.py
```python
from __future__ import absolute_import
import os
__copyright__ = "Copyright 2016-2020, Netflix, Inc."
__license__ = "BSD+Patent"
PYTHON_ROOT = os.path.dirname(os.path.realpath(__file__))
ROOT = os.path.abspath(os.path.join(PYTHON_ROOT, '..', '..', '..',))
class VmafExternalConfig(object):
_MISSING_EXTERNAL_MESSAGE = """
Must install {name} and set {key} in %s/externals.py, e.g. add a line like
{key} = "[path to exec]/{name}"
""" % PYTHON_ROOT
@staticmethod
def _path_from_external(name):
"""
:param str name: Name of external configuration to look up
:return str: Configured path, if any
"""
try:
from . import externals
path = getattr(externals, name, None)
if path and os.path.exists(path):
return path
except ImportError:
print('ImportError')
pass
return None
@classmethod
def ffmpeg_path(cls):
"""
:return str: Path to ffmpeg, if installed and configured via `externals` module
"""
return cls._path_from_external('FFMPEG_PATH')
@classmethod
def matlab_path(cls):
"""
:return str: Path to matlab, if installed and configured via `externals` module
"""
return cls._path_from_external('MATLAB_PATH')
@classmethod
def matlab_runtime_path(cls):
"""
:return str: Path to matlab runtime, if installed and configured via `externals` module
"""
return cls._path_from_external('MATLAB_RUNTIME_PATH')
@classmethod
def cvx_path(cls):
"""
:return str: Path to cvx, if installed and configured via `externals` module
"""
return cls._path_from_external('CVX_PATH')
@classmethod
def psnr_path(cls):
"""
:return str: Path to external psnr executable, if installed and configured via `externals` module
"""
return cls._path_from_external('PSNR_PATH')
@classmethod
def moment_path(cls):
"""
:return str: Path to external moment executable, if installed and configured via `externals` module
"""
return cls._path_from_external('MOMENT_PATH')
@classmethod
def ssim_path(cls):
"""
:return str: Path to external ssim executable, if installed and configured via `externals` module
"""
return cls._path_from_external('SSIM_PATH')
@classmethod
def ms_ssim_path(cls):
"""
:return str: Path to external ms_ssim executable, if installed and configured via `externals` module
"""
return cls._path_from_external('MS_SSIM_PATH')
@classmethod
def vmaf_path(cls):
"""
:return str: Path to external vmaf executable, if installed and configured via `externals` module
"""
return cls._path_from_external('VMAF_PATH')
@classmethod
def vmafossexec_path(cls):
"""
:return str: Path to external vmafossexec executable, if installed and configured via `externals` module
"""
return cls._path_from_external('VMAFOSSEXEC_PATH')
@classmethod
def vmafrc_path(cls):
"""
:return str: Path to external vmafossexec executable, if installed and configured via `externals` module
"""
return cls._path_from_external('VMAFRC_PATH')
@classmethod
def get_and_assert_ffmpeg(cls):
path = cls.ffmpeg_path()
assert path is not None, cls._MISSING_EXTERNAL_MESSAGE.format(name='ffmpeg', key='FFMPEG_PATH')
return path
@classmethod
def get_and_assert_matlab(cls):
path = cls.matlab_path()
assert path is not None, cls._MISSING_EXTERNAL_MESSAGE.format(name='matlab', key='MATLAB_PATH')
return path
@classmethod
def get_and_assert_matlab_runtime(cls):
path = cls.matlab_runtime_path()
assert path is not None, \
"""Must install matlab runtime (v9.1) and set {key} in {root}/externals.py, e.g. add a line like {key} = "[path to matlab runtime]/v91"
""".format(root=PYTHON_ROOT, key='MATLAB_RUNTIME_PATH')
return path
@classmethod
def get_and_assert_cvx(cls):
path = cls.cvx_path()
assert path is not None, cls._MISSING_EXTERNAL_MESSAGE.format(name='cvx', key='CVX_PATH')
return path
class VmafConfig(object):
@classmethod
def root_path(cls, *components):
return os.path.join(ROOT, *components)
@classmethod
def file_result_store_path(cls, *components):
return cls.root_path('workspace', 'result_store_dir', 'file_result_store', *components)
@classmethod
def encode_store_path(cls, *components):
return cls.root_path('workspace', 'result_store_dir', 'encode_store', *components)
@classmethod
def workspace_path(cls, *components):
return cls.root_path('workspace', *components)
@classmethod
def workdir_path(cls, *components):
return cls.root_path('workspace', 'workdir', *components)
@classmethod
def model_path(cls, *components):
return cls.root_path('model', *components)
@classmethod
def resource_path(cls, *components):
return cls.root_path('resource', *components)
@classmethod
def test_resource_path(cls, *components):
return cls.root_path('python', 'test', 'resource', *components)
@classmethod
def tools_resource_path(cls, *components):
return cls.root_path('python', 'src', 'vmaf', 'tools', 'resource', *components)
@classmethod
def encode_path(cls, *components):
return cls.root_path('workspace', 'encode', *components)
class DisplayConfig(object):
@staticmethod
def show(**kwargs):
from vmaf import plt
if 'write_to_dir' in kwargs:
format = kwargs['format'] if 'format' in kwargs else 'png'
filedir = kwargs['write_to_dir'] if kwargs['write_to_dir'] is not None else VmafConfig.workspace_path('output')
if not os.path.exists(filedir):
os.makedirs(filedir)
for fignum in plt.get_fignums():
fig = plt.figure(fignum)
fig.savefig(os.path.join(filedir, str(fignum) + '.' + format), format=format)
else:
plt.show()
```
#### File: vmaf/core/h5py_mixin.py
```python
import h5py
__copyright__ = "Copyright 2016-2020, Netflix, Inc."
__license__ = "BSD+Patent"
class H5pyMixin(object):
"""
Use a h5py file to store raw video channel or similar as features.
Implementation class must have attribute optional_dict2.
"""
@staticmethod
def open_h5py_file(h5py_filepath, mode='w'):
f = h5py.File(h5py_filepath, mode)
return f
@staticmethod
def close_h5py_file(f, mode='w'):
if mode == 'w':
f.flush()
f.close()
elif mode == 'r':
f.close()
else:
assert False
def assert_h5py_file(self):
assert self.optional_dict2 is not None and 'h5py_file' in self.optional_dict2
@property
def h5py_file(self):
return self.optional_dict2['h5py_file']
```
#### File: src/vmaf/routine.py
```python
import os
import numpy as np
from vmaf import plt
from vmaf.core.cross_validation import ModelCrossValidation
from vmaf.core.feature_assembler import FeatureAssembler
from vmaf.core.quality_runner import VmafQualityRunner
from vmaf.core.result_store import FileSystemResultStore
from vmaf.tools.misc import indices, get_stdout_logger, import_python_file, close_logger, get_file_name_without_extension
from vmaf.config import VmafConfig, DisplayConfig
from vmaf.core.asset import Asset
from vmaf.core.train_test_model import TrainTestModel, RegressorMixin, ClassifierMixin
from vmaf.core.local_explainer import LocalExplainer
__copyright__ = "Copyright 2016-2020, Netflix, Inc."
__license__ = "BSD+Patent"
def read_dataset(dataset, **kwargs):
groundtruth_key = kwargs['groundtruth_key'] if 'groundtruth_key' in kwargs else None
skip_asset_with_none_groundtruth = kwargs['skip_asset_with_none_groundtruth'] \
if 'skip_asset_with_none_groundtruth' in kwargs else False
content_ids = kwargs['content_ids'] if 'content_ids' in kwargs else None
asset_ids = kwargs['asset_ids'] if 'asset_ids' in kwargs else None
workdir_root = kwargs['workdir_root'] if 'workdir_root' in kwargs else VmafConfig.workdir_path()
# asserts, can add more to the list...
assert hasattr(dataset, 'dataset_name')
assert hasattr(dataset, 'ref_videos')
assert hasattr(dataset, 'dis_videos')
assert hasattr(dataset, 'yuv_fmt') or all(['yuv_fmt' in ref_video for ref_video in dataset.ref_videos])
data_set_name = dataset.dataset_name
ref_videos = dataset.ref_videos
dis_videos = dataset.dis_videos
width = dataset.width if hasattr(dataset, 'width') else None
height = dataset.height if hasattr(dataset, 'height') else None
yuv_fmt = dataset.yuv_fmt if hasattr(dataset, 'yuv_fmt') else None
quality_width = dataset.quality_width if hasattr(dataset, 'quality_width') else None
quality_height = dataset.quality_height if hasattr(dataset, 'quality_height') else None
resampling_type = dataset.resampling_type if hasattr(dataset, 'resampling_type') else None
crop_cmd = dataset.crop_cmd if hasattr(dataset, 'crop_cmd') else None
pad_cmd = dataset.pad_cmd if hasattr(dataset, 'pad_cmd') else None
workfile_yuv_type = dataset.workfile_yuv_type if hasattr(dataset, 'workfile_yuv_type') else None
duration_sec = dataset.duration_sec if hasattr(dataset, 'duration_sec') else None
fps = dataset.fps if hasattr(dataset, 'fps') else None
start_frame = dataset.start_frame if hasattr(dataset, 'start_frame') else None
end_frame = dataset.end_frame if hasattr(dataset, 'end_frame') else None
ref_dict = {} # dictionary of content_id -> path for ref videos
for ref_video in ref_videos:
ref_dict[ref_video['content_id']] = ref_video
assets = []
for dis_video in dis_videos:
if content_ids is not None and dis_video['content_id'] not in content_ids:
continue
if asset_ids is not None and dis_video['asset_id'] not in asset_ids:
continue
if groundtruth_key is not None:
groundtruth = dis_video[groundtruth_key]
else:
if 'dmos' in dis_video:
groundtruth = dis_video['dmos']
elif 'mos' in dis_video:
groundtruth = dis_video['mos']
elif 'groundtruth' in dis_video:
groundtruth = dis_video['groundtruth']
else:
groundtruth = None
if 'os' in dis_video:
raw_groundtruth = dis_video['os']
else:
raw_groundtruth = None
if 'groundtruth_std' in dis_video:
groundtruth_std = dis_video['groundtruth_std']
else:
groundtruth_std = None
if 'rebuf_indices' in dis_video:
rebuf_indices = dis_video['rebuf_indices']
else:
rebuf_indices = None
ref_video = ref_dict[dis_video['content_id']]
ref_path = ref_video['path']
ref_yuv_fmt_ = yuv_fmt if yuv_fmt is not None else ref_dict[dis_video['content_id']]['yuv_fmt']
dis_yuv_fmt_ = dis_video['yuv_fmt'] if 'yuv_fmt' in dis_video else ref_yuv_fmt_
if width is not None:
width_ = width
elif 'width' in ref_video and 'width' not in dis_video:
width_ = ref_video['width']
elif 'width' in dis_video and 'width' not in ref_video:
width_ = dis_video['width']
elif 'width' in ref_video and 'width' in dis_video:
assert ref_video['width'] == dis_video['width']
width_ = ref_video['width']
else:
width_ = None
if height is not None:
height_ = height
elif 'height' in ref_video and 'height' not in dis_video:
height_ = ref_video['height']
elif 'height' in dis_video and 'height' not in ref_video:
height_ = dis_video['height']
elif 'height' in ref_video and 'height' in dis_video:
assert ref_video['height'] == dis_video['height']
height_ = ref_video['height']
else:
height_ = None
if quality_width is not None:
quality_width_ = quality_width
elif 'quality_width' in dis_video:
quality_width_ = dis_video['quality_width']
else:
quality_width_ = None
if quality_height is not None:
quality_height_ = quality_height
elif 'quality_height' in dis_video:
quality_height_ = dis_video['quality_height']
else:
quality_height_ = None
if resampling_type is not None:
resampling_type_ = resampling_type
elif 'resampling_type' in dis_video:
resampling_type_ = dis_video['resampling_type']
else:
resampling_type_ = None
if crop_cmd is not None:
ref_crop_cmd_ = crop_cmd
dis_crop_cmd_ = crop_cmd
else:
if 'crop_cmd' in ref_video:
ref_crop_cmd_ = ref_video['crop_cmd']
else:
ref_crop_cmd_ = None
if 'crop_cmd' in dis_video:
dis_crop_cmd_ = dis_video['crop_cmd']
else:
dis_crop_cmd_ = None
if pad_cmd is not None:
ref_pad_cmd_ = pad_cmd
dis_pad_cmd_ = pad_cmd
else:
if 'pad_cmd' in ref_video:
ref_pad_cmd_ = ref_video['pad_cmd']
else:
ref_pad_cmd_ = None
if 'pad_cmd' in dis_video:
dis_pad_cmd_ = dis_video['pad_cmd']
else:
dis_pad_cmd_ = None
if duration_sec is not None:
duration_sec_ = duration_sec
elif 'duration_sec' in dis_video:
duration_sec_ = dis_video['duration_sec']
else:
duration_sec_ = None
if fps is not None:
fps_ = fps
elif 'fps' in dis_video:
fps_ = dis_video['fps']
else:
fps_ = None
if start_frame is not None:
start_frame_ = start_frame
elif 'start_frame' in dis_video:
start_frame_ = dis_video['start_frame']
else:
start_frame_ = None
if end_frame is not None:
end_frame_ = end_frame
elif 'end_frame' in dis_video:
end_frame_ = dis_video['end_frame']
else:
end_frame_ = None
asset_dict = {'ref_yuv_type': ref_yuv_fmt_, 'dis_yuv_type': dis_yuv_fmt_}
if width_ is not None:
if asset_dict['ref_yuv_type'] != 'notyuv':
asset_dict['ref_width'] = width_
if asset_dict['dis_yuv_type'] != 'notyuv':
asset_dict['dis_width'] = width_
if height_ is not None:
if asset_dict['ref_yuv_type'] != 'notyuv':
asset_dict['ref_height'] = height_
if asset_dict['dis_yuv_type'] != 'notyuv':
asset_dict['dis_height'] = height_
if groundtruth is not None:
asset_dict['groundtruth'] = groundtruth
if raw_groundtruth is not None:
asset_dict['raw_groundtruth'] = raw_groundtruth
if groundtruth_std is not None:
asset_dict['groundtruth_std'] = groundtruth_std
if quality_width_ is not None:
asset_dict['quality_width'] = quality_width_
if quality_height_ is not None:
asset_dict['quality_height'] = quality_height_
if resampling_type_ is not None:
asset_dict['resampling_type'] = resampling_type_
if ref_crop_cmd_ is not None:
asset_dict['ref_crop_cmd'] = ref_crop_cmd_
if dis_crop_cmd_ is not None:
asset_dict['dis_crop_cmd'] = dis_crop_cmd_
if ref_pad_cmd_ is not None:
asset_dict['ref_pad_cmd'] = ref_pad_cmd_
if dis_pad_cmd_ is not None:
asset_dict['dis_pad_cmd'] = dis_pad_cmd_
if duration_sec_ is not None:
asset_dict['duration_sec'] = duration_sec_
if workfile_yuv_type is not None:
asset_dict['workfile_yuv_type'] = workfile_yuv_type
if rebuf_indices is not None:
asset_dict['rebuf_indices'] = rebuf_indices
if fps_ is not None:
asset_dict['fps'] = fps_
if start_frame_ is not None:
asset_dict['start_frame'] = start_frame_
if end_frame_ is not None:
asset_dict['end_frame'] = end_frame_
if groundtruth is None and skip_asset_with_none_groundtruth:
pass
else:
asset = Asset(dataset=data_set_name,
content_id=dis_video['content_id'],
asset_id=dis_video['asset_id'],
workdir_root=workdir_root,
ref_path=ref_path,
dis_path=dis_video['path'],
asset_dict=asset_dict,
)
assets.append(asset)
return assets
def run_test_on_dataset(test_dataset, runner_class, ax,
result_store, model_filepath,
parallelize=True, fifo_mode=True,
aggregate_method=np.mean,
type='regressor',
**kwargs):
test_assets = read_dataset(test_dataset, **kwargs)
test_raw_assets = None
try:
for test_asset in test_assets:
assert test_asset.groundtruth is not None
except AssertionError:
# no groundtruth, try do subjective modeling
from sureal.dataset_reader import RawDatasetReader
from sureal.subjective_model import DmosModel
subj_model_class = kwargs['subj_model_class'] if 'subj_model_class' in kwargs and kwargs['subj_model_class'] is not None else DmosModel
dataset_reader_class = kwargs['dataset_reader_class'] if 'dataset_reader_class' in kwargs else RawDatasetReader
subjective_model = subj_model_class(dataset_reader_class(test_dataset))
subjective_model.run_modeling(**kwargs)
test_dataset_aggregate = subjective_model.to_aggregated_dataset(**kwargs)
test_raw_assets = test_assets
test_assets = read_dataset(test_dataset_aggregate, **kwargs)
if model_filepath is not None:
optional_dict = {'model_filepath': model_filepath}
if 'model_720_filepath' in kwargs and kwargs['model_720_filepath'] is not None:
optional_dict['720model_filepath'] = kwargs['model_720_filepath']
if 'model_480_filepath' in kwargs and kwargs['model_480_filepath'] is not None:
optional_dict['480model_filepath'] = kwargs['model_480_filepath']
else:
optional_dict = None
if 'enable_transform_score' in kwargs and kwargs['enable_transform_score'] is not None:
if not optional_dict:
optional_dict = {}
optional_dict['enable_transform_score'] = kwargs['enable_transform_score']
if 'disable_clip_score' in kwargs and kwargs['disable_clip_score'] is not None:
if not optional_dict:
optional_dict = {}
optional_dict['disable_clip_score'] = kwargs['disable_clip_score']
if 'subsample' in kwargs and kwargs['subsample'] is not None:
if not optional_dict:
optional_dict = {}
optional_dict['subsample'] = kwargs['subsample']
# run
runner = runner_class(
test_assets,
None, fifo_mode=fifo_mode,
delete_workdir=True,
result_store=result_store,
optional_dict=optional_dict,
optional_dict2=None,
)
runner.run(parallelize=parallelize)
results = runner.results
for result in results:
result.set_score_aggregate_method(aggregate_method)
try:
model_type = runner.get_train_test_model_class()
except:
if type == 'regressor':
model_type = RegressorMixin
elif type == 'classifier':
model_type = ClassifierMixin
else:
assert False
split_test_indices_for_perf_ci = kwargs['split_test_indices_for_perf_ci'] \
if 'split_test_indices_for_perf_ci' in kwargs else False
# plot
groundtruths = list(map(lambda asset: asset.groundtruth, test_assets))
predictions = list(map(lambda result: result[runner_class.get_score_key()], results))
raw_grountruths = None if test_raw_assets is None else \
list(map(lambda asset: asset.raw_groundtruth, test_raw_assets))
groundtruths_std = None if test_assets is None else \
list(map(lambda asset: asset.groundtruth_std, test_assets))
try:
predictions_bagging = list(map(lambda result: result[runner_class.get_bagging_score_key()], results))
predictions_stddev = list(map(lambda result: result[runner_class.get_stddev_score_key()], results))
predictions_ci95_low = list(map(lambda result: result[runner_class.get_ci95_low_score_key()], results))
predictions_ci95_high = list(map(lambda result: result[runner_class.get_ci95_high_score_key()], results))
predictions_all_models = list(map(lambda result: result[runner_class.get_all_models_score_key()], results))
# need to revert the list of lists, so that the outer list has the predictions for each model separately
predictions_all_models = np.array(predictions_all_models).T.tolist()
num_models = np.shape(predictions_all_models)[0]
stats = model_type.get_stats(groundtruths, predictions,
ys_label_raw=raw_grountruths,
ys_label_pred_bagging=predictions_bagging,
ys_label_pred_stddev=predictions_stddev,
ys_label_pred_ci95_low=predictions_ci95_low,
ys_label_pred_ci95_high=predictions_ci95_high,
ys_label_pred_all_models=predictions_all_models,
ys_label_stddev=groundtruths_std,
split_test_indices_for_perf_ci=split_test_indices_for_perf_ci)
except Exception as e:
print('Stats calculation failed, using default stats calculation. Error cause: ')
print(e)
stats = model_type.get_stats(groundtruths, predictions,
ys_label_raw=raw_grountruths,
ys_label_stddev=groundtruths_std,
split_test_indices_for_perf_ci=split_test_indices_for_perf_ci)
num_models = 1
print('Stats on testing data: {}'.format(model_type.format_stats_for_print(stats)))
# printing stats if multiple models are present
if 'SRCC_across_model_distribution' in stats \
and 'PCC_across_model_distribution' in stats \
and 'RMSE_across_model_distribution' in stats:
print('Stats on testing data (across multiple models, using all test indices): {}'.format(
model_type.format_across_model_stats_for_print(model_type.extract_across_model_stats(stats))))
if split_test_indices_for_perf_ci:
print('Stats on testing data (single model, multiple test sets): {}'
.format(model_type.format_stats_across_test_splits_for_print(model_type.extract_across_test_splits_stats(stats))))
if ax is not None:
content_ids = list(map(lambda asset: asset.content_id, test_assets))
if 'point_label' in kwargs:
if kwargs['point_label'] == 'asset_id':
point_labels = list(map(lambda asset: asset.asset_id, test_assets))
elif kwargs['point_label'] == 'dis_path':
point_labels = list(map(lambda asset: get_file_name_without_extension(asset.dis_path), test_assets))
else:
raise AssertionError("Unknown point_label {}".format(kwargs['point_label']))
else:
point_labels = None
model_type.plot_scatter(ax, stats, content_ids=content_ids, point_labels=point_labels, **kwargs)
ax.set_xlabel('True Score')
ax.set_ylabel("Predicted Score")
ax.grid()
ax.set_title("{runner}{num_models}\n{stats}".format(
dataset=test_assets[0].dataset,
runner=runner_class.TYPE,
stats=model_type.format_stats_for_plot(stats),
num_models=", {} models".format(num_models) if num_models > 1 else "",
))
return test_assets, results
def print_matplotlib_warning():
print("Warning: cannot import matplotlib, no picture displayed. " \
"If you are on Mac OS and have installed matplotlib, you " \
"possibly need to run: \nsudo pip uninstall python-dateutil \n" \
"sudo pip install python-dateutil==2.2 \n" \
"Refer to: http://stackoverflow.com/questions/27630114/matplotlib-issue-on-os-x-importerror-cannot-import-name-thread")
def train_test_vmaf_on_dataset(train_dataset, test_dataset,
feature_param, model_param,
train_ax, test_ax, result_store,
parallelize=True, logger=None, fifo_mode=True,
output_model_filepath=None,
aggregate_method=np.mean,
**kwargs):
train_assets = read_dataset(train_dataset, **kwargs)
train_raw_assets = None
try:
for train_asset in train_assets:
assert train_asset.groundtruth is not None
except AssertionError:
# no groundtruth, try do subjective modeling
from sureal.dataset_reader import RawDatasetReader
from sureal.subjective_model import DmosModel
subj_model_class = kwargs['subj_model_class'] if 'subj_model_class' in kwargs and kwargs['subj_model_class'] is not None else DmosModel
dataset_reader_class = kwargs['dataset_reader_class'] if 'dataset_reader_class' in kwargs else RawDatasetReader
subjective_model = subj_model_class(dataset_reader_class(train_dataset))
subjective_model.run_modeling(**kwargs)
train_dataset_aggregate = subjective_model.to_aggregated_dataset(**kwargs)
train_raw_assets = train_assets
train_assets = read_dataset(train_dataset_aggregate, **kwargs)
train_fassembler = FeatureAssembler(
feature_dict=feature_param.feature_dict,
feature_option_dict=None,
assets=train_assets,
logger=logger,
fifo_mode=fifo_mode,
delete_workdir=True,
result_store=result_store,
optional_dict=None,
optional_dict2=None,
parallelize=parallelize,
)
train_fassembler.run()
train_features = train_fassembler.results
for result in train_features:
result.set_score_aggregate_method(aggregate_method)
model_type = model_param.model_type
model_param_dict = model_param.model_param_dict
model_class = TrainTestModel.find_subclass(model_type)
train_xys = model_class.get_xys_from_results(train_features)
train_xs = model_class.get_xs_from_results(train_features)
train_ys = model_class.get_ys_from_results(train_features)
model = model_class(model_param_dict, logger)
model.train(train_xys, **kwargs)
# append additional information to model before saving, so that
# VmafQualityRunner can read and process
model.append_info('feature_dict', feature_param.feature_dict)
if 'score_clip' in model_param_dict:
VmafQualityRunner.set_clip_score(model, model_param_dict['score_clip'])
if 'score_transform' in model_param_dict:
VmafQualityRunner.set_transform_score(model, model_param_dict['score_transform'])
train_ys_pred = VmafQualityRunner.predict_with_model(model, train_xs, **kwargs)['ys_pred']
raw_groundtruths = None if train_raw_assets is None else \
list(map(lambda asset: asset.raw_groundtruth, train_raw_assets))
train_stats = model.get_stats(train_ys['label'], train_ys_pred, ys_label_raw=raw_groundtruths)
log = 'Stats on training data: {}'.format(model.format_stats_for_print(train_stats))
if logger:
logger.info(log)
else:
print(log)
# save model
if output_model_filepath is not None:
model.to_file(output_model_filepath)
if train_ax is not None:
train_content_ids = list(map(lambda asset: asset.content_id, train_assets))
model_class.plot_scatter(train_ax, train_stats, content_ids=train_content_ids)
train_ax.set_xlabel('True Score')
train_ax.set_ylabel("Predicted Score")
train_ax.grid()
train_ax.set_title("Dataset: {dataset}, Model: {model}\n{stats}".format(
dataset=train_dataset.dataset_name,
model=model.model_id,
stats=model_class.format_stats_for_plot(train_stats)
))
# === test model on test dataset ===
if test_dataset is None:
test_assets = None
test_stats = None
test_fassembler = None
else:
test_assets = read_dataset(test_dataset, **kwargs)
test_raw_assets = None
try:
for test_asset in test_assets:
assert test_asset.groundtruth is not None
except AssertionError:
# no groundtruth, try do subjective modeling
from sureal.dataset_reader import RawDatasetReader
from sureal.subjective_model import DmosModel
subj_model_class = kwargs['subj_model_class'] if 'subj_model_class' in kwargs and kwargs['subj_model_class'] is not None else DmosModel
dataset_reader_class = kwargs['dataset_reader_class'] if 'dataset_reader_class' in kwargs else RawDatasetReader
subjective_model = subj_model_class(dataset_reader_class(test_dataset))
subjective_model.run_modeling(**kwargs)
test_dataset_aggregate = subjective_model.to_aggregated_dataset(**kwargs)
test_raw_assets = test_assets
test_assets = read_dataset(test_dataset_aggregate, **kwargs)
test_fassembler = FeatureAssembler(
feature_dict=feature_param.feature_dict,
feature_option_dict=None,
assets=test_assets,
logger=logger,
fifo_mode=fifo_mode,
delete_workdir=True,
result_store=result_store,
optional_dict=None,
optional_dict2=None,
parallelize=True,
)
test_fassembler.run()
test_features = test_fassembler.results
for result in test_features:
result.set_score_aggregate_method(aggregate_method)
test_xs = model_class.get_xs_from_results(test_features)
test_ys = model_class.get_ys_from_results(test_features)
test_ys_pred = VmafQualityRunner.predict_with_model(model, test_xs, **kwargs)['ys_pred']
raw_groundtruths = None if test_raw_assets is None else \
list(map(lambda asset: asset.raw_groundtruth, test_raw_assets))
test_stats = model.get_stats(test_ys['label'], test_ys_pred, ys_label_raw=raw_groundtruths)
log = 'Stats on testing data: {}'.format(model_class.format_stats_for_print(test_stats))
if logger:
logger.info(log)
else:
print(log)
if test_ax is not None:
test_content_ids = list(map(lambda asset: asset.content_id, test_assets))
model_class.plot_scatter(test_ax, test_stats, content_ids=test_content_ids)
test_ax.set_xlabel('True Score')
test_ax.set_ylabel("Predicted Score")
test_ax.grid()
test_ax.set_title("Dataset: {dataset}, Model: {model}\n{stats}".format(
dataset=test_dataset.dataset_name,
model=model.model_id,
stats=model_class.format_stats_for_plot(test_stats)
))
return train_fassembler, train_assets, train_stats, test_fassembler, test_assets, test_stats, model
def construct_kfold_list(assets, contentid_groups):
# construct cross validation kfold input list
content_ids = list(map(lambda asset: asset.content_id, assets))
kfold = []
for curr_content_group in contentid_groups:
curr_indices = indices(content_ids, lambda x: x in curr_content_group)
kfold.append(curr_indices)
return kfold
def cv_on_dataset(dataset, feature_param, model_param, ax, result_store,
contentid_groups, logger=None, aggregate_method=np.mean):
assets = read_dataset(dataset)
kfold = construct_kfold_list(assets, contentid_groups)
fassembler = FeatureAssembler(
feature_dict=feature_param.feature_dict,
feature_option_dict=None,
assets=assets,
logger=logger,
delete_workdir=True,
result_store=result_store,
optional_dict=None,
optional_dict2=None,
parallelize=True, fifo_mode=True,
# parallelize=False, fifo_mode=False, # VQM
)
fassembler.run()
results = fassembler.results
for result in results:
result.set_score_aggregate_method(aggregate_method)
model_class = TrainTestModel.find_subclass(model_param.model_type)
# run nested kfold cv for each combintation
cv_output = ModelCrossValidation.run_kfold_cross_validation(
model_class,
model_param.model_param_dict,
results,
kfold,
logger=logger,
)
print('Feature parameters: {}'.format(feature_param.feature_dict))
print('Model type: {}'.format(model_param.model_type))
print('Model parameters: {}'.format(model_param.model_param_dict))
print('Stats: {}'.format(model_class.format_stats_for_print(cv_output['aggr_stats'])))
if ax is not None:
model_class.plot_scatter(ax, cv_output['aggr_stats'], cv_output['contentids'])
ax.set_xlabel('True Score')
ax.set_ylabel("Predicted Score")
ax.grid()
ax.set_title("Dataset: {dataset}, Model: {model},\n{stats}".format(
dataset=dataset.dataset_name,
model=model_param.model_type,
stats=model_class.format_stats_for_plot(cv_output['aggr_stats'])
))
return assets, cv_output
def run_remove_results_for_dataset(result_store, dataset, executor_class):
assets = read_dataset(dataset)
executor = executor_class(assets=assets, logger=None, result_store=result_store)
executor.remove_results()
def run_vmaf_cv(train_dataset_filepath,
test_dataset_filepath,
param_filepath,
output_model_filepath=None,
**kwargs):
result_store_dir = kwargs['result_store_dir'] if 'result_store_dir' in kwargs else VmafConfig.file_result_store_path()
logger = get_stdout_logger()
result_store = FileSystemResultStore(result_store_dir)
train_dataset = import_python_file(train_dataset_filepath)
test_dataset = import_python_file(test_dataset_filepath) if test_dataset_filepath is not None else None
param = import_python_file(param_filepath)
# === plot scatter ===
nrows = 1
ncols = 2
fig, axs = plt.subplots(figsize=(5*ncols, 5*nrows), nrows=nrows, ncols=ncols)
train_test_vmaf_on_dataset(train_dataset, test_dataset, param, param, axs[0], axs[1],
result_store, parallelize=True, logger=None,
output_model_filepath=output_model_filepath,
**kwargs)
if 'xlim' in kwargs:
axs[0].set_xlim(kwargs['xlim'])
axs[1].set_xlim(kwargs['xlim'])
if 'ylim' in kwargs:
axs[0].set_ylim(kwargs['ylim'])
axs[1].set_ylim(kwargs['ylim'])
bbox = {'facecolor':'white', 'alpha':1, 'pad':20}
axs[0].annotate('Training Set', xy=(0.1, 0.85), xycoords='axes fraction', bbox=bbox)
axs[1].annotate('Testing Set', xy=(0.1, 0.85), xycoords='axes fraction', bbox=bbox)
plt.tight_layout()
# === clean up ===
close_logger(logger)
def run_vmaf_kfold_cv(dataset_filepath,
contentid_groups,
param_filepath,
aggregate_method,
result_store_dir=VmafConfig.file_result_store_path(),
):
logger = get_stdout_logger()
result_store = FileSystemResultStore(result_store_dir)
dataset = import_python_file(dataset_filepath)
param = import_python_file(param_filepath)
fig, ax = plt.subplots(figsize=(5, 5), nrows=1, ncols=1)
cv_on_dataset(dataset, param, param, ax, result_store, contentid_groups,
logger, aggregate_method)
ax.set_xlim([0, 120])
ax.set_ylim([0, 120])
plt.tight_layout()
# === clean up ===
close_logger(logger)
def explain_model_on_dataset(model, test_assets_selected_indexs,
test_dataset_filepath,
result_store_dir=VmafConfig.file_result_store_path()):
def print_assets(test_assets):
print('\n'.join(map(
lambda tasset: "Asset {i}: {name}".format(
i=tasset[0], name=get_file_name_without_extension(tasset[1].dis_path)),
enumerate(test_assets)
)))
test_dataset = import_python_file(test_dataset_filepath)
test_assets = read_dataset(test_dataset)
print_assets(test_assets)
print("Assets selected for local explanation: {}".format(
test_assets_selected_indexs))
result_store = FileSystemResultStore(result_store_dir)
test_assets = [test_assets[i] for i in test_assets_selected_indexs]
test_fassembler = FeatureAssembler(
feature_dict=model.model_dict['feature_dict'],
feature_option_dict=None,
assets=test_assets,
logger=None,
fifo_mode=True,
delete_workdir=True,
result_store=result_store,
optional_dict=None,
optional_dict2=None,
parallelize=True,
)
test_fassembler.run()
test_feature_results = test_fassembler.results
test_xs = model.get_xs_from_results(test_feature_results)
test_ys = model.get_ys_from_results(test_feature_results)
test_ys_pred = model.predict(test_xs)['ys_label_pred']
explainer = LocalExplainer(neighbor_samples=1000)
test_exps = explainer.explain(model, test_xs)
explainer.print_explanations(test_exps, assets=test_assets, ys=test_ys, ys_pred=test_ys_pred)
explainer.plot_explanations(test_exps, assets=test_assets, ys=test_ys, ys_pred=test_ys_pred)
DisplayConfig.show()
def generate_dataset_from_raw(raw_dataset_filepath, output_dataset_filepath, **kwargs):
if raw_dataset_filepath:
from sureal.subjective_model import DmosModel
subj_model_class = kwargs['subj_model_class'] if 'subj_model_class' in kwargs else DmosModel
content_ids = kwargs['content_ids'] if 'content_ids' in kwargs else None
asset_ids = kwargs['asset_ids'] if 'asset_ids' in kwargs else None
subjective_model = subj_model_class.from_dataset_file(raw_dataset_filepath,
content_ids=content_ids,
asset_ids=asset_ids)
subjective_model.run_modeling(**kwargs)
subjective_model.to_aggregated_dataset_file(output_dataset_filepath, **kwargs)
def run_vmaf_cv_from_raw(train_dataset_raw_filepath, test_dataset_raw_filepath,
param_filepath, output_model_filepath, **kwargs):
if 'train_quality_wh' in kwargs and kwargs['train_quality_wh'] is not None:
train_quality_width, train_quality_height = kwargs['train_quality_wh']
else:
train_quality_width = None
train_quality_height = None
if 'test_quality_wh' in kwargs and kwargs['test_quality_wh'] is not None:
test_quality_width, test_quality_height = kwargs['test_quality_wh']
else:
test_quality_width = None
test_quality_height = None
if 'train_transform_final' in kwargs and kwargs['train_transform_final'] is not None:
train_transform_final = kwargs['train_transform_final']
else:
train_transform_final = None
if 'test_transform_final' in kwargs and kwargs['test_transform_final'] is not None:
test_transform_final = kwargs['test_transform_final']
else:
test_transform_final = None
workspace_path = kwargs['workspace_path'] if 'workspace_path' in kwargs else VmafConfig.workspace_path()
train_output_dataset_filepath = os.path.join(workspace_path, 'dataset', 'train_dataset.py')
generate_dataset_from_raw(raw_dataset_filepath=train_dataset_raw_filepath,
output_dataset_filepath=train_output_dataset_filepath,
quality_width=train_quality_width,
quality_height=train_quality_height,
transform_final=train_transform_final,
**kwargs)
test_output_dataset_filepath = os.path.join(workspace_path, 'dataset', 'test_dataset.py') \
if test_dataset_raw_filepath is not None else None
generate_dataset_from_raw(raw_dataset_filepath=test_dataset_raw_filepath,
output_dataset_filepath=test_output_dataset_filepath,
quality_width=test_quality_width,
quality_height=test_quality_height,
transform_final=test_transform_final,
**kwargs)
run_vmaf_cv(
train_dataset_filepath=train_output_dataset_filepath,
test_dataset_filepath=test_output_dataset_filepath,
param_filepath=param_filepath,
output_model_filepath=output_model_filepath,
**kwargs
)
``` |
{
"source": "aachick/gol",
"score": 3
} |
#### File: aachick/gol/gol_test.py
```python
import pytest
from gol import Universe
@pytest.fixture
def original_universe():
"""Return a small universe with a set random seed."""
universe = Universe(5, 5, seed_of_life=1, threshold=0.5)
return universe
def test_universe_representation(original_universe: Universe):
"""Test that the universe is represented correctly and consistently."""
universe_str = str(original_universe)
universe_repr = repr(original_universe)
universe_display = original_universe.display()
assert universe_str == " ██ \n ██ \n█ █ \n█ ██ \n ██ \n"
assert universe_repr == universe_str
assert "\n".join(universe_display) + "\n" == universe_str
def test_universe_tick(original_universe: Universe):
"""Test that the ticking function works correctly."""
original_universe.tick()
# Original cells should be:
# [
# False, True, True, False, False,
# False, True, True, False, False,
# True, False, True, False, False,
# True, False, True, True, False,
# False, True, True, False, False,
# ]
# fmt: off
expected_cells = [
True, False, False, True, False,
True, False, False, True, False,
True, False, False, False, True,
True, False, False, True, True,
True, False, False, False, False,
]
# fmt: on
assert original_universe.cells == expected_cells
``` |
{
"source": "aachick/pacsanini",
"score": 3
} |
#### File: pacsanini/cli/db.py
```python
from typing import List
import click
from sqlalchemy import create_engine
from pacsanini.cli.base import config_option
from pacsanini.config import PacsaniniConfig
from pacsanini.db.utils import (
TABLES,
dump_database,
get_db_session,
initialize_database,
)
@click.command(name="init")
@config_option
@click.option(
"--force-init",
is_flag=True,
help="If set, force the creation of the database regardless of whether it exists.",
)
def init_cli(config: str, force_init: bool):
"""Initialize the database and its tables using the resources
value in the supplied configuration file.
"""
ext = config.rsplit(".", 1)[-1].lower()
load_func = (
PacsaniniConfig.from_json if ext == "json" else PacsaniniConfig.from_yaml
)
pacsanini_config = load_func(config)
engine = create_engine(pacsanini_config.storage.resources)
initialize_database(engine, force_init=force_init)
@click.command(name="dump")
@config_option
@click.option(
"-o",
"--output",
default=None,
help="If set, specify the output directory to write results to. They will be written to the current directory otherwise.",
)
@click.option(
"-t",
"--table",
type=click.Choice(TABLES.keys()),
multiple=True,
show_choices=True,
default=list(TABLES.keys()),
help="If specified, select one or more tables to dump in CSV format. The default is all tables.",
)
def dump_cli(config: str, output: str, table: List[str]):
"""Dump pacsanini database tables in CSV format."""
ext = config.rsplit(".", 1)[-1].lower()
load_func = (
PacsaniniConfig.from_json if ext == "json" else PacsaniniConfig.from_yaml
)
pacsanini_config = load_func(config)
with get_db_session(pacsanini_config.storage.resources) as session:
dump_database(session, output=output, tables=table)
@click.group(name="db")
def db_cli_group():
"""Perform database related commands."""
db_cli_group.add_command(init_cli)
db_cli_group.add_command(dump_cli)
```
#### File: pacsanini/cli/pipeline.py
```python
import click
from pacsanini.cli.base import config_option
from pacsanini.pipeline import run_pacsanini_pipeline
@click.command(name="orchestrate")
@config_option
@click.option(
"-t",
"--threads",
type=int,
default=1,
show_default=True,
help="The number of threads to use (applicable if the backend is not a database).",
)
@click.option(
"--init-db/--no-init-db",
is_flag=True,
default=False,
show_default=True,
help="If --init-db is set and the results backend is a database: create the database.",
)
def orchestrate_cli(config: str, threads: int, init_db: bool):
"""Run the find-move-parse pipeline orchestrated by pacsanini."""
run_pacsanini_pipeline(config, nb_threads=threads, init_db=init_db)
```
#### File: src/pacsanini/config.py
```python
import json
import os
from datetime import datetime, time, timedelta
from typing import List, Optional, Union
import yaml
from pydantic import BaseModel, root_validator, validator
from pacsanini.convert import datetime2str, str2datetime
from pacsanini.models import DicomNode, QueryLevel, StorageSortKey
from pacsanini.parse import DicomTag, DicomTagGroup
DEFAULT_CONFIG_NAME = "pacsaninirc.yaml"
DEFAULT_SETTINGS_PATH = os.path.join(os.path.expanduser("~"), DEFAULT_CONFIG_NAME)
PACSANINI_CONF_ENVVAR = "PACSANINI_CONFIG"
class EmailConfig(BaseModel):
"""A class to store email configuration settings.
Attributes
----------
username : Optional[str]
The email account's name.
password : Optional[str]
The email account's password.
host : Optional[str]
The email server host.
port : Optional[int]
The port to send the email to.
"""
username: Optional[str] = ""
password: Optional[str] = ""
host: Optional[str] = "smtp.gmail.com"
port: Optional[int] = 465
class MoveConfig(BaseModel):
"""The MoveConfig class represents the possible settings that
are used when querying for data.
The start_time and end_time attributes correspond to the time
in the day at which studies should be queried. This is to avoid
saturating of a PACS node during business hours. A time
instance or a string in the HH, HH:MM, or HH:MM:SS format are
accepted. Hours should be in the 24 hour format.
If the start_time is greater than the end_time, it is assumed
that the queries should be performed over two days. For example,
with start_time = "20:00" and end_time = "07:00", queries will
run from 20:00 on day 1 until 07:00 on day 2.
Attributes
----------
start_time : Optional[time]
The time of the day at which C-MOVE queries should start at.
If set, the end_time parameter should be set as well.
end_time : Optional[time]
The time of the day at which C-MOVE queries should end at.
If set, the start_time parameter should be set as well.
query_level : QueryLevel
The root model to use when querying for DICOM resources. The
default is STUDY.
"""
start_time: Optional[time] = None
end_time: Optional[time] = None
query_level: QueryLevel = QueryLevel.STUDY
@root_validator(pre=True)
def validate_start_and_end_time(
cls, values
): # pylint: disable=no-self-argument,no-self-use
"""Validate that both start_time and end_time
are set or that both are unset.
"""
start_time = values.get("start_time", None)
end_time = values.get("end_time", None)
if (start_time and not end_time) or (end_time and not start_time):
msg = "Both start_time and end_time parameters must be set or both must be unset."
raise ValueError(msg)
def validate_format(val):
if not val:
return None
if isinstance(val, time):
return val
return time.fromisoformat(val)
start_time = validate_format(start_time)
end_time = validate_format(end_time)
if start_time == end_time:
start_time, end_time = None, None
values["start_time"] = start_time
values["end_time"] = end_time
return values
def can_query(self) -> bool:
"""Return True if the current time of the day is between the
specified start_time and end_time.
"""
if self.start_time is None or self.end_time is None:
return True
now = self.now()
today = now - timedelta(
hours=now.hour,
minutes=now.minute,
seconds=now.second,
microseconds=now.microsecond,
)
start_td = timedelta(
hours=self.start_time.hour,
minutes=self.start_time.minute,
seconds=self.start_time.second,
microseconds=self.start_time.microsecond,
)
end_td = timedelta(
hours=self.end_time.hour,
minutes=self.end_time.minute,
seconds=self.end_time.second,
microseconds=self.end_time.microsecond,
)
if self.end_time > self.start_time:
upper_limit = today + end_td
lower_limit = today + start_td
return lower_limit < now < upper_limit
return now > (today + start_td) or now < (today + end_td)
def now(self) -> datetime:
"""Return the current datetime."""
return datetime.now()
class Config:
use_enum_values = True
json_encoders = {time: lambda time_: time_.isoformat(autospec="seconds")}
class NetConfig(BaseModel):
"""The NetConfig class represents the possible DICOM network
configuration that can be made.
Attributes
----------
local_node : DicomNode
The local DICOM node's information configuration.
called_node : DicomNode
The configuration settings for the DICOM node to call
(eg: emit C-FIND, C-MOVE, or C-ECHO messages to).
dest_node : DicomNode
The destination node to send DICOM results from C-MOVE
results to. If unset, this will be equal to the local_node.
"""
local_node: DicomNode
called_node: DicomNode
dest_node: Optional[DicomNode] = None
@validator("called_node")
def validate_called_node(cls, v): # pylint: disable=no-self-argument,no-self-use
"""Validate that the called has correct network information."""
if bool(v.ip) is False or bool(v.port) is False:
err_msg = (
"The called DICOM node configuration must have a valid IP and port."
)
raise ValueError(err_msg)
return v
@root_validator(pre=True)
def validate_dest_node(cls, values): # pylint: disable=no-self-argument,no-self-use
"""Check if the provided dest_node value is None.
If true, set the dest_node to be equal to the local_node.
"""
if not values.get("dest_node"):
values["dest_node"] = values["local_node"]
return values
class FindConfig(BaseModel):
"""The FindConfig class is used to store settings that are
to be used for sending C-FIND requests.
Attributes
----------
query_level : QueryLevel
The query level to use when sending C-FIND messages. The
default is STUDY.
search_fields : List[str]
A list of DICOM tag fields to obtain values from each
returned result.
start_date : datetime
The date component of the query. Can be passed as a string
in which case it should be in a valid DICOM date or datetime
format (eg: YYYYMMDD).
end_date : Optional[datetime]
The upper date limit of the query. Can be passed as a string
in which case it should be in a valid DICOM date or datetime
format (eg: YYYYMMDD).
modality : Optional[str]
The modality to match for each query.
"""
query_level: QueryLevel = QueryLevel.STUDY
search_fields: List[str] = []
start_date: datetime
end_date: Optional[datetime] = None
modality: Optional[str] = ""
@validator("start_date", "end_date", pre=True)
def validate_date(cls, v): # pylint: disable=no-self-argument,no-self-use
"""Validate a date and accept string formats."""
if isinstance(v, str):
return str2datetime(v)
return v
class Config:
use_enum_values = True
json_encoders = {datetime: datetime2str}
class StorageConfig(BaseModel):
"""The StorageConfig indicates where DICOM files should be
persisted as a result of launching C-MOVE requests or a
C-STORESCP server.
Attributes
----------
resources : str
The file path to store results from C-FIND results.
resources_meta : str
The file path to store results from parsing DICOM files.
directory : str
The directory path to persist DICOM files under.
sort_by : str
An indication of how to store received DICOM files.
Accepted values are "PATIENT", "STUDY", and "IMAGE".
"""
resources: str
resources_meta: Optional[str] = "resources_meta.csv"
directory: str
sort_by: StorageSortKey = StorageSortKey.PATIENT
class Config:
use_enum_values = True
class PacsaniniConfig(BaseModel):
"""PacsaniniConfig represents the overall configuration
file that can be used to conveniently run pacsanini
functionalities.
Attributes
----------
find : Optional[FindConfig]
The application's find configuration.
move : Optional[MoveConfig]
The application's move configuration.
net : Optional[NetConfig]
The application's network configuration.
storage : Optional[StorageConfig]
The application's storage configuration.
tags : Optional[List[DicomTag]]
The application's DICOM tags parsing configuration.
email : Optional[EmailConfig]
The application's email settings.
"""
find: Optional[FindConfig] = None
move: Optional[MoveConfig] = None
net: Optional[NetConfig] = None
storage: Optional[StorageConfig] = None
tags: Optional[List[DicomTag]] = None
email: Optional[EmailConfig] = EmailConfig()
@classmethod
def from_json(cls, path: str):
"""Obtain a PacsaniniConfig instance from a json file."""
with open(path) as in_:
content = json.load(in_)
return cls(**content)
@classmethod
def from_yaml(cls, path: str):
"""Obtain a PacsaniniConfig instance from a yaml file."""
with open(path) as in_:
content = yaml.safe_load(in_.read())
return cls(**content)
def can_find(self) -> bool:
"""Return True if the current configuration is adequately
set for emitting C-FIND requests -return False otherwise.
"""
return self.net is not None and self.find is not None
def can_move(self) -> bool:
"""Returns True if the move config, net config, and storage config
is not None.
"""
return (
self.move is not None and self.net is not None and self.storage is not None
)
def can_parse(self) -> bool:
"""Returns True if the tags config is not None -False otherwise."""
return self.tags is not None
def get_tags(self) -> Union[DicomTagGroup, None]:
"""Return the DICOMTagGroup instance associated
with the current configuration.
"""
if self.tags is None:
return None
return DicomTagGroup(tags=[tag_grp.dict() for tag_grp in self.tags])
```
#### File: pacsanini/db/dcm2model.py
```python
from typing import Tuple, Union
from pydicom import Dataset, dcmread
from pacsanini.convert import agestr2years, dcm2dict, str2datetime
from pacsanini.db.models import Image, Patient, Series, Study, StudyFind
from pacsanini.parse import DicomTagGroup
def dcm2patient(dcm: Dataset, institution: str = None) -> Patient:
"""Convert a DICOM file to a Patient instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Patient instance.
institution : str
If set, add a specified institution name to the Patient
model. The default is None.
Returns
-------
Patient
The Patient model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "PatientName", "tag_alias": "patient_name", "callback": str},
{
"tag_name": "PatientBirthDate",
"tag_alias": "patient_birth_date",
"callback": str2datetime,
},
]
)
data = tag_grp.parse_dicom(dcm)
data["institution"] = institution
return Patient(**data)
def dcm2study(dcm: Dataset) -> Study:
"""Convert a DICOM file to a Study instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Study instance.
Returns
-------
Study
The Study model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{
"tag_name": "PatientAge",
"tag_alias": "patient_age",
"callback": agestr2years,
"default": -1,
},
{"tag_name": "AccessionNumber", "tag_alias": "accession_number"},
]
)
data = tag_grp.parse_dicom(dcm)
return Study(**data)
def dcm2study_finding(dcm: Dataset) -> StudyFind:
"""Convert a DICOM file to a StudyFind instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a StudyFind instance.
Returns
-------
StudyFind
The StudyFind model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientName", "tag_alias": "patient_name", "callback": str},
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{"tag_name": "AccessionNumber", "tag_alias": "accession_number"},
]
)
data = tag_grp.parse_dicom(dcm)
return StudyFind(**data)
def dcm2series(dcm: Dataset) -> Series:
"""Convert a DICOM file to a Series instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Series instance.
Returns
-------
Series
The Series model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "SeriesInstanceUID", "tag_alias": "series_uid"},
{"tag_name": "Modality", "tag_alias": "modality"},
]
)
data = tag_grp.parse_dicom(dcm)
return Series(**data)
def dcm2image(dcm: Dataset, institution: str = None, filepath: str = None) -> Image:
"""Convert a DICOM file to a Image instance that can be inserted
in the database.
Parameters
----------
dcm : Dataset
The DICOM data to convert to a Image instance.
institution : str
If set, add a specified institution name to the Image
model. The default is None.
filepath : str
If set, add the DICOM's filepath to the database. The default
is None.
Returns
-------
Image
The Image model.
"""
tag_grp = DicomTagGroup(
tags=[
{"tag_name": "PatientID", "tag_alias": "patient_id"},
{"tag_name": "StudyInstanceUID", "tag_alias": "study_uid"},
{
"tag_name": "StudyDate",
"tag_alias": "study_date",
"callback": str2datetime,
},
{"tag_name": "SeriesInstanceUID", "tag_alias": "series_uid"},
{"tag_name": "Modality", "tag_alias": "modality"},
{"tag_name": "SOPClassUID", "tag_alias": "sop_class_uid"},
{"tag_name": "SOPInstanceUID", "tag_alias": "image_uid"},
{"tag_name": "AcquisitionTime", "tag_alias": "acquisition_time"},
{"tag_name": "Manufacturer", "tag_alias": "manufacturer"},
{
"tag_name": "ManufacturerModelName",
"tag_alias": "manufacturer_model_name",
},
]
)
data = tag_grp.parse_dicom(dcm)
data["meta"] = dcm2dict(dcm, include_pixels=False)
data["institution"] = institution
data["filepath"] = filepath
return Image(**data)
def dcm2dbmodels(
dcm: Union[str, Dataset], institution: str = None, filepath: str = None
) -> Tuple[Patient, Study, Series, Image]:
"""Convert a DICOM file into the different database models that will be used
to insert the DICOM data into the database.
Parameters
----------
dcm : Union[str, Dataset]
The DICOM data to convert to a Patient, Study, Series, and Image instance.
institution : str
If set, add a specified institution name to the Patient
model. The default is None.
filepath : str
If set, add the DICOM's filepath to the database. The default
is None. If the input dcm parameter value is a string, filepath
will be set to this.
Returns
-------
Tuple[Patient, Study, Series, Image]
A 4-tuple corresponding to the image's
"""
if isinstance(dcm, str):
filepath = dcm
dcm = dcmread(dcm, stop_before_pixels=True)
pat = dcm2patient(dcm, institution=institution)
study = dcm2study(dcm)
series = dcm2series(dcm)
image = dcm2image(dcm, institution=institution, filepath=filepath)
return pat, study, series, image
```
#### File: pacsanini/db/models.py
```python
from datetime import datetime
from typing import List
from sqlalchemy import JSON, Column, DateTime, Float, ForeignKey, Integer, String
from sqlalchemy.orm import declarative_base, relationship
Base = declarative_base()
class StudyFind(Base):
"""Table corresponding to the studies that were found using
C-FIND operations.
"""
__tablename__ = "studies_find"
id = Column(Integer, primary_key=True)
patient_name = Column(String)
patient_id = Column(String)
study_uid = Column(String, index=True, unique=True)
study_date = Column(DateTime)
accession_number = Column(String)
retrieved_on = Column(DateTime, default=None)
found_on = Column(DateTime, default=datetime.utcnow)
study: "Study" = relationship("Study", back_populates="study_find")
def __repr__(self):
study_date = self.study_date.strftime("%Y%m%d")
return f"<StudyFind: pid={self.patient_id}, pn={self.patient_name}, sd={study_date}>"
@classmethod
def cfind_fields(cls) -> List[str]:
"""Returns the fields names that can be used for C-FIND queries."""
return [
"PatientName",
"PatientID",
"StudyInstanceUID",
"StudyDate",
"AccessionNumber",
]
class Patient(Base):
"""Table corresponding to patient-level data found in
DICOM files.
"""
__tablename__ = "patients"
id = Column(Integer, primary_key=True)
patient_id = Column(String, unique=True)
patient_name = Column(String)
patient_birth_date = Column(DateTime)
institution = Column(String)
def __repr__(self) -> str:
return f"<Patient: {self.patient_id}>"
class Study(Base):
"""Table corresponding to study-level data found in
DICOM files.
"""
__tablename__ = "studies"
id = Column(Integer, primary_key=True)
study_find_id = Column(Integer, ForeignKey("studies_find.id"), nullable=True)
patient_id = Column(Integer, ForeignKey("patients.id"))
study_uid = Column(String, unique=True)
study_date = Column(DateTime)
patient_age = Column(Integer, default=-1)
accession_number = Column(String)
study_find: "StudyFind" = relationship("StudyFind", back_populates="study")
def __repr__(self) -> str:
return f"<Study: {self.study_uid}>"
class Series(Base):
"""Table corresponding to series-level data found in
DICOM files.
"""
__tablename__ = "series"
id = Column(Integer, primary_key=True)
study_id = Column(Integer, ForeignKey("studies.id"))
series_uid = Column(String, unique=True)
modality = Column(String)
def __repr__(self) -> str:
return f"<Series: {self.series_uid}"
class Image(Base):
"""Table corresponding to the studies that were queried and
retrieved from the PACS.
"""
__tablename__ = "images"
id = Column(Integer, primary_key=True)
series_id = Column(Integer, ForeignKey("series.id"))
institution = Column(String)
patient_id = Column(String, index=True)
patient_name = Column(String)
study_uid = Column(String, index=True)
study_date = Column(DateTime)
series_uid = Column(String)
modality = Column(String)
sop_class_uid = Column(String)
image_uid = Column(String, unique=True)
acquisition_time = Column(Float, default=-1)
manufacturer = Column(String)
manufacturer_model_name = Column(String)
meta = Column(JSON, nullable=True)
filepath = Column(String, nullable=True)
def __repr__(self):
return f"<Image: {self.image_uid}>"
```
#### File: pacsanini/io/base_parser.py
```python
import os
import queue
import threading
from collections.abc import Callable
from typing import Optional, Union
from pydicom import dcmread
from pacsanini.parse import DicomTagGroup
def _thread_worker(
parser: DicomTagGroup,
worker_queue: queue.Queue,
consumer_queue: queue.Queue,
stop_working: threading.Event,
include_path: bool = True,
):
while True:
try:
file_path = worker_queue.get(True, timeout=1)
if parser is not None:
result = parser.parse_dicom(file_path)
else:
result = {"dicom": dcmread(file_path, stop_before_pixels=True)}
if include_path:
result["dicom_path"] = file_path
consumer_queue.put(result)
except queue.Empty:
if stop_working.is_set():
break
except Exception: # pylint: disable=broad-except
# Catch all exceptions so that a particular thread worker
# doesn't fail and leave others with extra work.
pass
def _thread_consumer(
consumer_queue: queue.Queue,
stop_consuming: threading.Event,
callback: Callable,
callback_args: tuple = None,
callback_kwargs: dict = None,
):
if callback_args is None:
callback_args = ()
if callback_kwargs is None:
callback_kwargs = {}
while True:
try:
result = consumer_queue.get(True, timeout=1)
callback(result, *callback_args, **callback_kwargs)
except queue.Empty:
if stop_consuming.is_set():
break
except Exception: # pylint: disable=broad-except
pass
def _enqueue_files(src: Union[str, os.PathLike], worker_queue: queue.Queue):
"""Enqueue DICOM files into the worker queue."""
if os.path.isfile(src):
worker_queue.put(src)
return
for root, _, files in os.walk(src):
for fname in files:
path = os.path.join(root, fname)
worker_queue.put(path)
def parse_dir(
src: Union[str, os.PathLike],
parser: Optional[DicomTagGroup],
callback: Callable,
callback_args: tuple = None,
callback_kwargs: dict = None,
nb_threads: int = 1,
include_path: bool = True,
):
"""Parse a DICOM directory and return the passed results into the
provided callback function.
The callback function is responsible for consuming the results of
the parsed DICOM files.
Parameters
----------
src : Union[str, os.PathLike]
The source DICOM path or directory to parse recursively.
parser : Optional[DicomTagGroup]
The tags to get the DICOM tag values from. If this is None,
the results passed to the callback function will not be a dict
containing a "dicom" key whose value will be the corresponding
pydicom.Dataset object.
callback : Callable
The callback functions to send results to for consumption.
The first argument of the function should be reserved for
the parsing result.
callback_args : tuple
Extra positional arguments to pass to the callback function.
callback_kwargs : dict
Extra keyword arguments to pass to the callback function.
nb_threads : int
The number of threads to use for the parsing of DICOM files.
include_path : bool
If True, add a "dicom_path" key to the results dict.
"""
if not os.path.exists(src):
raise FileNotFoundError(f"'{src}' does not exist.")
if nb_threads < 1:
raise ValueError("nb_threads must be greater than 0")
if not callable(callback):
raise ValueError("callback must be a callable.")
try:
stop_working = threading.Event()
stop_consuming = threading.Event()
worker_queue: queue.Queue = queue.Queue()
consumer_queue: queue.Queue = queue.Queue()
consumer_thread = threading.Thread(
target=_thread_consumer,
args=(consumer_queue, stop_consuming, callback),
kwargs={"callback_args": callback_args, "callback_kwargs": callback_kwargs},
daemon=True,
)
consumer_thread.start()
threads = []
for _ in range(nb_threads):
thread = threading.Thread(
target=_thread_worker,
args=(parser, worker_queue, consumer_queue, stop_working),
kwargs={"include_path": include_path},
daemon=True,
)
threads.append(thread)
thread.start()
_enqueue_files(src, worker_queue)
finally:
stop_working.set()
for worker in threads:
worker.join()
stop_consuming.set()
consumer_thread.join()
```
#### File: pacsanini/io/io_parsers.py
```python
import csv
import json
from datetime import datetime
from os import PathLike
from typing import TextIO, Union
from pacsanini.io.base_parser import parse_dir
from pacsanini.parse import DicomTagGroup
def _write_results(result: dict, reader: csv.DictWriter):
reader.writerow(result)
def parse_dir2csv(
src: Union[str, PathLike],
parser: DicomTagGroup,
dest: Union[str, PathLike, TextIO],
nb_threads: int = 1,
include_path: bool = True,
mode: str = "w",
):
"""Parse a DICOM directory and write results to a CSV
file.
Parameters
----------
src : Union[str, PathLike]
The DICOM file or directory to parse.
parser : DicomTagGroup
The DicomTagGroup instance specifying which DICOM
tags to parse and how.
dest : Union[str, PathLike, TextIO]
The destination path to write the results to.
nb_threads : int
The number of threads to use when parsing DICOM
files. The default is 1.
include_path : bool
If True, add a "dicom_path" key to the parsed results.
The default is True.
mode : str
Whether to write ("w") or append ("a") to the
destination file.
"""
fieldnames = [tag.tag_alias for tag in parser.tags]
if include_path:
fieldnames.append("dicom_path")
if isinstance(dest, (str, PathLike)):
with open(dest, mode, newline="") as output:
reader = csv.DictWriter(output, fieldnames=fieldnames)
if mode == "w":
reader.writeheader()
parse_dir(
src,
parser,
_write_results,
callback_args=(reader,),
nb_threads=nb_threads,
include_path=include_path,
)
else:
reader = csv.DictWriter(dest, fieldnames=fieldnames)
if mode == "w":
reader.writeheader()
parse_dir(
src,
parser,
_write_results,
callback_args=(reader,),
nb_threads=nb_threads,
include_path=include_path,
)
def _append_results(result: dict, *, results_list: list):
results_list.append(result)
def _json_serializer(value):
if isinstance(value, datetime):
# Format datetime the same way that csv writers do.
return value.isoformat(sep=" ")
return value
def parse_dir2json(
src: Union[str, PathLike],
parser: DicomTagGroup,
dest: Union[str, PathLike, TextIO],
nb_threads: int = 1,
include_path: bool = True,
mode: str = "w",
):
"""Parse a DICOM directory and write results to a JSON
file.
Parameters
----------
src : Union[str, PathLike]
The DICOM file or directory to parse.
parser : DicomTagGroup
The DicomTagGroup instance specifying which DICOM
tags to parse and how.
dest : Union[str, PathLike, TextIO]
The destination path to write the results to.
nb_threads : int
The number of threads to use when parsing DICOM
files. The default is 1.
include_path : bool
If True, add a "dicom_path" key to the parsed results.
The default is True.
mode : str
Whether to write ("w") or append ("a") to the
destination file.
"""
fieldnames = [tag.tag_alias for tag in parser.tags]
if include_path:
fieldnames.append("dicom_path")
results: list = []
parse_dir(
src,
parser,
_append_results,
callback_kwargs={"results_list": results},
nb_threads=nb_threads,
include_path=include_path,
)
if isinstance(dest, (str, PathLike)):
if mode == "a":
mode = "r+"
with open(dest, mode) as output:
if mode == "r+":
old_results = json.load(output.read())
results += old_results["dicom_tags"]
json.dump(
{"dicom_tags": results}, output, indent=2, default=_json_serializer
)
else:
json.dump({"dicom_tags": results}, dest, indent=2, default=_json_serializer)
```
#### File: pacsanini/net/c_find.py
```python
from csv import DictWriter
from datetime import datetime, timedelta
from string import ascii_lowercase
from typing import Generator, List, Union
from pydicom import Dataset
from pynetdicom import AE
from pynetdicom.sop_class import ( # pylint: disable=no-name-in-module
PatientRootQueryRetrieveInformationModelFind,
StudyRootQueryRetrieveInformationModelFind,
)
from pacsanini.db import DBWrapper, StudyFind, add_found_study
from pacsanini.models import DicomNode, QueryLevel
_SEARCH_FIELDS = ["Modality", "PatientName", "StudyDate"]
def find(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
*,
query_level: Union[QueryLevel, str],
dicom_fields: List[str],
start_date: datetime,
end_date: datetime = None,
modality: str = "",
) -> Generator[Dataset, None, None]:
"""Find DICOM resources from the destination DICOM node using
the specified DICOM criteria.
The dicom_fields parameter are used to ask the destination
DICOM node for additional information regarding results. If
the destination node does not return those values, default
values of None will be returned.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
query_level : Union[QueryLevel, str]
The query level to use when asking for data to retrieve. This
can be "PATIENT" or "STUDY". According to this level, the values
returned for the dicom_fields you request may change.
dicom_fields : List[str]
A list of DICOM tags to get information from when the destination
node returns results.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
Yields
------
Dataset
Each result returned by the query made to the called_node
is yielded as a Dataset instance.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
if isinstance(local_node, dict):
local_node = DicomNode(**local_node)
if isinstance(called_node, dict):
called_node = DicomNode(**called_node)
if isinstance(query_level, str):
query_level = QueryLevel(query_level)
if query_level is QueryLevel.PATIENT:
query_root = PatientRootQueryRetrieveInformationModelFind
else:
query_root = StudyRootQueryRetrieveInformationModelFind
if not called_node.has_net_info:
raise ValueError(f"{called_node} does not have a network address.")
if end_date is None:
end_date = start_date
if end_date < start_date:
err_msg = (
f"The start date {start_date} cannot be greater"
f" than the end date {end_date}"
)
raise ValueError(err_msg)
ae = AE(ae_title=local_node.aetitle)
ae.add_requested_context(query_root)
current_date = start_date
date_increment = timedelta(days=15)
while current_date <= end_date:
if (current_date + date_increment) >= end_date:
upper_date = end_date
else:
upper_date = current_date + date_increment
if current_date == end_date:
requested_date = current_date.strftime("%Y%m%d")
else:
requested_date = (
f"{current_date.strftime('%Y%m%d')}-{upper_date.strftime('%Y%m%d')}"
)
for char in ascii_lowercase:
ds = Dataset()
ds.Modality = modality if modality else ""
ds.PatientName = f"{char}*"
ds.QueryRetrieveLevel = query_level.value
ds.StudyDate = requested_date
for field in dicom_fields:
if field not in _SEARCH_FIELDS:
setattr(ds, field, "")
assoc = ae.associate(called_node.ip, called_node.port)
try:
if assoc.is_established:
responses = assoc.send_c_find(ds, query_root)
for (status, identifier) in responses:
if status and identifier:
for field in list(dicom_fields) + _SEARCH_FIELDS:
if not hasattr(identifier, field):
setattr(identifier, field, None)
yield identifier
finally:
if assoc.is_alive():
assoc.release()
current_date += date_increment + timedelta(days=1)
def patient_find(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
*,
dicom_fields: List[str],
start_date: datetime,
end_date: datetime = None,
modality: str = "",
) -> Generator[Dataset, None, None]:
"""Find DICOM resources from the destination DICOM node using the
specified DICOM criteria. Queries are made using the PATIENT
query retrieve level.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
dicom_fields : List[str]
A list of DICOM tags to get information from when the destination
node returns results.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
Yields
------
Dataset
Each result returned by the query made to the called_node
is yielded as a Dataset instance.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
results = find(
local_node,
called_node,
query_level=QueryLevel.PATIENT,
dicom_fields=dicom_fields,
start_date=start_date,
end_date=end_date,
modality=modality,
)
for res in results:
yield res
def study_find(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
*,
dicom_fields: List[str],
start_date: datetime,
end_date: datetime = None,
modality: str = "",
) -> Generator[Dataset, None, None]:
"""Find DICOM resources from the destination DICOM node using the
specified DICOM criteria. Queries are made using the STUDY
query retrieve level.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
dicom_fields : List[str]
A list of DICOM tags to get information from when the destination
node returns results.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
Yields
------
Dataset
Each result returned by the query made to the called_node
is yielded as a Dataset instance.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
results = find(
local_node,
called_node,
query_level=QueryLevel.STUDY,
dicom_fields=dicom_fields,
start_date=start_date,
end_date=end_date,
modality=modality,
)
for res in results:
yield res
def patient_find2csv(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
dest: str,
*,
dicom_fields: List[str],
start_date: datetime,
end_date: datetime = None,
modality: str = "",
):
"""Find DICOM resources from the destination DICOM node using the
specified DICOM criteria. Queries are made using the PATIENT
query retrieve level. Returned results will be persisted the dest
file.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
dest : str
The output path to write results to.
dicom_fields : List[str]
A list of DICOM tags to get information from when the destination
node returns results.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
fields = _SEARCH_FIELDS + dicom_fields
with open(dest, "w", newline="") as out:
writer = DictWriter(out, fieldnames=fields)
writer.writeheader()
results_generator = patient_find(
local_node,
called_node,
dicom_fields=dicom_fields,
start_date=start_date,
end_date=end_date,
modality=modality,
)
for result in results_generator:
res_dict = {field: getattr(result, field) for field in fields}
writer.writerow(res_dict)
def study_find2csv(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
dest: str,
*,
dicom_fields: List[str],
start_date: datetime,
end_date: datetime = None,
modality: str = "",
):
"""Find DICOM resources from the destination DICOM node using the
specified DICOM criteria. Queries are made using the STUDY
query retrieve level. Returned results will be persisted the dest
file.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
dest : str
The output path to write results to.
dicom_fields : List[str]
A list of DICOM tags to get information from when the destination
node returns results.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
fields = _SEARCH_FIELDS + dicom_fields
with open(dest, "w", newline="") as out:
writer = DictWriter(out, fieldnames=fields)
writer.writeheader()
results_generator = study_find(
local_node,
called_node,
dicom_fields=dicom_fields,
start_date=start_date,
end_date=end_date,
modality=modality,
)
for result in results_generator:
res_dict = {field: getattr(result, field) for field in fields}
writer.writerow(res_dict)
def patient_find2sql(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
conn_uri: str,
*,
start_date: datetime,
end_date: datetime = None,
modality: str = "",
create_tables: bool = False,
):
"""Find DICOM resources from the destination DICOM node using the
specified DICOM criteria. Queries are made using the PATIENT
query retrieve level. Returned results will be persisted in the
specified database.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
conn_uri : str
The database's connection URI.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
create_tables : bool
If True, create the database tables before inserting the first
find result. The default is False.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
with DBWrapper(conn_uri, create_tables=create_tables) as db:
results_generator = patient_find(
local_node,
called_node,
dicom_fields=StudyFind.cfind_fields(),
start_date=start_date,
end_date=end_date,
modality=modality,
)
for result in results_generator:
add_found_study(db.conn(), result)
def study_find2sql(
local_node: Union[DicomNode, dict],
called_node: Union[DicomNode, dict],
conn_uri: str,
*,
start_date: datetime,
end_date: datetime = None,
modality: str = "",
create_tables: bool = False,
):
"""Find DICOM resources from the destination DICOM node using the
specified DICOM criteria. Queries are made using the STUDY
query retrieve level. Returned results will be persisted in the
specified database.
Parameters
----------
local_node : Union[DicomNode, dict]
The source/calling DICOM node that seeks to retrieve information
from the destination node.
called_node : Union[DicomNode, dict]
The destination/target node from which information is queried
from.
conn_uri : str
The database's connection URI.
start_date : datetime
The date for which the query should be made.
end_date : datetime
If set, queries will range from the start_date to the end_date.
The end_date parameter must therefore be greater or equal to the
start_date parameter.
modality : str
If set, specify the DICOM modality to get results for.
create_tables : bool
If True, create the database tables before inserting the first
find result. The default is False.
Raises
------
ValueError
A ValueError is raised if the called_node parameter does not
have set IP and port values or if the end_date parameter is
set and is smaller than the start_date parameter.
"""
with DBWrapper(conn_uri, create_tables=create_tables) as db:
results_generator = study_find(
local_node,
called_node,
dicom_fields=StudyFind.cfind_fields(),
start_date=start_date,
end_date=end_date,
modality=modality,
)
for result in results_generator:
add_found_study(db.conn(), result)
```
#### File: pacsanini/net/c_store.py
```python
import os
from typing import Generator, Tuple, Union
from pydicom import Dataset, dcmread
from pydicom.errors import InvalidDicomError
from pydicom.uid import (
DeflatedExplicitVRLittleEndian,
ExplicitVRBigEndian,
ExplicitVRLittleEndian,
ImplicitVRLittleEndian,
)
from pynetdicom import AE
from pynetdicom.association import Association
from pynetdicom.presentation import StoragePresentationContexts
from pacsanini.models import DicomNode
def send_dicom(
dcm_path: str,
*,
src_node: Union[DicomNode, dict],
dest_node: Union[DicomNode, dict],
) -> Generator[Tuple[str, Dataset], None, None]:
"""Send one or multiple DICOM files from the source node
to the dest node. If the dcm_path is a directory, non-DICOM
files will be ignored.
Parameters
----------
dcm_path : str
The path to the DICOM file to send or the DICOM directory
(in which case DICOM files will be collected recursively).
src_node : Union[DicomNode, dict]
The source DICOM node to use for sending the DICOM data.
dest_node : Union[DicomNode, dict]
The destination DICOM node to send the DICOM data to.
Yields
------
Generator[Tuple[str, Dataset], None, None]
A 2-tuple corresponding to the DICOM file's path and the
associated status of the C-STORE operation as a Dataset.
"""
if isinstance(src_node, dict):
src_node = DicomNode(**src_node)
if isinstance(dest_node, dict):
dest_node = DicomNode(**dest_node)
if os.path.isfile(dcm_path):
dcm_files = [dcm_path]
else:
dcm_files = []
append = dcm_files.append
for root, _, files in os.walk(dcm_path):
for fname in files:
append(os.path.join(root, fname))
ae = AE(ae_title=src_node.aetitle)
transfer_syntax = [
ExplicitVRLittleEndian,
ImplicitVRLittleEndian,
DeflatedExplicitVRLittleEndian,
ExplicitVRBigEndian,
]
for ctx in StoragePresentationContexts:
ae.add_requested_context(ctx.abstract_syntax, transfer_syntax)
assoc: Association = None
try:
assoc = ae.associate(dest_node.ip, dest_node.port, ae_title=dest_node.aetitle)
if assoc.is_established:
for path in dcm_files:
try:
dcm = dcmread(path)
yield path, assoc.send_c_store(dcm)
except InvalidDicomError:
pass
finally:
if assoc is not None:
assoc.release()
```
#### File: pacsanini/net/storescp.py
```python
import os
from functools import partial
from typing import Any, Callable, Dict, List, Union
from warnings import simplefilter
from loguru import logger
from pydicom import Dataset
from pynetdicom import AE, AllStoragePresentationContexts, evt
from pynetdicom.events import Event
from pynetdicom.status import Status
from sqlalchemy import exc
from sqlalchemy.orm import Session
from pacsanini.db.crud import add_image, update_retrieved_study
from pacsanini.models import DicomNode, StorageSortKey
# Ignore 'Starting in pydicom 3.0, Dataset.file_meta must be a FileMetaDataset class instance'
# as long as we stay on pydicom 2.X
simplefilter("ignore", category=DeprecationWarning)
Status.add("UNABLE_TO_DECODE", 0xC215)
Status.add("UNABLE_TO_PROCESS", 0xC216)
Status.add("UNABLE_TO_RECORD", 0xC217)
def default_store_handle(
event: Event,
data_dir: str = "",
sort_by: StorageSortKey = StorageSortKey.PATIENT,
db_session: Session = None,
callbacks: List[Callable[[Any], Any]] = None,
) -> int:
"""Handle a C-STORE request event by writing the received DICOM file
to the data_dir in the way specified by sort_by.
Parameters
----------
event : Event
The C-STORE event to handle.
data_dir : str
The directory to write results under.
sort_by : StorageSortKey
The organization to follow when writing DICOM files to disk.
callbacks : List[Callable[[Any], Any]]
If supplied pass the received DICOM file to the callable as
a positional argument (the first one) to each one of the
callables for processing.
Returns
-------
int
The reception status.
"""
try:
ds: Dataset = event.dataset
ds.file_meta = event.file_meta
except: # pylint: disable=bare-except
logger.warning("Unable to decode received DICOM")
return Status.UNABLE_TO_DECODE # pylint: disable=no-member
if StorageSortKey.PATIENT == sort_by:
dest = os.path.join(
data_dir,
ds.PatientID,
ds.StudyInstanceUID,
ds.SeriesInstanceUID,
ds.SOPInstanceUID,
)
elif StorageSortKey.STUDY == sort_by:
dest = os.path.join(
data_dir, ds.StudyInstanceUID, ds.SeriesInstanceUID, ds.SOPInstanceUID
)
else:
dest = os.path.join(data_dir, ds.SOPInstanceUID)
dest += ".dcm"
try:
dcm_dir = os.path.dirname(dest)
os.makedirs(dcm_dir, exist_ok=True)
ds.save_as(dest, write_like_original=False)
logger.info(f"{ds.SOPInstanceUID} is persisted.")
except OSError:
logger.warning(f"Failed to write {ds.StudyInstanceUID} to disk")
return Status.UNABLE_TO_PROCESS # pylint: disable=no-member
if db_session is not None:
try:
add_image(db_session, ds, filepath=dest)
update_retrieved_study(db_session, ds.StudyInstanceUID)
except exc.SQLAlchemyError as err:
logger.warning(f"Failed to update database due to {err}")
return Status.UNABLE_TO_RECORD # pylint: disable=no-member
if callbacks is not None:
for func in callbacks:
func(ds)
return 0x0000
class StoreSCPServer:
"""The StoreSCPServer class provides a way to run a storescp server
that can be used to receive DICOM files and write them locally.
Attributes
----------
node : Union[DicomNode, dict]
The DICOM node information to use when running the server.
data_dir : str
The path to the top-level directory where DICOM files should
be written to. The default is the current directory.
sort_by : StorageSortKey
The method by which DICOM files should be written to disk.
db_session : Session
Optional. If specified, received studies will be parsed and
persisted to the provided database. The default is None.
callbacks : List[Callable[[Any], Any]]
If set, pass a list of callables that will be called on the
DICOM file after it is received and persisted to disk.
"""
def __init__(
self,
node: Union[DicomNode, dict],
data_dir: str = "",
sort_by: StorageSortKey = StorageSortKey.PATIENT,
db_session: Session = None,
callbacks: List[Callable[[Any], Any]] = None,
):
if isinstance(node, dict):
node = DicomNode(**node)
if not node.has_port():
raise ValueError(f"{node} must have a set port to listen to.")
self.node = node
self.data_dir = data_dir
self.sort_by = sort_by
kwargs: Dict[str, Any] = {"data_dir": self.data_dir, "sort_by": self.sort_by}
if db_session is not None:
kwargs["db_session"] = db_session
if callbacks is not None:
kwargs["callbacks"] = callbacks
handler = partial(default_store_handle, **kwargs)
handlers = [(evt.EVT_C_STORE, handler)]
self.handlers = handlers
self.scp = None
def __enter__(self):
self.run(block=False)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type:
logger.warning(f"Shutting down server due to {exc_type}")
logger.warning(f"Exception value: {exc_val}")
logger.warning(f"Exception traceback: {exc_tb}")
self.shutdown()
def __repr__(self) -> str:
return self.__str__()
def __str__(self) -> str:
return f"<{self.__class__.__qualname__}: {self.node} - {self.data_dir}>"
def run(self, block: bool = False):
"""Run the storescp server in a non-blocking way."""
if self.scp is not None:
raise RuntimeError(f"A current SCP instance is already running for {self}.")
ae = AE(ae_title=self.node.aetitle)
ae.supported_contexts = AllStoragePresentationContexts
if self.data_dir:
os.makedirs(self.data_dir, exist_ok=True)
logger.debug(f"Starting SCP server: {self}")
if not block:
self.scp = ae.start_server(
("", self.node.port),
block=False,
evt_handlers=self.handlers,
ae_title=self.node.aetitle,
)
else:
ae.start_server(
("", self.node.port),
block=True,
evt_handlers=self.handlers,
ae_title=self.node.aetitle,
)
def shutdown(self):
"""Shutdown the running scp server."""
if self.scp is not None:
logger.debug(f"Stopping SCP server: {self}")
self.scp.shutdown()
self.scp = None
def run_server(
node: Union[DicomNode, dict],
data_dir: str = "",
sort_by: StorageSortKey = StorageSortKey.PATIENT,
callbacks: List[Callable[[Any], Any]] = None,
block: bool = False,
) -> Union[StoreSCPServer, None]:
"""Instantiate and run a storescp server using the provided
configuration. The server will run in a detached thread.
node : Union[DicomNode, dict]
The DICOM node information to use when running the server.
data_dir : str
The path to the top-level directory where DICOM files should
be written to. The default is the current directory.
sort_by : StorageSortKey
The method by which DICOM files should be written to disk.
callbacks : List[Callable[[Any], Any]]
If set, pass a list of callables that will be called on the
DICOM file after it is received and persisted to disk.
block : bool
If False, the default, run the storescp server in a different
thread. If True, the running server will block the current
thread. In this case, a KeyboardInterrupt is needed to stop
the server.
Returns
-------
Union[StoreSCPServer, None]
The running StoreSCPServer instance if block is set to False
(in which case you must subsequently call the shudown method)
or None if the server is in blocking mode.
"""
server = StoreSCPServer(
node, data_dir=data_dir, sort_by=sort_by, callbacks=callbacks
)
if block:
server.run(block=True)
return None
server.run(block=False)
return server
```
#### File: tests/db/crud_test.py
```python
from datetime import datetime, timedelta
from typing import Tuple
import pytest
from pydicom import FileDataset
from sqlalchemy.orm import Session
from pacsanini.db import crud
from pacsanini.db.models import Image, StudyFind
@pytest.fixture
def add_studies_to_find(sqlite_session: Session):
"""Add studies in the database that need to be found."""
study1 = StudyFind(
patient_name="patient1",
study_uid="study1",
study_date=datetime.now(),
accession_number="accession1",
)
study2 = StudyFind(
patient_name="patient1",
study_uid="study2",
study_date=datetime.now(),
accession_number="accession2",
found_on=datetime.utcnow(),
retrieved_on=datetime.utcnow(),
)
sqlite_session.add(study1)
sqlite_session.add(study2)
sqlite_session.commit()
yield study1, study2
sqlite_session.delete(study1)
sqlite_session.delete(study2)
sqlite_session.commit()
@pytest.mark.db
def test_add_image(dicom: FileDataset, sqlite_session: Session):
"""Test that adding an image to the database works well."""
image1 = crud.add_image(sqlite_session, dicom)
assert isinstance(image1, Image)
assert image1.image_uid == dicom.SOPInstanceUID
assert len(sqlite_session.query(Image).all()) == 1
image2 = crud.add_image(sqlite_session, dicom)
assert image2 is None
assert len(sqlite_session.query(Image).all()) == 1
@pytest.mark.db
def test_add_found_study(dicom: FileDataset, sqlite_session: Session):
"""Test that adding a study finding (from a C-FIND request works
well.
"""
dcm_finding = dicom
study_finding1 = crud.add_found_study(sqlite_session, dcm_finding)
assert isinstance(study_finding1, StudyFind)
assert study_finding1.study_uid == dcm_finding.StudyInstanceUID
assert len(sqlite_session.query(StudyFind).all()) == 1
study_finding2 = crud.add_found_study(sqlite_session, dcm_finding)
assert study_finding2 is None
assert len(sqlite_session.query(StudyFind).all()) == 1
@pytest.mark.db
def test_update_study(
add_studies_to_find: Tuple[StudyFind, StudyFind], sqlite_session: Session
):
"""Test that studies in the studies_find table are correctly updated."""
_, study_to_update = add_studies_to_find
before = datetime.utcnow()
updated_study = crud.update_retrieved_study(
sqlite_session, study_to_update.study_uid
)
assert updated_study is not None and isinstance(updated_study, StudyFind)
assert updated_study.study_uid == study_to_update.study_uid
assert (
before - timedelta(seconds=1)
< updated_study.retrieved_on
< before + timedelta(seconds=1)
)
@pytest.mark.db
def test_get_studies_to_retrieve(
add_studies_to_find: Tuple[StudyFind, StudyFind], sqlite_session: Session
):
"""Test that only studies that have not yet been retrieved are
returned.
"""
studies_to_find = crud.get_studies_to_move(sqlite_session)
assert len(studies_to_find) == 1
expected_study, _ = add_studies_to_find
found_study = studies_to_find[0]
assert found_study.study_uid == expected_study.study_uid
study_uids_to_find = crud.get_study_uids_to_move(sqlite_session)
assert len(studies_to_find) == 1
assert study_uids_to_find[0] == expected_study.study_uid
```
#### File: tests/db/parser_test.py
```python
import os
from typing import List
import pytest
from pydicom import FileDataset
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from pacsanini.db.models import Image
from pacsanini.db.parser import parse_dir2sql
@pytest.mark.db
def test_parse_sql2db(data_dir: str, sqlite_db_path: str, dicom: FileDataset):
"""Test that parsing DICOM data and persisting it into the
database works well.
"""
parse_dir2sql(data_dir, sqlite_db_path, institution_name="foobar")
engine = create_engine(sqlite_db_path)
Session = sessionmaker(bind=engine)
session = Session()
results: List[Image] = session.query(Image).all()
assert len(results) > 1
for result in results:
assert os.path.exists(result.filepath)
assert result.institution == "foobar"
file_count = 0
for _, _, files in os.walk(data_dir):
file_count += len([f for f in files if f.endswith("dcm")])
assert len(results) == file_count
res: Image = (
session.query(Image).filter(Image.image_uid == dicom.SOPInstanceUID).first()
)
assert res
session.close()
engine.dispose()
```
#### File: tests/net/c_echo_test.py
```python
import pytest
from pynetdicom import AE, ALL_TRANSFER_SYNTAXES, evt
from pynetdicom.sop_class import ( # pylint: disable=no-name-in-module
VerificationSOPClass,
)
from pacsanini.net.c_echo import echo
@pytest.fixture
def echoscp(test_dest_node: dict):
"""Simple echoscp server for testing purposes."""
def handle_cecho(event):
return 0x0000
ae = AE(ae_title=test_dest_node["aetitle"].encode())
ae.add_supported_context(VerificationSOPClass, ALL_TRANSFER_SYNTAXES)
server = None
try:
server = ae.start_server(
("", test_dest_node["port"]),
evt_handlers=[(evt.EVT_C_ECHO, handle_cecho)],
block=False,
)
yield ae
finally:
if server is not None:
server.shutdown()
@pytest.mark.net
def test_c_echo_invalid_node(test_src_node: dict):
"""Test that if the destination node does not have correct
information, the c_echo methods returns an error.
"""
with pytest.raises(ValueError):
echo(test_src_node, test_src_node)
@pytest.mark.net
def test_c_echo_unreachable_node(test_src_node: dict):
"""Test that if the destination cannot be reached,
a value of -1 is returned.
"""
unreachable_node = test_src_node.copy()
unreachable_node["ip"] = "www.localhost.com"
unreachable_node["port"] = 11118
result = echo(test_src_node, unreachable_node)
assert result == -1
@pytest.mark.net
def test_c_echo(echoscp: AE, test_src_node: dict, test_dest_node: dict):
"""Test that sending a C-ECHO message to a functional
DICOM node works correctly.
"""
result = echo(test_src_node, test_dest_node)
assert result == 0
```
#### File: tests/net/c_move_test.py
```python
import os
import pytest
from pydicom import dcmread
from pydicom.dataset import Dataset
from pynetdicom import AE, StoragePresentationContexts, evt
from pynetdicom.events import Event
from pynetdicom.sop_class import ( # pylint: disable=no-name-in-module
PatientRootQueryRetrieveInformationModelMove,
StudyRootQueryRetrieveInformationModelMove,
)
from pynetdicom.transport import ThreadedAssociationServer
from pacsanini.net import c_move
@pytest.fixture(scope="module")
def dcm(dicom_path: str):
"""Return a test DICOM file."""
return dcmread(dicom_path, stop_before_pixels=True)
@pytest.mark.net
@pytest.mark.skip(msg="Not ready")
class TestCMove:
"""Test that emitting C-MOVE requests functions correctly."""
def setup(self): # pylint: disable=attribute-defined-outside-init
"""Setup the server."""
self.scp: ThreadedAssociationServer = None
self.testing_node = {"aetitle": "pacsanini_testing", "ip": "", "port": 11114}
def teardown(self):
"""Ensure that the server is shutdown."""
if self.scp is not None:
self.scp.shutdown()
self.scp = None
def test_patient_move(self, dcm: Dataset, tmpdir: os.PathLike):
"""Test that moving patients functions correctly."""
def handle_cmove_request(event: Event):
if event.dataset is None:
status = Dataset()
status.Status = 0xFF01
yield status, dcm
return
ds = event.dataset
status = Dataset()
assert "QueryRetrieveLevel" in ds
yield ("localhost", self.testing_node["port"])
if ds.QueryRetrieveLevel == "PATIENT":
assert ds.PatientID == dcm.PatientID
if ds.QueryRetrieveLevel == "STUDY":
assert ds.StudyInstanceUID == dcm.StudyInstanceUID
yield 1
yield 0xFF00, dcm
handlers = [(evt.EVT_C_MOVE, handle_cmove_request)]
ae = AE()
ae.requested_contexts = StoragePresentationContexts
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove)
self.scp = ae.start_server(
("", 11114),
ae_title=b"pacsanini_testing",
evt_handlers=handlers,
block=False,
)
results = c_move.move_studies(
{"aetitle": "pacsanini_testing", "port": 11112},
{"aetitle": "pacsanini_testing", "ip": "localhost", "port": 11114},
study_uids=[dcm.StudyInstanceUID],
directory=str(tmpdir),
)
next(results)
expected_path = os.path.join(
str(tmpdir),
dcm.PatientID,
dcm.StudyInstanceUID,
dcm.SeriesInstanceUID,
f"{dcm.SOPInstanceUID}.dcm",
)
assert os.path.exists(expected_path)
result_dcm = dcmread(expected_path)
assert isinstance(result_dcm)
```
#### File: tests/net/conftest.py
```python
import os
import pytest
from pydicom import Dataset, dcmread
from pynetdicom import AE, ALL_TRANSFER_SYNTAXES, AllStoragePresentationContexts, evt
from pynetdicom.events import Event
from pynetdicom.sop_class import ( # pylint: disable=no-name-in-module
PatientRootQueryRetrieveInformationModelFind,
PatientRootQueryRetrieveInformationModelMove,
StudyRootQueryRetrieveInformationModelFind,
StudyRootQueryRetrieveInformationModelMove,
VerificationSOPClass,
)
@pytest.fixture
def test_dest_node() -> dict:
"""Return a DICOM node as a dict that can
be used to test the net functionalities.
"""
return {"aetitle": "pacsanini_testing_server", "ip": "localhost", "port": 11112}
@pytest.fixture
def test_src_node() -> dict:
"""Return a DICOM node as a dict that can
be used to initiate network queries.
"""
return {"aetitle": "pacsanini_testing", "ip": 11114}
@pytest.fixture
def test_dicom_server(test_dest_node: dict, data_dir: str):
"""Yield a mock DICOM server that can be used for testing."""
dicom_dir = os.path.join(data_dir, "dicom-files")
ae = AE(ae_title=test_dest_node["aetitle"])
ae.add_supported_context(VerificationSOPClass, ALL_TRANSFER_SYNTAXES)
for context in AllStoragePresentationContexts:
ae.add_supported_context(
context.abstract_syntax,
ALL_TRANSFER_SYNTAXES,
scp_role=True,
scu_role=False,
)
ae.add_supported_context(PatientRootQueryRetrieveInformationModelFind)
ae.add_supported_context(PatientRootQueryRetrieveInformationModelMove)
ae.add_supported_context(StudyRootQueryRetrieveInformationModelFind)
ae.add_supported_context(StudyRootQueryRetrieveInformationModelMove)
def handle_cfind(event: Event, data_dir: str):
model = event.request.AffectedSOPClassUID
if model not in ["PATIENT", "STUDY"]:
yield 0xC320, None
return
results = []
for root, _, files in os.walk(data_dir):
for name in files:
path = os.path.join(root, name)
dcm = dcmread(path, stop_before_pixels=True)
ds = Dataset()
is_ok = False
for key, value in event.identifier.items():
tag_name = value.name
if value.value:
search_val = value.value
if tag_name == "StudyDate" and "-" in search_val:
lower_date, upper_date = (
search_val.split("-")[0],
search_val.split("-")[1],
)
is_ok = lower_date <= search_val <= upper_date
else:
is_ok = getattr(dcm, tag_name, None) == search_val
setattr(ds, tag_name, getattr(dcm, tag_name, None))
if is_ok:
results.append(ds)
for res in results:
yield 0xFF00, res
def handle_cmove(event: Event, data_dir: str):
yield "localhost", "11114", {"contexts": []}
yield 0
yield 0xFE00, None
return
handlers = [
(evt.EVT_C_FIND, handle_cfind, [data_dir]),
(evt.EVT_C_MOVE, handle_cmove, [data_dir]),
]
server = None
try:
server = ae.start_server(
("", test_dest_node["port"]), evt_handlers=handlers, block=False
)
yield ae
finally:
if server is not None:
server.shutdown()
``` |
{
"source": "aachick/pycli",
"score": 2
} |
#### File: pycli/docs/build.py
```python
from pathlib import Path
CDIR = Path(__file__).parent
PROJECT_DIR = CDIR / ".."
def main():
readme = (PROJECT_DIR / "README.md").read_text()
(PROJECT_DIR / "docs" / "index.md").write_text(readme, encoding="utf-8")
if __name__ == "__main__":
main()
```
#### File: pycli/examples/decorated.py
```python
from typing import List
from pycli import cli
class MyClass:
def __init__(self, a: str, b: int, c: List[int] = None):
"""MyClass does a bunch of things and I would like to get
instances of it from the CLI directly.
Parameters
----------
a
The a variable does this.
b
The b variable does that.
c
The c variable is a list of integers.
"""
self.a = a
self.b = b
self.c = c
def __str__(self):
return f"MyClass(a={self.a}, b={self.b}, c={self.c})"
@classmethod
def from_cli(cls, a: str):
"""Obtain a MyClass instance through an alternative method.
Parameters
----------
a
The a variable that does stuff.
"""
return cls(a, 42, c=["20", "30"])
@cli(MyClass, constructor="from_cli")
def foo(obj: MyClass):
print(f"The object: {obj}")
foo()
``` |
{
"source": "AAchintha97/artificial-intelligence",
"score": 3
} |
#### File: AAchintha97/artificial-intelligence/pass.py
```python
import pyttsx3 #pip install pyttsx3
import speech_recognition as sr #pip install speechRecognition
import os
# import datetime
# import wikipedia #pip install wikipedia
# import webbrowser
# import pywhatkit as kit
# import os
# import smtplib
# import random
# import pyautogui
# import psutil
# import pyjokes
code=['Hey you have to tell a passcode to access Jarvis! Please tell the passcodewhe you see listening in the console!']
mauka=['The password is wrong please repeat!']
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
# print(voices[1].id)
engine.setProperty('voice', voices[1].id)
def speak(audio):
engine.say(audio)
engine.runAndWait()
def ask():
speak(code)
def chance():
speak(mauka)
def takeCommand():
#It takes microphone input from the user and returns string output
r = sr.Recognizer()
with sr.Microphone() as source:
print("Listening...")
r.pause_threshold = 1
audio = r.listen(source)
try:
print("Recognizing...")
query = r.recognize_google(audio, language='en-in')
print(f"User said: {query}\n")
except Exception as e:
# print(e)
print("Say that again please...")
return "None"
return query
if __name__ == "__main__":
ask()
while True:
# if 1:
query = takeCommand().lower()
if 'ben' in query:
os.system('python main.py')
exit()
else:
speak('Your passcode is not Correct!')
chance()
``` |
{
"source": "aachong/fairseq",
"score": 2
} |
#### File: fairseq/criterions/sequence_risk.py
```python
import math
import torch.nn.functional as F
from fairseq import utils,metrics
from fairseq.criterions import register_criterion
from fairseq.criterions.fairseq_criterion import FairseqSequenceCriterion
# from ..tasks.translation_struct import TranslationStructuredPredictionTask
from fairseq.drc_utils import dprint
from fairseq import drc_utils
@register_criterion('sequence_risk')
class SequenceRiskCriterion(FairseqSequenceCriterion):
def __init__(self,task):
super().__init__(task)
from fairseq.tasks.translation_struct import TranslationStructuredPredictionTask
if not isinstance(task, TranslationStructuredPredictionTask):
raise Exception(
'sequence_risk criterion requires `--task=translation_struct`'
)
@staticmethod
def add_args(parser):
"""Add criterion-specific arguments to the parser."""
# fmt: off
parser.add_argument('--normalize-costs', action='store_true',
help='normalize costs within each hypothesis')
# fmt: on
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
"""
Do forward and backward, and return the loss as computed by *criterion*
for the given *model* and *sample*.
Args:
sample (dict): the mini-batch. The format is defined by the
:class:`~fairseq.data.FairseqDataset`.
model (~fairseq.models.BaseFairseqModel): the model
criterion (~fairseq.criterions.FairseqCriterion): the criterion
optimizer (~fairseq.optim.FairseqOptimizer): the optimizer
ignore_grad (bool): multiply loss by 0 if this is set to True
Returns:
tuple:
- the loss
- the sample size, which is used as the denominator for the
gradient
- logging outputs to display while training
"""
"""
sample (dict): a mini-batch with the following keys:
- `id` (LongTensor): example IDs in the original input order
- `ntokens` (int): total number of tokens in the batch
- `net_input` (dict): the input to the Model, containing keys:
- `src_tokens` (LongTensor): a padded 2D Tensor of tokens in
the source sentence of shape `(bsz, src_len)`. Padding will
appear on the left if *left_pad_source* is ``True``.
- `src_lengths` (LongTensor): 1D Tensor of the unpadded
lengths of each source sentence of shape `(bsz)`
- `prev_output_tokens` (LongTensor): a padded 2D Tensor of
tokens in the target sentence, shifted right by one position
for input feeding/teacher forcing, of shape `(bsz,
tgt_len)`. This key will not be present if *input_feeding*
is ``False``. Padding will appear on the left if
*left_pad_target* is ``True``.
- `target` (LongTensor): a padded 2D Tensor of tokens in the
target sentence of shape `(bsz, tgt_len)`. Padding will appear
on the left if *left_pad_target* is ``True``.
"""
bsz = len(sample['hypos'])
nhypos = len(sample['hypos'][0])
sample: dict
# get costs for hypotheses using --seq-scorer (defaults to 1. - BLEU)
#计算每个句子中每个预测的cost batch size * beam size
costs = self.task.get_costs(sample)
# costs = costs*0.1
#读取不到这个参数,所以直接设置为True,我也不知道它读取参数是个什么机制
self.normalize_costs = False
if self.normalize_costs:
unnormalized_costs = costs.clone()
max_costs = costs.max(dim=1, keepdim=True)[0]
min_costs = costs.min(dim=1, keepdim=True)[0]
costs = (costs - min_costs) / \
(max_costs - min_costs).clamp_(min=1e-6)
else:
unnormalized_costs = None
# generate a new sample from the given hypotheses
# 把每个源句子翻译的多个句子b,n 差分成一维b*n
new_sample = self.task.get_new_sample_for_hypotheses(sample)
hypotheses = new_sample['target'].view(bsz, nhypos, -1, 1)#bsz,hpsz,seq_len,1
hypolen = hypotheses.size(2)
pad_mask = hypotheses.ne(self.task.target_dictionary.pad()) #bsz,hpsz,seq_len,1
lengths = pad_mask.sum(dim=2).float() #bsz,hpsz,1
#maxtokens 被乘以12了,设置为1000,现在有12000个,不算pad
#dprint(lengths=lengths,end_is_stop=True,shape=lengths.shape,sum=lengths.sum())
net_output = model(**new_sample['net_input'])
lprobs = model.get_normalized_probs(net_output, log_probs=True)
lprobs = lprobs.view(bsz, nhypos, hypolen, -1)
scores = lprobs.gather(3, hypotheses) #bsz,hpsz,seq_len,1
scores *= pad_mask.float()
avg_scores = scores.sum(dim=2) / lengths
# avg_scores = avg_scores*0.005
probs = F.softmax(avg_scores, dim=1).squeeze(-1)
#porbs.shape=batch size,beam size
loss = (probs * costs).sum()
sample_size = bsz
assert bsz == utils.item(costs.size(dim=0))
logging_output = {
'loss': utils.item(loss.data),
'num_cost': costs.numel(),
'ntokens': sample['ntokens'],
'nsentences': bsz,
'sample_size': sample_size,
'htokens':lengths.sum()
}
def add_cost_stats(costs, prefix=''):
logging_output.update({
prefix + 'sum_cost': utils.item(costs.sum()),
prefix + 'min_cost': utils.item(costs.min(dim=1)[0].sum()),
prefix + 'cost_at_1': utils.item(costs[:, 0].sum()),
})
add_cost_stats(costs)
if unnormalized_costs is not None:
add_cost_stats(unnormalized_costs, 'unnormalized_')
return loss, sample_size, logging_output
@staticmethod
def reduce_metrics(logging_outputs):
"""Aggregate logging outputs from data parallel training."""
ntokens = sum(log.get('ntokens', 0) for log in logging_outputs)
nsentences = sum(log.get('nsentences', 0) for log in logging_outputs)
sample_size = sum(log.get('sample_size', 0) for log in logging_outputs)
num_costs = sum(log.get('num_cost', 0) for log in logging_outputs)
agg_outputs = {
'loss': sum(log.get('loss', 0) for log in logging_outputs) / sample_size,
'ntokens': ntokens,
'nsentences': nsentences,
}
all_loss = sum(log.get('loss', 0) for log in logging_outputs)
metrics.log_scalar('loss', all_loss/sample_size , sample_size, round=3)
def add_cost_stats(prefix=''):
agg_outputs.update({
prefix + 'avg_cost': sum(log.get(prefix + 'sum_cost', 0) for log in logging_outputs) / num_costs,
prefix + 'min_cost': sum(log.get(prefix + 'min_cost', 0) for log in logging_outputs) / nsentences,
prefix + 'cost_at_1': sum(log.get(prefix + 'cost_at_1', 0) for log in logging_outputs) / nsentences,
})
add_cost_stats()
if any('unnormalized_sum_cost' in log for log in logging_outputs):
add_cost_stats('unnormalized_')
return agg_outputs
```
#### File: fairseq/tmp/drc_utils.py
```python
import logging
import os
import sys
logging.basicConfig(
format='%(asctime)s | %(levelname)s | %(name)s | %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=os.environ.get('LOGLEVEL', 'INFO').upper(),
stream=sys.stdout,
)
# 第一步,创建一个logger
logger = logging.getLogger('drc_utils')
def dprint(*args, **kwargs):
log = ''
for i in args:
log += f' {i} |'
for i in kwargs.items():
log += f' {i[0]} = {i[1]} |'
logger.info(log)
```
#### File: tmp/hidden_normalization/tmp.py
```python
import torch
import torch.nn as nn
import plotly.graph_objs as go
import plotly
import plotly.io as pio`
x = torch.randn(5,1000,100)
class LayerNorm(nn.Module):
"Construct a layernorm module (See citation for details)."
def __init__(self, features, eps=1e-6):
super(LayerNorm, self).__init__()
self.a_2 = nn.Parameter(torch.ones(features))
self.b_2 = nn.Parameter(torch.zeros(features))
self.eps = eps
def forward(self, x):
print(self.a_2.shape)
mean = x.mean(-1, keepdim=True)
std = x.std(-1, keepdim=True)
return self.a_2 * (x - mean) / (std + self.eps) + self.b_2
m = nn.LayerNorm(x.shape[1:])
x = m(x)
x.detach_()
trace0 = go.Histogram(x=x[0].reshape(-1))
data = [trace0]
fig_spx = go.Figure(data=data)
# fig_spx.show()
``` |
{
"source": "aachong/SUDA-newStudents",
"score": 3
} |
#### File: text-classification/example/main.py
```python
import dataLoader
import avgModule
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from utils import LabelSmoothing
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
train_data = dataLoader.train_data
test_data = dataLoader.test_data
vocab_size = dataLoader.MAX_WORD_SIZE
index2label = dataLoader.index2label
index2word = dataLoader.index2word
embedding_size = 128
pad_idx = 0
output_size = 5
model = avgModule.avgModule(
vocab_size, embedding_size, pad_idx, output_size).to(device)
# for p in model.parameters():
# if p.dim() > 1:
# nn.init.xavier_uniform(p)
criterion = nn.CrossEntropyLoss().to(device)
# criterion = LabelSmoothing().to(device)
optimizer = optim.Adam(model.parameters())
train_data[0][2]
def accuracy(preds, y):
f_preds = preds.max(1)[1]
correct = (f_preds == y).float()
acc = sum(correct)/len(correct)
return acc
def evaluate(model, criterion, data):
epoch_acc = 0.0
epoch_loss = 0.0
for (x, length, y) in data:
x = torch.from_numpy(x).long().to(device)
y = torch.from_numpy(y).long().to(device)
preds = model(x)
loss = criterion(preds, y)
acc = accuracy(preds, y)
epoch_acc += acc
epoch_loss += loss
print(f'测试集:准确率:{epoch_acc/len(data)},loss:{epoch_loss/len(data)}')
return epoch_acc/len(data)
def train(model, criterion, optimizer, data):
epochs = 100
maxv = 0.4
for epoch in range(epochs):
epoch_loss = 0
epoch_acc = 0
for (x, length, y) in data:
x = torch.from_numpy(x).long().to(device)
y = torch.from_numpy(y).long().to(device)
preds = model(x)
loss = criterion(preds, y)
acc = accuracy(preds, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
epoch_loss += loss
epoch_acc += acc
with torch.no_grad():
model.eval()
tmp = evaluate(model, criterion, test_data)
model.train()
if(tmp > maxv):
maxv = tmp
torch.save(model.state_dict(), 'avgModel.pt')
print(
f'Epoch:{epoch},精准度:{epoch_acc/len(data)},loss:{epoch_loss/len(data)}')
train(model, criterion, optimizer, train_data)
model.load_state_dict(torch.load('avgModel.pt'))
evaluate(model, criterion, test_data)
it = iter(model.parameters())
para = next(it)
``` |
{
"source": "Aachuma/Drawchitecture",
"score": 2
} |
#### File: Aachuma/Drawchitecture/drawchitecture.py
```python
bl_info = {
"name": "Drawchitecture",
"description": "creates temporary workplanes by strokes or points for drawing in 3D with the grease pencil",
"author": "<NAME>",
"version": (1, 1),
"blender": (2, 80, 0),
"location": "View3D",
# "warning": "", # used for warning icon and text in addons panel
# "wiki_url": ""
# "tracker_url": "",
"support": "TESTING",
"category": "Paint"
}
import bpy
import math
import mathutils
from bpy.types import Panel
from math import *
from mathutils import Vector, Matrix
import numpy as np
bpy.types.Scene.gp_active = bpy.props.StringProperty(name='gp_active', description='saves last used GP',
default='empty', options={'HIDDEN'})
bpy.types.Scene.del_stroke = bpy.props.BoolProperty(name='del_stroke', description='V/H/3D: deletes last stroke',
default=False, options={'HIDDEN'})
bpy.types.Scene.expand_system = bpy.props.BoolProperty(name='expand_system', description='expands system tools',
default=True, options={'HIDDEN'})
bpy.types.Scene.expand_grid = bpy.props.BoolProperty(name='expand_grid', description='expands grid settings',
default=True, options={'HIDDEN'})
bpy.types.Scene.grid_scale = bpy.props.FloatVectorProperty(name='grid_scale',
description='saves the grid size of the workplane',
default=(1.0, 1.0, 0))
bpy.types.Scene.grid_count = bpy.props.IntVectorProperty(name='grid_count',
description='saves the grid size of the workplane',
default=(100, 100, 0))
bpy.types.Scene.plane_location = bpy.props.FloatVectorProperty(name='plane_location',
description='global memory for wp location',
default=(0.0, 0.0, 0.0))
def update_offset(self, context):
"""updates the position of the plane when the Factor in UI is change
"""
if 'workplane_TEMPORARY' in (obj.name for obj in bpy.data.objects):
wp = bpy.data.objects['workplane_TEMPORARY']
# rotation in euler
eu = wp.rotation_euler
# offset factor in UI
factor_offset = bpy.context.scene.plane_offset
# Defining Vector for Translation in Z Axis and rotating it to be the normal of the plane
vec_offset = Vector((0, 0, factor_offset))
vec_offset.rotate(eu)
loc = bpy.context.scene.plane_location
vec_loc = Vector((loc[0], loc[1], loc[2]))
wp.location = vec_loc + vec_offset
bpy.types.Scene.plane_offset = bpy.props.FloatProperty(name='plane_offset',
description='plane offset in normal-direction of plane',
default=0.0,
update=update_offset)
def cross(a, b):
""" simple cross product formula for calculating normal vector
"""
c = Vector((a[1] * b[2] - a[2] * b[1],
a[2] * b[0] - a[0] * b[2],
a[0] * b[1] - a[1] * b[0]))
return c
def activate_gp():
"""activate last GP or create GP
"""
if bpy.context.scene.gp_active == 'empty':
# if gp objects exist choose random gp object if not yet initialized as active gp object
for obj in bpy.data.objects:
if obj.type == 'GPENCIL':
bpy.context.scene.gp_active = obj.name
bpy.context.view_layer.objects.active = bpy.data.objects[obj.name]
break
# if no gp objects exist add new gp object
if bpy.context.scene.gp_active == 'empty':
print('activate_gp: no gp object detected, creating new GP')
add_GP()
return {'GP added'}
else:
name_active = bpy.context.scene.gp_active
# if there is an object with the same name as the saved GP, activate it
if (name_active in (gp_obj.name for gp_obj in bpy.data.objects)):
bpy.context.view_layer.objects.active = bpy.data.objects[name_active]
return {'FINISHED'}
else:
print('activate_gp: gp object not found, creating new GP')
add_GP()
return {'GP added'}
def add4arrays():
""" add 4x Array-modifier to active Object (Plane) to achieve grid-like-Workplane
"""
obj = bpy.context.active_object
step = radians(90)
for i in range(0, 2):
modifier = obj.modifiers.new(name='AR' + str(i), type='ARRAY')
modifier.count = 100
modifier.relative_offset_displace[0] = cos(step * i)
modifier.relative_offset_displace[1] = sin(step * i)
for i in range(2, 4):
modifier = obj.modifiers.new(name='AR' + str(i), type='ARRAY')
modifier.count = 2
modifier.relative_offset_displace[0] = cos(step * i)
modifier.relative_offset_displace[1] = sin(step * i)
obj.modifiers['AR0'].count = bpy.context.scene.grid_count[0]
obj.modifiers['AR1'].count = bpy.context.scene.grid_count[1]
def add_GP():
"""Create standard GP object
"""
deselect_all()
a = []
b = []
name = gpencil_obj_name()
# list of grease_pencil object names before adding new one
for o in bpy.data.grease_pencil:
a.append(o.name)
# adding new GP Object
bpy.ops.object.gpencil_add(location=(0, 0, 0), rotation=(0, 0, 0), type='EMPTY')
# empty Grease Pencil Object at 0,0,0 otherwise gp stroke point coordinates are offset
# name + Number by counting all other GP Objects
bpy.context.view_layer.objects.active.name = name
# lock in place at 0,0,0 because point.coordinates refer to GP Origin
bpy.context.view_layer.objects.active.lock_location = [True for x in range(3)]
# find out the name of newly created grease_pencil object to rename it properly
for o in bpy.data.grease_pencil:
b.append(o.name)
newgpname = list(set(b) - set(a))
# name + Number same as GP Object
bpy.data.grease_pencil[newgpname[0]].name = name
save_active_gp()
# name + Number by counting all other grease_pencil objects
# bpy.data.grease_pencil[newgpname[0]].name = gpname()
def add_workplane_3p():
"""Creates Plane through 3 selected points of active GP Object (selected in Editmode)
"""
selected_points = []
if bpy.context.view_layer.objects.active.type == 'GPENCIL':
# name of the active object (Type Gpencil Object)
name_active = bpy.context.view_layer.objects.active.name
else:
name_active = bpy.context.scene.gp_active
if (name_active in (gp_pen.name for gp_pen in bpy.data.grease_pencil)):
gp_pen = bpy.data.grease_pencil[name_active]
if gp_pen.layers.active:
if gp_pen.layers.active.active_frame.strokes:
for stroke in gp_pen.layers.active.active_frame.strokes:
for point in stroke.points:
if point.select:
selected_points.append(point.co)
print('add_workplane_3p: selected_points:')
print(selected_points)
if len(selected_points) == 0:
print('no point selected')
elif len(selected_points) == 1:
print('1 point selected - creating horizontal plane')
select_p1 = selected_points[-1]
plane_array(select_p1, (0, 0, 0), '1p')
gpencil_paint_mode()
elif len(selected_points) == 2:
print('2 points selected - creating plane through 2 points')
select_p1 = selected_points[-1]
select_p2 = selected_points[-2]
plane_array(select_p1, select_p2, '3d')
gpencil_paint_mode()
elif len(selected_points) >= 3:
print('3 or more points selected - creating plane through 3 points')
select_p1 = selected_points[-1]
select_p2 = selected_points[-2]
select_p3 = selected_points[-3]
v1 = select_p2 - select_p1
# print(v1)
v2 = select_p3 - select_p1
# print(v2)
v_normal = Vector((cross(v1, v2)))
# print(v_normal)
p_normal = select_p1 + v_normal
plane_array(select_p1, p_normal, '3p')
gpencil_paint_mode()
return {'FINISHED'}
def angle_between(v1, v2):
""" Returns the angle in radians between vectors 'v1' and 'v2'
"""
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
angle = np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
return angle
def angle_between_3d(z_dif, v_xparallel):
""" Returns the angle in radians between vectors 'v1' and 'v2'
v2 mostly used as vector parallel to x-y-plane for calculating x rotation
"""
mag = np.sqrt(v_xparallel.dot(v_xparallel))
angle_rad = atan(z_dif / mag)
return angle_rad
def angle_between_z(v1, v2):
""" returns the angle in radians between vectors 'v1' and 'v2' for z
"""
v1_2d_z = Vector((v1[0], v1[1]))
v2_2d_z = Vector((v2[0], v2[1]))
angle_z = angle_between(v1_2d_z, v2_2d_z)
return angle_z
def calc_location_2p(point_a, point_b):
""" calculates midpoint of line between two input points
"""
loc = []
loc.append((point_a[0] + point_b[0]) / 2)
loc.append((point_a[1] + point_b[1]) / 2)
loc.append((point_a[2] + point_b[2]) / 2)
return loc
def calc_rotation_2p_zh(point_a, point_b):
""" returns rotation vector for a horizontal plane by 2 points
"""
if point_a[0] > point_b[0]:
v_2p = point_b - point_a
else:
v_2p = point_a - point_b
v_y = Vector((0, 1, 0))
z = angle_between_z(v_y, v_2p)
return ((0, 0, z))
def calc_rotation_2p_zv(point_a, point_b):
""" returns rotation vector of a Plane by 2 points
first: set y rotation to 90°
then: rotation in z
"""
if point_a[0] > point_b[0]:
v_2p = point_b - point_a
else:
v_2p = point_a - point_b
v_y = Vector((0, 1, 0))
y = 90 * math.pi / 180
z = angle_between_z(v_y, v_2p)
return ((0, y, z))
def calc_rotation_2p_3d(point_a, point_b):
"""returns rotation vector for plane by 2 Points
first: rotation in z
then: roration in x by z difference and projected distance of points"""
if point_a[0] > point_b[0]:
v_2p = point_b - point_a
z_dif = point_b[2] - point_a[2]
else:
v_2p = point_a - point_b
z_dif = point_a[2] - point_b[2]
v_y = Vector((0, 1, 0))
v_xparallel = v_2p
v_xparallel[2] = 0
x = angle_between_3d(z_dif, v_xparallel)
z = angle_between_z(v_y, v_2p)
return ((x, 0, z))
def calc_rotation_3p(point_a, point_b):
"""returns rotation vector for plane by 3 Points
first: rotation in z
then: rotation in x with the normal-vector of plane
adding 90° to rotation for final plane
"""
if point_a[0] > point_b[0]:
v_2p = point_b - point_a
z_dif = point_b[2] - point_a[2]
else:
v_2p = point_a - point_b
z_dif = point_a[2] - point_b[2]
v_y = Vector((0, 1, 0))
v_xparallel = v_2p
v_xparallel[2] = 0
# adding 90 degrees to the x rotation because it is the normal vector
ortho = 90 * math.pi / 180
x = ortho + angle_between_3d(z_dif, v_xparallel)
# print(x)
z = angle_between_z(v_y, v_2p)
# print(z)
return ((x, 0, z))
def deselect_all():
"""deselects every object
"""
if not bpy.context.mode == 'OBJECT':
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.select_all(action='DESELECT')
def deselect_all_gp():
"""deselects all gp vertices / strokes
"""
if not bpy.context.mode == 'EDIT_GPENCIL':
bpy.ops.object.mode_set(mode='EDIT_GPENCIL')
bpy.ops.gpencil.select_all(action='DESELECT')
def find_3dview_space():
"""returns 3D_View and its screen space
"""
area = None
for a in bpy.data.window_managers[0].windows[0].screen.areas:
if a.type == 'VIEW_3D':
area = a
break
if area:
space = area.spaces[0]
else:
space = bpy.context.space_data
return space
def gpencil_obj_name():
"""Generates Name for new GP object based on existing GP objects
"""
# for o in bpy.data.objects:
# if o.type == 'GPENCIL':
# num = num + 1
# list all existing GP objects
namelist = [gp_obj.name for gp_obj in bpy.data.objects if gp_obj.type == 'GPENCIL']
num = 1
name = 'Drawing ' + str(num)
# as long as name+num is allready taken, count up num
while name in namelist:
num = num + 1
name = 'Drawing ' + str(num)
return name
def gpencil_paint_mode():
"""Gpencil has to be selected! activates DRAW mode / GPENCIL_PAINT mode, unless it's already active
"""
if not bpy.context.mode == 'PAINT_GPENCIL':
bpy.ops.object.mode_set(mode='PAINT_GPENCIL')
return {'FINISHED'}
def laststroke():
"""returns last stroke of active Greasepencil object
returns 'No GP object active' when no GP Obj is active
returns 'Names not equal' if data.objects GP name + data.grease_pencil object Name of active GP Object are not equal
"""
if bpy.context.view_layer.objects.active.type == 'GPENCIL':
# name of the active object (Type Gpencil Object)
name_active = bpy.context.view_layer.objects.active.name
if (name_active in (gp_pen.name for gp_pen in bpy.data.grease_pencil)):
gp_pen = bpy.data.grease_pencil[name_active]
if gp_pen.layers.active:
if gp_pen.layers.active.active_frame.strokes:
ls = gp_pen.layers.active.active_frame.strokes[-1]
return ls
else:
print('laststroke: active GP Obj has no strokes')
return {'No Strokes'}
else:
print('laststroke: active GP Obj has no strokes')
return {'No Strokes'}
else:
print('laststroke: Names of active GP object and its bpy.data.grease_pencil equivalent must be equal')
return {'Names not equal'}
else:
print('No GP object active')
return {'GP obj inactive'}
# def offset_plane():
#
# cube = bpy.data.objects["Cube"]
# # one blender unit in x-direction
# vec = mathutils.Vector((1.0, 0.0, 0.0))
# inv = cube.rotation_euler.to_matrix()
# # vec aligned to local axis
# vec_rot = vec * inv
# cube.location = cube.location + vec_rot
def plane_array(p1, p2, rotation):
"""adds an array of 1m by 1m planes at given location, parameter rotation defines way to calculate angle
"""
# define standard scale / count
save_active_gp()
# delete last workplane
if bpy.data.objects:
deselect_all()
# select last temporary workplane
for o in bpy.data.objects:
if o.name == 'workplane_TEMPORARY':
o.select_set(state=True)
# save settings of last workplane
save_grid_settings()
break
# delete last workplane
bpy.ops.object.delete()
bpy.context.scene.plane_offset = 0.0
if rotation == '1p':
p_loc = calc_location_2p(p1, p1)
p_rot = ((0, 0, 0))
elif rotation == '3p':
p_loc = calc_location_2p(p1, p1)
else:
p_loc = calc_location_2p(p1, p2)
if rotation == 'v':
p_rot = calc_rotation_2p_zv(p1, p2)
elif rotation in ('h', 'bp'):
p_rot = calc_rotation_2p_zh(p1, p2)
elif rotation == '3d':
p_rot = calc_rotation_2p_3d(p1, p2)
elif rotation == '3p':
p_rot = calc_rotation_3p(p1, p2)
bpy.context.scene.plane_location = p_loc
bpy.ops.mesh.primitive_plane_add(size=1, location=p_loc, rotation=p_rot)
baseplane = bpy.context.active_object
baseplane.name = 'workplane_TEMPORARY'
add4arrays()
baseplane.scale = bpy.context.scene.grid_scale
# set material of plane
# mat = bpy.data.materials['Mat_Transparent_White']
# baseplane.active_material = mat
baseplane.show_wire = True
deselect_all()
activate_gp()
if rotation not in ('3p', 'bp'):
if bpy.context.scene.del_stroke:
bpy.ops.dt.delete_last_stroke()
return {'FINISHED'}
def save_active_gp():
"""save active gp obj in global variable
"""
if bpy.context.view_layer.objects.active:
if (bpy.context.view_layer.objects.active.type == 'GPENCIL'):
# name of the active object (Type Gpencil Object)
name_active = bpy.context.view_layer.objects.active.name
if (name_active in (gp_pen.name for gp_pen in bpy.data.grease_pencil)):
# select data.grease_pencil object to select its strokes
bpy.context.scene.gp_active = name_active
else:
bpy.context.scene.gp_active = 'empty'
else:
bpy.context.scene.gp_active = 'empty'
else:
bpy.context.scene.gp_active = 'empty'
def save_grid_settings():
"""Stores Grid settings of workplane to global Property of scene
"""
bpy.context.scene.grid_scale = bpy.data.objects['workplane_TEMPORARY'].scale
bpy.context.scene.grid_count = (
bpy.data.objects['workplane_TEMPORARY'].modifiers[0].count,
bpy.data.objects['workplane_TEMPORARY'].modifiers[1].count, 0)
def unit_vector(vector):
""" Returns the unit vector of the input vector.
"""
return vector / np.linalg.norm(vector)
class SetupDrawchitecture(bpy.types.Operator): # standard plane
"""initializes the setup: colors & viewsettings
"""
bl_idname = 'dt.setup'
bl_label = 'SetupDrawchitecture View'
def execute(self, context):
# Viewport shader mode set to 'WIREFRAME' for transparent objects
find_3dview_space().shading.type = 'WIREFRAME'
find_3dview_space().shading.show_xray_wireframe = True
# Disable Floor Grid + Cursor in active View3D, make Vertices in editmode visible
find_3dview_space().overlay.show_floor = False
find_3dview_space().overlay.show_cursor = False
find_3dview_space().overlay.show_object_origins = False
find_3dview_space().overlay.vertex_opacity = 1
# Set 3d View Background color to white and Wire color to grey
bpy.context.preferences.themes[0].view_3d.space.gradients.high_gradient = (0.8, 0.8, 0.8)
bpy.context.preferences.themes[0].view_3d.wire = (0.5, 0.5, 0.5)
# Set Stroke Placement in active Scene to 'Surface'
bpy.context.window.scene.tool_settings.gpencil_stroke_placement_view3d = 'SURFACE'
# plane_array(Vector((0, 0.5, 0)), Vector((1, 0.5, 0)), 'h') # default workplane at 0,0,0
# create GP object or activate last GP object
activate_gp()
# switch to DRAW mode
gpencil_paint_mode()
return {'FINISHED'}
class InitializeDrawchitecture(bpy.types.Operator): # standard plane
"""initializes the setup: default workplane at start, activates GP mode
"""
bl_idname = 'dt.initialize'
bl_label = 'Create Baseplane (+ GP Object if there is none)'
def execute(self, context):
# default workplane at 0,0,0
plane_array(Vector((0, 0.5, 0)), Vector((1, 0.5, 0)), 'bp')
# create GP object if there is none
# if not [obj for obj in bpy.data.objects if obj.type == 'GPENCIL']:
# add_GP()
activate_gp()
# switch to DRAW mode
gpencil_paint_mode()
return {'FINISHED'}
class AddGPObject(bpy.types.Operator):
"""Adds new GP Object to Scene, locked at 0.0.0
"""
bl_idname = 'dt.add_gp_object'
bl_label = 'adds gp object, locked at 0.0.0'
def execute(self, context):
add_GP()
gpencil_paint_mode()
return {'FINISHED'}
class AddRotation(bpy.types.Operator):
"""Adds given rotation to the rotation vector of workplane_TEMPORARY, property sets +/- and Achsis
only shown when workplane_Temporary exists
"""
bl_idname = 'dt.add_rotation'
bl_label = 'add rotation'
axis: bpy.props.StringProperty()
rotation: bpy.props.FloatProperty()
# axis_index = bpy.props.IntProperty()
def execute(self, context):
wp = bpy.data.objects['workplane_TEMPORARY']
rotation_old = wp.rotation_euler
rotation_add = self.rotation * math.pi / 180
if self.axis == 'x':
axis_index = 0
elif self.axis == 'y':
axis_index = 1
elif self.axis == 'z':
axis_index = 2
else:
print('error: axis must be x / y / z')
return {'CANCELLED'}
bpy.data.objects['workplane_TEMPORARY'].rotation_euler[axis_index] = rotation_old[axis_index] + rotation_add
return {'FINISHED'}
class ClearPlaneAndGP(bpy.types.Operator):
""" Deletes the Temporary Workplane and all GP Objects
"""
bl_idname = 'dt.clear_all_objects'
bl_label = 'clears all Temporary Workplane + gp objects in project'
def execute(self, context):
if not bpy.context.mode == 'OBJECT':
bpy.ops.object.mode_set(mode='OBJECT')
# delete all objects
if bpy.data.objects:
for o in bpy.data.objects:
if o.name == 'workplane_TEMPORARY':
o.select_set(state=True)
if o.type == 'GPENCIL':
o.select_set(state=True)
bpy.ops.object.delete()
if bpy.data.grease_pencil:
for gp in bpy.data.grease_pencil:
bpy.data.grease_pencil.remove(gp)
bpy.context.scene.gp_active = 'empty'
bpy.context.scene.plane_offset = 0.0
bpy.ops.dt.initialize()
return {'FINISHED'}
else:
bpy.context.scene.gp_active = 'empty'
bpy.ops.dt.initialize()
return {'FINISHED'}
class DeleteLastStroke(bpy.types.Operator):
"""For V/H/3D: deletes last drawn stroke of active GP Object
"""
bl_idname = 'dt.delete_last_stroke'
bl_label = 'deletes last stroke of active GP object'
def execute(self, context):
save_active_gp()
activate_gp()
gpencil_paint_mode()
active_name = bpy.context.scene.gp_active
if bpy.data.grease_pencil[active_name].layers.active:
if bpy.data.grease_pencil[active_name].layers.active.active_frame.strokes:
# deselect gp to only delete latest stroke
deselect_all_gp()
bpy.data.grease_pencil[active_name].layers.active.active_frame.strokes[-1].select = True
bpy.ops.gpencil.delete(type='STROKES')
else:
print('DeleteLastStroke: Active Grease Pencil has no strokes to be deleted')
else:
print('DeleteLastStroke: Active Grease Pencil has no strokes to be deleted')
gpencil_paint_mode()
return {'FINISHED'}
class RemoveGPObject(bpy.types.Operator):
"""Removes the active GP Object
"""
bl_idname = 'dt.remove_gp_object'
bl_label = 'removes active GP Object'
def execute(self, context):
# make sure no other object is selected
deselect_all()
# activate last gp or write gp name in global variable
activate_gp()
# object mode must be activated to delete an object
if not bpy.context.mode == 'OBJECT':
bpy.ops.object.mode_set(mode='OBJECT')
name_active = bpy.context.scene.gp_active
# if there is an object with the same name as the saved GP, delete it
if (name_active in (gp_obj.name for gp_obj in bpy.data.objects)):
bpy.data.objects[name_active].select_set(state=True)
bpy.ops.object.delete()
# clear saved GP name to activate any other GP or create new if no GP left
bpy.context.scene.gp_active = 'empty'
activate_gp()
gpencil_paint_mode()
return {'FINISHED'}
class ResetScale(bpy.types.Operator):
"""Reset X and Y scale + count of workplane
"""
bl_idname = 'dt.reset_scale'
bl_label = 'reset scale + count'
def execute(self, context):
scale_default = (1.0, 1.0, 0)
wp = bpy.data.objects['workplane_TEMPORARY']
wp.scale = scale_default
wp.modifiers[0].count = 100
wp.modifiers[1].count = 100
save_grid_settings()
return {'FINISHED'}
class SelectGPobject(bpy.types.Operator):
"""Shows buttons with all GP Objects and selects them
(Problems with hidden GP Objects)
"""
bl_idname = 'dt.select_gp_object'
bl_label = 'Activates Greasepencil Object by Name on Button'
gp: bpy.props.StringProperty(default='', options={'SKIP_SAVE'})
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
deselect_all()
gp = context.scene.objects.get(self.gp)
bpy.context.view_layer.objects.active = gp
context.scene.grease_pencil = gp.grease_pencil
save_active_gp()
gpencil_paint_mode()
return {'FINISHED'}
class SwitchScaleAndCount(bpy.types.Operator):
"""Switches X and Y scale + count of workplane
"""
bl_idname = 'dt.switch_scale_and_count'
bl_label = 'switch x/y'
def execute(self, context):
scale = bpy.data.objects['workplane_TEMPORARY'].scale
scale_switched = (scale[1], scale[0], scale[2])
wp = bpy.data.objects['workplane_TEMPORARY']
wp.scale = scale_switched
count_x = wp.modifiers[0].count
wp.modifiers[0].count = wp.modifiers[1].count
wp.modifiers[1].count = count_x
save_grid_settings()
return {'FINISHED'}
class WPstrokeV(bpy.types.Operator): # First+ Last Point of last Stroke create vertical plane
"""adds VERTICAL workplane at last grease pencil stroke by start + endpoint of stroke
! GP Object must be selected first
! GP Object and Grease_Pencil object need equal Names
"""
bl_idname = 'dt.work_plane_on_stroke_2p'
bl_label = 'add vertical workplane by stroke start end'
def execute(self, context):
# last greasepencil stroke
# gp_laststroke = bpy.data.grease_pencil[-1].layers.active.active_frame.strokes[-1]
ls = laststroke()
if ls == {'GP obj inactive'}:
return {'CANCELLED'}
elif ls == {'Names not equal'}:
return {'CANCELLED'}
elif ls == {'No Strokes'}:
return {'CANCELLED'}
else:
p1 = ls.points[0].co
p2 = ls.points[-1].co
plane_array(p1, p2, "v")
# DELETE LAST GP STROKE
# if not bpy.context.mode == 'EDIT':
# bpy.ops.object.mode_set(mode='EDIT')
# gp_laststroke
# bpy.ops.gpencil.delete(type='STROKES')
gpencil_paint_mode()
return {'FINISHED'}
class WPStrokeH(bpy.types.Operator): # First+ Last Point of last Stroke create horizontal plane
"""adds HORIZONTAL workplane at last grease pencil stroke by start + endpoint of stroke
! GP Object must be selected first
! GP Object and Grease_Pencil object need equal Names
"""
bl_idname = 'dt.work_plane_on_stroke_2p_horizontal'
bl_label = 'add horizontal workplane by stroke start end'
def execute(self, context):
# last greasepencil stroke
# gp_laststroke = bpy.data.grease_pencil[-1].layers.active.active_frame.strokes[-1]
ls = laststroke()
if ls == {'GP obj inactive'}:
return {'CANCELLED'}
elif ls == {'Names not equal'}:
return {'CANCELLED'}
elif ls == {'No Strokes'}:
return {'CANCELLED'}
else:
p1 = ls.points[0].co
p2 = ls.points[-1].co
plane_array(p1, p2, "h")
gpencil_paint_mode()
return {'FINISHED'}
class WPstroke3D(bpy.types.Operator): # First+ Last Point of last Stroke create horizontal plane
"""adds tilted Plane to any Stroke by Start + Endpoint of Stroke
! GP Object must be selected first -
! GP Object and Grease_Pencil object need equal Names
"""
bl_idname = 'dt.work_plane_on_stroke_2p_3d'
bl_label = 'align workplane to tilted 3d-strokes by start end'
def execute(self, context):
ls = laststroke()
if ls == {'GP obj inactive'}:
return {'CANCELLED'}
elif ls == {'Names not equal'}:
return {'CANCELLED'}
elif ls == {'No Strokes'}:
return {'CANCELLED'}
else:
p1 = ls.points[0].co
p2 = ls.points[-1].co
plane_array(p1, p2, '3d')
gpencil_paint_mode()
return {'FINISHED'}
class WPselect3P(bpy.types.Operator): # Plane from 1/2/3 Points in Selection
"""First click: enter Stroke-Edit mode, select up to 3 points (hold SHIFT)
Second Click: create Plane through Points
1point: horizontal plane
2points: 3D Plane through 2 Points
3points: 3D Plane through 3 Points
"""
bl_idname = 'dt.work_plane_points_3d'
bl_label = 'Enters Editmode, or converts up to 3 Selected GP_Points to a Plane'
def execute(self, context):
# save gp here?
save_active_gp()
activate_gp()
if not bpy.context.mode == 'EDIT_GPENCIL':
bpy.ops.object.mode_set(mode='EDIT_GPENCIL')
bpy.context.scene.tool_settings.gpencil_selectmode = 'POINT'
else:
# prevent this mode from deleting last stroke
if bpy.context.scene.del_stroke:
bpy.context.scene.del_stroke = False
add_workplane_3p()
bpy.context.scene.del_stroke = True
else:
add_workplane_3p()
# reactivate gp here
gpencil_paint_mode()
return {'FINISHED'}
class View3DPanel:
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'Drawchitecture'
class AddPanel(View3DPanel, Panel):
"""Interface
"""
bl_label = 'Drawchitecture'
# bl_context = 'objectmode' # without context works for all contexts
# bl_category = 'Drawchitecture'
def draw(self, context):
layout = self.layout
layout.use_property_split = True
system_box = layout.box()
system_box_title = system_box.row(align=True)
system_box_title.label(text='System Tools', icon='SETTINGS')
system_box_title_sub = system_box_title.row()
system_box_title_sub.prop(bpy.context.scene, 'expand_system', text='', icon='THREE_DOTS', emboss=False)
if bpy.context.scene.expand_system:
system_box_col1 = system_box.column(align=True)
system_box_col1.operator('dt.setup', text='Setup View', icon='PLAY')
system_box_col1.operator('dt.clear_all_objects', text='Clear All Objects', icon='LIBRARY_DATA_BROKEN')
bg_color = bpy.context.preferences.themes[0].view_3d.space.gradients
color_col1 = system_box.column(align=True)
color_col1.label(icon='COLOR', text='Colors')
color_col1.prop(bg_color, 'high_gradient', text='background')
wire_color = bpy.context.preferences.themes[0].view_3d
color_col1.prop(wire_color, 'wire', text='wire lines')
# box with Create WP options
workplane_box = layout.box()
workplane_box_title = workplane_box.row(align=True)
workplane_box_title.label(text='Workplanes', icon='MESH_GRID')
workplane_box_title.prop(bpy.context.scene, "del_stroke", text="delete Stroke")
# Buttons
workplane_box_row1 = workplane_box.row()
workplane_box_row1.operator('dt.delete_last_stroke', text='Delete Last Stroke', icon='STROKE')
workplane_box_col1 = workplane_box.column(align=True)
workplane_box_col1.operator('dt.initialize', text='horizontal base plane', icon='AXIS_TOP')
workplane_box_row2 = workplane_box_col1.row(align=True)
if bpy.context.scene.del_stroke:
workplane_box_row2.alert = True
workplane_box_row2.operator('dt.work_plane_on_stroke_2p', text='V', icon='AXIS_FRONT')
workplane_box_row2.operator('dt.work_plane_on_stroke_2p_horizontal', text='H', icon='AXIS_TOP')
workplane_box_row2.operator('dt.work_plane_on_stroke_2p_3d', text='3D', icon='MOD_LATTICE')
workplane_box_row3 = workplane_box_col1.row(align=True)
if bpy.context.mode == 'EDIT_GPENCIL':
workplane_box_row3.alert = True
workplane_box_row3.operator('dt.work_plane_points_3d', text='select 1 / 2 / 3 points', icon='MOD_DATA_TRANSFER')
if [obj for obj in bpy.data.objects if obj.name == 'workplane_TEMPORARY']:
workplane_rotation_box = layout.box()
workplane_rotation_box_title = workplane_rotation_box.row(align=True)
workplane_rotation_box_title.label(text='Workplane Rotation', icon='FILE_REFRESH')
wp_rot_box_row1 = workplane_rotation_box.row(align=True)
wp_rot_box_row1_sub1 = wp_rot_box_row1.row()
wp_rot_box_row1_sub1.prop(bpy.data.objects['workplane_TEMPORARY'], 'rotation_euler', text=' ')
wp_rot_box_row1_sub2 = wp_rot_box_row1.column(align=True)
minus_x = wp_rot_box_row1_sub2.operator('dt.add_rotation', text='- 45°')
minus_x.axis = 'x'
minus_x.rotation = -45
minus_y = wp_rot_box_row1_sub2.operator('dt.add_rotation', text='- 45°')
minus_y.axis = 'y'
minus_y.rotation = -45
minus_z = wp_rot_box_row1_sub2.operator('dt.add_rotation', text='- 45°')
minus_z.axis = 'z'
minus_z.rotation = -45
wp_rot_box_row1_sub3 = wp_rot_box_row1.column(align=True)
plus_x = wp_rot_box_row1_sub3.operator('dt.add_rotation', text='+ 45°')
plus_x.axis = 'x'
plus_x.rotation = 45
plus_y = wp_rot_box_row1_sub3.operator('dt.add_rotation', text='+ 45°')
plus_y.axis = 'y'
plus_y.rotation = 45
plus_z = wp_rot_box_row1_sub3.operator('dt.add_rotation', text='+ 45°')
plus_z.axis = 'z'
plus_z.rotation = 45
wp_rot_box_row2 = workplane_rotation_box.row(align=True)
wp_rot_box_row2.prop(bpy.context.scene, 'plane_offset')
workplane_grid_box = layout.box()
workplane_grid_box_title = workplane_grid_box.row(align=True)
workplane_grid_box_title.label(text='Grid Size', icon='GRID')
workplane_grid_box_title.prop(bpy.context.scene, 'expand_grid', text='', icon='THREE_DOTS', icon_only=True,
emboss=False)
if bpy.context.scene.expand_grid:
if [obj for obj in bpy.data.objects if obj.name == 'workplane_TEMPORARY']:
workplane_grid_box_row1 = workplane_grid_box.row(align=True)
workplane_grid_box_row1_col1 = workplane_grid_box_row1.column(align=True)
workplane_grid_box_row1_col1.label(text='scale')
workplane_grid_box_row1_col1.prop(bpy.data.objects['workplane_TEMPORARY'], 'scale', index=0,
icon_only=True)
workplane_grid_box_row1_col1.prop(bpy.data.objects['workplane_TEMPORARY'], 'scale', index=1,
icon_only=True)
workplane_grid_box_row1_col2 = workplane_grid_box_row1.column(align=True)
workplane_grid_box_row1_col2.label(text='count')
workplane_grid_box_row1_col2.prop(bpy.data.objects['workplane_TEMPORARY'].modifiers[0], 'count',
icon_only=True)
workplane_grid_box_row1_col2.prop(bpy.data.objects['workplane_TEMPORARY'].modifiers[1], 'count',
icon_only=True)
workplane_grid_box_row2 = workplane_grid_box.row(align=True)
workplane_grid_box_row2.operator('dt.switch_scale_and_count', icon='ARROW_LEFTRIGHT', text='switch')
workplane_grid_box_row2.operator('dt.reset_scale', icon='LOOP_BACK', text='reset')
box_gp = layout.box()
# Show which GP Obj is active
box_gp.label(text='Grease Pencil Objects: ' + bpy.context.scene.gp_active, icon='GREASEPENCIL')
box_gp_row1 = box_gp.row(align=True)
box_gp_row1.operator('dt.add_gp_object', icon='ADD', text='add new')
box_gp_row1.operator('dt.remove_gp_object', icon='REMOVE', text='del active')
greasepencils = [gp for gp in context.scene.objects if gp.type == 'GPENCIL']
box_gp_col1 = box_gp.column(align=True)
for gp in greasepencils:
op = box_gp_col1.row()
if gp.name == bpy.context.scene.gp_active:
op.alert = True
opo = op.operator('dt.select_gp_object', text=gp.name)
opo.gp = gp.name
# op.alert = True
# tuple of all used classes
classes = (
SetupDrawchitecture, InitializeDrawchitecture, AddGPObject, AddRotation, ClearPlaneAndGP, DeleteLastStroke,
RemoveGPObject, ResetScale, SelectGPobject, SwitchScaleAndCount, WPstrokeV, WPStrokeH, WPstroke3D, WPselect3P,
AddPanel)
# registering/unregistering classes
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
def unregister():
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
``` |
{
"source": "aachurin/broccoli",
"score": 2
} |
#### File: broccoli/broccoli/injector.py
```python
import inspect
from broccoli.components import ReturnValue
class Injector:
allow_async = False
def __init__(self, components, initial):
self.components = [self.ensure_component(comp) for comp in components]
self.initial = dict(initial)
self.reverse_initial = {val: key for key, val in initial.items()}
self.singletons = {}
self.resolver_cache = {}
def clear_cache(self):
self.resolver_cache.clear()
def get_resolved_to(self, parameter):
if (parameter.annotation in (ReturnValue, inspect.Parameter) or
parameter.annotation in self.reverse_initial):
return parameter.annotation
for component in self.components:
if component.can_handle_parameter(parameter):
return inspect.signature(component.resolve).return_annotation
@staticmethod
def ensure_component(comp):
msg = 'Component "%s" must implement `identity` method.'
assert hasattr(comp, 'identity') and callable(comp.identity), \
msg % comp.__class__.__name__
msg = 'Component "%s" must implement `can_handle_parameter` method.'
assert hasattr(comp, 'can_handle_parameter') and callable(comp.can_handle_parameter), \
msg % comp.__class__.__name__
msg = 'Component "%s" must implement `resolve` method.'
assert hasattr(comp, 'resolve') and callable(comp.resolve), \
msg % comp.__class__.__name__
return comp
def resolve_function(self,
func,
seen_state,
output_name=None,
parent_parameter=None,
set_return=False):
steps = []
kwargs = {}
consts = {}
signature = inspect.signature(func)
if output_name is None:
if signature.return_annotation in self.reverse_initial:
# some functions can override initial state
output_name = self.reverse_initial[signature.return_annotation]
else:
output_name = 'return_value'
for parameter in signature.parameters.values():
if parameter.annotation is ReturnValue:
kwargs[parameter.name] = 'return_value'
continue
# Check if the parameter class exists in 'initial'.
if parameter.annotation in self.reverse_initial:
initial_kwarg = self.reverse_initial[parameter.annotation]
kwargs[parameter.name] = initial_kwarg
continue
# The 'Parameter' annotation can be used to get the parameter
# itself. Used for example in 'Header' components that need the
# parameter name in order to lookup a particular value.
if parameter.annotation is inspect.Parameter:
consts[parameter.name] = parent_parameter
continue
# Otherwise, find a component to resolve the parameter.
for component in self.components:
if component.can_handle_parameter(parameter):
if component in self.singletons:
consts[parameter.name] = self.singletons[component]
else:
identity = component.identity(parameter)
kwargs[parameter.name] = identity
if identity not in seen_state:
seen_state.add(identity)
resolved_steps = self.resolve_function(
component.resolve,
seen_state,
output_name=identity,
parent_parameter=parameter
)
steps += resolved_steps
if getattr(component, 'singleton', False):
steps.append(self.resolve_singleton(component, identity))
break
else:
msg = 'No component able to handle parameter %r on function %r.'
raise TypeError(msg % (parameter.name, func.__qualname__))
is_async = inspect.iscoroutinefunction(func)
if is_async and not self.allow_async:
msg = 'Function %r may not be async.'
raise TypeError(msg % (func.__qualname__,))
step = (func, is_async, kwargs, consts, output_name, set_return)
steps.append(step)
return steps
def resolve_singleton(self, component, identity):
kwargs = {'value': identity}
def func(value):
self.singletons[component] = value
return func, False, kwargs, (), '$nocache', False
def resolve_functions(self, funcs, state):
steps = []
seen_state = set(self.initial) | set(state)
for func in funcs:
func_steps = self.resolve_function(func, seen_state, set_return=True)
steps.extend(func_steps)
return steps
def run(self, funcs, state, cache=True):
if not funcs:
return
funcs = tuple(funcs)
try:
steps = self.resolver_cache[funcs]
except KeyError:
steps = self.resolve_functions(funcs, state)
if cache:
self.resolver_cache[funcs] = steps
step = 0
try:
for func, is_async, kwargs, consts, output_name, set_return in steps:
kwargs = {key: state[val] for key, val in kwargs.items()}
kwargs.update(consts)
state[output_name] = func(**kwargs)
if set_return:
state['return_value'] = state[output_name]
step += 1
finally:
state['$step'] = step
if cache and '$nocache' in state:
self.resolver_cache.pop(funcs)
# noinspection PyUnboundLocalVariable
return state[output_name]
class ASyncInjector(Injector):
allow_async = True
async def run_async(self, funcs, state, cache=True):
if not funcs:
return
funcs = tuple(funcs)
try:
steps = self.resolver_cache[funcs]
except KeyError:
steps = self.resolve_functions(funcs, state)
if cache:
self.resolver_cache[funcs] = steps
step = 0
try:
for func, is_async, kwargs, consts, output_name, set_return in steps:
kwargs = {key: state[val] for key, val in kwargs.items()}
kwargs.update(consts)
output = func(**kwargs)
if is_async:
state[output_name] = await output
else:
state[output_name] = output
if set_return:
state['return_value'] = state[output_name]
step += 1
finally:
state['$step'] = step
if cache and '$nocache' in state:
self.resolver_cache.pop(funcs)
# noinspection PyUnboundLocalVariable
return state[output_name]
```
#### File: broccoli/broccoli/types.py
```python
import abc
import logging
from typing import TypeVar, Type, List, Tuple, Any, Callable
__all__ = ('Config', 'Argument', 'Arguments', 'Task', 'Message', 'Fence', 'TaskLogger',
'State', 'Router', 'LoggerService', 'Broker', 'App')
Config = TypeVar('Config')
Argument = TypeVar('Argument')
Arguments = TypeVar('Arguments')
Message = TypeVar('Message')
Fence = TypeVar('Fence')
TaskLogger = TypeVar('TaskLogger')
CLOSERS = {'"': '"', "'": "'", '[': ']', '{': '}', '(': ')'}
class MsgRepr:
__slots__ = ('m',)
def __init__(self, m):
self.m = m
def __str__(self):
"""Short representation"""
return "{'id': %r, 'task': %r}" % (self.m.get('id'), self.m.get('task'))
# noinspection PyDefaultArgument
def __repr__(self, _closers=CLOSERS):
"""Full representation"""
ret = []
for k, v in self.m.items():
v = repr(v)
if len(v) > 100:
v = v[:100] + ' ...'
if v[0] in _closers:
v += _closers[v[0]]
ret.append('%r: %s' % (k, v))
return '{' + ', '.join(ret) + '}'
class State:
PENDING = 'pending'
RUNNING = 'running'
class LoggerService(abc.ABC):
@abc.abstractmethod
def get_logger(self, name) -> logging.Logger:
raise NotImplementedError()
class Router(abc.ABC):
@abc.abstractmethod
def get_queue(self, task_name: str) -> str:
raise NotImplementedError()
class Broker(abc.ABC):
@property
@abc.abstractmethod
def BrokerError(self):
raise NotImplementedError()
@abc.abstractmethod
def set_node_id(self, node_id: str):
raise NotImplementedError()
@abc.abstractmethod
def get_nodes(self) -> List[Tuple[int, str]]:
raise NotImplementedError()
@abc.abstractmethod
def setup(self, consumer_id: str, queues: List[str]):
raise NotImplementedError()
@abc.abstractmethod
def close(self):
raise NotImplementedError()
@abc.abstractmethod
def get_messages(self,
timeout: int = 0):
raise NotImplementedError()
@abc.abstractmethod
def ack(self, key):
raise NotImplementedError()
@abc.abstractmethod
def send_message(self, message: dict, reply_back: bool = False):
raise NotImplementedError()
@abc.abstractmethod
def send_reply(self, consumer: str, message: dict):
raise NotImplementedError()
@abc.abstractmethod
def set_result(self, result_key: str, result: dict, expires_in: int):
raise NotImplementedError()
@abc.abstractmethod
def get_result(self, result_key: str, timeout: int = 0):
raise NotImplementedError()
@abc.abstractmethod
def set_state(self, task_id: str, state: Any):
raise NotImplementedError()
@abc.abstractmethod
def run_gc(self):
raise NotImplementedError()
class App(abc.ABC):
settings: dict = None
@abc.abstractmethod
def set_hooks(self,
on_request: Callable = None,
on_response: Callable = None):
raise NotImplementedError()
@abc.abstractmethod
def get_context(self) -> dict:
raise NotImplementedError()
@abc.abstractmethod
def set_context(self, **kwargs):
raise NotImplementedError()
@abc.abstractmethod
def inject(self, funcs, args=None, cache=True):
raise NotImplementedError()
@abc.abstractmethod
def serve_message(self, message: dict, fence: Fence = None):
raise NotImplementedError()
@abc.abstractmethod
def send_message(self, message: dict):
raise NotImplementedError()
@abc.abstractmethod
def result(self, result_key: str):
raise NotImplementedError()
class Task(abc.ABC):
throws: Tuple[Type[Exception], ...] = ()
ignore_result: bool = False
@property
@abc.abstractmethod
def handler(self) -> Callable:
raise NotImplementedError()
@property
@abc.abstractmethod
def name(self) -> str:
raise NotImplementedError()
@staticmethod
@abc.abstractmethod
def get_arguments(*args, **kwargs) -> dict:
raise NotImplementedError()
```
#### File: broccoli/broccoli/utils.py
```python
import os
import re
import sys
import importlib
class default:
"""
default value wrapper
"""
__slots__ = ('value',)
def __init__(self, value):
self.value = value
class cached_property:
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
Optional ``name`` argument allows you to make cached properties of other
methods. (e.g. url = cached_property(get_absolute_url, name='url') )
"""
def __init__(self, func, name=None):
self.func = func
self.__doc__ = getattr(func, '__doc__')
self.name = name or func.__name__
def __get__(self, instance, cls=None):
"""
Call the function and put the return value in instance.__dict__ so that
subsequent attribute access on the instance returns the cached value
instead of calling cached_property.__get__().
"""
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
def import_path(path):
module, attr = path.rsplit('.', 1)
return getattr(importlib.import_module(module), attr)
def validate(value, msg, type=None, coerce=None,
min_value=None, max_value=None, regex=None,
min_length=None, max_length=None):
if coerce is not None:
try:
value = coerce(value)
except (ValueError, TypeError):
raise ValueError(msg)
if type is not None and not isinstance(value, type):
raise ValueError(msg)
try:
if regex is not None and not re.match(regex, str(value)):
raise ValueError('not match pattern %r' % regex)
if min_value is not None and value < min_value:
raise ValueError('minimum value is %s' % min_value)
if max_value is not None and value > max_value:
raise ValueError('maximum value is %s' % max_value)
if min_length is not None and len(value) < min_length:
raise ValueError('minimum length is %s' % min_length)
if max_length is not None and len(value) > max_length:
raise ValueError('maximum length is %s' % max_length)
except ValueError as exc:
raise ValueError(msg + ': ' + str(exc))
except TypeError:
raise ValueError(msg)
return value
class color:
black = '0;30'
red = '0;31'
green = '0;32'
yellow = '0;33'
blue = '0;34'
purple = '0;35'
cyan = '0;36'
white = '0;37'
gray = '1;30'
light_red = '1;31'
light_green = '1;32'
light_yellow = '1;33'
light_blue = '1;34'
light_purple = '1;35'
light_cyan = '1;36'
light_white = '1;37'
def get_colorizer():
if not sys.stdout.isatty() or os.environ.get('NOCOLORS'):
return _fake_colorizer
return _simple_colorizer
def _fake_colorizer(text, _):
return text
_fake_colorizer.support_colors = False # type: ignore
def _simple_colorizer(text, fg):
return '\x1b[%sm%s\x1b[0m' % (fg, text)
_simple_colorizer.support_colors = True # type: ignore
```
#### File: broccoli/broccoli/worker.py
```python
import os
import sys
import uuid
import time as _time
import random as _random
import heapq
import signal
import traceback
import multiprocessing as mp
from multiprocessing import connection
from broccoli import __version__
from broccoli.utils import get_colorizer, color, default, validate
from broccoli.components import ReturnValue
from broccoli.types import App, Broker, LoggerService, Config, Message, MsgRepr as _MsgRepr
from broccoli.exceptions import WorkerInterrupt, Shutdown, Reject as _Reject
def add_console_arguments(parser):
parser.add_argument('-n', '--node',
dest='worker_node_id',
help='Set custom node id.',
default=default(str(uuid.uuid4())))
parser.add_argument('-c', '--concurrency',
dest='worker_concurrency',
help=('Number of child processes processing the queue. '
'The default is the number of CPUs available on your system.'),
type=int,
default=default(mp.cpu_count()))
parser.add_argument('-q', '--queues',
dest='worker_queues',
help=('List of queues to enable for this worker, separated by comma. '
'By default "default" queue are enabled.'),
type=lambda x: x.split(','),
default=default(['default']))
parser.add_argument('-l', '--loglevel',
dest='loglevel',
help=('Logging level for default logger.'),
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default=default(['INFO']))
parser.add_argument('--result-expires-in',
dest='result_expires_in',
help=('Time (in seconds) to hold task results. Default is 3600.'),
type=int,
default=default(3600))
parser.add_argument('--watchdog-interval',
dest='worker_fall_down_watchdog_interval',
help=('Fall down watchdog interval (in seconds). Default is 10.'),
type=int,
default=default(10))
parser.add_argument('--fetch-timeout',
dest='broker_fetch_timeout',
type=int,
default=default(10))
parser.add_argument('--restart-died-workers',
dest='restart_died_workers',
action='store_true',
default=default(False))
def bootstrap():
return [
validate_config,
initialize,
start
]
def validate_config(config: Config):
try:
validate(config.get('worker_node_id'),
type=str,
regex=r'[a-zA-Z0-9_\-.]+',
msg='Invalid node_id value')
validate(config.get('worker_concurrency'),
type=int,
min_value=1,
msg='Invalid concurrency value')
validate(config.get('broker_fetch_timeout'),
type=int,
min_value=1,
msg='Invalid fetch_timeout value')
validate(config.get('result_expires_in'),
type=int,
min_value=5,
msg='Invalid fetch_timeout value')
validate(config.get('worker_queues'),
type=list,
min_length=1,
msg='Invalid queues value')
validate(config.get('worker_fall_down_watchdog_interval'),
coerce=int,
min_value=1,
msg='Invalid watchdog interval')
for item in config['worker_queues']:
validate(item,
type=str,
regex=r'[a-zA-Z0-9_\-.]+',
msg='Invalid queue name')
except ValueError as exc:
print('Error: %s' % exc)
sys.exit(-1)
c = get_colorizer()
print('\n\U0001F966', c('broccoli v%s.' % __version__, color.green))
print(c('[node_id] ', color.cyan), c(config['worker_node_id'], color.yellow))
print(c('[concurrency]', color.cyan), c(config['worker_concurrency'], color.yellow))
print(c('[queues] ', color.cyan), c(', '.join(config['worker_queues']), color.yellow))
print(c('[result_expires_in]', color.cyan), c(config['result_expires_in'], color.yellow))
print()
def initialize(app: App, config: Config):
app.set_context(
node_id=config['worker_node_id']
)
def start(app: App, broker: Broker, config: Config, logger_service: LoggerService):
logger = logger_service.get_logger('bro.master')
def start_worker():
c1, c2 = mp.Pipe(True)
p = mp.Process(target=run_worker, args=[app, c2])
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
p.start()
signal.signal(signal.SIGINT, shutdown_handler)
signal.signal(signal.SIGTERM, shutdown_handler)
c1.p = p
connections.append(c1)
def restart_worker(worker_conn):
if worker_conn.p.is_alive():
worker_conn.p.terminate()
worker_conn.close()
connections.remove(worker_conn)
restart_timeout = 3 + int(random() * 5)
logger.critical('%s died unexpectedly \U0001f480, start new worker in %d seconds...',
worker_conn.p.name, restart_timeout)
call_in(restart_timeout, start_worker)
def shutdown_handler(_, __):
nonlocal shutdown_started
if not shutdown_started:
shutdown_started = True
raise Shutdown()
def on_task_start(_conn, _data):
pass
def on_task_done(_conn, _data):
pass
def hold_elections():
nodes = broker.get_nodes()
if any(n[1].endswith('@') for n in nodes):
return False
nodes.sort(reverse=True)
if nodes[0][1] == node_id:
broker.set_node_id(node_id + '@')
logger.info('Worker fall down watchdog activated.')
return True
return False
def watch_dog():
nonlocal watch_dog_enabled
if not watch_dog_enabled:
watch_dog_enabled = hold_elections()
if watch_dog_enabled:
broker.run_gc()
call_in(fall_down_watchdog_interval, watch_dog)
def call_in(in_seconds, handler, args=()):
heappush(tasks, (time() + in_seconds, (handler, args)))
random = _random.random
sleep = _time.sleep
time = _time.time
wait = connection.wait
heappush = heapq.heappush
heappop = heapq.heappop
tasks = []
connections = []
shutdown_started = False
watch_dog_enabled = False
node_id = config['worker_node_id']
fall_down_watchdog_interval = config['worker_fall_down_watchdog_interval']
event_handlers = {
'task_start': on_task_start,
'task_done': on_task_done
}
call_in(0, watch_dog)
try:
signal.signal(signal.SIGINT, shutdown_handler)
signal.signal(signal.SIGTERM, shutdown_handler)
sleep(random() * 3)
broker.set_node_id(node_id)
sleep(0.5)
for ident in range(config['worker_concurrency']):
start_worker()
while 1:
try:
while tasks:
next_event_at = tasks[0][0]
timeout = next_event_at - time()
if timeout <= 0:
event_handler, event_args = heappop(tasks)[1]
event_handler(*event_args)
else:
break
if not tasks:
timeout = None
ready: list = wait(connections, timeout)
for conn in ready:
try:
event, data = conn.recv()
except EOFError:
logger.debug('Broken pipe to %s', conn.p.name)
restart_worker(conn)
continue
event_handlers[event](conn, data)
except Exception:
logger.critical(traceback.format_exc())
break
except Shutdown:
pass
logger.warning('Shutdown started.')
while connections:
alive = []
for conn in connections:
if conn.p.is_alive():
conn.p.terminate()
conn.close()
alive.append(conn)
connections = alive
if connections:
sleep(0.5)
def run_worker(app, conn):
worker_id = str(uuid.uuid4())
app.set_context(
worker_id=worker_id
)
app.inject([worker], args={'conn': conn, 'worker_id': worker_id}, cache=False)
def worker(conn, worker_id, app: App, broker: Broker, logger_service: LoggerService, config: Config):
logger = logger_service.get_logger('bro.worker')
class fence:
__slots__ = ()
def __enter__(self):
nonlocal fence_counter
fence_counter += 1
def __exit__(self, *exc_info):
nonlocal fence_counter
fence_counter -= 1
if fence_counter == 0 and shutdown_started:
raise WorkerInterrupt()
def worker_interrupt_handler(_, __):
nonlocal shutdown_started
if not shutdown_started:
shutdown_started = True
if fence_counter == 0:
raise WorkerInterrupt()
def worker_state_handler(_, __):
frame = sys._getframe(1)
logger.info('%s: line %s', frame.f_code.co_filename, frame.f_lineno)
def emit(event, data=None):
conn.send((event, data))
def on_request(msg: Message):
logger.info('Received task %s.', MsgRepr(msg))
start_time = time()
msg['_start'] = start_time
emit('task_start', {
'id': msg['id'],
'task': msg['task'],
'start_time': start_time
})
def on_response(msg: Message, ret: ReturnValue):
running_time = time() - msg['_start']
logger.info('Task %s done in %s.', MsgRepr(msg), running_time)
emit('task_done', {
'id': msg['id'],
'task': msg['task'],
'start_time': msg['_start'],
'running_time': running_time
})
return ret
def send_reply(reply):
key = reply.pop('_context', None)
while 1:
try:
if 'result_key' in reply:
if reply['result_key']:
broker.set_result(reply['result_key'], reply, expires_in=result_expires_in)
logger.debug('Set result: %r', MsgRepr(reply))
broker.ack(key)
return
elif 'reply_to' in reply:
broker.send_reply(reply['reply_to'], reply)
logger.debug('Send reply: %r', MsgRepr(reply))
broker.ack(key)
return
else:
broker.send_message(reply, reply_back=True)
logger.debug('Send message: %r', MsgRepr(reply))
return
except broker.BrokerError as err:
logger.critical('Broker error: %s', str(err))
sleep(3 + random() * 3)
def main_loop():
num_errors = 0
while 1:
try:
messages = None
if deferred_messages:
next_message_at = deferred_messages[0][0]
timeout = next_message_at - time()
if timeout <= 0:
messages = [heappop(deferred_messages)[1]]
timeout = getmin(timeout, fetch_timeout)
else:
timeout = fetch_timeout
if not messages:
try:
messages = broker.get_messages(timeout=timeout)
num_errors = 0
except broker.BrokerError as exc:
logger.critical('Broker error: %s', str(exc))
num_errors += 1
sleep_timeout = getmin(num_errors, 10) + random() * 3
if deferred_messages:
sleep_timeout = getmin(sleep_timeout, next_message_at - time())
if sleep_timeout > 0:
sleep(sleep_timeout)
continue
for message_key, message in messages:
message_repr = MsgRepr(message)
run_after = message.get('run_after')
if run_after is not None and isinstance(run_after, (int, float)):
timeout = run_after - time()
if timeout >= 0:
heappush(deferred_messages, (run_after, (message_key, message)))
logger.info('Deferred message %s received. Should be started in %.2f seconds.',
message_repr, timeout)
continue
logger.debug('Got message: %r.', message_repr)
is_reply = 'reply_id' in message
if not is_reply:
message['_context'] = message_key
try:
for message_reply in app.serve_message(message, fence=fence):
send_reply(message_reply)
except Reject as exc:
logger.info('Message %s was rejected: %s', message_repr, str(exc))
continue
finally:
if is_reply:
broker.ack(message_key)
except Exception:
# Something went wrong
logger.critical('Critical error:\n%s', traceback.format_exc())
heappush = heapq.heappush
heappop = heapq.heappop
getmin = min
time = _time.time
sleep = _time.sleep
random = _random.random
Reject = _Reject
MsgRepr = _MsgRepr
fence = fence()
fence_counter = 0
deferred_messages = []
shutdown_started = False
fetch_timeout = config['broker_fetch_timeout']
result_expires_in = config['result_expires_in']
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, worker_interrupt_handler)
signal.signal(signal.SIGUSR2, worker_state_handler)
app.set_hooks(on_request=on_request,
on_response=on_response)
logger.info('Started, pid=%d, id=%s', os.getpid(), worker_id)
try:
broker.setup(worker_id, config['worker_queues'])
sleep(random() * 1.5)
main_loop()
except WorkerInterrupt:
pass
broker.close()
``` |
{
"source": "aachurin/meow.di",
"score": 2
} |
#### File: meow/di/injector.py
```python
import inspect
import types
import typing
from .component import Component, ReturnValue
from .exception import InjectorError
_Callable = typing.Callable[..., typing.Any]
_Step = typing.Tuple[
_Callable, bool, typing.Dict[str, str], typing.Dict[str, object], str, bool
]
class Injector:
allow_async = False
def __init__(
self,
components: typing.Sequence[Component],
initial: typing.Optional[typing.Mapping[str, object]] = None,
resolved: typing.Optional[typing.Mapping[object, object]] = None,
):
self.components: typing.Sequence[Component] = list(components)
self.initial: typing.Mapping[str, object] = dict(initial or {})
self.reverse_initial: typing.Mapping[object, str] = {
val: key for key, val in self.initial.items()
}
self.resolved: typing.Dict[object, object] = dict(resolved or {})
self.resolver_cache: typing.Dict[
typing.Tuple[_Callable, ...], typing.List[_Step]
] = {}
def resolve_function(
self,
func: _Callable,
seen_state: typing.Set[str],
output_name: typing.Optional[str] = None,
parent_parameter: typing.Optional[inspect.Parameter] = None,
set_return: bool = False,
) -> typing.List[_Step]:
steps = []
kwargs: typing.Dict[str, str] = {}
consts: typing.Dict[str, object] = {}
signature = inspect.signature(func)
if output_name is None:
if signature.return_annotation in self.reverse_initial:
# some functions can override initial state
output_name = self.reverse_initial[signature.return_annotation]
else:
output_name = "return_value"
for parameter in signature.parameters.values():
if parameter.annotation is ReturnValue:
kwargs[parameter.name] = "return_value"
continue
# Check if the parameter class exists in 'initial'.
if parameter.annotation in self.reverse_initial:
initial_kwarg = self.reverse_initial[parameter.annotation]
kwargs[parameter.name] = initial_kwarg
continue
# Check if the parameter class is already resolved.
if parameter.annotation in self.resolved:
consts[parameter.name] = self.resolved[parameter.annotation]
continue
# The 'Parameter' annotation can be used to get the parameter
# itself. Used for example in 'Header' components that need the
# parameter name in order to lookup a particular value.
if parameter.annotation is inspect.Parameter:
consts[parameter.name] = parent_parameter
continue
# Otherwise, find a component to resolve the parameter.
for component in self.components:
if component.can_handle_parameter(parameter):
if component.is_singleton:
try:
consts[parameter.name] = self.resolved[component]
except KeyError:
consts[parameter.name] = self.resolved[
component
] = self.resolve_singleton(component.resolve)
else:
identity = component.identity(parameter)
kwargs[parameter.name] = identity
if identity not in seen_state:
seen_state.add(identity)
resolved_steps = self.resolve_function(
component.resolve,
seen_state,
output_name=identity,
parent_parameter=parameter,
)
steps += resolved_steps
break
else:
hint = self._get_hint(func, parameter)
msg = f"In {hint}: no component able to handle parameter `{parameter.name}`."
raise InjectorError(msg)
is_async = inspect.iscoroutinefunction(func)
if is_async and not self.allow_async:
hint = self._get_hint(func)
msg = f"Function {hint} may not be async."
raise InjectorError(msg)
steps.append((func, is_async, kwargs, consts, output_name, set_return))
return steps
def resolve_functions(
self, funcs: typing.Tuple[_Callable, ...], state: typing.Mapping[str, object]
) -> typing.List[_Step]:
steps = []
seen_state = set(self.initial) | set(state)
for func in funcs:
func_steps = self.resolve_function(func, seen_state, set_return=True)
steps.extend(func_steps)
return steps
def resolve_singleton(self, func: _Callable) -> object:
consts = {}
signature = inspect.signature(func)
for parameter in signature.parameters.values():
# Check if the parameter class is already resolved.
if parameter.annotation in self.resolved:
consts[parameter.name] = self.resolved[parameter.annotation]
continue
# Otherwise, find a component to resolve the parameter.
for component in self.components:
if component.is_singleton and component.can_handle_parameter(parameter):
try:
consts[parameter.name] = self.resolved[component]
except KeyError:
consts[parameter.name] = self.resolved[
component
] = self.resolve_singleton(component.resolve)
break
else:
hint = self._get_hint(func, parameter)
msg = f"In {hint}: no component able to handle parameter `{parameter.name}`."
raise InjectorError(msg)
is_async = inspect.iscoroutinefunction(func)
if is_async and not self.allow_async: # pragma: nocover
hint = self._get_hint(func)
msg = f"Function {hint} may not be async."
raise InjectorError(msg)
return func(**consts)
@staticmethod
def _get_hint(
func: _Callable, parameter: typing.Optional[inspect.Parameter] = None
) -> str: # pragma: nocover
if isinstance(func, types.FunctionType):
name = func.__name__
elif isinstance(func, types.MethodType):
name = f"{func.__self__.__class__.__name__}.{func.__func__.__name__}"
else:
name = str(func)
if parameter:
if parameter.annotation is not parameter.empty:
if isinstance(parameter.annotation, type):
annotation = f": {parameter.annotation.__name__}"
else:
annotation = ": {parameter.annotation!r}"
else:
annotation = ""
args = f"... {parameter.name}{annotation} ..."
else:
args = ""
return f"{name}({args})"
def run(
self, funcs: typing.Tuple[_Callable, ...], state: typing.Dict[str, object]
) -> object:
if not funcs: # pragma: nocover
return None
try:
steps = self.resolver_cache[funcs]
except KeyError:
steps = self.resolve_functions(funcs, state)
self.resolver_cache[funcs] = steps
for func, is_async, kwargs, consts, output_name, set_return in steps:
func_kwargs = {key: state[val] for key, val in kwargs.items()}
if consts:
func_kwargs.update(consts)
state[output_name] = func(**func_kwargs)
if set_return:
state["return_value"] = state[output_name]
# noinspection PyUnboundLocalVariable
return state[output_name]
class AsyncInjector(Injector): # pragma: nocover
allow_async = True
async def run_async(self, funcs, state): # type: ignore
if not funcs:
return
funcs = tuple(funcs)
try:
steps = self.resolver_cache[funcs]
except KeyError:
steps = self.resolve_functions(funcs, state)
self.resolver_cache[funcs] = steps
for func, is_async, kwargs, consts, output_name, set_return in steps:
func_kwargs = {key: state[val] for key, val in kwargs.items()}
func_kwargs.update(consts)
if is_async:
# noinspection PyUnresolvedReferences
state[output_name] = await func(**func_kwargs)
else:
state[output_name] = func(**func_kwargs)
if set_return:
state["return_value"] = state[output_name]
# noinspection PyUnboundLocalVariable
return state[output_name]
``` |
{
"source": "aachurin/meow.validators",
"score": 2
} |
#### File: meow/validators/container.py
```python
import datetime
import uuid
import typing
import enum
import collections.abc
from dataclasses import field as _field, fields, is_dataclass, MISSING
from .compat import get_origin, get_args
from .elements import (
Validator,
Optional,
Adapter,
Any,
String,
Float,
Integer,
Boolean,
DateTime,
Date,
Time,
UUID,
Enum,
Union,
Mapping,
Object,
List,
TypedList,
)
_T = typing.TypeVar("_T")
_T_co = typing.TypeVar("_T_co", covariant=True)
def field( # type: ignore
*,
default=MISSING,
default_factory=MISSING,
repr=True,
hash=None,
init=True,
compare=True,
**kwargs,
):
return _field( # type: ignore
default=default,
default_factory=default_factory,
repr=repr,
hash=hash,
init=init,
compare=compare,
metadata=kwargs,
)
_Factory = typing.Callable[..., Validator[typing.Any]]
_Callback = typing.Callable[
[typing.Type[object], typing.Tuple[Validator[typing.Any], ...]], _Factory,
]
class Container:
_builtins: typing.Dict[typing.Type[typing.Any], _Factory] = {
str: String,
float: Float,
int: Integer,
bool: Boolean,
datetime.datetime: DateTime,
datetime.time: Time,
datetime.date: Date,
uuid.UUID: UUID,
set: (
lambda items=Any, **spec: Adapter(
List(items, uniqueitems=True, **spec), set
)
),
frozenset: (
lambda items=Any, **spec: Adapter(
List(items, uniqueitems=True, **spec), frozenset
)
),
list: (lambda items=Any, **spec: List(items, **spec)),
dict: (lambda keys=Any, values=Any, **spec: Mapping(keys, values, **spec)),
tuple: (lambda items=Any, **spec: Adapter(List(items, **spec), tuple)),
collections.abc.Sequence: (
lambda items=Any, **spec: Adapter(List(items, **spec), tuple)
),
collections.abc.MutableSequence: (
lambda items=Any, **spec: Adapter(List(items, **spec), tuple)
),
collections.abc.Set: (
lambda items=Any, **spec: Adapter(
List(items, uniqueitems=True, **spec), frozenset
)
),
collections.abc.MutableSet: (
lambda items=Any, **spec: Adapter(
List(items, uniqueitems=True, **spec), set
)
),
collections.abc.Mapping: (
lambda keys=Any, values=Any, **spec: Mapping(keys, values, **spec)
),
}
_lookup_cache: dict # type: ignore
def __init__(
self, lookup_cache_size: int = 5000, default: typing.Optional[_Callback] = None,
):
self._lookup_cache = {}
self._lookup_cache_size = lookup_cache_size
self._default = default
# @classmethod
# def is_primitive_type(cls, tp: typing.Type[object]) -> bool:
# return tp in cls._builtin_primitives
#
# @staticmethod
# def is_enum_type(tp: typing.Type[object]) -> bool:
# return isinstance(tp, type) and issubclass(tp, enum.Enum)
#
# @staticmethod
# def is_dataclass_type(tp: typing.Type[object]) -> bool:
# return isinstance(tp, type) and is_dataclass(tp)
@typing.overload
def get_validator(self, tp: typing.Type[_T]) -> Validator[_T]:
... # pragma: nocover
@typing.overload
def get_validator(self, tp: _T) -> Validator[_T]:
... # pragma: nocover
def get_validator(self, tp): # type: ignore
try:
return self._lookup_cache[tp]
except KeyError:
pass
validator = self._lookup_cache[tp] = self._get_validator(tp, {})
if len(self._lookup_cache) > self._lookup_cache_size: # pragma: nocover
self._lookup_cache.pop(next(iter(self._lookup_cache)))
return validator
__getitem__ = get_validator
@typing.overload
def get_validator_spec(self, tp: typing.Type[_T], **spec: object) -> Validator[_T]:
... # pragma: nocover
@typing.overload
def get_validator_spec(self, tp: _T, **spec: object) -> Validator[_T]:
... # pragma: nocover
def get_validator_spec(self, tp, **spec): # type: ignore
return self._get_validator(tp, spec)
__call__ = get_validator_spec
@typing.overload
def _get_validator(
self, tp: typing.Type[_T], spec: typing.Dict[str, object]
) -> Validator[_T]:
... # pragma: nocover
@typing.overload
def _get_validator(self, tp: _T, spec: typing.Dict[str, object]) -> Validator[_T]:
... # pragma: nocover
def _get_validator(self, tp, spec): # type: ignore
if tp is typing.Any:
return Any
if isinstance(tp, type):
if factory := self._builtins.get(tp):
return factory(**spec)
if issubclass(tp, enum.Enum):
assert not spec, "Spec for enums is not allowed"
# noinspection PyTypeChecker
return Enum(items=tp)
if is_dataclass(tp):
assert not spec, "Spec for dataclasses is not allowed"
properties: typing.Dict[str, Validator[typing.Any]] = {}
required = []
for fld in fields(tp):
if not fld.init:
continue
if fld.default is MISSING and fld.default_factory is MISSING: # type: ignore
required.append(fld.name)
if fld.metadata:
properties[fld.name] = self._get_validator(
fld.type, dict(fld.metadata)
)
else:
properties[fld.name] = self.get_validator(fld.type)
return Adapter(
Object(properties, required=tuple(required)), lambda x: tp(**x)
)
# return TypedObject(properties, tp, required=tuple(required))
# TODO: typing.NamedTuple
if self._default is not None and (resolved := self._default(tp, ())):
# noinspection PyUnboundLocalVariable
return resolved(**spec)
elif origin := get_origin(tp):
type_args = get_args(tp)
items: typing.Any = spec.pop("items", None)
if origin is typing.Union:
none_type = type(None)
if none_type in type_args:
args = tuple(item for item in type_args if item is not none_type)
inner_tp = (
args[0] if len(args) == 1 else typing.Union.__getitem__(args)
)
if spec:
validator = self._get_validator(inner_tp, spec)
else:
validator = self.get_validator(inner_tp)
return Optional(validator)
if items is None:
items = [self.get_validator(arg) for arg in type_args]
else:
# simple check, if the developer specifies the inconsistent validators, it's his problem
assert isinstance(items, (list, tuple)) and len(items) == len(
type_args
)
assert not spec, "Invalid spec for Union"
return Union(*items)
if origin is tuple:
if items is not None:
if not type_args or type_args[-1] is ...:
return Adapter(List(items, **spec), tuple)
else:
assert (
isinstance(items, (list, tuple))
and len(items) == len(type_args)
and not spec
), "Invalid spec for Tuple"
return Adapter(TypedList(*items), tuple)
elif not type_args:
return Adapter(List(Any, **spec), tuple)
elif type_args[-1] is ...:
return Adapter(
List(self.get_validator(type_args[0]), **spec), tuple
)
else:
assert not spec, "Invalid spec for Tuple"
return Adapter(
TypedList(*(self.get_validator(arg) for arg in type_args)),
tuple,
)
# handle other generics
if items is None:
items = [self.get_validator(type_arg) for type_arg in type_args]
if factory := self._builtins.get(origin):
return factory(*items, **spec)
if self._default is not None:
if resolved := self._default(tp, items):
return resolved(**spec)
raise TypeError("Don't know how to create validator for %r" % tp)
V = Container()
get_validator = V.get_validator
``` |
{
"source": "aachurin/meow.webs",
"score": 2
} |
#### File: meow/webs/utils.py
```python
import os
import typing
import importlib
from importlib.util import find_spec
def get_package_path(package_path: str) -> str:
if ":" in package_path:
package, path = package_path.split(":", 1)
spec = find_spec(package)
if spec and spec.origin:
package_dir = os.path.dirname(spec.origin)
return os.path.join(package_dir, path)
raise ValueError(f"Package {package} not found") # pragma: nocover
else:
return package_path
T = typing.TypeVar("T")
@typing.overload
def import_string(dot_path: str) -> object:
... # pragma: nocover
@typing.overload
def import_string(dot_path: str, tp: typing.Type[T]) -> T:
... # pragma: nocover
def import_string(dot_path, tp=None): # type: ignore
path, attr = dot_path.rsplit(".", 1)
module = importlib.import_module(path)
try:
ret = getattr(module, attr)
except AttributeError:
raise ImportError(f"Could not load name {dot_path}") from None
if tp is None or isinstance(ret, tp):
return ret
raise TypeError(
f"{dot_path} must be an instance of {tp}, got {ret!r}"
) # pragma: nocover
```
#### File: aachurin/meow.webs/setup.py
```python
import os
import re
from setuptools import setup, find_namespace_packages
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, "__init__.py")).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
def get_long_description(long_description_file):
"""
Read long description from file.
"""
with open(long_description_file, encoding="utf-8") as f:
long_description = f.read()
return long_description
setup(
name="meow.webs",
version=get_version("meow/webs"),
url="https://github.com/aachurin/meow.webs",
license="MIT",
description="Expressive WEB apiS",
long_description=get_long_description("README.md"),
long_description_content_type="text/markdown",
author="<NAME>",
maintainer="<NAME>",
maintainer_email="<EMAIL>",
packages=find_namespace_packages(include=["meow.*"]),
package_data={"meow.webs": ["py.typed"]},
install_requires=["jinja2", "werkzeug", "whitenoise", "meow.di", "meow.validators"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
# entry_points={
# 'console_scripts': [
# 'stark=stark:main'
# ],
# },
)
```
#### File: meow.webs/tests/hooks.py
```python
import typing
from meow.webs import http, Component
Context = typing.NewType("Context", dict)
class ContextComponent(Component):
def resolve(self) -> Context:
return Context({})
ERROR_HOOK = 0
class CustomResponseHeader:
def __init__(self, header="Custom"):
self.header = header
def on_request(self, context: Context):
context["hook"] = "Ran hooks"
def on_response(self, response: http.Response, context: Context):
if "hook" in context:
response.headers[self.header] = context["hook"]
def on_error(self):
global ERROR_HOOK
ERROR_HOOK += 1
class NonHook:
pass
class NonComponent:
pass
``` |
{
"source": "aachurin/promisedio",
"score": 3
} |
#### File: promisedio/examples/tcp_server.py
```python
import ssl
from promisedio import loop, ns, promise, timer
n = 0
def new_connection(stream):
global n
print("We are here!!!!")
print(stream)
if n == 3:
server.close()
n += 1
server = ns.start_server(new_connection, ("127.0.0.1", 8090))
loop.run_forever()
```
#### File: promisedio/tools/capluse.py
```python
import os
import re
import sys
import hashlib
import argparse
code_copyright = """
// Copyright (c) 2021 <NAME> (<EMAIL>).
// This file is part of promisedio
"""
def main(params):
parser = argparse.ArgumentParser()
parser.add_argument("root")
args = parser.parse_args(params)
modules = [name for name in os.listdir(args.root) if os.path.isdir(os.path.join(args.root, name))]
if "capsule" in modules:
modules.remove("capsule")
else:
os.mkdir(os.path.join(args.root, "capsule"))
for module in modules:
module_path = os.path.join(args.root, module)
files = [
name
for name in os.listdir(module_path)
if name.endswith(".c") and os.path.isfile(os.path.join(module_path, name))
]
generate_auto_files(args.root, module, files)
def error(msg, key, decl):
print(key)
print(decl)
raise ValueError(msg)
def parse_c_file(data):
result = {}
functions = re.findall(r"CAPSULE_API\((.*),\s*(.*)\)([^{;]*)", data)
for key, ret, decl in functions:
key = key.strip()
if not re.match(r"^[a-zA-Z_][a-zA-Z0-9_]+$", key):
error(key, decl, "Invalid key")
ret = ret.strip()
decl = decl.strip()
match = re.match(r"(.*)\(([\s\S]*)\)", decl)
if not match:
error(key, decl, "Invalid declaration")
funcname, funcargs = match.groups()
funcname = funcname.strip()
funcargs = [x.strip() for x in funcargs.strip().split(",")]
result.setdefault(key.lower(), []).append({
"name": funcname,
"ret": ret,
"args": funcargs
})
return result
def generate_auto_files(root, module, files):
functions = {}
for file in files:
with open(os.path.join(root, module, file), "rt") as f:
data = f.read()
result = parse_c_file(data)
if result:
for key, value in result.items():
functions.setdefault(key, []).extend(value)
if not functions:
return
hash_keys = {}
for api_key, funcs in functions.items():
hash_keys[api_key] = hashlib.md5(repr(funcs).encode("utf-8")).hexdigest()
with open(os.path.join(root, "capsule", module + ".h"), "wt") as f1:
with open(os.path.join(root, module, "capsule.h"), "wt") as f2:
f1.write(code_copyright)
f1.write(f"#ifndef PROMISEDIO_{module.upper()}_API\n")
f1.write(f"#define PROMISEDIO_{module.upper()}_API\n\n")
if os.path.exists(os.path.join(root, module, f"{module}.h")):
f1.write(f'#include "{module}/{module}.h"\n\n')
f2.write(code_copyright)
for api_key, funcs in functions.items():
hash_key = api_key + "_" + hash_keys[api_key]
f1.write(f"#define {api_key.upper()} {hash_key}\n\n")
f2.write(f"#define {api_key.upper()} {hash_key}\n\n")
f2.write(f"#define {api_key.upper()}_CAPSULE {{\\\n")
for index, func in enumerate(funcs):
ret = func["ret"]
name = func["name"]
args = list(func["args"])
func_id = name.upper() + "_ID"
f1.write(f"#define {func_id} {index}\n")
f2.write(f" [{index}] = {name},\\\n")
has_state = "_ctx_var" in args
if has_state:
args.remove("_ctx_var")
has_args = bool(args)
if has_state:
args.insert(0, "void*")
if has_args:
f1.write(f"#define {name}(...) \\\n")
else:
f1.write(f"#define {name}() \\\n")
varargs = []
if has_state:
varargs.append(f"_ctx->{hash_key}__ctx")
if has_args:
varargs.append("__VA_ARGS__")
args = ", ".join(args)
varargs = ", ".join(varargs)
f1.write(f" (*({ret} (*) ({args}))(_ctx->{hash_key}__api[{func_id}]))( \\\n")
f1.write(f" {varargs})\n\n")
f2.write("}\n\n")
f1.write("#endif\n")
if __name__ == "__main__":
main(sys.argv[1:])
```
#### File: promisedio/tools/myclinic.py
```python
import re
from clinic import *
from clinic import main as clinic_main
from capluse import main as capsule_main
_readme_content = {}
def get_readme():
if _readme_content:
return _readme_content
with open("README.md", "rt") as f:
data = f.read()
content = _readme_content
module = func = None
lines = data.splitlines()
n = 0
while lines:
line = lines.pop(0)
n += 1
if line.startswith("### "):
module = line.split()[1]
if module.startswith("promisedio."):
module = module[11:]
func = None
continue
if module:
if line.startswith("```python"):
line = lines.pop(0)
n += 1
func = line.split("(")[0].strip()
if func and re.match('^[a-zA-Z_]+(?:\.[a-zA-Z_]+)?$', func):
content.setdefault(module, {})[func] = ""
else:
func = None
while lines.pop(0).strip() != "```":
n += 1
continue
if func:
content[module][func] += line + "\n"
return content
def get_readme_description(func):
result = func.docstring.rstrip("\n") + "\n\n"
module = func.module.name
name = f"{func.cls.name}.{func.name}" if func.cls else func.name
lines = get_readme().get(module, {}).get(name)
if not lines:
print("No docstring for %s.%s" % (module, name))
else:
result += lines
return result
docstring_for_c_string = CLanguage.docstring_for_c_string
class DocstringHolder:
pass
def docstring_for_c_string_from_readme(self, f):
df = DocstringHolder()
df.docstring = get_readme_description(f)
return docstring_for_c_string(self, df)
CLanguage.docstring_for_c_string = docstring_for_c_string_from_readme
def rebuild_func(fn, consts):
code = type(fn.__code__)(fn.__code__.co_argcount,
fn.__code__.co_posonlyargcount,
fn.__code__.co_kwonlyargcount,
fn.__code__.co_nlocals,
fn.__code__.co_stacksize,
fn.__code__.co_flags,
fn.__code__.co_code,
consts,
fn.__code__.co_names,
fn.__code__.co_varnames,
fn.__code__.co_filename,
fn.__code__.co_name,
fn.__code__.co_firstlineno,
fn.__code__.co_lnotab,
fn.__code__.co_freevars,
fn.__code__.co_cellvars
)
new_fn = type(fn)(code, fn.__globals__, fn.__name__, fn.__defaults__, fn.__closure__)
new_fn.__kwdefaults__ = fn.__kwdefaults__
return new_fn
def hack_clanguage_output_templates():
consts = []
for v in CLanguage.output_templates.__code__.co_consts:
if isinstance(v, str) and "static {impl_return_type}" in v:
v = "Py_LOCAL_INLINE({impl_return_type})\n{c_basename}_impl({impl_parameters})\n"
consts.append(v)
CLanguage.output_templates = rebuild_func(CLanguage.output_templates, tuple(consts))
hack_clanguage_output_templates()
class Path_converter(CConverter):
type = "PyObject *"
converter = "PyUnicode_FSConverter"
c_default = "NULL"
def cleanup(self):
return f"Py_XDECREF({self.name});"
class cstring_converter(CConverter):
type = "const char *"
converter = "cstring_converter"
c_default = "NULL"
def converter_init(self, *, accept=None):
if accept == {NoneType}:
self.converter = "cstring_optional_converter"
elif accept is not None:
fail("cstring_converter: illegal 'accept' argument " + repr(accept))
class ssize_t_converter(CConverter):
type = "Py_ssize_t"
converter = "ssize_t_converter"
class fd_converter(CConverter):
type = "int"
converter = "fd_converter"
class off_t_converter(CConverter):
type = "Py_off_t"
converter = "off_t_converter"
class inet_addr_converter(CConverter):
type = "sockaddr_any"
converter = "inet_addr_converter"
impl_by_reference = True
class uid_t_converter(CConverter):
type = "uid_t"
converter = "uid_converter"
class gid_t_converter(CConverter):
type = "gid_t"
converter = "gid_converter"
if __name__ == "__main__":
capsule_main([sys.argv[1]])
clinic_main(["--make", "--srcdir"] + sys.argv[1:])
``` |
{
"source": "aachurin/stark",
"score": 2
} |
#### File: stark/server/adapters.py
```python
import asyncio
import sys
from stark.server.wsgi import RESPONSE_STATUS_TEXT
class ASGItoWSGIAdapter(object):
"""
Expose an WSGI interface, given an ASGI application.
We want this so that we can use the Werkzeug development server and
debugger together with an ASGI application.
"""
def __init__(self, asgi, raise_exceptions=False):
self.asgi = asgi
self.raise_exceptions = raise_exceptions
self.loop = asyncio.get_event_loop()
def __call__(self, environ, start_response):
return_bytes = []
message = self.environ_to_message(environ)
asgi_coroutine = self.asgi(message)
async def send(msg):
if msg['type'] == 'http.response.start':
status = RESPONSE_STATUS_TEXT[msg['status']]
headers = [
[key.decode('latin-1'), value.decode('latin-1')]
for key, value in msg['headers']
]
exc_info = sys.exc_info()
start_response(status, headers, exc_info)
elif msg['type'] == 'http.response.body':
return_bytes.append(msg.get('body', b''))
async def receive():
return {
'type': 'http.request',
'body': environ['wsgi.input'].read()
}
try:
self.loop.run_until_complete(asgi_coroutine(receive, send))
except Exception:
if self.raise_exceptions:
raise
return return_bytes
def environ_to_message(self, environ):
"""
WSGI environ -> ASGI message
"""
message = {
'method': environ['REQUEST_METHOD'].upper(),
'root_path': environ.get('SCRIPT_NAME', ''),
'path': environ.get('PATH_INFO', ''),
'query_string': environ.get('QUERY_STRING', '').encode('latin-1'),
'http_version': environ.get('SERVER_PROTOCOL', 'http/1.0').split('/', 1)[-1],
'scheme': environ.get('wsgi.url_scheme', 'http'),
'raise_exceptions': self.raise_exceptions # Not actually part of the ASGI spec
}
if 'REMOTE_ADDR' in environ and 'REMOTE_PORT' in environ:
message['client'] = [environ['REMOTE_ADDR'], int(environ['REMOTE_PORT'])]
if 'SERVER_NAME' in environ and 'SERVER_PORT' in environ:
message['server'] = [environ['SERVER_NAME'], int(environ['SERVER_PORT'])]
headers = []
if environ.get('CONTENT_TYPE'):
headers.append([b'content-type', environ['CONTENT_TYPE'].encode('latin-1')])
if environ.get('CONTENT_LENGTH'):
headers.append([b'content-length', environ['CONTENT_LENGTH'].encode('latin-1')])
for key, val in environ.items():
if key.startswith('HTTP_'):
key_bytes = key[5:].replace('_', '-').lower().encode('latin-1')
val_bytes = val.encode()
headers.append([key_bytes, val_bytes])
message['headers'] = headers
return message
```
#### File: stark/server/app.py
```python
import sys
import importlib
import werkzeug
from stark import exceptions
from stark.http import HTMLResponse, JSONResponse, PathParams, Response, LazyResponse
from stark.server.adapters import ASGItoWSGIAdapter
from stark.server.asgi import ASGI_COMPONENTS, ASGIReceive, ASGIScope, ASGISend
from stark.server.components import Component, ReturnValue
from stark.server.core import Route, Settings, generate_document
from stark.server.injector import ASyncInjector, Injector, BaseInjector
from stark.server.router import Router
from stark.server.staticfiles import ASyncStaticFiles, StaticFiles
from stark.server.templates import Templates
from stark.server.validation import VALIDATION_COMPONENTS
from stark.server.wsgi import RESPONSE_STATUS_TEXT, WSGI_COMPONENTS, WSGIEnviron, WSGIStartResponse
from stark.server.utils import import_path
from stark.document import Document
class App:
interface = "wsgi"
injector: BaseInjector
document: Document
router: Router
templates: Templates
statics: StaticFiles
def __init__(self, settings_module: str = 'settings', event_hooks=None):
mod = importlib.import_module(settings_module)
self.settings = Settings(mod)
static_url = self.settings.STATIC_URL
template_dirs = list(self.settings.TEMPLATE_DIRS)
static_dirs = list(self.settings.STATIC_DIRS)
schema_url = self.settings.SCHEMA_URL
docs_url = self.settings.DOCS_URL
docs_theme = self.settings.DOCS_THEME
components = self.settings.COMPONENTS
routes = self.settings.ROUTES
if docs_url:
template_dirs += [
"stark:templates",
{"apistar": f"stark:themes/{docs_theme}/templates"}
]
static_dirs += [f"stark:themes/{docs_theme}/static"]
if not static_dirs:
static_url = None
if event_hooks:
msg = "event_hooks must be a list."
assert isinstance(event_hooks, list), msg
self.debug = getattr(self.settings, "DEBUG", False)
self.propagate_exceptions = getattr(self.settings, "PROPAGATE_EXCEPTIONS", self.debug)
self.init_injector(components)
self.init_templates(template_dirs)
self.init_staticfiles(static_url, static_dirs)
self.event_hooks = event_hooks
module = importlib.import_module(routes)
routes = module.routes or []
routes += self.include_extra_routes(schema_url, docs_url, static_url)
self.init_router(routes)
self.init_document(routes)
# Ensure event hooks can all be instantiated.
self.get_event_hooks()
def include_extra_routes(self, schema_url=None, docs_url=None, static_url=None):
extra_routes = []
from stark.server.handlers import serve_documentation, serve_schema, serve_static_wsgi
if schema_url:
extra_routes += [
Route(schema_url, method='GET', handler=serve_schema, documented=False)
]
if docs_url:
extra_routes += [
Route(docs_url, method='GET', handler=serve_documentation, documented=False)
]
if static_url:
static_url = static_url.rstrip('/') + '/{+filename}'
extra_routes += [
Route(
static_url, method='GET', handler=serve_static_wsgi,
name='static', documented=False, standalone=True
)
]
return extra_routes
def init_document(self, routes):
self.document = generate_document(routes)
def init_router(self, routes):
for route in routes:
route.setup(self.injector)
self.router = Router(routes)
def init_templates(self, template_dirs):
if not template_dirs:
self.templates = None
else:
template_globals = {
'reverse_url': self.reverse_url,
'static_url': self.static_url
}
self.templates = Templates(template_dirs, template_globals)
def init_staticfiles(self, static_url, static_dirs):
if not static_dirs:
self.statics = None
else:
self.statics = StaticFiles(static_url, static_dirs)
def init_injector(self, components=None):
app_components = list(WSGI_COMPONENTS + VALIDATION_COMPONENTS)
for comp in (components or []):
if isinstance(comp, str):
comp = import_path(comp, ["components", "COMPONENTS"])
if isinstance(comp, Component):
app_components.append(comp)
elif isinstance(comp, (list, tuple)):
for c in comp:
if not isinstance(c, Component):
msg = "Could not load component %r"
raise exceptions.ConfigurationError(msg % c)
app_components += list(comp)
else:
msg = "Could not load component %r"
raise exceptions.ConfigurationError(msg % comp)
initial_components = {
'environ': WSGIEnviron,
'start_response': WSGIStartResponse,
'exc': Exception,
'app': App,
'path_params': PathParams,
'route': Route,
'response': Response,
'settings': Settings
}
self.injector = Injector(app_components, initial_components)
def get_event_hooks(self):
event_hooks = []
for hook in self.event_hooks or []:
if isinstance(hook, type):
# New style usage, instantiate hooks on requests.
event_hooks.append(hook())
else:
# Old style usage, to be deprecated on the next version bump.
event_hooks.append(hook)
on_request = [
hook.on_request for hook in event_hooks
if hasattr(hook, 'on_request')
]
on_response = [
hook.on_response for hook in reversed(event_hooks)
if hasattr(hook, 'on_response')
]
on_error = [
hook.on_error for hook in reversed(event_hooks)
if hasattr(hook, 'on_error')
]
return on_request, on_response, on_error
def static_url(self, filename):
assert self.router is not None, "Router is not initialized"
return self.router.reverse_url('static', filename=filename)
def reverse_url(self, name: str, **params):
assert self.router is not None, "Router is not initialized"
return self.router.reverse_url(name, **params)
def render_template(self, path: str, **context):
return self.templates.render_template(path, **context)
def serve(self, host, port, debug=False, **options):
self.debug = debug
if 'use_debugger' not in options:
options['use_debugger'] = debug
if 'use_reloader' not in options:
options['use_reloader'] = debug
werkzeug.run_simple(host, port, self, **options)
@staticmethod
def render_response(return_value: ReturnValue) -> Response:
if return_value is None:
return Response("No Content", 204)
if isinstance(return_value, Response):
return return_value
elif isinstance(return_value, LazyResponse):
return return_value.render_response()
elif isinstance(return_value, str):
return HTMLResponse(return_value)
return JSONResponse(return_value)
@staticmethod
def exception_handler(exc: Exception) -> Response:
if isinstance(exc, exceptions.HTTPException):
return JSONResponse(exc.detail, exc.status_code, exc.get_headers())
raise exc
@staticmethod
def error_handler() -> Response:
return JSONResponse('Server error', 500, exc_info=sys.exc_info())
def finalize_wsgi(self, response: Response, start_response: WSGIStartResponse):
if self.propagate_exceptions and response.exc_info is not None:
exc_info = response.exc_info
raise exc_info[0].with_traceback(exc_info[1], exc_info[2])
start_response(
RESPONSE_STATUS_TEXT[response.status_code],
list(response.headers),
response.exc_info
)
return [response.content]
def __call__(self, environ, start_response):
state = {
'environ': environ,
'start_response': start_response,
'settings': self.settings,
'exc': None,
'app': self,
'path_params': None,
'route': None,
'response': None,
}
method = environ['REQUEST_METHOD'].upper()
path = environ['PATH_INFO']
if self.event_hooks is None:
on_request, on_response, on_error = [], [], []
else:
on_request, on_response, on_error = self.get_event_hooks()
try:
route, path_params = self.router.lookup(path, method)
state['route'] = route
state['path_params'] = path_params
if route.standalone:
funcs = [route.handler]
else:
funcs = (
on_request +
[route.handler, self.render_response] +
on_response +
[self.finalize_wsgi]
)
return self.injector.run(funcs, state)
except Exception as exc:
try:
state['exc'] = exc
# noinspection PyTypeChecker
funcs = (
[self.exception_handler] +
on_response +
[self.finalize_wsgi]
)
return self.injector.run(funcs, state)
except Exception as inner_exc:
try:
state['exc'] = inner_exc
self.injector.run(on_error, state)
finally:
funcs = [self.error_handler, self.finalize_wsgi]
return self.injector.run(funcs, state)
class ASyncApp(App):
interface = "asgi"
def include_extra_routes(self, schema_url=None, docs_url=None, static_url=None):
extra_routes = []
from stark.server.handlers import serve_documentation, serve_schema, serve_static_asgi
if schema_url:
extra_routes += [
Route(schema_url, method='GET', handler=serve_schema, documented=False)
]
if docs_url:
extra_routes += [
Route(docs_url, method='GET', handler=serve_documentation, documented=False)
]
if static_url:
static_url = static_url.rstrip('/') + '/{+filename}'
extra_routes += [
Route(
static_url, method='GET', handler=serve_static_asgi,
name='static', documented=False, standalone=True
)
]
return extra_routes
def init_injector(self, components=None):
components = components if components else []
components = list(ASGI_COMPONENTS + VALIDATION_COMPONENTS) + components
initial_components = {
'scope': ASGIScope,
'receive': ASGIReceive,
'send': ASGISend,
'exc': Exception,
'app': App,
'path_params': PathParams,
'route': Route,
'response': Response,
'settings': Settings
}
self.injector = ASyncInjector(components, initial_components)
def init_staticfiles(self, static_url, static_dirs):
if not static_dirs:
self.statics = None
else:
self.statics = ASyncStaticFiles(static_url, static_dirs)
def __call__(self, scope):
async def asgi_callable(receive, send):
state = {
'scope': scope,
'receive': receive,
'send': send,
'exc': None,
'app': self,
'path_params': None,
'route': None
}
method = scope['method']
path = scope['path']
if self.event_hooks is None:
on_request, on_response, on_error = [], [], []
else:
on_request, on_response, on_error = self.get_event_hooks()
try:
route, path_params = self.router.lookup(path, method)
state['route'] = route
state['path_params'] = path_params
if route.standalone:
funcs = [route.handler]
else:
funcs = (
on_request +
[route.handler, self.render_response] +
on_response +
[self.finalize_asgi]
)
await self.injector.run_async(funcs, state)
except Exception as exc:
try:
state['exc'] = exc
# noinspection PyTypeChecker
funcs = (
[self.exception_handler] +
on_response +
[self.finalize_asgi]
)
await self.injector.run_async(funcs, state)
except Exception as inner_exc:
try:
state['exc'] = inner_exc
await self.injector.run_async(on_error, state)
finally:
funcs = [self.error_handler, self.finalize_asgi]
await self.injector.run_async(funcs, state)
return asgi_callable
async def finalize_asgi(self, response: Response, send: ASGISend, scope: ASGIScope):
if response.exc_info is not None:
if self.propagate_exceptions or scope.get('raise_exceptions', False):
exc_info = response.exc_info
raise exc_info[0].with_traceback(exc_info[1], exc_info[2])
await send({
'type': 'http.response.start',
'status': response.status_code,
'headers': [
[key.encode(), value.encode()]
for key, value in response.headers
]
})
await send({
'type': 'http.response.body',
'body': response.content
})
def serve(self, host, port, debug=False, **options):
self.debug = debug
if 'use_debugger' not in options:
options['use_debugger'] = debug
if 'use_reloader' not in options:
options['use_reloader'] = debug
wsgi = ASGItoWSGIAdapter(self, raise_exceptions=debug)
werkzeug.run_simple(host, port, wsgi, **options)
```
#### File: stark/server/core.py
```python
import inspect
import re
import typing
import datetime
import decimal
import uuid
from stark import http, document, exceptions
from stark.server.utils import import_path, parse_docstring
from stark.schema import (
is_field,
is_schema,
Any,
Array,
Boolean,
Date,
DateTime,
Decimal,
Float,
Integer,
Object,
String,
Time,
UUID,
Union,
Reference
)
class Settings:
ROUTES = "routes"
SCHEMA_URL = "/schema"
DOCS_URL = "/docs/"
STATIC_URL = "/static/"
DOCS_THEME = "apistar"
LOGGING = None
COMPONENTS = ()
TEMPLATE_DIRS = ()
STATIC_DIRS = ()
def __init__(self, mod):
tuple_settings = [
"COMPONENTS",
"TEMPLATE_DIRS",
"STATIC_DIRS",
]
for setting in dir(mod):
if setting.isupper():
setting_value = getattr(mod, setting)
if (setting in tuple_settings and
not isinstance(setting_value, (list, tuple))):
msg = f"The {setting} setting must be a list or a tuple."
raise exceptions.ConfigurationError(msg)
setattr(self, setting, setting_value)
def issubclass_safe(cls, classinfo):
try:
return issubclass(cls, classinfo)
except TypeError:
return False
class Route:
link = None
def __init__(self,
url: str,
method: str,
handler: typing.Union[str, typing.Callable],
name: str = None,
documented: bool = True,
standalone: bool = False,
tags: typing.Sequence[str] = None):
if isinstance(handler, str):
handler = import_path(handler)
self.url = url
self.method = method
self.handler = handler
self.name = name or handler.__name__
self.documented = documented
self.standalone = standalone
self.tags = tags
def setup(self, injector):
self.link = LinkGenerator(injector).generate_link(
self.url,
self.method,
self.handler,
self.name,
self.tags
)
class Include:
section = None
def __init__(self, url, name, routes, documented=True):
if isinstance(routes, str):
routes = import_path(routes, ["routes", "ROUTES"])
self.url = url
self.name = name
self.routes = routes
self.documented = documented
def setup(self, injector):
content = []
for item in self.routes:
item.setup(injector)
if isinstance(item, Route):
content.append(item.link)
elif isinstance(item, Include):
content.append(item.section)
self.section = document.Section(name=self.name, content=content)
PRIMITIVES = {
inspect.Parameter.empty: Any,
int: Integer,
float: Float,
str: String,
bool: Boolean,
datetime.datetime: DateTime,
datetime.date: Date,
datetime.time: Time,
decimal.Decimal: Decimal,
uuid.UUID: UUID
}
class LinkGenerator:
def __init__(self, injector):
self.injector = injector
def generate_link(self, url, method, handler, name, tags):
docstring = parse_docstring(handler.__doc__)
fields = self.generate_fields(url, method, handler)
response = self.generate_response(handler)
encoding = None
if any([f.location == "body" for f in fields]):
encoding = "application/json"
description = (docstring.short_description + "\n" + docstring.long_description).strip()
return document.Link(
url=url,
method=method,
name=name,
encoding=encoding,
fields=fields,
response=response,
description=description,
tags=tags
)
def generate_fields(self, url, method, handler):
fields = []
path_names = [
item.strip("{}").lstrip("+") for item in re.findall("{[^}]*}", url)
]
body_params = []
parameters = self.injector.resolve_validation_parameters(handler)
for name, param in parameters.items():
if name in path_names:
fields.append(self.generate_path_field(param))
elif is_schema(param.annotation):
if method in ("GET", "DELETE"):
fields += self.generate_query_fields_from_schema(param)
else:
fields.append(document.Field(name=name, location="body", schema=param.annotation))
body_params.append(param)
else:
fields += self.generate_query_fields(param)
if len(body_params) > 1:
params = "\n ".join(f"{x.name}: {x.annotation.__name__}" for x in body_params)
msg = (
f"\n\nUsing multiple body fields in {method} handler "
f"`{handler.__module__}.{handler.__name__}` is confusing.\n"
f"Use only one of the following parameters:\n {params}\n"
)
raise exceptions.ConfigurationError(msg)
return fields
@staticmethod
def generate_path_field(param):
try:
schema = PRIMITIVES[param.annotation](description=param.description)
except KeyError:
raise TypeError(
f"Annotation {param.annotation} is not suitable for path parameter `{param.name}`"
)
return document.Field(name=param.name, location="path", schema=schema)
@staticmethod
def generate_query_fields(param):
t = param.annotation
kwargs = {"description": param.description}
if t in PRIMITIVES:
schema = PRIMITIVES[t]
else:
o = getattr(t, "__origin__", t)
try:
generic = issubclass(o, (typing.Sequence, typing.Set, typing.Tuple))
except TypeError:
generic = False
if generic:
schema = Array
if issubclass(o, typing.Tuple):
if hasattr(t, "__args__") and not t._special:
if len(t.__args__) == 2 and t.__args__[1] is ...:
try:
kwargs["items"] = PRIMITIVES[t.__args__[0]]()
except KeyError:
raise TypeError(
f"Annotation `{param.name}: {param.annotation}` is not allowed"
)
else:
try:
kwargs["items"] = [PRIMITIVES[arg]() for arg in t.__args__]
except KeyError:
raise TypeError(
f"Annotation `{param.name}: {param.annotation}` is not allowed"
)
else:
kwargs["unique_items"] = issubclass(o, typing.Set)
if hasattr(t, "__args__") and not t._special:
try:
kwargs["items"] = PRIMITIVES[t.__args__[0]]()
except KeyError:
raise TypeError(
f"Annotation `{param.name}: {param.annotation}` is not allowed"
)
else:
return []
required = False
if param.default is param.empty:
required = True
elif param.default is None:
kwargs["default"] = None
kwargs["allow_null"] = True
else:
kwargs["default"] = param.default
schema = schema(**kwargs)
return [document.Field(name=param.name, location="query", required=required, schema=schema)]
@staticmethod
def generate_query_fields_from_schema(param):
schema = param.annotation.make_validator()
return [
document.Field(name=name, location="query", required=(name in schema.required), schema=field)
for name, field in schema.properties.items()
]
def generate_response(self, handler):
annotation = inspect.signature(handler).return_annotation
if annotation in (None, inspect.Signature.empty):
return document.Response(encoding="application/json", status_code=204)
annotation = self.coerce_generics(annotation)
return document.Response(encoding="application/json", status_code=200, schema=annotation)
def coerce_generics(self, t):
if is_schema(t):
return t
if t in PRIMITIVES:
return PRIMITIVES[t]()
o = getattr(t, "__origin__", t)
if o is typing.Union:
args = [self.coerce_generics(x) for x in t.__args__]
return Union(any_of=args)
if issubclass(o, (typing.Sequence, typing.Set)):
unique_items = issubclass(o, typing.Set)
if hasattr(t, "__args__") and not t._special:
arg = self.coerce_generics(t.__args__[0])
return Array(items=Reference(to=arg) if is_schema(arg) else arg,
unique_items=unique_items)
else:
return Array(unique_items=unique_items)
elif issubclass(o, typing.Mapping):
if hasattr(t, "__args__") and not t._special:
arg = self.coerce_generics(t.__args__[1])
return Object(additional_properties=Reference(to=arg) if is_schema(arg) else arg)
else:
return Object(additional_properties=True)
elif issubclass(o, typing.Tuple):
if hasattr(t, "__args__") and not t._special:
if len(t.__args__) == 2 and t.__args__[1] is ...:
arg = self.coerce_generics(t.__args__[0])
return Array(items=Reference(to=arg) if is_schema(arg) else arg)
else:
args = [
(Reference(x) if is_schema(x) else x)
for x in [self.coerce_generics(arg) for arg in t.__args__]
]
return Array(items=args)
else:
return Array()
return Any()
def generate_document(routes):
content = []
for item in routes:
if isinstance(item, Route) and item.documented:
content.append(item.link)
elif isinstance(item, Include) and item.documented:
content.append(item.section)
for link in item.section.get_links():
link.url = item.url + link.url
return document.Document(content=content)
```
#### File: stark/server/handlers.py
```python
from stark import App, http
from stark.codecs import OpenAPICodec
from stark.server.asgi import ASGIReceive, ASGIScope, ASGISend
from stark.server.wsgi import WSGIEnviron, WSGIStartResponse
def serve_schema(app: App):
codec = OpenAPICodec()
content = codec.encode(app.document)
headers = {'Content-Type': 'application/vnd.oai.openapi'}
return http.Response(content, headers=headers)
def serve_documentation(app: App):
template_name = 'apistar/index.html'
code_style = default_code_style
return app.render_template(
template_name,
document=app.document,
langs=['python', 'javascript'],
code_style=code_style
)
def serve_static_wsgi(app: App, environ: WSGIEnviron, start_response: WSGIStartResponse):
return app.statics(environ, start_response)
async def serve_static_asgi(app: App, scope: ASGIScope, receive: ASGIReceive, send: ASGISend):
instance = app.statics(scope)
await instance(receive, send)
default_code_style = """
.highlight.python .word{color:#d372e3;}
.highlight.python .string{color:#8bc76c;}
.highlight.python .attr{color:#42b0f5;}
.highlight.python .kwarg{color:#db985c;}
.highlight.python .global{color:#1fb8c4;}
"""
```
#### File: stark/server/templates.py
```python
import typing
from stark.compat import jinja2
from stark.server.utils import get_path
def get_jinja_prefix_loader(dirs):
return jinja2.PrefixLoader({
prefix: get_jinja_path_loader(path)
for prefix, path in dirs.items()
})
def get_jinja_path_loader(path):
return jinja2.FileSystemLoader(get_path(path))
class BaseTemplates:
def render_template(self, path: str, **context):
raise NotImplementedError()
class Templates(BaseTemplates):
def __init__(self,
template_dirs: typing.Union[str, list, tuple, dict],
global_context: dict = None):
if jinja2 is None:
raise RuntimeError('`jinja2` must be installed to use `Templates`.')
global_context = global_context if global_context else {}
if not isinstance(template_dirs, (list, tuple)):
template_dirs = [template_dirs]
loaders = []
for template_dir in template_dirs:
if isinstance(template_dir, dict):
loaders.append(get_jinja_prefix_loader(template_dir))
else:
loaders.append(get_jinja_path_loader(template_dir))
loader = jinja2.ChoiceLoader(loaders) if len(loaders) > 1 else loaders[0]
self.env = jinja2.Environment(autoescape=True, loader=loader)
for key, value in global_context.items():
self.env.globals[key] = value
def render_template(self, path: str, **context):
template = self.env.get_template(path)
return template.render(**context)
``` |
{
"source": "aacienfuegos/pyattck",
"score": 3
} |
#### File: pyattck/ics/mitigation.py
```python
from .attckobject import AttckObject
class AttckMitigation(AttckObject):
"""ICS MITRE ATT&CK Mitigation object.
A child class of AttckObject
Creates objects which have been categorized as potential mitigations
Example:
You can iterate over a `mitigations` list and access specific
properties and relationship properties.
The following relationship properties are accessible:
1. techniques
1. To iterate over an `mitigations` list, do the following:
.. code-block:: python
from pyattck import Attck
attck = Attck()
for mitigation in attck.ics.mitigations:
print(mitigation.id)
print(mitigation.name)
print(mitigation.description)
# etc.
2. To access relationship properties, do the following:
.. code-block:: python
from pyattck import Attck
attck = Attck()
for mitigation in attck.ics.mitigations:
print(mitigation.id)
print(mitigation.name)
print(mitigation.description)
# etc.
for technique in mitigation.techniques:
print(technique.name)
print(technique.description)
# etc.
Arguments:s
attck_obj (json) -- Takes the raw Mitre ATT&CK Json object
AttckObject (dict) -- Takes the Mitre ATT&CK Json object as a kwargs values
"""
def __init__(self, attck_obj = None, **kwargs):
"""
This class represents mitigation guidance as defined by the
ICS MITRE ATT&CK framework.
Keyword Arguments:
attck_obj {json} -- A ICS MITRE ATT&CK Framework json object (default: {None})
"""
super(AttckMitigation, self).__init__(**kwargs)
self.__attck_obj = attck_obj
self.created_by_ref = self._set_attribute(kwargs, 'created_by_ref')
self.id = self._set_id(kwargs)
self.name = self._set_attribute(kwargs, 'name')
self.description = self._set_attribute(kwargs, 'description')
self.external_reference = self._set_reference(kwargs)
self.created = self._set_attribute(kwargs, 'created')
self.modified = self._set_attribute(kwargs, 'modified')
self.stix = self._set_attribute(kwargs, 'id')
self.type = self._set_attribute(kwargs, 'type')
self.wiki = self._set_wiki(kwargs)
self.contributor = self._set_attribute(kwargs, 'contributor')
self.set_relationships(self.__attck_obj)
@property
def techniques(self):
"""
Returns all technique objects as a list that are associated with this
mitigation advice from the ICS MITRE ATT&CK Framework
Returns:
[list] -- A list of related technique objects defined within the
ICS MITRE ATT&CK Framework for a mitigation object
"""
from .technique import AttckTechnique
return_list = []
item_dict = {}
for item in self.__attck_obj['objects']:
if 'type' in item:
if item['type'] == 'attack-pattern':
item_dict[item['id']] = item
if self._RELATIONSHIPS.get(self.stix):
for item in self._RELATIONSHIPS[self.stix]:
if item in item_dict:
return_list.append(AttckTechnique(attck_obj=self.__attck_obj, **item_dict[item]))
return return_list
``` |
{
"source": "aacsspkt/autodealerappliation",
"score": 3
} |
#### File: app/auth/forms.py
```python
from django import forms
from django.contrib.auth.forms import (
UserCreationForm,
PasswordResetForm,
UserChangeForm,
SetPasswordForm,
)
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
class LoginForm(forms.Form):
username = forms.CharField(
required=True, widget=forms.TextInput(attrs={"class": "form-control"})
)
password = forms.CharField(
required=True, widget=forms.PasswordInput(attrs={"class": "form-control"})
)
def clean(self):
cleaned_data = super().clean()
username = cleaned_data["username"]
password = cleaned_data["password"]
user = authenticate(username=username, password=password)
if user is not None:
cleaned_data["user"] = user
else:
raise ValidationError(
_("Invalid Credentials"),
code="invalid",
)
class SignUpForm(UserCreationForm):
username = forms.CharField(
required=True, widget=forms.TextInput(attrs={"class": "form-control"})
)
email = forms.EmailField(
required=True, widget=forms.EmailInput(attrs={"class": "form-control"})
)
password1 = forms.CharField(
required=True, widget=forms.PasswordInput(attrs={"class": "form-control"})
)
password2 = forms.CharField(
required=True, widget=forms.PasswordInput(attrs={"class": "form-control"})
)
class Meta:
model = User
fields = ("username", "email", "password1", "<PASSWORD>")
class CustomPasswordResetForm(PasswordResetForm):
email = forms.EmailField(
label=_("Email"),
max_length=254,
widget=forms.EmailInput(
attrs={"class": "form-control", "autocomplete": "email"}
),
)
from django.contrib.auth import password_validation
class CustomSetPasswordForm(SetPasswordForm):
new_password1 = forms.CharField(
label=_("New password"),
strip=False,
help_text=password_validation.password_validators_help_text_html(),
widget=forms.PasswordInput(
attrs={"class": "form-control mb-2", "autocomplete": "new-password"}
),
)
new_password2 = forms.CharField(
label=_("New password confirmation"),
strip=False,
widget=forms.PasswordInput(
attrs={"class": "form-control mb-2", "autocomplete": "new-password"}
),
)
class UserEditForm(forms.ModelForm):
username = forms.CharField(
required=True, widget=forms.TextInput(attrs={"class": "form-control"})
)
email = forms.EmailField(
required=True, widget=forms.EmailInput(attrs={"class": "form-control"})
)
first_name = forms.CharField(
required=True, widget=forms.TextInput(attrs={"class": "form-control"})
)
last_name = forms.CharField(
required=True, widget=forms.TextInput(attrs={"class": "form-control"})
)
class Meta:
model = User
fields = ["first_name", "last_name", "username", "email"]
```
#### File: app/customer/forms.py
```python
import mimetypes
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
from .models import Customer, District, State, Gender
class CustomerForm(forms.ModelForm):
citizenship_no = forms.CharField(
max_length=50,
required=True,
label="Citizenship Number:",
widget=forms.widgets.TextInput(attrs={"class": "form-control"}),
)
pan_no = forms.CharField(
max_length=50,
label="PAN Number:",
widget=forms.TextInput(attrs={"class": "form-control"}),
)
fullname = forms.CharField(
max_length=200,
required=True,
label="Customer Name:",
widget=forms.TextInput(attrs={"class": "form-control"}),
)
dob = forms.DateField(
label="Date of Birth:",
required=True,
widget=forms.TextInput(
attrs={
"class": "custom-control",
"type": "date",
},
),
)
gender = forms.ChoiceField(
choices=Gender.choices,
required=True,
label="Gender:",
widget=forms.Select(attrs={"class": "custom-select"}),
)
email = forms.EmailField(
max_length=254,
required=True,
label="Email:",
widget=forms.widgets.EmailInput(attrs={"class": "form-control"}),
)
phone = forms.CharField(
max_length=20,
required=True,
label="Phone:",
widget=forms.TextInput(attrs={"class": "form-control"}),
)
occupation = forms.CharField(
max_length=200,
required=True,
label="Occupation:",
widget=forms.TextInput(attrs={"class": "form-control"}),
)
city = forms.CharField(
max_length=200,
required=True,
label="City:",
widget=forms.TextInput(attrs={"class": "form-control"}),
)
district = forms.ChoiceField(
choices=District.choices,
label="District:",
widget=forms.Select(attrs={"class": "custom-select"}),
)
state = forms.ChoiceField(
choices=State.choices,
label="State:",
widget=forms.Select(attrs={"class": "custom-select"}),
)
country = forms.CharField(
max_length=50,
required=True,
label="Country:",
widget=forms.TextInput(attrs={"class": "form-control"}),
)
address = forms.CharField(
max_length=500,
required=True,
label="Address:",
widget=forms.Textarea(attrs={"rows": "4", "class": "form-control"}),
)
class Meta:
model = Customer
fields = (
"citizenship_no",
"pan_no",
"fullname",
"dob",
"gender",
"email",
"phone",
"occupation",
"city",
"district",
"state",
"country",
"state",
"address",
)
# # check if any errors occur here
# def save(self, commit: bool = ...):
# firstname = self.cleaned_data["firstname"]
# middlename = self.cleaned_data["middlename"]
# surname = self.cleaned_data["surname"]
# if middlename:
# fullname = f"{firstname} {middlename} {surname}"
# else:
# fullname = f"{firstname} {surname}"
# self.fullname = fullname
# return super().save(commit)
from .validators import FileValidator
class CustomerImportForm(forms.Form):
validate_file = FileValidator(
max_size=26214400,
content_types=(
"text/comma-separated-values",
"application/csv",
"text/csv",
"application/excel",
"application/vnd.ms-excel",
"application/vnd.msexcel",
"text/plain",
),
)
csvfile = forms.FileField(
allow_empty_file=False,
required=True,
widget=forms.widgets.ClearableFileInput(
attrs={
"class": "custom-file custom-file-control",
"accept": ".csv"
}
),
validators=[
validate_file,
],
)
```
#### File: app/customer/tests.py
```python
from datetime import date
from django.test import TestCase
from customer.models import (
Customer,
Gender,
State,
District,
)
class CustomerModelTest(TestCase):
def setUp(self):
self.customer = Customer.objects.create(
citizenship_no= "1234-567-89",
pan_no = "12345",
fullname= "<NAME>",
dob = date.today(),
gender = Gender.MALE,
email = "<EMAIL>",
phone = "123456789",
occupation = "Testing",
city = "Kathmandu",
district = District.KATHMANDU,
state = State.BAGMATI_PROVINCE,
country = "Nepal",
address = "Maitighar, Kathmandu, Nepal"
)
def tearDown(self):
self.customer.delete()
def test_customer_model_string(self):
self.assertEqual(str(self.customer), "Test Customer")
def test_get_absolute_url(self):
self.assertEqual(self.customer.get_absolute_url(), f"/customers/{self.customer.id}/")
``` |
{
"source": "AadamAbrahams/covid_traffic_controller_demonstrator",
"score": 4
} |
#### File: covid_traffic_controller_demonstrator/covid_traffic_controller_demonstrator/server.py
```python
from Tkinter import *
import tkFont
import time
from omlaxtcp import TCP
def main():
"""
Generates GUI to illustrate number of individuals within a building
Listens for messages on port 1234 for updates on directional movement
and temperature, and further updates the GUI accordingly.
"""
window = Tk()
# Creates a new GUI window.
window.geometry("1920x1080")
# Configures display ratio for the window.
count = 0
Flag = False
fontStyle = tkFont.Font(family="Lucida Grande", size=55)
# Configure GUI text font
label = Label(window, text="There are currently \n"
+ str(count) + "\n individuals inside the shopping center.",
fg="black", bg="light blue", font=fontStyle)
# Creates a label to display number of individuals in building.
label2 = Label(window,
text="Please stand infront of the temperature sensor,"
"\n until the screen displays either a green or red "
"background, \n if you wish to enter.",
fg="black", bg="light blue", font=fontStyle)
# Creates a label to display instruction the user is requested to perform.
label.pack(expand=True, fill=BOTH)
# Link the label to the GUI window.
label2.pack(expand=True, fill=BOTH)
# Link the label to the GUI window.
TCP.server_setup(5)
# Establish device as a server and open port for listening.
while True:
window.update_idletasks()
# Updates entire GUI layout and text.
data = TCP.server_recieve(512)
# Retrieves message sent by client.
if (data == '1') and Flag:
Flag = False
label['bg'] = "light blue"
# Changes top half of GUI background to light blue.
label2['bg'] = "light blue"
# Changes bottom half of GUI background to light blue.
label2['text'] = "Please stand infront of the temperature "
"sensor, \n until the screen displays either a green or "
"red background, \n if you wish to enter."
if count == 20:
pass
else:
if count == 0:
count = count + 1
# Increment number of individuals in store.
label['text'] = "There is currently \n" + str(count)
"\n individual inside the shopping center."
# Changes top half text to new number in building.
else:
count = count + 1
# Increment number of individuals in store.
label['text'] = "There are currently \n" + str(count)
"\n individuals inside the shopping center."
# Changes top half text to new number in building.
window.update_idletasks()
# Updates entire GUI layout and text.
if data == '0':
# Checks if message reflects an individual leaving building
label['bg'] = "light blue"
# Changes top half of GUI background to light blue.
label2['bg'] = "light blue"
# Changes bottom half of GUI background to light blue.
label2['text'] = "Please stand infront of the temperature "
"sensor, \n until the screen displays either a green or "
"red background, \n if you wish to enter."
if count == 0:
pass
else:
if count == 2:
count = count - 1
# Decrement number of individuals in store.
label['text'] = "The is currently \n" + str(count)
"\n individual inside the shopping center."
# Changes top half text to new number in building.
else:
count = count - 1
# Decrement number of individuals in store.
label['text'] = "There are currently \n" + str(count)
"\n individuals inside the shopping center."
# Changes top half text to new number in building.
window.update_idletasks()
# Updates entire GUI layout and text.
if (data != '0') and (data != '1'):
# Checks if recieved message is a temperature value.
if (float(data) < 38):
# Checks if individuals temperature is regular.
Flag = True
label['bg'] = "pale green"
# Changes top half of GUI background to pale green.
label2['bg'] = "pale green"
# Changes bottom half of GUI background to pale green.
label2['text'] = "Normal temperature detected. "
"\n You may proceed."
# Changes bottom half text to indicate normal temp scan.
if (float(data) >= 38):
# Checks if individuals temperature is above the norm.
label['bg'] = "salmon"
# Changes top half of GUI background to salmon.
label2['bg'] = "salmon"
# Changes bottom half of GUI background to salmon.
label2['text'] = "Above normal temperature detected. "
"\n Access denied."
# Changes bottom half text to indicate above normal temp scan.
window.update_idletasks()
# Updates entire GUI layout and text.
time.sleep(5)
label2['text'] = "Please stand infront of the temperature "
"sensor, \n until the screen displays either a green or "
"red background, \n if you wish to enter."
# Changes bottom half text to request temperature scan.
label2['bg'] = "light blue"
# Changes top half of GUI background to light blue.
label['bg'] = "light blue"
# Changes bottom half of GUI background to light blue.
window.update_idletasks()
# Updates entire GUI layout and text.
print(data)
# Prints message recieved from client.
if __name__ == "__main__":
setup()
main()
``` |
{
"source": "AadamLok/abcd",
"score": 3
} |
#### File: abcd/Backend/app.py
```python
import os
from flask import Flask, request, jsonify
from flask_mysqldb import MySQL
import random
import string
from messaging.py import email_message
'''
cur.execute("CREATE TABLE TempData (username VARCHAR(20), temperature smallint)")
cur.execute("CREATE TABLE user_USERNAME (dateAndtime VARCHAR(20), sensor text)")
cur.execute("CREATE TABLE login (HardWareID VARCHAR(10), username VARCHAR(20), hash VARCHAR(50), email VARCHAR(50))")
cur.execute("CREATE TABLE Emergency (username VARCHAR(20), email text)")
cur.execute("CREATE TABLE Token (username VARCHAR(20), token VARCHAR(10))")
'''
app = Flask(__name__)
app.config['MYSQL_HOST'] = '172.16.17.32'
app.config['MYSQL_USER'] = 'BackEnd'
app.config['MYSQL_PASSWORD'] = '<PASSWORD>!!'
app.config['MYSQL_DB'] = 'user_info'
app.config['MYSQL_CURSORCLASS'] = 'DictCursor'
mysql = MySQL(app)
@app.route('/')
def mainRoute():
cur = mysql.connection.cursor()
cur.close()
return "Health Belt backend is working."
'''
{
"HID": "",
"heart": [],
"temp": num
}
'''
@app.route('/upload', methods=['POST'])
def upload():
data = request.get_json()
hid = data["HID"]
cur = mysql.connection.cursor()
cur.execute("SELECT HardWareID, username FROM login WHERE HardWareID = '"+hid+"'")
rv = cur.fetchall()
if len(rv) == 0:
return "Hardware not connected"
else:
cur.execute('INSERT INTO user_' +
rv[0]["username"]+" (sensor) VALUES ('"+data["heart"]+"')")
cur.execute("UPDATE TempData SET temperature="+data['temp']+" WHERE username='"+rv[0]["username"]+"'")
mysql.connection.commit()
cur.close()
return "Done!"
'''
{
"HID": "",
"reason": "", covid/heart-attack
}
'''
@app.route('/emergency', methods=['POST'])
def emergency():
data = request.get_json()
hid = data["HID"]
cur = mysql.connection.cursor()
cur.execute("SELECT username FROM login WHERE HardWareID='"+hid+"'")
rv = cur.fetchall()
if len(rv) == 0:
return "Hardware not connected"
else:
username = rv[0]["username"]
cur.execute("SELECT email FROM emergency WHERE username = '"+username+"'")
rv = cur.fetchall()
if len(rv) == 0:
return "No Emergency Contacts"
emails = rv[0]["email"].split()
##Reason for emails: We decided to use the email addresses of the users as opposed to SMS messaging is because SMS is a paid service.
##In an ideal case, we would use a paid service such that we would simply pass in the phone number of the person, so the SMS API can track the user's service provider
for email in range emails:
email_message("EMERGENCY", "You are an emergency contact of '" +
username + "' for COVID-19 or a heart attack", email)
'''
{
"HID":"",
"username":"",
"email": "",
"hash": ""
}
'''
@app.route('/register', methods=['GET'])
def register():
data = request.get_json()
username = data["username"]
cur = mysql.connection.cursor()
cur.execute('SELECT username FROM login WHERE username = "'+username+'"')
rv = cur.fetchall()
if len(rv) != 0:
return jsonify(error=True,token="")
else:
cur.execute("INSERT INTO login VALUES ('"+data["HID"]+"','"+username+"','"+data["hash"]+"','"+data["email"]+"')")
cur.execute("CREATE TABLE user_"+username +
" (dateAndtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP, sensor TEXT)")
cur.execute("INSERT INTO TempData VALUES ('"+username+"',32)")
letters = string.ascii_letters
_token = ''.join(random.choice(letters) for i in range(10))
cur.execute("INSERT INTO Token VALUES ('"+username+"','"+token+"')")
mysql.connection.commit()
cur.close()
return jsonify(error=False,token=_token)
'''
{
"username" : ""
}
'''
@app.route('/login', methods=['GET'])
def login():
data = request.get_json()
username = data["username"]
cur = mysql.connection.cursor()
cur.execute("SELECT hash FROM login WHERE username='"+username+"'")
rv = cur.fetchall()
if len(rv) == 0:
return jsonify(error=True,hash="")
_hash = rv[0]["hash"]
cur.close()
return jsonify(hash=_hash)
'''
{
"username" : ""
}
'''
@app.route('/getToken', methods=['GET'])
def login():
data = request.get_json()
username = data["username"]
cur = mysql.connection.cursor()
cur.execute("SELECT hash FROM login WHERE username='"+username+"'")
rv = cur.fetchall()
if len(rv) == 0:
return jsonify(error=True,token="")
cur.execute("SELECT token FROM Token WHERE username='"+username+"'")
rv = cur.fetchall()
_token = rv[0]["token"]
cur.close()
return jsonify(token=_token)
'''
{
"username":"",
"token":"",
"contact":"",
}
'''
@app.route('/newContact', method=['POST'])
def newContact():
data = request.get_json()
username = data["username"]
cur = mysql.connection.cursor()
cur.execute("SELECT token FROM Token WHERE username='"+username+"'")
rv=cur.fetchall()
if len(rv) == 0:
return "Username not registered"
if rv[0]["token"] != data["token"]:
return "Unsuccessful"
cur.execute("UPDATE emergency SET email='"+data["contact"]+"'")
mysql.connection.commit()
cur.close()
return "Successful!"
'''
{
"username":"",
"token":"",
"email":""
}
'''
@app.route('/changeEmail', method=['POST'])
def newEmail():
data = request.get_json()
username = data["username"]
cur = mysql.connection.cursor()
cur.execute("SELECT token FROM Token WHERE username='"+username+"'")
rv=cur.fetchall()
if len(rv) == 0:
return "Username not registered"
if rv[0]["token"] != data["token"]:
return "Unsuccessful"
cur.execute("UPDATE login SET email='"+data["email"]+"' WHERE username='"+username+"'")
mysql.connection.commit()
cur.close()
return "Successful!"
'''
{
"username":"",
"token":"" ,
}
'''
@app.route('/getData'method=['GET'])
def getData():
data = request.get_json()
username = data["username"]
cur = mysql.connection.cursor()
cur.execute("SELECT token FROM Token WHERE username='"+username+"'")
rv=cur.fetchall()
if len(rv) == 0:
return "Username not registered"
if rv[0]["token"] != data["token"]:
return "Unsuccessful"
cur.execute("SELECT * FROM user_"+username+ "ORDER BY dateAndtime DESC LIMIT 10")
rv = curr.fetchall()
_heart = rv
curr.execute("SELECT temperature FROM TempData WHERE username = '"+username+"'")
rv = curr.fetchall()
_temp = rv
return jsonify(heart=_heart, temp=_temp)
if __name__ == "__main__":
app.run(debug=True)
```
#### File: abcd/Backend/messaging.py
```python
import smtplib
from email.message import EmailMessage
def email_message(subject, content, toRecipient):
msg = EmailMessage()
msg.set_content(content)
msg['subject'] = subject
msg['toRecipient'] = toRecipient
user = "<EMAIL>"
msg['from'] = user
password = "<PASSWORD>"
server = smtplib.SMTP("smtp.gmail.com",587)
server.starttls()
server.login(user,password)
server.send_message(msg)
server.quit()
if __name__ == "__main__":
email_message("Hello","Emergency","<EMAIL>")
``` |
{
"source": "AadamLok/wwd",
"score": 3
} |
#### File: wwd/tests/test_whatsapp_web_driver.py
```python
from whatsapp_web_driver.custom_errors import MaxTimeOut
from whatsapp_web_driver import WhatsappWebDriver, ChromeDriverNotWorking, MaxTimeOut
import pytest
import time
def test_WWD_init_returns_error():
with pytest.raises(ChromeDriverNotWorking):
pytest.WWD = WhatsappWebDriver(chrome_driver=" ")
def test_WWD_start():
try:
pytest.WWD = WhatsappWebDriver()
except MaxTimeOut:
pytest.fail("Webpage took too much time to load, check your internet connection.")
except:
pytest.fail("Initiating WWD raises Exception.")
def test_is_logged_in():
assert pytest.WWD.is_logged_in() == False, "Checking for web whatsapp not logged in"
start_time = time.time()
while not pytest.WWD.is_logged_in():
if time.time()-start_time > 20:
pytest.fail("Checking for web whatsapp logged in failed.")
def test_close():
assert pytest.WWD.close() == True, "Close the driver"
``` |
{
"source": "aadamson/distributed",
"score": 2
} |
#### File: distributed/dashboard/core.py
```python
from distutils.version import LooseVersion
import os
import warnings
import bokeh
from bokeh.server.server import Server
from tornado import web
from urllib.parse import urljoin
if LooseVersion(bokeh.__version__) < LooseVersion("0.13.0"):
warnings.warn(
"\nDask needs bokeh >= 0.13.0 for the dashboard."
"\nContinuing without the dashboard."
)
raise ImportError("Dask needs bokeh >= 0.13.0")
class BokehServer(object):
server_kwargs = {}
def listen(self, addr):
if self.server:
return
if isinstance(addr, tuple):
ip, port = addr
else:
port = addr
ip = None
for i in range(5):
try:
server_kwargs = dict(
port=port,
address=ip,
check_unused_sessions_milliseconds=500,
allow_websocket_origin=["*"],
use_index=False,
extra_patterns=[
(
r"/",
web.RedirectHandler,
{"url": urljoin(self.prefix.rstrip("/") + "/", r"status")},
)
],
)
server_kwargs.update(self.server_kwargs)
self.server = Server(self.apps, **server_kwargs)
self.server.start()
handlers = [
(
self.prefix + r"/statics/(.*)",
web.StaticFileHandler,
{"path": os.path.join(os.path.dirname(__file__), "static")},
)
]
self.server._tornado.add_handlers(r".*", handlers)
return
except (SystemExit, EnvironmentError) as exc:
if port != 0:
if "already in use" in str(
exc
) or "Only one usage of" in str( # Unix/Mac
exc
): # Windows
msg = (
"Port %d is already in use. "
"\nPerhaps you already have a cluster running?"
"\nHosting the diagnostics dashboard on a random port instead."
% port
)
else:
msg = (
"Failed to start diagnostics server on port %d. " % port
+ str(exc)
)
warnings.warn("\n" + msg)
port = 0
if i == 4:
raise
@property
def port(self):
return (
self.server.port
or list(self.server._http._sockets.values())[0].getsockname()[1]
)
def stop(self):
for context in self.server._tornado._applications.values():
context.run_unload_hook()
self.server._tornado._stats_job.stop()
self.server._tornado._cleanup_job.stop()
if self.server._tornado._ping_job is not None:
self.server._tornado._ping_job.stop()
# https://github.com/bokeh/bokeh/issues/5494
if LooseVersion(bokeh.__version__) >= "0.12.4":
self.server.stop()
``` |
{
"source": "aadamson/pfr-api",
"score": 3
} |
#### File: pfr-api/pfr_api/player.py
```python
import re
import pandas as pd
import requests
from bs4 import BeautifulSoup
from pfr_api.config import BASE_URL
from pfr_api.parse.parse import parse_stats_table
class Player(object):
entity_type = 'players'
def __init__(
self,
name: str,
player_id: str,
):
self._name = name
self._player_id = player_id
def _url_base(self):
return (
'{base}/{entity}/{first}/{id}'
.format(
base=BASE_URL,
entity=self.entity_type,
first=self._player_id[0],
id=self._player_id
)
)
def _gamelog_page(self, season: str = '') -> BeautifulSoup:
url = (
'{base}/gamelog/{season}'
.format(base=self._url_base(), season=season)
)
r = requests.get(url)
soup = BeautifulSoup(r.content, 'html.parser')
return soup
def _fantasy_page(self, season: str = '') -> BeautifulSoup:
url = (
'{base}/fantasy/{season}'
.format(base=self._url_base(), season=season)
)
r = requests.get(url)
soup = BeautifulSoup(r.content, 'html.parser')
return soup
def regular_season_gamelog(self, season: str = '') -> pd.DataFrame:
soup = self._gamelog_page(season)
results_table = soup.find('table', {'id': 'stats'})
columns, rows = parse_stats_table(
results_table,
stat_row_attributes={'id': re.compile('^stats\..*$')})
return pd.DataFrame(columns=columns, data=rows)
def playoffs_gamelog(self, season: str = '') -> pd.DataFrame:
soup = self._gamelog_page(season)
results_table = soup.find('table', {'id': 'stats_playoffs'})
columns, rows = parse_stats_table(
results_table,
stat_row_attributes={'id': re.compile('^stats\..*$')})
return pd.DataFrame(columns=columns, data=rows)
def fantasy(self, season: str = '') -> pd.DataFrame:
soup = self._fantasy_page(season)
results_table = soup.find('table', {'id': 'player_fantasy'})
# TODO handle weirdness with Inside 20 columns not being specific
# in data-stat field
columns, rows = parse_stats_table(results_table)
return pd.DataFrame(columns=columns, data=rows)
``` |
{
"source": "AadamZ5/py-controller-model",
"score": 3
} |
#### File: websocket_api/lib/data_model.py
```python
from .websocket_endpoint import WebsocketController
class Person:
def __init__(self, name, age):
self.name = name
self.age = age
class Customer(Person):
def __init__(self, name, age):
super().__init__(name, age)
class Employee(Person):
def __init__(self, name, age, employee_id):
self.employee_id = employee_id
super().__init__(name, age)
@WebsocketController.register_model
class DataModel:
def __init__(self):
self.persons = []
@WebsocketController.register_action
def add_employee(self, name, age, employee_id):
new_employee = Employee(name, age, employee_id)
self.persons.append(new_employee)
return {'employee': new_employee}
@WebsocketController.register_action
def add_customer(self, name, age):
new_customer = Customer(name, age)
self.persons.append(new_customer)
return {'customer': new_customer}
@WebsocketController.register_action
def get_persons(self):
return self.persons
```
#### File: websocket_api/lib/websocket_endpoint.py
```python
from controllermodel import GenericController
import asyncio
import websockets
import jsonpickle
class WebsocketController(GenericController):
def __init__(self, host, port):
super().__init__() #! This is important! Initialize the base class or you will get an error upon startup!
#Websocket asyncio setup. See https://websockets.readthedocs.io/en/stable/ for more information.
self._loop = asyncio.get_event_loop()
self.websocket = websockets.serve(self.handler, host, port)
print("Server running at ws://{0}:{1}".format(host, port))
self._loop.run_until_complete(self.websocket)
async def handler(self, ws, path, *args, **kw):
"""
This is the function that gets called when a new websocket connection arrives.
"""
await self.consumer_handler(ws, path, *args, **kw) # We only have a consumer handler here. For both a producer and consumer, see https://websockets.readthedocs.io/en/stable/intro.html#both
async def consumer_handler(self, ws, path, *args, **kwargs):
"""
The consumer_handler funciton will wait for data to be sent to us from the client, and try to find a corresponding action
to execute, which will return data to the client.
Send commands to this endpoint as `{ 'action': <<an action>>, 'data': { <<your keyword arguments>> } }`
"""
async for message in ws:
m = {} #Initialize our message dictionary
try:
m = jsonpickle.loads(message) # Load the (presumably) JSON message they sent
except Exception:
print("Error decoding message from " + str(ws.remote_address) + ". Message: " + str(message))
send = jsonpickle.dumps({"error": "Couldn't parse JSON data!"}, unpicklable=False, make_refs=False)
await ws.send(send)
else:
if(m != None):
action = m.get('action', None)
data = m.get('data', dict())
if action != None:
try:
r = self.execute_action(str(action), **data) # The main application will register functions to various commands. See if we can find one registered for the command sent.
# Note, if no function is found, we will just JSON pickle `None` which will just send a `null` back to the client.
# You may want to change this behavior by sending back an error message if the command recieved doesn't exist
except Exception as e:
r = {"error": str(e)}
r_json = jsonpickle.dumps(r, unpicklable=False, make_refs=False)
await ws.send(r_json)
else:
send = jsonpickle.dumps({"error": "No command to process!"}, unpicklable=False, make_refs=False)
await ws.send(send)
else:
send = jsonpickle.dumps({"error": "No data to parse!"}, unpicklable=False, make_refs=False)
await ws.send(send)
```
#### File: py-controller-model/tests/test_multimodel.py
```python
import pytest
from controllermodel import GenericController
@GenericController.register_model
class A:
def __init__(self):
self.a = "A variable"
@GenericController.register_action(action="actiona")
def myaction(self):
print(self.a)
return self.a
@GenericController.register_model
class B:
def __init__(self):
self.b = "B variable"
@GenericController.register_action(action="actionb")
def myaction(self):
print(self.b)
return self.b
def test_action():
a = A()
b = B()
gc = GenericController()
gc.connect_instance(a, b)
assert gc.execute_action('actiona') == "A variable"
assert gc.execute_action('actionb') == "B variable"
``` |
{
"source": "aadaniu/satori",
"score": 2
} |
#### File: src/backend/victorops.py
```python
import json
# -- third party --
import requests
# -- own --
from backend.common import register_backend, Backend
# -- code --
@register_backend
class VictorOpsBackend(Backend):
def send(self, users, event):
for user in users:
if 'victorops' not in user:
continue
url = user['victorops']
try:
routing_key = event['groups'][0]
except:
routing_key = 'default'
# status: PROBLEM OK EVENT FLAPPING TIMEWAIT ACK
if event['status'] in ('PROBLEM', 'EVENT'):
msg_type = 'CRITICAL'
elif event['status'] in ( 'OK', 'TIMEWAIT'):
msg_type = 'RECOVERY'
elif event['status'] == 'ACK':
msg_type = 'ACK'
else:
msg_type = 'INFO'
resp = requests.post(
url + '/' + routing_key,
headers={'Content-Type': 'application/json'},
timeout=10,
data=json.dumps({
'entity_id': event['title'],
'entity_display_name': event['title'],
'priority': event['level'],
'message_type': msg_type,
'state_message': event['text'],
}),
)
if not resp.ok:
raise Exception(resp.text)
``` |
{
"source": "aadarshkrishnan/SideProjects",
"score": 3
} |
#### File: aadarshkrishnan/SideProjects/bot.py
```python
from discord.ext import commands
import random
bot = commands.Bot(command_prefix= '!')
@bot.command(name = "idea", help = "Get a random side project idea")
async def idea(ctx):
await ctx.send("Ideas are hard")
topics = ['chat bot', 'cli', 'game', 'web bot', 'brower extension', 'api', 'website']
areas = ['pet care', 'doing homework', 'fitness']
idea = f'Create an new {random.choice(topics)} that helps with {random.choice(areas)} :slight_smile:'
await ctx.send(idea)
@bot.command(name = "calc", help = "Do a two number calculation where fn is +, -, /, *, **")
async def calc(ctx, x: float, fn: str, y: float):
if fn == '+':
await ctx.send(x + y)
elif fn == '-':
await ctx.send(x - y)
elif fn == '/':
await ctx.send(x / y)
elif fn == '*':
await ctx.send(x * y)
elif fn == '**':
await ctx.send(x ** y)
with open("BOT_TOKEN.txt", "r") as token_file:
TOKEN = token_file.read()
print("Token file read")
bot.run(TOKEN)
``` |
{
"source": "aadarsh-patel/spitfire",
"score": 2
} |
#### File: spitfire/scripts/crunner.py
```python
from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import object
import copy
import logging
import optparse
import os.path
import sys
import time
import traceback
import io as StringIO
from spitfire.compiler import compiler
from spitfire.compiler import options
from spitfire.compiler import util
from spitfire.compiler import visitor
from spitfire import runtime
from spitfire.runtime import runner
from spitfire.runtime import udn
# this class let's me check if placeholder caching is working properly by
# tracking the number of accesses for a single key
class ResolveCounter(object):
def __init__(self):
self._dict = {}
@property
def resolve_x(self):
return self._get_item('resolve_x')
@property
def resolve_y(self):
return self._get_item('resolve_y')
def _get_item(self, key):
if key in self._dict:
self._dict[key] += 1
else:
self._dict[key] = 1
return '%s%s' % (key, self._dict[key])
def __contains__(self, key):
return key.startswith('resolve')
def __getitem__(self, key):
if not key.startswith('resolve'):
raise KeyError(key)
return self._get_item(key)
def __getattr__(self, key):
if not key.startswith('resolve'):
raise AttributeError(key)
return self._get_item(key)
sys_modules = list(sys.modules.keys())
def reset_sys_modules():
for key in list(sys.modules.keys()):
if key not in sys_modules:
del sys.modules[key]
class TestRunner(object):
def __init__(self, spt_compiler, spt_options, spt_files):
self.compiler = spt_compiler
self.options = spt_options
self.files = spt_files
self._search_list = [
{'tier1': {'tier2': ResolveCounter()}},
{'nest': ResolveCounter()},
ResolveCounter(),
]
if self.options.test_input:
self._search_list.append(runner.load_search_list(
self.options.test_input))
self.buffer = StringIO.StringIO()
self.start_time = 0
self.finish_time = 0
self.num_tests_run = 0
self.num_tests_failed = 0
# return a copy of the search_list for each set of tests
@property
def search_list(self):
return copy.deepcopy(self._search_list)
def run(self):
self.begin()
for filename in self.files:
self.process_file(filename)
self.end()
def begin(self):
self.start_time = time.time()
def end(self):
self.finish_time = time.time()
print(file=sys.stderr)
if self.num_tests_failed > 0:
sys.stderr.write(self.buffer.getvalue())
print('-' * 70, file=sys.stderr)
print('Ran %d tests in %0.3fs' % (
self.num_tests_run, self.finish_time - self.start_time), file=sys.stderr)
print(file=sys.stderr)
if self.num_tests_failed > 0:
print('FAILED (failures=%d)' % self.num_tests_failed, file=sys.stderr)
sys.exit(1)
else:
print('OK', file=sys.stderr)
sys.exit(0)
def process_file(self, filename):
buffer = StringIO.StringIO()
reset_sys_modules()
classname = util.filename2classname(filename)
modulename = util.filename2modulename(filename)
test_output_path = os.path.join(self.options.test_output,
classname + '.txt')
if self.options.verbose:
sys.stderr.write(modulename + ' ... ')
compile_failed = False
if self.options.debug or self.options.compile:
try:
self.compiler.compile_file(filename)
except Exception as e:
compile_failed = True
print('=' * 70, file=buffer)
print('FAIL:', modulename, '(' + filename + ')', file=buffer)
print('-' * 70, file=buffer)
traceback.print_exc(None, buffer)
if self.options.debug:
if 'parse_tree' in self.options.debug_flags:
print("parse_tree:", file=buffer)
visitor.print_tree(self.compiler._parse_tree, output=buffer)
if 'analyzed_tree' in self.options.debug_flags:
print("analyzed_tree:", file=buffer)
visitor.print_tree(self.compiler._analyzed_tree,
output=buffer)
if 'optimized_tree' in self.options.debug_flags:
print("optimized_tree:", file=buffer)
visitor.print_tree(self.compiler._optimized_tree,
output=buffer)
if 'hoisted_tree' in self.options.debug_flags:
print("hoisted_tree:", file=buffer)
visitor.print_tree(self.compiler._hoisted_tree,
output=buffer)
if 'source_code' in self.options.debug_flags:
print("source_code:", file=buffer)
for i, line in enumerate(self.compiler._source_code.split(
'\n')):
print('% 3s' % (i + 1), line, file=buffer)
test_failed = False
if not self.options.skip_test:
import tests
current_output = None
raised_exception = False
try:
if self.options.debug or self.options.compile:
template_module = util.load_module_from_src(
self.compiler._source_code, filename, modulename)
else:
template_module = runtime.import_module_symbol(modulename)
except Exception as e:
# An exception here means the template is unavailble; the test
# fails.
test_failed = True
raised_exception = True
current_output = str(e)
if not test_failed:
try:
template_class = getattr(template_module, classname)
template = template_class(search_list=self.search_list)
current_output = template.main().encode('utf8')
except Exception as e:
# An exception here doesn't meant that the test fails
# necessarily since libraries don't have a class; as long as
# the expected output matches the exception, the test
# passes.
raised_exception = True
current_output = str(e)
if not test_failed:
if self.options.test_accept_result:
test_file = open(test_output_path, 'w')
test_file.write(current_output)
test_file.close()
try:
test_file = open(test_output_path)
except IOError as e:
# An excpetion here means that the expected output is
# unavailbe; the test fails.
test_failed = True
raised_exception = True
current_output = str(e)
if test_failed:
test_output = None
else:
test_output = test_file.read()
if current_output != test_output:
test_failed = True
if self.options.debug:
print("expected output:", file=buffer)
print(test_output, file=buffer)
print("actual output:", file=buffer)
print(current_output, file=buffer)
if compile_failed or test_failed:
self.num_tests_failed += 1
if self.options.verbose:
sys.stderr.write('FAIL\n')
else:
sys.stderr.write('F')
current_output_path = os.path.join(self.options.test_output,
classname + '.failed')
f = open(current_output_path, 'w')
f.write(current_output)
f.close()
print('=' * 70, file=buffer)
print('FAIL:', modulename, '(' + filename + ')', file=buffer)
print('-' * 70, file=buffer)
print('Compare expected and actual output with:', file=buffer)
print(' '.join([' diff -u', test_output_path,
current_output_path]), file=buffer)
print('Show debug information for the test with:', file=buffer)
test_cmd = [arg for arg in sys.argv if arg not in self.files]
if '--debug' not in test_cmd:
test_cmd.append('--debug')
test_cmd = ' '.join(test_cmd)
print(' ', test_cmd, filename, file=buffer)
if raised_exception:
print('-' * 70, file=buffer)
print(current_output, file=buffer)
traceback.print_exc(None, buffer)
print(file=buffer)
self.buffer.write(buffer.getvalue())
else:
if self.options.verbose:
sys.stderr.write('ok\n')
else:
sys.stderr.write('.')
self.num_tests_run += 1
if __name__ == '__main__':
reload(sys)
sys.setdefaultencoding('utf8')
option_parser = optparse.OptionParser()
options.add_common_options(option_parser)
option_parser.add_option('-c',
'--compile',
action='store_true',
default=False)
option_parser.add_option('--skip-test', action='store_true', default=False)
option_parser.add_option(
'--test-input',
default='tests/input/search_list_data.pye',
help='input data file for templates (.pkl or eval-able file)')
option_parser.add_option('--test-output',
default='tests/output',
help="directory for output")
option_parser.add_option(
'--test-accept-result',
action='store_true',
default=False,
help='accept current code output as correct for future tests')
option_parser.add_option('--debug', action='store_true', default=False)
option_parser.add_option(
'--debug-flags',
action='store',
default='hoisted_tree,source_code',
help='parse_tree, analyzed_tree, optimized_tree, hoisted_tree, source_code')
option_parser.add_option('--enable-c-accelerator',
action='store_true',
default=False)
(spt_options, spt_args) = option_parser.parse_args()
if spt_options.debug:
spt_options.verbose = True
spt_options.debug_flags = getattr(spt_options, 'debug_flags').split(',')
else:
spt_options.debug_flags = []
udn.set_accelerator(spt_options.enable_c_accelerator, enable_test_mode=True)
spt_compiler_args = compiler.Compiler.args_from_optparse(spt_options)
spt_compiler = compiler.Compiler(**spt_compiler_args)
test_runner = TestRunner(spt_compiler, spt_options, spt_args)
test_runner.run()
``` |
{
"source": "aadarshsingh191198/AAAI-21-SDU-shared-task-1-AI",
"score": 3
} |
#### File: aadarshsingh191198/AAAI-21-SDU-shared-task-1-AI/dataset_reformatter.py
```python
import nltk
import pandas as pd
def reformat_test(x, test = False):
tok_text = nltk.word_tokenize(x['sentence'])
word_pos = nltk.pos_tag(tok_text)
return '\n'.join([f'{word} {pos} O O' for (word,pos) in word_pos])
def reformat(x):
tok_text = nltk.word_tokenize(x['sentence'])
tags = x['labels'].split()
word_pos = nltk.pos_tag(tok_text)
return '\n'.join([f'{word} {pos} O {i}' for (word,pos),i in zip(word_pos,tags)]) #Not very sure as to why the third element is O.
def make_data(filename):
df = pd.read_csv(f'./dataset/{filename}.csv')
# train = pd.read_csv('/content/AAAI-21-SDU-shared-task-1-AI/dataset/train.csv')
if filename == 'test':
df['reformatted_data'] = df[['sentence']].apply(reformat_test,axis=1)
else:
df['reformatted_data'] = df[['sentence','labels']].apply(reformat,axis=1)
# train['reformatted_data'] = train[['sentence','labels']].apply(reformat,axis=1)
with open(f'dataset/scibert_sduai/{filename}.txt','w', encoding='utf-8') as f:
f.write('\n\n'.join(df['reformatted_data'].tolist()))
print(f'{filename} data reformatted and stored in scibert_sduai/{filename}.txt..')
if __name__ == '__main__':
nltk.download('punkt')
nltk.download('averaged_perceptron_tagger')
make_data('train')
make_data('dev')
make_data('test')
``` |
{
"source": "aadarshsingh191198/AAAI-21-SDU-shared-task-2-AD",
"score": 3
} |
#### File: aadarshsingh191198/AAAI-21-SDU-shared-task-2-AD/convert_to_csv.py
```python
import pandas as pd
import json
import os
from tqdm import tqdm
def process_file(fname):
json_list = json.load(open(fname))
for i in tqdm(range(len(json_list))):
json_list[i]['text'] = ' '.join(json_list[i]['tokens'])
json_list[i]['acronym_'] = json_list[i]['tokens'][json_list[i]['acronym']]
del json_list[i]['tokens']
del json_list[i]['acronym']
df = pd.DataFrame(json_list)
save_name = fname.split('/')[-1].split('.')[0]
save_name = os.path.join('csv_files', f'{save_name}.csv')
df.to_csv(save_name, index=False)
if __name__ == '__main__':
os.makedirs('dataset', exist_ok=True)
process_file('./dataset/train.json')
process_file('./dataset/dev.json')
``` |
{
"source": "aadarshsingh191198/dffml",
"score": 2
} |
#### File: docs/_ext/consoletest.py
```python
import os
import abc
import sys
import time
import copy
import shlex
import signal
import atexit
import shutil
import tempfile
import contextlib
import subprocess
import importlib.util
from typing import (
Any,
Dict,
List,
Union,
)
from docutils import nodes
from docutils.nodes import Node
from docutils.parsers.rst import directives
import sphinx
from sphinx.directives.code import LiteralInclude
from sphinx.locale import __
from sphinx.ext.doctest import DocTestBuilder
from sphinx.util.docutils import SphinxDirective
# Root of DFFML source tree
ROOT_DIR = os.path.join(os.path.dirname(__file__), "..", "..")
# Load file by path
spec = importlib.util.spec_from_file_location(
"plugins", os.path.join(ROOT_DIR, "dffml", "plugins.py")
)
plugins = importlib.util.module_from_spec(spec)
spec.loader.exec_module(plugins)
class ConsoletestCommand(abc.ABC):
def __init__(self):
self.poll_until = None
self.ignore_errors = False
self.daemon = False
def __repr__(self):
return (
self.__class__.__qualname__
+ "("
+ str(
{
k: v
for k, v in self.__dict__.items()
if not k.startswith("_")
}
)
+ ")"
)
def str(self):
return repr(self)
def __enter__(self):
pass
def __exit__(self, _exc_type, _exc_value, _traceback):
pass
class VirtualEnvCommand(ConsoletestCommand):
def __init__(self, directory: str):
super().__init__()
self.directory = directory
self.old_virtual_env = None
self.old_path = None
def __eq__(self, other: "VirtualEnvCommand"):
return bool(
hasattr(other, "directory") and self.directory == other.directory
)
def run(self, ctx):
self.old_virtual_env = os.environ.get("VIRTUAL_ENV", None)
self.old_path = os.environ.get("PATH", None)
os.environ["VIRTUAL_ENV"] = os.path.abspath(
os.path.join(ctx["cwd"], self.directory)
)
os.environ["PATH"] = ":".join(
[os.path.abspath(os.path.join(ctx["cwd"], self.directory, "bin"))]
+ os.environ.get("PATH", "").split(":")
)
def __exit__(self, _exc_type, _exc_value, _traceback):
if self.old_virtual_env is not None:
os.environ["VIRTUAL_ENV"] = self.old_virtual_env
if self.old_path is not None:
os.environ["PATH"] = self.old_path
def run_commands(
cmds,
ctx,
*,
stdout: Union[str, bytes] = None,
ignore_errors: bool = False,
daemon: bool = False,
):
proc = None
procs = []
for i, cmd in enumerate(map(sub_env_vars, cmds)):
kwargs = {}
# Set stdout to system stdout so it doesn't go to the pty
kwargs["stdout"] = stdout if stdout is not None else sys.stdout
# Check if there is a previous command
if i != 0:
kwargs["stdin"] = proc.stdout
# Check if there is a next command
if i + 1 < len(cmds):
kwargs["stdout"] = subprocess.PIPE
# Check if we redirect stderr to stdout
if "2>&1" in cmd:
kwargs["stderr"] = subprocess.STDOUT
cmd.remove("2>&1")
# If not in venv ensure correct Python
if not "VIRTUAL_ENV" in os.environ and cmd[0].startswith("python"):
cmd[0] = sys.executable
# Run the command
proc = subprocess.Popen(
cmd, start_new_session=True, cwd=ctx["cwd"], **kwargs
)
proc.cmd = cmd
procs.append(proc)
# Parent (this Python process) close stdout of previous command so that
# the command we just created has exclusive access to the output.
if i != 0:
kwargs["stdin"].close()
# Wait for all processes to complete
errors = []
for i, proc in enumerate(procs):
# Do not wait for last process to complete if running in daemon mode
if daemon and (i + 1) == len(procs):
break
proc.wait()
if proc.returncode != 0:
errors.append(f"Failed to run: {cmd!r}")
if errors and not ignore_errors:
raise RuntimeError("\n".join(errors))
if daemon:
return procs[-1]
def sub_env_vars(cmd):
for env_var_name, env_var_value in os.environ.items():
for i, arg in enumerate(cmd):
for check in ["$" + env_var_name, "${" + env_var_name + "}"]:
if check in arg:
cmd[i] = arg.replace(check, env_var_value)
return cmd
def pipes(cmd):
if not "|" in cmd:
return [cmd]
cmds = []
j = 0
for i, arg in enumerate(cmd):
if arg == "|":
cmds.append(cmd[j:i])
j = i + 1
cmds.append(cmd[j:])
return cmds
class ConsoleCommand(ConsoletestCommand):
def __init__(self, cmd: List[str]):
super().__init__()
self.cmd = cmd
self.daemon_proc = None
def run(self, ctx):
if self.poll_until is None:
self.daemon_proc = run_commands(
pipes(self.cmd),
ctx,
ignore_errors=self.ignore_errors,
daemon=self.daemon,
)
else:
while True:
with tempfile.TemporaryFile() as stdout:
run_commands(
pipes(self.cmd),
ctx,
stdout=stdout,
ignore_errors=self.ignore_errors,
)
stdout.seek(0)
stdout = stdout.read().decode()
if call_poll_until(self.poll_until, stdout):
return
time.sleep(0.1)
def __exit__(self, _exc_type, _exc_value, _traceback):
# Send ctrl-c to daemon if running
if self.daemon_proc is not None:
self.daemon_proc.send_signal(signal.SIGINT)
self.daemon_proc.wait()
class PipInstallCommand(ConsoleCommand):
def __init__(self, cmd: List[str]):
super().__init__(self.fix_dffml_packages(cmd))
@staticmethod
def fix_dffml_packages(cmd):
"""
If a piece of the documentation says to install dffml or one of the
packages, we need to make sure that the version from the current branch
gets installed instead, since we don't want to test the released
version, we want to test the version of the codebase as it is.
"""
package_names_to_directory = copy.copy(
plugins.PACKAGE_NAMES_TO_DIRECTORY
)
package_names_to_directory["dffml"] = "."
for i, pkg in enumerate(cmd):
if pkg in package_names_to_directory:
directory = package_names_to_directory[pkg]
directory = os.path.join(ROOT_DIR, *directory)
directory = os.path.abspath(directory)
cmd[i] = directory
return cmd
class DockerRunCommand(ConsoleCommand):
def __init__(self, cmd: List[str]):
name, needs_removal, cmd = self.find_name(cmd)
super().__init__(cmd)
self.name = name
self.needs_removal = needs_removal
self.stopped = False
@staticmethod
def find_name(cmd):
"""
Find the name of the container we are starting (if starting as daemon)
"""
name = None
needs_removal = bool("--rm" not in cmd)
for i, arg in enumerate(cmd):
if arg.startswith("--name="):
name = arg[len("--name=") :]
elif arg == "--name" and (i + 1) < len(cmd):
name = cmd[i + 1]
return name, needs_removal, cmd
def cleanup(self):
if self.name and not self.stopped:
subprocess.check_call(["docker", "stop", self.name])
if self.needs_removal:
subprocess.check_call(["docker", "rm", self.name])
self.stopped = True
def __enter__(self):
atexit.register(self.cleanup)
def __exit__(self, _exc_type, _exc_value, _traceback):
self.cleanup()
def within_qoute(current, qoute=('"', "'")):
within = False
for i, char in enumerate(current):
context = current[i - 1 : i]
if char in qoute and not context.startswith("\\"):
within = not within
return within
def parse_commands(content):
commands = []
current = ""
for line in content:
line = line.rstrip()
if line.startswith("$ "):
if line.endswith("\\"):
current = line[2:-1]
else:
current = line[2:]
if within_qoute(current):
continue
commands.append(current)
current = ""
elif current and line.endswith("\\"):
current += line[:-1]
elif current and not line.endswith("\\"):
current += line
if within_qoute(current):
continue
commands.append(current)
current = ""
# Raise NotImplementedError if command substitution is attempted
for command in commands:
for check in ("`", "$("):
index = 0
while index != -1:
index = command.find(check, index + 1)
if index == -1:
continue
if not within_qoute(command[:index], qoute=("'")):
raise NotImplementedError(
f"Command substitution was attempted: {command}"
)
try:
commands = list(map(shlex.split, commands))
except ValueError as error:
print(commands)
raise
return commands
def build_command(cmd):
if not cmd:
raise ValueError("Empty command")
# Handle virtualenv activation
if ".\\.venv\\Scripts\\activate" in cmd or (
len(cmd) == 2
and cmd[0] in ("source", ".")
and ".venv/bin/activate" == cmd[1]
):
return VirtualEnvCommand(".venv")
# TODO Handle cd
# Handle pip installs
if (
"pip" in cmd
and "install" in cmd
and cmd[cmd.index("pip") + 1] == "install"
):
return PipInstallCommand(cmd)
# Handle docker commands
if cmd[:2] == ["docker", "run"]:
return DockerRunCommand(cmd)
# Regular console command
return ConsoleCommand(cmd)
# set up the necessary directives
MAKE_POLL_UNTIL_TEMPLATE = """
import sys
func = lambda stdout: {func}
sys.exit(int(not func(sys.stdin.buffer.read())))
"""
def call_poll_until(func, stdout):
with tempfile.NamedTemporaryFile() as fileobj, tempfile.NamedTemporaryFile() as stdin:
fileobj.write(MAKE_POLL_UNTIL_TEMPLATE.format(func=func).encode())
fileobj.seek(0)
stdin.write(stdout.encode() if isinstance(stdout, str) else stdout)
stdin.seek(0)
return_code = subprocess.call(["python", fileobj.name], stdin=stdin)
return bool(return_code == 0)
class ConsoletestLiteralIncludeDirective(LiteralInclude):
def run(self) -> List[Node]:
retnodes = super().run()
retnodes[0]["consoletest-literalinclude"] = True
retnodes[0]["filepath"] = self.options.get(
"filepath", os.path.basename(retnodes[0]["source"])
).split("/")
return retnodes
ConsoletestLiteralIncludeDirective.option_spec.update(
{"filepath": directives.unchanged_required,}
)
class ConsoletestDirective(SphinxDirective):
option_spec = {
"poll-until": directives.unchanged_required,
"ignore-errors": directives.flag,
"daemon": directives.flag,
}
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
def run(self) -> List[Node]:
code = "\n".join(self.content)
nodetype = nodes.literal_block # type: Type[TextElement]
node = nodetype(
code,
code,
language="console",
consoletestnodetype=self.name,
consoletest_commands=list(
map(build_command, parse_commands(self.content))
),
)
self.set_source_info(node)
poll_until = self.options.get("poll-until", None)
ignore_errors = bool("ignore-errors" in self.options)
for command in node["consoletest_commands"]:
command.poll_until = poll_until
command.ignore_errors = ignore_errors
# Last command to be run is a daemon
daemon = bool("daemon" in self.options)
if daemon:
node["consoletest_commands"][-1].daemon = True
return [node]
class ConsoleTestBuilder(DocTestBuilder):
name = "consoletest"
epilog = __(
"Testing of consoletests in the sources finished, look at the "
"results in %(outdir)s/output.txt."
)
def init(self) -> None:
self.total_failures = 0
self.total_tries = 0
date = time.strftime("%Y-%m-%d %H:%M:%S")
self.outfile = open(
os.path.join(self.outdir, "output.txt"), "w", encoding="utf-8"
)
self.outfile.write(
(
"Results of %s builder run on %s\n"
"===========%s================%s\n"
)
% (self.name, date, "=" * len(self.name), "=" * len(date))
)
def finish(self) -> None:
# write executive summary
def s(v: int) -> str:
return "s" if v != 1 else ""
repl = (
self.total_tries,
s(self.total_tries),
self.total_failures,
s(self.total_failures),
)
self._out(
f"""
{self.name} summary
{"=" * len(self.name)}========
%5d test%s
%5d failure%s in tests
"""
% repl
)
self.outfile.close()
if self.total_failures:
self.app.statuscode = 1
@staticmethod
def condition(node: Node) -> bool:
return isinstance(node, (nodes.literal_block, nodes.comment)) and (
"consoletest_commands" in node
or "consoletest-literalinclude" in node
)
def test_doc(self, docname: str, doctree: Node) -> None:
# Get all applicable nodes
doc_nodes = list(doctree.traverse(self.condition))
if not doc_nodes:
return
print()
print(f"{self.name} testing: {docname}")
print()
self.total_tries += 1
try:
with tempfile.TemporaryDirectory() as tempdir, contextlib.ExitStack() as stack:
ctx = {"cwd": tempdir}
for node in doc_nodes: # type: Element
filename = self.get_filename_for_node(node, docname)
line_number = self.get_line_number(node)
if "consoletest-literalinclude" in node:
print()
print("Copying", node["source"], node["filepath"])
print()
shutil.copyfile(
node["source"],
os.path.join(ctx["cwd"], *node["filepath"]),
)
elif "consoletest_commands" in node:
for command in node["consoletest_commands"]:
print()
print("Running", command)
print()
stack.enter_context(command)
command.run(ctx)
print()
print("No more tempdir")
print()
except:
self.total_failures += 1
def setup(app: "Sphinx") -> Dict[str, Any]:
app.add_directive("consoletest", ConsoletestDirective)
app.add_directive(
"consoletest-literalinclude", ConsoletestLiteralIncludeDirective
)
app.add_builder(ConsoleTestBuilder)
return {"version": "0.0.1", "parallel_read_safe": True}
``` |
{
"source": "AADavin/RelativeDating",
"score": 4
} |
#### File: RelativeDating/Scripts/count_orders.py
```python
import operator as op
import functools
from ete3 import PhyloTree
import sys
def ncr(n, r):
r = min(r, n-r)
if r == 0: return 1
numer = functools.reduce(op.mul, range(n, n-r, -1))
denom = functools.reduce(op.mul, range(1, r+1))
return numer//denom
def count_inner(node):
n = len([x for x in node.iter_descendants() if x.kind != 0])
return n
def count_total_orders(mytree_file):
with open(mytree_file) as f:
mytree = f.readline().strip()
mytree = PhyloTree(mytree, format=1)
# We assign kinds to the nodes
# 0 To leaves
# 1 To nodes with two descendant to leaves
# 2 To nodes with one descendant to leaf
# 3 To nodes with two descendants to inner nodes
# Value stores the combinatorial value of the node. -1 if it is not computed yet for the node
i = 0
for node in mytree.traverse("postorder"):
if node.is_leaf():
node.add_features(kind=0, descendants=-1, value=1)
else:
i += 1
node.name = str(i)
node.add_features(kind=0, descendants=0, value=0)
children = node.get_children()
leaves = len([x for x in children if x.is_leaf()])
if leaves == 2:
node.kind = 1
node.value = 1
elif leaves == 1:
node.kind = 2
node.value = -1
elif leaves == 0:
node.kind = 3
node.value = -1
node.descendants = count_inner(node)
myroot = node.get_tree_root()
myroot.value = -1
while myroot.value == -1:
for node in mytree.traverse("postorder"):
if node.kind != 0 and node.kind != 1:
c1, c2 = node.get_children()
if c1.value == -1 or c2.value == -1:
continue
x, y = c1.descendants + 1, c2.descendants + 1
node.value = ncr(x+y,x) * (c1.value * c2.value)
print(myroot.value)
if __name__ == "__main__":
if len(sys.argv) != 2:
print ("usage: python count_total_orders.py mytree")
exit(0)
scr, mytree_file = sys.argv
count_total_orders(mytree_file)
```
#### File: RelativeDating/Scripts/generate_orders.py
```python
import ete3
import sys
import itertools
import copy
# This scripts generates all the possible orders for a given tree
# We name the inner nodes if required
def name_nodes(mytree):
i=0
for node in mytree.traverse():
if node.is_leaf() == False:
i+=1
node.name = str(i)
def partitions(n, k):
for c in itertools.combinations(range(n+k-1), k-1):
yield [b-a-1 for a, b in zip((-1,)+c, c+(n+k-1,))]
def is_inner_node(node):
children = node.get_children()
if not children[0].is_leaf() and not children[1].is_leaf():
return True
else:
return False
def return_type_node(node):
# 3 Real inner node with no leaf descedents
# 2 Inner node with one leaf descendant
# 1 With two leafes descendents
children = node.get_children()
if not children[0].is_leaf() and not children[1].is_leaf():
return 3
else:
if (children[0].is_leaf() and not children[1].is_leaf()) or (not children[0].is_leaf() and children[1].is_leaf()):
return 2
else:
return 1
# First thing we count the number of inner nodes under each node.
# We write for every inner node the number of partitions
def write_partitions(mytree):
all_partitions = list()
for node in mytree.traverse(strategy="postorder"):
if node.is_leaf():
continue
node.add_feature("order",list())
if is_inner_node(node):
children = node.get_children()
left_branch = len(children[0].get_leaves())
right_branch = len(children[1].get_leaves()) - 1
# This line stores the node and the corresponding partitions:
all_partitions.append([node.name + ";" + ",".join([str(x) for x in p]) for p in partitions(right_branch,left_branch)])
return all_partitions
def get_node_order(node, boxes):
children = node.get_children()
if children[0].is_leaf():
node.order = list(children[1].order)
node.order.insert(0, node.name)
elif children[1].is_leaf():
node.order = list(children[0].order)
node.order.insert(0, node.name)
else:
# This is the general case where we are dealing with an inner node. Here we need information
# relative to the partitions
myorder = list(children[0].order) # We use the left node as a mold
# And now we insert the other nodes onto that list
# For that we iterate on the partitions
counter = 0 # We count how many nodes of the right branch we have inserted
balls_number = len(children[1].order)
if node.is_root():
pass
while counter < balls_number:
true_index = 0
for index, box in enumerate(boxes):
if box == "0":
pass
else:
for x in range(int(box)):
if true_index < len(myorder):
myorder.insert(true_index, children[1].order[counter])
else:
myorder.append(children[1].order[counter])
true_index+=1
counter += 1
true_index +=1
myorder.insert(0,node.name)
node.order = myorder
def generate_node_orders(all_partitions):
for element in itertools.product(*all_partitions):
# Now we translate the code into orders:
tree = copy.deepcopy(mytree)
root = tree.get_tree_root()
mypartitions = dict()
for partition in element:
node, boxes = partition.split(";")
mypartitions[node] = boxes.split(",")
# Now we iterate the tree filling the node orders
for node in tree.traverse(strategy="postorder"):
if node.is_leaf():
continue
node_type = return_type_node(node)
if node_type == 1:
node.order.append(node.name)
elif node_type == 2:
get_node_order(node, None)
elif node_type == 3:
get_node_order(node, mypartitions[node.name])
# Now we get the node orders of the tree
print (",".join(root.order))
def test_tree():
all_partitions = write_partitions(mytree)
generate_node_orders(all_partitions)
if __name__ == "__main__":
if len(sys.argv) != 2:
print ("usage: python generate_orders.py mytree")
exit(0)
scr,mytree_file = sys.argv
with open(mytree_file) as f:
mytree = ete3.Tree(f.readline().strip(), format=1)
all_partitions = write_partitions(mytree)
generate_node_orders(all_partitions)
```
#### File: RelativeDating/Scripts/get_node_order.py
```python
from ete3 import Tree
import sys
def get_order(tree):
mytree = Tree(tree, format=1)
distances = dict()
for mynode in mytree.traverse():
if mynode.is_leaf():
continue
one_leaf = mynode.get_leaves()[0]
dist = mynode.get_distance(one_leaf)
distances[mynode.name] = dist
node_order = sorted(distances.items(), key=lambda x: x[1])
node_order = [x[0] for x in node_order][::-1]
return ",".join(node_order)
def get_node_order(tree_file):
with open(tree_file) as f:
for line in f:
print(get_order(line.strip()))
if __name__ == "__main__":
if len(sys.argv) != 2:
print ("usage: python get_node_order.py tree_file")
exit(0)
scr, tree_file = sys.argv
get_node_order(tree_file)
```
#### File: RelativeDating/Scripts/script_tree.py
```python
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False
def getbppnumber(tree,node):
return tree[node][7]
def writeBootstrap(tree,node,value):
tree[node][6] = value
def getBootstrap(tree,node):
return tree[node][6]
def addNode(tree):
id_node = 0
while tree.has_key(id_node):
id_node = id_node + 1
tree[id_node] = ["N"+str(id_node),-1,[],0,"","",""]
return id_node
def getAncestor(tree):
if tree.has_key("ancestor"):
return tree["ancestor"]
else:
return -1
def setAncestor(tree,node):
tree["ancestor"] = node
def getLength(tree,node):
return tree[node][3]
def setLength(tree,node,l):
tree[node][3] = l
def getName(tree,node):
return tree[node][0]
def setName(tree,node,name):
tree[node][0] = name
def getSpecies(tree,node):
return tree[node][5]
def writeSpecies(tree,node,annot):
tree[node][5] = annot
def getNodes(tree):
clefs = tree.keys()
c = 0
while c < len(clefs):
if (clefs[c] == "sequence" or
clefs[c] == "ancestor" or
len(tree[clefs[c]]) == 0):
del clefs[c]
else:
c = c + 1
return clefs
def getParent(tree,node):
return tree[node][1]
def getChildNumber(tree,n,c):
children = getChildren(tree,n)
if children[0] == c:
return 0
else:
return 1
def setParent(tree,node,p):
tree[node][1] = p
def addChild(tree,pere,node):
tree[pere][2].append(node)
def removeLeaf(tree,l):
#root = getRoot(tree)
#print "remove",l,writeTree(tree,root,False)
if isRoot(tree,l):
del tree[l]
else:
pere = getParent(tree,l)
if isRoot(tree,pere) and len(getChildren(tree,pere)) == 2:
#print "son of the root"
b = getBrother(tree,l)
tree[b][1] = -1
del tree[pere]
del tree[l]
elif len(getChildren(tree,pere)) == 2:
b = getBrother(tree,l)
grandpere = getParent(tree,pere)
setParent(tree,b,grandpere)
number = getChildNumber(tree,grandpere,pere)
setChild(tree,grandpere,number,b)
tree[b][3] = tree[b][3]+tree[pere][3]
del tree[pere]
del tree[l]
elif isRoot(tree,pere) and len(getChildren(tree,pere)) == 1:
del tree[l]
del tree[pere]
elif len(getChildren(tree,pere)) > 2:
number = getChildNumber(tree,pere,l)
del tree[pere][2][number]
del tree[l]
def removeNodeAndChildren(tree,node):
children = list(getChildren(tree,node))
for child in children:
removeNodeAndChildren(tree,child)
removeNode(tree,node)
def removeNode(tree,node):
# print "effacement du noeud",node
del tree[node]
def removeChildAndChildren(tree,pere,node):
numero = 0
while node != getChild(tree,pere,numero):
numero = numero + 1
del tree[pere][2][numero]
removeNodeAndChildren(tree,node)
def removeChild(tree,pere,node):
numero = 0
while node != getChild(tree,pere,numero):
numero = numero + 1
del tree[pere][2][numero]
removeNode(tree,node)
def getChild(tree,node,k):
return tree[node][2][k]
def setChild(tree,node,k,c):
tree[node][2][k] = c
def getNumberOfChildren(tree,node):
return len(tree[node][2])
def getChildren(tree,node):
return tree[node][2]
def getBrother(tree,node):
anc = getParent(tree,node)
if (getChild(tree,anc,0) == node):
return getChild(tree,anc,1)
else:
return getChild(tree,anc,0)
def isLeaf(tree,node):
return (len(getChildren(tree,node)) == 0)
def isRoot(tree,node):
return (tree[node][1] == -1)
def isDup(tree,node):
return (tree[node][4] == "D")
def getND(tree,node):
if tree[node].has_key("ND"):
return tree[node]["ND"]
else:
return ""
def lastCommonAncestor(tree,a,b):
ancestor = -1
ancestorsa = [a]
while not isRoot(tree,a):
a = getParent(tree,a)
ancestorsa.append(a)
ancestorsb = [b]
while not isRoot(tree,b):
b = getParent(tree,b)
ancestorsb.append(b)
# print ancestorsa,ancestorsb
while len(ancestorsa) > 0 and len(ancestorsb) > 0 and ancestorsa[-1] == ancestorsb[-1]:
ancestor = ancestorsa[-1]
del ancestorsa[-1]
del ancestorsb[-1]
# print "ancestor",ancestor
return ancestor
def distanceFrom(tree,a,b):
ancestor = lastCommonAncestor(tree,a,b)
distance = 0
while a != ancestor:
#print tree[a]
distance = distance + tree[a][3]
a = getParent(tree,a)
while b != ancestor:
#print tree[b]
distance = distance + tree[b][3]
b = getParent(tree,b)
return distance
def getLeaves(tree,a):
# print "getleaves",a
if isLeaf(tree,a):
return [a]
else:
#print "non feuille",child1(a),child2(a)
result = []
children = list(getChildren(tree,a))
for child in children:
result = result + getLeaves(tree,child)
return result
def writeTree(tree,a,NHX):
# print a,tree[a]
if isLeaf(tree,a):
if isRoot(tree,a):
chaine = "("
else:
chaine = ""
chaine = chaine + tree[a][0]
if tree[a][3] != -1:
#~ print tree[a][3]
chaine = chaine + ":" + str(tree[a][3])
if NHX and tree[a][5] != "":
chaine = chaine + "[&&NHX:S="+tree[a][5]+"]"
if isRoot(tree,a):
chaine = chaine + ")" + str(getBootstrap(tree,a))
else:
chaine = "("
children = list(getChildren(tree,a))
for child in children:
chaine = chaine + writeTree(tree,child,NHX)+","
chaine = chaine[:-1]+")"+str(getBootstrap(tree,a))
if (not isRoot(tree,a)) and tree[a][3] != -1:
chaine = chaine + ":" + str(tree[a][3])
if NHX and (tree[a][4] != "" or tree[a][5] != ""):
chaine = chaine + "[&&NHX:"
if tree[a][5] != "":
chaine = chaine + "S="+tree[a][5]
if tree[a][4] == "D" or tree[a][4] == "WGD":
chaine = chaine+":D=Y"
chaine = chaine + "]"
if isRoot(tree,a):
chaine = chaine + ";"
return chaine
def writeTreeNexus(tree,a,tab):
# print a,tree[a]
if isLeaf(tree,a):
if isRoot(tree,a):
chaine = "("
else:
chaine = ""
chaine = chaine + tree[a][0]
chaine = chaine + "[&!color=#"+tab[a]+"]"
if tree[a][3] != -1:
#~ print tree[a][3]
chaine = chaine + ":" + str(tree[a][3])
if isRoot(tree,a):
chaine = chaine + ")"
else:
chaine = "("
children = list(getChildren(tree,a))
for child in children:
chaine = chaine + writeTreeNexus(tree,child,tab)+","
chaine = chaine[:-1]+")"
chaine = chaine + "[&!color=#"+tab[a]+"]"
if (not isRoot(tree,a)) and tree[a][3] != -1:
chaine = chaine + ":" + str(tree[a][3])
if isRoot(tree,a):
chaine = chaine + ";"
return chaine
def getRoot(tree):
keys = getNodes(tree)
#print tree
#print keys
start = keys[0]
while (not isRoot(tree,start)):
start = getParent(tree,start)
return start
def getNodesBetween(tree,a,b):
chemin = []
ancestor = -1
ancestorsa = []
while not isRoot(tree,a):
a = getParent(tree,a)
ancestorsa.append(a)
ancestorsb = []
while not isRoot(tree,b):
b = getParent(tree,b)
ancestorsb.append(b)
while len(ancestorsa) > 0 and len(ancestorsb) > 0 and ancestorsa[-1] == ancestorsb[-1]:
ancestor = ancestorsa[-1]
del ancestorsa[-1]
del ancestorsb[-1]
# print "ancestor",ancestor
return ancestorsa+[ancestor]+ancestorsb
def isAncestor(tree,a,b):
if isRoot(tree,a):
result = True
else:
result = False
current = b
while ((not result) and (not isRoot(tree,current))):
if current == a:
result = True
else:
current = getParent(tree,current)
return result
def treeCopy(tree):
result = {}
for k in tree.keys():
if k == "ancestor" or k == "sequence":
result[k] = tree[k]
else:
result[k] = [tree[k][0],tree[k][1],list(tree[k][2]),tree[k][3],tree[k][4],tree[k][5],tree[k][6]]
return result
def changeRoot(tree,newRoot): # the new root is between newRoot and its parent NEVER TESTED
#~ print "changeroot",newRoot,getRoot(tree),getParent(tree,newRoot)
if (not isRoot(tree,newRoot)) and (not isRoot(tree,getParent(tree,newRoot))):
#~ print "changeroot"
root = getRoot(tree)
new_id = addNode(newtree)
tree[new_id][2] = [newRoot,getParent(tree,newRoot)]
tree[newRoot][1] = new_id
tree[newRoot][3] = tree[newRoot][3]/2
current = getParent(tree,newRoot)
prec = new_id
current_length = tree[newRoot][3]/2
while getParent(tree,current) != root:
if current[2][0] == prec:
tree[current][2][0] = getParent(tree,current)
else:
tree[current][2][1] = getParent(tree,current)
tree[current][1] = prec
temp = current_length
current_length = tree[current][3]
tree[current][3] = temp
prec = current
current = getParent(tree,current)
if current[2][0] == prec:
tree[current][2][0] = getBrother(tree,current)
else:
tree[current][2][1] = getBrother(tree,current)
tree[current][1] = prec
temp = current_length
current_length = tree[current][3]
tree[current][3] = temp
tree[getBrother(tree,current)][1] = current
tree[getBrother(tree,current)][3] = tree[getBrother(tree,current)][3] + current_length
del tree[root]
def SPR(tree,a,b):
#~ print a,b,getName(tree,a),getName(tree,b)
#~ print writeTree(tree,getParent(tree,a),False)
parent = getParent(tree,a)
great_parent = getParent(tree,getParent(tree,a))
brother = getBrother(tree,a)
tree[brother][1] = great_parent
child = getChildren(tree,great_parent)[0]
if child == getParent(tree,a):
tree[great_parent][2][0] = brother
else:
tree[great_parent][2][1] = brother
del tree[parent]
#~ print writeTree(tree,great_parent,False)
parent = getParent(tree,b)
new_node = addNode(tree)
tree[new_node][1] = parent
tree[new_node][2] = [a,b]
tree[a][1] = new_node
tree[b][1] = new_node
child = getChildren(tree,parent)[0]
if child == b:
tree[parent][2][0] = new_node
else:
tree[parent][2][1] = new_node
#~ print writeTree(tree,parent,False)
def NNI(tree,node):
if (not isRoot(tree,node)) and (not isLeaf(tree,node)):
parent = getParent(tree,node)
if isRoot(tree,parent):
brother = getBrother(tree,node)
if not isLeaf(tree,brother):
son1 = getChildren(tree,node)[0]
son2 = getChildren(tree,node)[1]
son3 = getChildren(tree,brother)[0]
son4 = getChildren(tree,brother)[1]
setChild(tree,node,1,son4)
setChild(tree,brother,1,son2)
setParent(tree,son2,brother)
setParent(tree,son4,node)
else:
brother = getBrother(tree,node)
if getChildren(tree,parent)[0] == brother:
no_brother = 0
else:
no_brother = 1
son1 = getChildren(tree,node)[0]
setChild(tree,node,0,brother)
setChild(tree,parent,no_brother,son1)
setParent(tree,son1,parent)
setParent(tree,brother,node)
def getLeavesNames(tree):
result = []
root = getRoot(tree)
leaves = getLeaves(tree,root)
for l in leaves:
result.append(getName(tree,l))
return result
def unroot(tree):
nodes = getNodes(tree)
if len(nodes) > 3:
root = getRoot(tree)
children = getChildren(tree,root)
if len(children) == 2:
new_root = children[0]
tree[new_root][1] = -1
tree[new_root][2].append(children[1])
tree[children[1]][1] = new_root
tree[children[1]][3] = tree[new_root][3] + tree[children[1]][3]
tree[children[1]][6] = max(tree[new_root][6],tree[children[1]][6])
tree[new_root][3] = -1
del tree[root]
def contractunsupported(tree,threshold):
result = 0
unroot(tree)
nodes = getNodes(tree)
#print "begin",len(nodes)
for n in nodes:
if isfloat(tree[n][6]):
tree[n][6] = float(tree[n][6])
else:
tree[n][6] = 0.0
if (not isRoot(tree,n)) and (not isLeaf(tree,n)) and (tree[n][6] < threshold):
#~ print "CONTRACTION",float(tree[n][6]),threshold,
parent = getParent(tree,n)
children = getChildren(tree,n)
for c in children:
tree[parent][2].append(c)
tree[c][1] = parent
removeChild(tree,parent,n)
result = result + 1
#nodes = getNodes(tree)
#print "end",len(nodes)
return result
def ultrametricize(tree):
root = getRoot(tree)
leaves = getLeaves(tree,root)
maximum = 0
index = -1
for l in leaves:
d = distanceFrom(tree,root,l)
if d > maximum:
maximum = d
index = l
#~ print getName(tree,l),"maximum",maximum
i = index
marque = []
while i != root:
marque.append(i)
i = getParent(tree,i)
marque.append(root)
while len(marque) < len(getNodes(tree)):
#~ print len(marque),len(getNodes(tree))
maximum_non_marque = 0
index = -1
for l in leaves:
d = distanceFrom(tree,root,l)
if (d > maximum_non_marque) and (not l in marque):
maximum_non_marque = d
index = l
#~ print getName(tree,l),"distance",distanceFrom(tree,root,l)
i = index
distance_from_marque = 0
while not i in marque:
distance_from_marque = distance_from_marque + getLength(tree,i)
i = getParent(tree,i)
ratio = (maximum - distanceFrom(tree,i,root)) / distance_from_marque
i = index
while not i in marque:
marque.append(i)
setLength(tree,i,getLength(tree,i) * ratio)
i = getParent(tree,i)
#~ else:
#~ print getName(tree,l),"distance",distanceFrom(tree,root,l)
def constructSupportFromBootstrapTrees(tree,setoftrees):
support = {}
leaves = getLeavesNames(tree)
pos = {}
for i in range(len(leaves)):
pos[leaves[i]] = i
bipartitions = {}
def complement(seq):
result = []
for s in seq.split("_")[0]:
if s == "1":
result.append("0")
else:
result.append("1")
return "".join(result)
def seq(leafset,node):
result = ["0"]*len(leaves)
for l in leafset:
result[pos[l]] = "1"
return "".join(result)
def constructBipartAndReturnLeaves(tree,node):
if isLeaf(tree,node):
return [getName(tree,node)]
else:
c = getChildren(tree,node)
tab0 = constructBipartAndReturnLeaves(tree,c[0])
tab1 = constructBipartAndReturnLeaves(tree,c[1])
result = tab0+tab1
if len(c) > 2:
tab2 = constructBipartAndReturnLeaves(tree,c[2])
result = result + tab2
if not isRoot(tree,node):
support[node] = 0
s = seq(tab0+tab1,node)
bipartitions[s] = node
bipartitions[complement(s)] = node
return result
root = getRoot(tree)
constructBipartAndReturnLeaves(tree,root)
def testBipartAndReturnLeaves(tree,node):
#print "boot"
if isLeaf(tree,node):
return [getName(tree,node)]
else:
#print "nonleafboot"
c = getChildren(tree,node)
tab0 = testBipartAndReturnLeaves(tree,c[0])
tab1 = testBipartAndReturnLeaves(tree,c[1])
result = tab0+tab1
if len(c) > 2:
tab2 = testBipartAndReturnLeaves(tree,c[2])
result = result + tab2
if not isRoot(tree,node):
s = seq(tab0+tab1,node)
#print s
if bipartitions.has_key(s):
#print "bip trouve"
support[bipartitions[s]] = support[bipartitions[s]] + 1
#if bipartitions.has_key(complement(s)):
#support[bipartitions[complement(s)] = support[bipartitions[complement(s)]] + 1
return result
for t in setoftrees:
root = getRoot(t)
testBipartAndReturnLeaves(t,root)
if len(setoftrees) > 0:
for k in support.keys():
writeBootstrap(tree,k,support[k]/float(len(setoftrees)))
#root = getRoot(tree)
#print writeTree(tree,root,False)
def RF(arbre1,arbre2):
root1 = getRoot(arbre1)
root2 = getRoot(arbre2)
nodes1 = getNodes(arbre1)
nodes2 = getNodes(arbre2)
clades1 = []
for n in nodes1:
leaves = getLeaves(arbre1,n)
if len(leaves) > 1:
clade = []
for l in leaves:
clade.append(getName(arbre1,l).split("|")[0].split("__")[0])
clade.sort()
clades1.append(clade)
clades2 = []
for n in nodes2:
leaves = getLeaves(arbre2,n)
if len(leaves) > 1:
clade = []
for l in leaves:
clade.append(getName(arbre2,l).split("|")[0].split("__")[0])
clade.sort()
clades2.append(clade)
distance = 0
for c in clades1:
if not c in clades2:
distance = distance + 1
#print 1,c
for c in clades2:
if not c in clades1:
distance = distance + 1
#print 2,c
return distance/2
def commonTriplets(arbre1,arbre2):
result = 0
triplets = {}
root1 = getRoot(arbre1)
leaves1 = getLeaves(arbre1,root1)
for n1 in range(len(leaves1)):
for n2 in range(n1+1,len(leaves1)):
for n3 in range(n2+1,len(leaves1)):
ids = [leaves1[n1],leaves1[n2],leaves1[n3]]
ids.sort(lambda x,y: cmp(getName(arbre1,x),getName(arbre1,y)))
names = [getName(arbre1,ids[0]),getName(arbre1,ids[1]),getName(arbre1,ids[2])]
LCA12 = lastCommonAncestor(arbre1,ids[0],ids[1])
LCA13 = lastCommonAncestor(arbre1,ids[0],ids[2])
LCA23 = lastCommonAncestor(arbre1,ids[1],ids[2])
#print LCA12,LCA13,LCA23
if LCA12 == LCA13:
triplets['_'.join(names)] = 1
if LCA12 == LCA23:
triplets['_'.join(names)] = 2
if LCA13 == LCA23:
triplets['_'.join(names)] = 3
#print names,triplets['_'.join(names)]
root2 = getRoot(arbre2)
leaves2 = getLeaves(arbre2,root2)
for n1 in range(len(leaves2)):
for n2 in range(n1+1,len(leaves2)):
for n3 in range(n2+1,len(leaves2)):
#print n1,n2,n3,result
ids = [leaves2[n1],leaves2[n2],leaves2[n3]]
ids.sort(lambda x,y: cmp(getName(arbre2,x),getName(arbre2,y)))
names = [getName(arbre2,ids[0]),getName(arbre2,ids[1]),getName(arbre2,ids[2])]
if triplets.has_key('_'.join(names)):
LCA12 = lastCommonAncestor(arbre2,ids[0],ids[1])
LCA13 = lastCommonAncestor(arbre2,ids[0],ids[2])
LCA23 = lastCommonAncestor(arbre2,ids[1],ids[2])
if LCA12 == LCA13 and triplets['_'.join(names)] == 1:
#print names,"yes",triplets['_'.join(names)]
result = result + 1
elif LCA12 == LCA23 and triplets['_'.join(names)] == 2:
#print names,"yes",triplets['_'.join(names)]
result = result + 1
elif LCA13 == LCA23 and triplets['_'.join(names)] == 3:
#print names,"yes",triplets['_'.join(names)]
result = result + 1
#else:
#print names
#else:
#print names,"not found"
return result
# structure of the tree:
# 0: name, 1: parent, 2: tab of children, 3: length, 4: isdup, 5:species, 6:bootstrap
#####################################################
#####################################################
# Traversal of one tree
#
#####################################################
#####################################################
def readTree(treeseq):
###############################################
######### TREE READING ########################
###############################################
tree = {"sequence":treeseq}
id_node = 0
nb_parenth = 0
bppnumber = 0
pile = []
t = 0
while t < len(treeseq):
if treeseq[t] == "(":
id_node = id_node + 1
nb_parenth = nb_parenth + 1
tree[id_node]={}
tree[id_node][0] = "N"+str(id_node)
tree[id_node][1] = -1
tree[id_node][2] = []
tree[id_node][3] = 0
tree[id_node][4] = ""
tree[id_node][5] = ""
tree[id_node][6] = ""
tree[id_node][7] = -1
# [nom,pere,[enfants],longueur,annotD,dotannot,bootstrap,bppnumber]
# print "ouverture",tree[id_node]
if len(pile) > 0:
tree[id_node][1] = pile[-1]
pile.append(id_node)
t = t + 1
elif treeseq[t] == ")":
t = t + 1
nb_parenth = nb_parenth - 1
tree[pile[-1]][7] = bppnumber
bppnumber = bppnumber + 1
#~ print nb_parenth,"(-1)",treeseq[t:t+80]
if treeseq[t] == "@":
t = t + 1
tree["ancestor"] = pile[-1]
while (treeseq[t] != ":" and
treeseq[t] != ";" and
treeseq[t] != "[" and
treeseq[t] != ")" and
treeseq[t] != ","):
tree[pile[-1]][6] = tree[pile[-1]][6] + treeseq[t]
t = t + 1
if treeseq[t] == ":":
debut = t + 1
while treeseq[t] != "," and treeseq[t]!=")" and treeseq[t] != "[" and treeseq[t] != ";":
t = t + 1
longueur = float(treeseq[debut:t])
tree[pile[-1]][3] = longueur
while treeseq[t] != "," and treeseq[t] != ")" and treeseq[t] != "[" and treeseq[t] != ";":
t = t + 1
if treeseq[t] == "[":
debut = t + 1
t = debut + treeseq[debut:].find("]")
chaine = treeseq[debut:t]
mots = chaine.split(":")
for m in mots:
if m == "D=Y" or m == "D=T" or m == "Ev=GDup":
tree[pile[-1]][4] = "D"
if m[:2] == "S=":
tree[pile[-1]][5] = m[2:]
if m[:2] == "B=":
tree[pile[-1]][6] = m[2:]
if m[:3] == "ND=":
tree[pile[-1]]["ND"] = m[3:]
if isfloat(m):
tree[pile[-1]][6] = float(m)
t = t + 1
if treeseq[t] == ":":
debut = t + 1
while treeseq[t] != "," and treeseq[t]!=")" and treeseq[t] != "[" and treeseq[t] != ";":
t = t + 1
longueur = float(treeseq[debut:t])
tree[pile[-1]][3] = longueur
while treeseq[t] != "," and treeseq[t] != ")" and treeseq[t] != "[" and treeseq[t] != ";":
t = t + 1
del pile[-1]
if treeseq[t] == ";":
t = len(treeseq)
elif treeseq[t] == ";":
t = len(treeseq)
elif treeseq[t]==",":
t = t + 1
elif treeseq[t]==" ":
t = t + 1
else: # nom d'une feuille
#print "nom_de_feuille"
id_node = id_node + 1
tree[id_node] = {}
tree[id_node][1] = -1
tree[id_node][2] = []
tree[id_node][3] = 0
tree[id_node][4] = ""
tree[id_node][5] = ""
tree[id_node][6] = ""
tree[id_node][7] = bppnumber
bppnumber = bppnumber + 1
if len(pile)>0:
tree[id_node][1]=pile[-1]
pile.append(id_node)
debut = t
while (treeseq[t]!="," and
treeseq[t]!=")" and
treeseq[t]!=":" and
treeseq[t]!=";" and
treeseq[t]!="\n" and
treeseq[t] != "["):
t=t+1
nom = treeseq[debut:t].strip()
tree[pile[-1]][0] = nom
#~ print nom
if treeseq[t]==":":
debut = t + 1
while treeseq[t]!="," and treeseq[t]!=")" and treeseq[t] != "[" and treeseq[t] != ";":
t = t + 1
longueur = float(treeseq[debut:t])
tree[id_node][3] = longueur
#print "fin nom"
if treeseq[t] == "[":
debut = t + 1
t = debut + treeseq[debut:].find("]")
chaine = treeseq[debut:t]
#print chaine
mots = chaine.split(":")
for m in mots:
if m[:2] == "S=":
tree[pile[-1]][5] = m[2:]
if m[:3] == "ND=":
tree[pile[-1]]["ND"] = m[3:]
t = t + 1
if treeseq[t]==":":
debut = t + 1
while treeseq[t]!="," and treeseq[t]!=")" and treeseq[t] != "[" and treeseq[t] != ";":
t = t + 1
longueur = float(treeseq[debut:t])
tree[id_node][3] = longueur
del pile[-1]
#print tree
# remplissage des enfants
nodes = list(getNodes(tree))
for node in nodes:
if not isRoot(tree,node):
pere = getParent(tree,node)
addChild(tree,pere,node)
return tree
``` |
{
"source": "aaddisonalford/processorCorrect",
"score": 2
} |
#### File: processorCorrect/examples/testPRFCorrect.py
```python
from scipy.interpolate import interp1d
from scipy.signal import savgol_filter
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm
def smoothVel(vel,spatial,filter):
#Smooth interpolated field.
velSmooth = savgol_filter(vel, filter, 3, mode='interp',axis=1)
#Change -32768 to NaNs.
velSmooth[velSmooth < -1000] = np.nan
#Create difference field
diff = vel.filled(fill_value=np.nan) - velSmooth
return velSmooth,diff
def prtCorrectNew(velField,radius,azimuth,fnyq = 0,nyqL=0,nyqH=0):
'''
+++ Description +++
This function corrects errors related to staggered-PRT processing, is copied
from ../velocityCorrection.py, and is modified to take in velField for testing.
Please see the errorCorrection function in ../velocityCorrection.py for comments and
descriptions.
'''
pointsCaught = 0
count = 0
for radFilter,azFilter in zip([11,21,5,51,71,5],[5,9,5,21,71,5]):
pointsCaughtPrev = pointsCaught
#Radial
vel = velField.copy()
r = radius
velInterp = vel
velSmooth = savgol_filter(velInterp, radFilter, 3, mode='interp',axis=1)
velSmoothRad = velSmooth
diffRad = vel - velSmooth
#### Azimuthal ####
vel = velField.copy().T
r = azimuth
velInterp = vel
velSmooth = savgol_filter(velInterp, azFilter, 3, mode='interp',axis=1)
velSmoothAz = velSmooth.T
velSmoothMean = np.nanmean(np.array([velSmoothAz,velSmoothRad]),axis=0)
diffAz = vel.T - velSmooth.T
diffMean = vel.T - velSmoothMean
diff = np.nanmax(np.array([diffRad,diffAz]),axis=0)
velNew = vel.copy().T
mu,stdAz = norm.fit(np.ma.masked_invalid(diffAz).compressed())
#Compute the standard deviation of the radial difference field.
mu,stdRad = norm.fit(np.ma.masked_invalid(diffRad).compressed())
if (max([nyqL,nyqH])) < 3*stdRad and (max([nyqL,nyqH])) < 3*stdAz:
radMask,azMask = min([nyqL,nyqH]),min([nyqL,nyqH])
else:
radMask,azMask = 3*stdRad,3*stdAz
#Fill NaNs where azimuthal difference is < 3*(standard deviation).
diffAz = np.ma.masked_where(np.abs(diffAz) < azMask,diffAz).filled(fill_value = np.nan)
#Fill NaNs where radial difference is < 3*(standard deviation).
diffRad = np.ma.masked_where(np.abs(diffRad) < radMask,diffRad).filled(fill_value = np.nan)
possibleSolutions = np.empty((16,velNew.shape[0],velNew.shape[1]))
differences = np.empty((16,velNew.shape[0],velNew.shape[1]))
possibleSolutions[0,:] = velNew.copy()
differences[0,:] = velNew.copy() - velSmoothMean
mask = np.zeros(velNew.shape)
count = 1
bound = fnyq
for n1 in [0,1,2,3]:
for n2 in [0,1,2,3]:
if ((n1 == 0) & (n2 == 0)):
continue
nyq = n1*nyqL + n2*nyqH
#### Both
velPossible = velNew.copy()
if nyq-bound < 0: limit = 0
else: limit=nyq-bound
positiveIndices = np.where(((np.isnan(diffAz) != True) & (np.isnan(diffRad) != True)) & \
(diffMean > limit) & (diffMean < nyq + bound))
mask[positiveIndices] = 1
velPossible[positiveIndices] = velPossible[positiveIndices] - nyq
negativeIndices = np.where(((np.isnan(diffAz) != True) & (np.isnan(diffRad) != True)) & \
(diffMean < -1*limit) & (diffMean > -1*(nyq + bound)))
velPossible[negativeIndices] = velPossible[negativeIndices] + nyq
mask[negativeIndices] = 1
possibleSolutions[count,:] = velPossible
count+=1
velSmoothRecompute = np.nanmean(np.array([smoothVel(np.ma.masked_where(mask==1,velNew),radius,radFilter)[0],\
smoothVel(np.ma.masked_where(mask==1,velNew).T,azimuth,azFilter)[0].T]),axis=0)
differences = np.array([velPoss - velSmoothRecompute for velPoss in possibleSolutions])
differences = np.abs(np.ma.masked_invalid(differences).filled(fill_value=0.))
azimuths,ranges = np.meshgrid(range(velSmoothMean.shape[0]),range(velSmoothMean.shape[1]),indexing='ij')
indices = tuple([np.nanargmin(differences,axis=0).flatten(),azimuths.flatten(),ranges.flatten()])
pointsCaught = pointsCaught + np.where(np.nanargmin(differences,axis=0).flatten()!=0)[0].shape[0]
finalSolution = possibleSolutions[indices].reshape(velSmoothMean.shape)
velField = finalSolution
print("TOTAL POINTS CAUGHT ",pointsCaught)
return velField
``` |
{
"source": "aadel/sqlalchemy",
"score": 3
} |
#### File: sqlalchemy/test/requirements.py
```python
import sys
from sqlalchemy import exc
from sqlalchemy import util
from sqlalchemy.testing import exclusions
from sqlalchemy.testing.exclusions import against
from sqlalchemy.testing.exclusions import fails_if
from sqlalchemy.testing.exclusions import fails_on
from sqlalchemy.testing.exclusions import fails_on_everything_except
from sqlalchemy.testing.exclusions import LambdaPredicate
from sqlalchemy.testing.exclusions import only_if
from sqlalchemy.testing.exclusions import only_on
from sqlalchemy.testing.exclusions import skip_if
from sqlalchemy.testing.exclusions import SpecPredicate
from sqlalchemy.testing.exclusions import succeeds_if
from sqlalchemy.testing.requirements import SuiteRequirements
def no_support(db, reason):
return SpecPredicate(db, description=reason)
def exclude(db, op, spec, description=None):
return SpecPredicate(db, op, spec, description=description)
class DefaultRequirements(SuiteRequirements):
@property
def deferrable_or_no_constraints(self):
"""Target database must support deferrable constraints."""
return skip_if(
[
no_support("firebird", "not supported by database"),
no_support("mysql", "not supported by database"),
no_support("mssql", "not supported by database"),
]
)
@property
def check_constraints(self):
"""Target database must support check constraints."""
return exclusions.open()
@property
def enforces_check_constraints(self):
"""Target database must also enforce check constraints."""
return self.check_constraints + fails_on(
self._mysql_check_constraints_dont_exist,
"check constraints don't enforce on MySQL, MariaDB<10.2",
)
@property
def named_constraints(self):
"""target database must support names for constraints."""
return exclusions.open()
@property
def implicitly_named_constraints(self):
"""target database must apply names to unnamed constraints."""
return skip_if([no_support("sqlite", "not supported by database")])
@property
def foreign_keys(self):
"""Target database must support foreign keys."""
return skip_if(no_support("sqlite", "not supported by database"))
@property
def on_update_cascade(self):
"""target database must support ON UPDATE..CASCADE behavior in
foreign keys."""
return skip_if(
["sqlite", "oracle"],
"target backend %(doesnt_support)s ON UPDATE CASCADE",
)
@property
def non_updating_cascade(self):
"""target database must *not* support ON UPDATE..CASCADE behavior in
foreign keys."""
return fails_on_everything_except(
"sqlite", "oracle", "+zxjdbc"
) + skip_if("mssql")
@property
def recursive_fk_cascade(self):
"""target database must support ON DELETE CASCADE on a self-referential
foreign key"""
return skip_if(["mssql"])
@property
def deferrable_fks(self):
"""target database must support deferrable fks"""
return only_on(["oracle"])
@property
def foreign_key_constraint_option_reflection_ondelete(self):
return only_on(["postgresql", "mysql", "sqlite", "oracle"])
@property
def fk_constraint_option_reflection_ondelete_restrict(self):
return only_on(["postgresql", "sqlite", self._mysql_80])
@property
def fk_constraint_option_reflection_ondelete_noaction(self):
return only_on(["postgresql", "mysql", "sqlite"])
@property
def foreign_key_constraint_option_reflection_onupdate(self):
return only_on(["postgresql", "mysql", "sqlite"])
@property
def fk_constraint_option_reflection_onupdate_restrict(self):
return only_on(["postgresql", "sqlite", self._mysql_80])
@property
def comment_reflection(self):
return only_on(["postgresql", "mysql", "oracle"])
@property
def unbounded_varchar(self):
"""Target database must support VARCHAR with no length"""
return skip_if(
["firebird", "oracle", "mysql"], "not supported by database"
)
@property
def boolean_col_expressions(self):
"""Target database must support boolean expressions as columns"""
return skip_if(
[
no_support("firebird", "not supported by database"),
no_support("oracle", "not supported by database"),
no_support("mssql", "not supported by database"),
no_support("sybase", "not supported by database"),
]
)
@property
def non_native_boolean_unconstrained(self):
"""target database is not native boolean and allows arbitrary integers
in it's "bool" column"""
return skip_if(
[
LambdaPredicate(
lambda config: against(config, "mssql"),
"SQL Server drivers / odbc seem to change "
"their mind on this",
),
LambdaPredicate(
lambda config: config.db.dialect.supports_native_boolean,
"native boolean dialect",
),
]
)
@property
def standalone_binds(self):
"""target database/driver supports bound parameters as column expressions
without being in the context of a typed column.
"""
return skip_if(["firebird", "mssql+mxodbc"], "not supported by driver")
@property
def no_quoting_special_bind_names(self):
"""Target database will quote bound parameter names, doesn't support
EXPANDING"""
return skip_if(["oracle"])
@property
def identity(self):
"""Target database must support GENERATED AS IDENTITY or a facsimile.
Includes GENERATED AS IDENTITY, AUTOINCREMENT, AUTO_INCREMENT, or other
column DDL feature that fills in a DB-generated identifier at
INSERT-time without requiring pre-execution of a SEQUENCE or other
artifact.
"""
return skip_if(
["firebird", "oracle", "postgresql", "sybase"],
"not supported by database",
)
@property
def temporary_tables(self):
"""target database supports temporary tables"""
return skip_if(["mssql", "firebird"], "not supported (?)")
@property
def temp_table_reflection(self):
return self.temporary_tables
@property
def reflectable_autoincrement(self):
"""Target database must support tables that can automatically generate
PKs assuming they were reflected.
this is essentially all the DBs in "identity" plus PostgreSQL, which
has SERIAL support. FB and Oracle (and sybase?) require the Sequence
to be explicitly added, including if the table was reflected.
"""
return skip_if(
["firebird", "oracle", "sybase"], "not supported by database"
)
@property
def insert_from_select(self):
return skip_if(["firebird"], "crashes for unknown reason")
@property
def fetch_rows_post_commit(self):
return skip_if(["firebird"], "not supported")
@property
def non_broken_binary(self):
"""target DBAPI must work fully with binary values"""
# see https://github.com/pymssql/pymssql/issues/504
return skip_if(["mssql+pymssql"])
@property
def binary_comparisons(self):
"""target database/driver can allow BLOB/BINARY fields to be compared
against a bound parameter value.
"""
return skip_if(["oracle", "mssql"], "not supported by database/driver")
@property
def binary_literals(self):
"""target backend supports simple binary literals, e.g. an
expression like::
SELECT CAST('foo' AS BINARY)
Where ``BINARY`` is the type emitted from :class:`.LargeBinary`,
e.g. it could be ``BLOB`` or similar.
Basically fails on Oracle.
"""
# adding mssql here since it doesn't support comparisons either,
# have observed generally bad behavior with binary / mssql.
return skip_if(["oracle", "mssql"], "not supported by database/driver")
@property
def tuple_in(self):
def _sqlite_tuple_in(config):
return against(
config, "sqlite"
) and config.db.dialect.dbapi.sqlite_version_info >= (3, 15, 0)
return only_on(["mysql", "postgresql", _sqlite_tuple_in])
@property
def independent_cursors(self):
"""Target must support simultaneous, independent database cursors
on a single connection."""
return skip_if(["mssql", "mysql"], "no driver support")
@property
def independent_connections(self):
"""
Target must support simultaneous, independent database connections.
"""
# This is also true of some configurations of UnixODBC and probably
# win32 ODBC as well.
return skip_if(
[
no_support(
"sqlite",
"independent connections disabled "
"when :memory: connections are used",
),
exclude(
"mssql",
"<",
(9, 0, 0),
"SQL Server 2005+ is required for "
"independent connections",
),
]
)
@property
def memory_process_intensive(self):
"""Driver is able to handle the memory tests which run in a subprocess
and iterate through hundreds of connections
"""
return skip_if(
[
no_support("oracle", "Oracle XE usually can't handle these"),
no_support("mssql+pyodbc", "MS ODBC drivers struggle"),
self._running_on_windows(),
]
)
@property
def updateable_autoincrement_pks(self):
"""Target must support UPDATE on autoincrement/integer primary key."""
return skip_if(
["mssql", "sybase"], "IDENTITY columns can't be updated"
)
@property
def isolation_level(self):
return only_on(
("postgresql", "sqlite", "mysql", "mssql"),
"DBAPI has no isolation level support",
) + fails_on(
"postgresql+pypostgresql",
"pypostgresql bombs on multiple isolation level calls",
)
@property
def autocommit(self):
"""target dialect supports 'AUTOCOMMIT' as an isolation_level"""
return only_on(
("postgresql", "mysql", "mssql+pyodbc", "mssql+pymssql"),
"dialect does not support AUTOCOMMIT isolation mode",
)
@property
def row_triggers(self):
"""Target must support standard statement-running EACH ROW triggers."""
return skip_if(
[
# no access to same table
no_support("mysql", "requires SUPER priv"),
exclude("mysql", "<", (5, 0, 10), "not supported by database"),
# huh? TODO: implement triggers for PG tests, remove this
no_support(
"postgresql",
"PG triggers need to be implemented for tests",
),
]
)
@property
def sequences_as_server_defaults(self):
"""Target database must support SEQUENCE as a server side default."""
return only_on(
"postgresql", "doesn't support sequences as a server side default."
)
@property
def sql_expressions_inserted_as_primary_key(self):
return only_if([self.returning, self.sqlite])
@property
def computed_columns_on_update_returning(self):
return self.computed_columns + skip_if("oracle")
@property
def correlated_outer_joins(self):
"""Target must support an outer join to a subquery which
correlates to the parent."""
return skip_if(
"oracle",
'Raises "ORA-01799: a column may not be '
'outer-joined to a subquery"',
)
@property
def update_from(self):
"""Target must support UPDATE..FROM syntax"""
return only_on(
["postgresql", "mssql", "mysql"],
"Backend does not support UPDATE..FROM",
)
@property
def delete_from(self):
"""Target must support DELETE FROM..FROM or DELETE..USING syntax"""
return only_on(
["postgresql", "mssql", "mysql", "sybase"],
"Backend does not support DELETE..FROM",
)
@property
def update_where_target_in_subquery(self):
"""Target must support UPDATE (or DELETE) where the same table is
present in a subquery in the WHERE clause.
This is an ANSI-standard syntax that apparently MySQL can't handle,
such as::
UPDATE documents SET flag=1 WHERE documents.title IN
(SELECT max(documents.title) AS title
FROM documents GROUP BY documents.user_id
)
"""
return fails_if(
self._mysql_not_mariadb_103,
'MySQL error 1093 "Cant specify target table '
'for update in FROM clause", resolved by MariaDB 10.3',
)
@property
def savepoints(self):
"""Target database must support savepoints."""
return skip_if(
["sqlite", "sybase", ("mysql", "<", (5, 0, 3))],
"savepoints not supported",
)
@property
def savepoints_w_release(self):
return self.savepoints + skip_if(
["oracle", "mssql"],
"database doesn't support release of savepoint",
)
@property
def schemas(self):
"""Target database must support external schemas, and have one
named 'test_schema'."""
return skip_if(["firebird"], "no schema support")
@property
def cross_schema_fk_reflection(self):
"""target system must support reflection of inter-schema foreign keys
"""
return only_on(["postgresql", "mysql", "mssql"])
@property
def implicit_default_schema(self):
"""target system has a strong concept of 'default' schema that can
be referred to implicitly.
basically, PostgreSQL.
"""
return only_on(["postgresql"])
@property
def unique_constraint_reflection(self):
return fails_on_everything_except(
"postgresql", "mysql", "sqlite", "oracle"
)
@property
def unique_constraint_reflection_no_index_overlap(self):
return (
self.unique_constraint_reflection
+ skip_if("mysql")
+ skip_if("oracle")
)
@property
def check_constraint_reflection(self):
return fails_on_everything_except(
"postgresql",
"sqlite",
"oracle",
self._mysql_and_check_constraints_exist,
)
@property
def indexes_with_expressions(self):
return only_on(["postgresql", "sqlite>=3.9.0"])
@property
def temp_table_names(self):
"""target dialect supports listing of temporary table names"""
return only_on(["sqlite", "oracle"])
@property
def temporary_views(self):
"""target database supports temporary views"""
return only_on(["sqlite", "postgresql"])
@property
def update_nowait(self):
"""Target database must support SELECT...FOR UPDATE NOWAIT"""
return skip_if(
["firebird", "mssql", "mysql", "sqlite", "sybase"],
"no FOR UPDATE NOWAIT support",
)
@property
def subqueries(self):
"""Target database must support subqueries."""
return skip_if(exclude("mysql", "<", (4, 1, 1)), "no subquery support")
@property
def ctes(self):
"""Target database supports CTEs"""
return only_on(
[
lambda config: against(config, "mysql")
and (
config.db.dialect._is_mariadb
and config.db.dialect._mariadb_normalized_version_info
>= (10, 2)
),
"postgresql",
"mssql",
"oracle",
]
)
@property
def ctes_with_update_delete(self):
"""target database supports CTES that ride on top of a normal UPDATE
or DELETE statement which refers to the CTE in a correlated subquery.
"""
return only_on(
[
"postgresql",
"mssql",
# "oracle" - oracle can do this but SQLAlchemy doesn't support
# their syntax yet
]
)
@property
def ctes_on_dml(self):
"""target database supports CTES which consist of INSERT, UPDATE
or DELETE *within* the CTE, e.g. WITH x AS (UPDATE....)"""
return only_if(["postgresql"])
@property
def mod_operator_as_percent_sign(self):
"""target database must use a plain percent '%' as the 'modulus'
operator."""
return only_if(["mysql", "sqlite", "postgresql+psycopg2", "mssql"])
@property
def intersect(self):
"""Target database must support INTERSECT or equivalent."""
return fails_if(
["firebird", self._mysql_not_mariadb_103, "sybase"],
"no support for INTERSECT",
)
@property
def except_(self):
"""Target database must support EXCEPT or equivalent (i.e. MINUS)."""
return fails_if(
["firebird", self._mysql_not_mariadb_103, "sybase"],
"no support for EXCEPT",
)
@property
def order_by_col_from_union(self):
"""target database supports ordering by a column from a SELECT
inside of a UNION
E.g. (SELECT id, ...) UNION (SELECT id, ...) ORDER BY id
Fails on SQL Server
"""
return fails_if("mssql")
@property
def parens_in_union_contained_select_w_limit_offset(self):
"""Target database must support parenthesized SELECT in UNION
when LIMIT/OFFSET is specifically present.
E.g. (SELECT ... LIMIT ..) UNION (SELECT .. OFFSET ..)
This is known to fail on SQLite.
"""
return fails_if("sqlite")
@property
def parens_in_union_contained_select_wo_limit_offset(self):
"""Target database must support parenthesized SELECT in UNION
when OFFSET/LIMIT is specifically not present.
E.g. (SELECT ...) UNION (SELECT ..)
This is known to fail on SQLite. It also fails on Oracle
because without LIMIT/OFFSET, there is currently no step that
creates an additional subquery.
"""
return fails_if(["sqlite", "oracle"])
@property
def offset(self):
"""Target database must support some method of adding OFFSET or
equivalent to a result set."""
return fails_if(["sybase"], "no support for OFFSET or equivalent")
@property
def sql_expression_limit_offset(self):
return (
fails_if(
["mysql"],
"Target backend can't accommodate full expressions in "
"OFFSET or LIMIT",
)
+ self.offset
)
@property
def window_functions(self):
return only_if(
["postgresql>=8.4", "mssql", "oracle", "sqlite>=3.25.0"],
"Backend does not support window functions",
)
@property
def two_phase_transactions(self):
"""Target database must support two-phase transactions."""
return skip_if(
[
no_support("firebird", "no SA implementation"),
no_support("mssql", "two-phase xact not supported by drivers"),
no_support(
"oracle", "two-phase xact not implemented in SQLA/oracle"
),
no_support(
"drizzle", "two-phase xact not supported by database"
),
no_support(
"sqlite", "two-phase xact not supported by database"
),
no_support(
"sybase", "two-phase xact not supported by drivers/SQLA"
),
no_support(
"postgresql+zxjdbc",
"FIXME: JDBC driver confuses the transaction state, "
"may need separate XA implementation",
),
no_support(
"mysql",
"recent MySQL communiity editions have too many issues "
"(late 2016), disabling for now",
),
]
)
@property
def two_phase_recovery(self):
return self.two_phase_transactions + (
skip_if("mysql", "crashes on most mariadb and mysql versions")
)
@property
def views(self):
"""Target database must support VIEWs."""
return skip_if("drizzle", "no VIEW support")
@property
def empty_strings_varchar(self):
"""
target database can persist/return an empty string with a varchar.
"""
return fails_if(
["oracle"], "oracle converts empty strings to a blank space"
)
@property
def empty_strings_text(self):
"""target database can persist/return an empty string with an
unbounded text."""
return exclusions.open()
@property
def expressions_against_unbounded_text(self):
"""target database supports use of an unbounded textual field in a
WHERE clause."""
return fails_if(
["oracle"],
"ORA-00932: inconsistent datatypes: expected - got CLOB",
)
@property
def unicode_data(self):
"""target drive must support unicode data stored in columns."""
return skip_if([no_support("sybase", "no unicode driver support")])
@property
def unicode_connections(self):
"""
Target driver must support some encoding of Unicode across the wire.
"""
# TODO: expand to exclude MySQLdb versions w/ broken unicode
return skip_if(
[exclude("mysql", "<", (4, 1, 1), "no unicode connection support")]
)
@property
def unicode_ddl(self):
"""Target driver must support some degree of non-ascii symbol names."""
# TODO: expand to exclude MySQLdb versions w/ broken unicode
return skip_if(
[
no_support("oracle", "FIXME: no support in database?"),
no_support("sybase", "FIXME: guessing, needs confirmation"),
no_support("mssql+pymssql", "no FreeTDS support"),
LambdaPredicate(
lambda config: against(config, "mysql+mysqlconnector")
and config.db.dialect._mysqlconnector_version_info > (2, 0)
and util.py2k,
"bug in mysqlconnector 2.0",
),
exclude(
"mysql", "<", (4, 1, 1), "no unicode connection support"
),
]
)
@property
def emulated_lastrowid(self):
""""target dialect retrieves cursor.lastrowid or an equivalent
after an insert() construct executes.
"""
return fails_on_everything_except(
"mysql", "sqlite+pysqlite", "sqlite+pysqlcipher", "sybase", "mssql"
)
@property
def implements_get_lastrowid(self):
return skip_if([no_support("sybase", "not supported by database")])
@property
def dbapi_lastrowid(self):
""""target backend includes a 'lastrowid' accessor on the DBAPI
cursor object.
"""
return skip_if(
"mssql+pymssql", "crashes on pymssql"
) + fails_on_everything_except(
"mysql", "sqlite+pysqlite", "sqlite+pysqlcipher"
)
@property
def nullsordering(self):
"""Target backends that support nulls ordering."""
return fails_on_everything_except(
"postgresql", "oracle", "firebird", "sqlite >= 3.30.0"
)
@property
def reflects_pk_names(self):
"""Target driver reflects the name of primary key constraints."""
return fails_on_everything_except(
"postgresql", "oracle", "mssql", "sybase", "sqlite"
)
@property
def nested_aggregates(self):
"""target database can select an aggregate from a subquery that's
also using an aggregate"""
return skip_if(["mssql", "sqlite"])
@property
def array_type(self):
return only_on(
[
lambda config: against(config, "postgresql")
and not against(config, "+pg8000")
and not against(config, "+zxjdbc")
]
)
@property
def json_type(self):
return only_on(
[
lambda config: against(config, "mysql")
and (
(
not config.db.dialect._is_mariadb
and against(config, "mysql >= 5.7")
)
or (
config.db.dialect._mariadb_normalized_version_info
>= (10, 2, 7)
)
),
"postgresql >= 9.3",
self._sqlite_json,
]
)
@property
def json_index_supplementary_unicode_element(self):
# for sqlite see https://bugs.python.org/issue38749
return skip_if(
[
lambda config: against(config, "mysql")
and config.db.dialect._is_mariadb,
"sqlite",
]
)
def _sqlite_json(self, config):
if not against(config, "sqlite >= 3.9"):
return False
else:
with config.db.connect() as conn:
try:
return (
conn.scalar(
"""select json_extract('{"foo": "bar"}', """
"""'$."foo"')"""
)
== "bar"
)
except exc.DBAPIError:
return False
@property
def reflects_json_type(self):
return only_on(
[
lambda config: against(config, "mysql >= 5.7")
and not config.db.dialect._is_mariadb,
"postgresql >= 9.3",
"sqlite >= 3.9",
]
)
@property
def json_array_indexes(self):
return self.json_type + fails_if("+pg8000")
@property
def datetime_literals(self):
"""target dialect supports rendering of a date, time, or datetime as a
literal string, e.g. via the TypeEngine.literal_processor() method.
"""
return fails_on_everything_except("sqlite")
@property
def datetime(self):
"""target dialect supports representation of Python
datetime.datetime() objects."""
return exclusions.open()
@property
def datetime_microseconds(self):
"""target dialect supports representation of Python
datetime.datetime() with microsecond objects."""
return skip_if(
["mssql", "mysql", "firebird", "+zxjdbc", "oracle", "sybase"]
)
@property
def timestamp_microseconds(self):
"""target dialect supports representation of Python
datetime.datetime() with microsecond objects but only
if TIMESTAMP is used."""
return only_on(["oracle"])
@property
def datetime_historic(self):
"""target dialect supports representation of Python
datetime.datetime() objects with historic (pre 1900) values."""
return succeeds_if(["sqlite", "postgresql", "firebird"])
@property
def date(self):
"""target dialect supports representation of Python
datetime.date() objects."""
return exclusions.open()
@property
def date_coerces_from_datetime(self):
"""target dialect accepts a datetime object as the target
of a date column."""
# does not work as of pyodbc 4.0.22
return fails_on("mysql+mysqlconnector") + skip_if("mssql+pyodbc")
@property
def date_historic(self):
"""target dialect supports representation of Python
datetime.datetime() objects with historic (pre 1900) values."""
return succeeds_if(["sqlite", "postgresql", "firebird"])
@property
def time(self):
"""target dialect supports representation of Python
datetime.time() objects."""
return skip_if(["oracle"])
@property
def time_microseconds(self):
"""target dialect supports representation of Python
datetime.time() with microsecond objects."""
return skip_if(
["mssql", "mysql", "firebird", "+zxjdbc", "oracle", "sybase"]
)
@property
def precision_numerics_general(self):
"""target backend has general support for moderately high-precision
numerics."""
return exclusions.open()
@property
def precision_numerics_enotation_small(self):
"""target backend supports Decimal() objects using E notation
to represent very small values."""
# NOTE: this exclusion isn't used in current tests.
return exclusions.open()
@property
def precision_numerics_enotation_large(self):
"""target backend supports Decimal() objects using E notation
to represent very large values."""
return fails_if(
[
(
"sybase+pyodbc",
None,
None,
"Don't know how do get these values through "
"FreeTDS + Sybase",
),
("firebird", None, None, "Precision must be from 1 to 18"),
]
)
@property
def precision_numerics_many_significant_digits(self):
"""target backend supports values with many digits on both sides,
such as 319438950232418390.273596, 87673.594069654243
"""
def broken_cx_oracle(config):
return (
against(config, "oracle+cx_oracle")
and config.db.dialect.cx_oracle_ver <= (6, 0, 2)
and config.db.dialect.cx_oracle_ver > (6,)
)
return fails_if(
[
("sqlite", None, None, "TODO"),
("firebird", None, None, "Precision must be from 1 to 18"),
("sybase+pysybase", None, None, "TODO"),
]
)
@property
def precision_numerics_retains_significant_digits(self):
"""A precision numeric type will return empty significant digits,
i.e. a value such as 10.000 will come back in Decimal form with
the .000 maintained."""
return fails_if(
[
("oracle", None, None, "driver doesn't do this automatically"),
(
"firebird",
None,
None,
"database and/or driver truncates decimal places.",
),
]
)
@property
def precision_generic_float_type(self):
"""target backend will return native floating point numbers with at
least seven decimal places when using the generic Float type."""
return fails_if(
[
(
"mysql",
None,
None,
"mysql FLOAT type only returns 4 decimals",
),
(
"firebird",
None,
None,
"firebird FLOAT type isn't high precision",
),
]
)
@property
def floats_to_four_decimals(self):
return fails_if(
[
("mysql+oursql", None, None, "Floating point error"),
(
"firebird",
None,
None,
"Firebird still has FP inaccuracy even "
"with only four decimal places",
),
(
"mssql+pyodbc",
None,
None,
"mssql+pyodbc has FP inaccuracy even with "
"only four decimal places ",
),
(
"mssql+pymssql",
None,
None,
"mssql+pymssql has FP inaccuracy even with "
"only four decimal places ",
),
(
"postgresql+pg8000",
None,
None,
"postgresql+pg8000 has FP inaccuracy even with "
"only four decimal places ",
),
(
"postgresql+psycopg2cffi",
None,
None,
"postgresql+psycopg2cffi has FP inaccuracy even with "
"only four decimal places ",
),
]
)
@property
def implicit_decimal_binds(self):
"""target backend will return a selected Decimal as a Decimal, not
a string.
e.g.::
expr = decimal.Decimal("15.7563")
value = e.scalar(
select([literal(expr)])
)
assert value == expr
See :ticket:`4036`
"""
# fixed for mysqlclient in
# https://github.com/PyMySQL/mysqlclient-python/commit/68b9662918577fc05be9610ef4824a00f2b051b0
def check(config):
if against(config, "mysql+mysqldb"):
# can remove once post 1.3.13 is released
try:
from MySQLdb import converters
from decimal import Decimal
return Decimal not in converters.conversions
except:
return True
return against(
config, "mysql+mysqldb"
) and config.db.dialect._mysql_dbapi_version <= (1, 3, 13)
return exclusions.fails_on(check, "fixed for mysqlclient post 1.3.13")
@property
def fetch_null_from_numeric(self):
return skip_if(("mssql+pyodbc", None, None, "crashes due to bug #351"))
@property
def duplicate_key_raises_integrity_error(self):
return fails_on("postgresql+pg8000")
def _has_pg_extension(self, name):
def check(config):
if not against(config, "postgresql"):
return False
count = config.db.scalar(
"SELECT count(*) FROM pg_extension "
"WHERE extname='%s'" % name
)
return bool(count)
return only_if(check, "needs %s extension" % name)
@property
def hstore(self):
return self._has_pg_extension("hstore")
@property
def btree_gist(self):
return self._has_pg_extension("btree_gist")
@property
def range_types(self):
def check_range_types(config):
if not against(
config, ["postgresql+psycopg2", "postgresql+psycopg2cffi"]
):
return False
try:
config.db.scalar("select '[1,2)'::int4range;")
return True
except Exception:
return False
return only_if(check_range_types)
@property
def oracle_test_dblink(self):
return skip_if(
lambda config: not config.file_config.has_option(
"sqla_testing", "oracle_db_link"
),
"oracle_db_link option not specified in config",
)
@property
def postgresql_test_dblink(self):
return skip_if(
lambda config: not config.file_config.has_option(
"sqla_testing", "postgres_test_db_link"
),
"postgres_test_db_link option not specified in config",
)
@property
def postgresql_jsonb(self):
return only_on("postgresql >= 9.4") + skip_if(
lambda config: config.db.dialect.driver == "pg8000"
and config.db.dialect._dbapi_version <= (1, 10, 1)
)
@property
def psycopg2_native_json(self):
return self.psycopg2_compatibility
@property
def psycopg2_native_hstore(self):
return self.psycopg2_compatibility
@property
def psycopg2_compatibility(self):
return only_on(["postgresql+psycopg2", "postgresql+psycopg2cffi"])
@property
def psycopg2_or_pg8000_compatibility(self):
return only_on(
[
"postgresql+psycopg2",
"postgresql+psycopg2cffi",
"postgresql+pg8000",
]
)
@property
def percent_schema_names(self):
return skip_if(
[
(
"+psycopg2",
None,
None,
"psycopg2 2.4 no longer accepts percent "
"sign in bind placeholders",
),
(
"+psycopg2cffi",
None,
None,
"psycopg2cffi does not accept percent signs in "
"bind placeholders",
),
("mysql", None, None, "executemany() doesn't work here"),
]
)
@property
def order_by_label_with_expression(self):
return fails_if(
[
(
"firebird",
None,
None,
"kinterbasdb doesn't send full type information",
),
("postgresql", None, None, "only simple labels allowed"),
("sybase", None, None, "only simple labels allowed"),
("mssql", None, None, "only simple labels allowed"),
]
)
def get_order_by_collation(self, config):
lookup = {
# will raise without quoting
"postgresql": "POSIX",
# note MySQL databases need to be created w/ utf8mb4 charset
# for the test suite
"mysql": "utf8mb4_bin",
"sqlite": "NOCASE",
# will raise *with* quoting
"mssql": "Latin1_General_CI_AS",
}
try:
return lookup[config.db.name]
except KeyError:
raise NotImplementedError()
@property
def skip_mysql_on_windows(self):
"""Catchall for a large variety of MySQL on Windows failures"""
return skip_if(
self._has_mysql_on_windows, "Not supported on MySQL + Windows"
)
@property
def mssql_freetds(self):
return only_on(["mssql+pymssql"])
@property
def ad_hoc_engines(self):
return exclusions.skip_if(
["oracle"],
"works, but Oracle just gets tired with "
"this much connection activity",
)
@property
def no_mssql_freetds(self):
return self.mssql_freetds.not_()
@property
def pyodbc_fast_executemany(self):
def has_fastexecutemany(config):
if not against(config, "mssql+pyodbc"):
return False
if config.db.dialect._dbapi_version() < (4, 0, 19):
return False
with config.db.connect() as conn:
drivername = conn.connection.connection.getinfo(
config.db.dialect.dbapi.SQL_DRIVER_NAME
)
# on linux this is 'libmsodbcsql-13.1.so.9.2'.
# don't know what it is on windows
return "msodbc" in drivername
return only_if(
has_fastexecutemany, "only on pyodbc > 4.0.19 w/ msodbc driver"
)
@property
def python_fixed_issue_8743(self):
return exclusions.skip_if(
lambda: sys.version_info < (2, 7, 8),
"Python issue 8743 fixed in Python 2.7.8",
)
@property
def granular_timezone(self):
"""the datetime.timezone class, or SQLAlchemy's port, supports
seconds and microseconds.
SQLAlchemy ported the Python 3.7 version for Python 2, so
it passes on that. For Python 3.6 and earlier, it is not supported.
"""
return exclusions.skip_if(
lambda: sys.version_info >= (3,) and sys.version_info < (3, 7)
)
@property
def selectone(self):
"""target driver must support the literal statement 'select 1'"""
return skip_if(
["oracle", "firebird"], "non-standard SELECT scalar syntax"
)
@property
def mysql_for_update(self):
return skip_if(
"mysql+mysqlconnector",
"lock-sensitive operations crash on mysqlconnector",
)
@property
def mysql_fsp(self):
return only_if("mysql >= 5.6.4")
@property
def mysql_fully_case_sensitive(self):
return only_if(self._has_mysql_fully_case_sensitive)
@property
def mysql_zero_date(self):
def check(config):
if not against(config, "mysql"):
return False
row = config.db.execute("show variables like 'sql_mode'").first()
return not row or "NO_ZERO_DATE" not in row[1]
return only_if(check)
@property
def mysql_non_strict(self):
def check(config):
if not against(config, "mysql"):
return False
row = config.db.execute("show variables like 'sql_mode'").first()
return not row or "STRICT_TRANS_TABLES" not in row[1]
return only_if(check)
@property
def mysql_ngram_fulltext(self):
def check(config):
return (
against(config, "mysql")
and not config.db.dialect._is_mariadb
and config.db.dialect.server_version_info >= (5, 7)
)
return only_if(check)
def _mysql_80(self, config):
return (
against(config, "mysql")
and config.db.dialect._is_mysql
and config.db.dialect.server_version_info >= (8,)
)
def _mariadb_102(self, config):
return (
against(config, "mysql")
and config.db.dialect._is_mariadb
and config.db.dialect._mariadb_normalized_version_info > (10, 2)
)
def _mysql_and_check_constraints_exist(self, config):
# 1. we have mysql / mariadb and
# 2. it enforces check constraints
if exclusions.against(config, "mysql"):
if config.db.dialect._is_mariadb:
norm_version_info = (
config.db.dialect._mariadb_normalized_version_info
)
return norm_version_info >= (10, 2)
else:
norm_version_info = config.db.dialect.server_version_info
return norm_version_info >= (8, 0, 16)
else:
return False
def _mysql_check_constraints_exist(self, config):
# 1. we dont have mysql / mariadb or
# 2. we have mysql / mariadb that enforces check constraints
return not exclusions.against(
config, "mysql"
) or self._mysql_and_check_constraints_exist(config)
def _mysql_check_constraints_dont_exist(self, config):
# 1. we have mysql / mariadb and
# 2. they dont enforce check constraints
return not self._mysql_check_constraints_exist(config)
def _mysql_not_mariadb_102(self, config):
return against(config, "mysql") and (
not config.db.dialect._is_mariadb
or config.db.dialect._mariadb_normalized_version_info < (10, 2)
)
def _mysql_not_mariadb_103(self, config):
return against(config, "mysql") and (
not config.db.dialect._is_mariadb
or config.db.dialect._mariadb_normalized_version_info < (10, 3)
)
def _has_mysql_on_windows(self, config):
return (
against(config, "mysql")
and config.db.dialect._detect_casing(config.db) == 1
)
def _has_mysql_fully_case_sensitive(self, config):
return (
against(config, "mysql")
and config.db.dialect._detect_casing(config.db) == 0
)
@property
def postgresql_utf8_server_encoding(self):
return only_if(
lambda config: against(config, "postgresql")
and config.db.scalar("show server_encoding").lower() == "utf8"
)
@property
def cxoracle6_or_greater(self):
return only_if(
lambda config: against(config, "oracle+cx_oracle")
and config.db.dialect.cx_oracle_ver >= (6,)
)
@property
def oracle5x(self):
return only_if(
lambda config: against(config, "oracle+cx_oracle")
and config.db.dialect.cx_oracle_ver < (6,)
)
@property
def computed_columns(self):
return skip_if(["postgresql < 12", "sqlite", "mysql < 5.7"])
``` |
{
"source": "aadesh11/KeyPhraseExtraction",
"score": 3
} |
#### File: KeyPhraseExtraction/keyphrase/normalize.py
```python
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from sklearn.decomposition import PCA
def standard_normalize_cosine_similarities(cosine_similarities):
"""Normalized cosine similarities"""
# normalize into 0-1 range
cosine_sims_norm = (cosine_similarities - np.min(cosine_similarities)) / (
np.max(cosine_similarities) - np.min(cosine_similarities)
)
# standardize and shift by 0.5
cosine_sims_norm = 0.5 + (cosine_sims_norm - np.mean(cosine_sims_norm)) / np.std(
cosine_sims_norm
)
return cosine_sims_norm
def max_normalize_cosine_similarities_pairwise(cosine_similarities):
"""Normalized cosine similarities of pairs which is 2d matrix of pairwise cosine similarities"""
cosine_sims_norm = np.copy(cosine_similarities)
np.fill_diagonal(cosine_sims_norm, np.NaN)
# normalize into 0-1 range
cosine_sims_norm = (
cosine_similarities - np.nanmin(cosine_similarities, axis=0)
) / (
np.nanmax(cosine_similarities, axis=0) - np.nanmin(cosine_similarities, axis=0)
)
# standardize shift by 0.5
cosine_sims_norm = 0.5 + (
cosine_sims_norm - np.nanmean(cosine_sims_norm, axis=0)
) / np.nanstd(cosine_sims_norm, axis=0)
return cosine_sims_norm
def max_normalize_cosine_similarities(cosine_similarities):
"""Normalize cosine similarities using max normalization approach"""
return 1 / np.max(cosine_similarities) * cosine_similarities.squeeze(axis=1)
def get_alias_keywords(keyword_sims, keywords, threshold):
"""Find keywords in selected list that are aliases (very similar) to each other"""
similarities = np.nan_to_num(keyword_sims, 0)
sorted_similarities = np.flip(np.argsort(similarities), 1)
aliases = []
for idx, item in enumerate(sorted_similarities):
alias_for_item = []
for i in item:
if similarities[idx, i] >= threshold:
alias_for_item.append(keywords[i])
else:
break
aliases.append(alias_for_item)
return aliases
def plot(embs, texts):
pca = PCA(n_components=2)
vectors_2d = pca.fit_transform(embs)
data = pd.DataFrame(
{
"v1": vectors_2d[:, 0],
"v2": vectors_2d[:, 1],
}
)
ax = sns.scatterplot(x=data.v1, y=data.v2) # style=data.type, hue=data.type
for i, text in enumerate(zip(texts)):
if len(text) > 20:
text = text[:20] + "..."
ax.annotate(text, (vectors_2d[i, 0], vectors_2d[i, 1]))
plt.show()
```
#### File: KeyPhraseExtraction/keyphrase/preprocess.py
```python
import re
"""
To add any pre-process steps here.
"""
# To clean the text sentences
def clean_token_text(token_text):
tokens = token_text.split()
p = r"[^A-Za-z0-9]\Z"
tokens = [re.sub(p, "", token) for token in tokens]
tokens = [token for token in tokens if len(token) > 1]
return " ".join(tokens)
```
#### File: KeyPhraseExtraction/keyphrase/transformer_models.py
```python
from transformers import AutoModel, AutoTokenizer
class TransformerModels:
def __init__(self, model_name="xlm-roberta-base"):
self.model = AutoModel.from_pretrained(model_name)
self.tokenizer = AutoTokenizer.from_pretrained(model_name)
# Build model and get embeddings from it.
def build_model(self, texts=None):
# texts: can be candidates phrase sentences or a given sentences for which we need to extract key-phrase
texts_tokens = self.tokenizer(texts, padding=True, return_tensors="pt")
texts_embeddings = self.model(**texts_tokens)["pooler_output"]
return texts_embeddings.detach().numpy()
``` |
{
"source": "Aadesh-Baral/tasking-manager",
"score": 3
} |
#### File: api/countries/resources.py
```python
from flask_restful import Resource, current_app
from backend.services.tags_service import TagsService
class CountriesRestAPI(Resource):
def get(self):
"""
Fetch all Country tags
---
tags:
- countries
produces:
- application/json
responses:
200:
description: All Country tags returned
500:
description: Internal Server Error
"""
try:
tags = TagsService.get_all_countries()
return tags.to_primitive(), 200
except Exception as e:
error_msg = f"User GET - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg, "SubCode": "InternalServerError"}, 500
```
#### File: api/projects/campaigns.py
```python
from flask_restful import Resource, current_app
from schematics.exceptions import DataError
from backend.models.dtos.campaign_dto import CampaignProjectDTO
from backend.services.campaign_service import CampaignService
from backend.services.project_admin_service import ProjectAdminService
from backend.models.postgis.utils import NotFound
from backend.services.users.authentication_service import token_auth
class ProjectsCampaignsAPI(Resource):
@token_auth.login_required
def post(self, project_id, campaign_id):
"""
Assign a campaign for a project
---
tags:
- campaigns
produces:
- application/json
parameters:
- in: header
name: Authorization
description: Base64 encoded session token
required: true
type: string
default: Token sessionTokenHere==
- name: project_id
in: path
description: Unique project ID
required: true
type: integer
default: 1
- name: campaign_id
in: path
description: Unique campaign ID
required: true
type: integer
default: 1
responses:
201:
description: Campaign assigned successfully
400:
description: Client Error - Invalid Request
401:
description: Unauthorized - Invalid credentials
403:
description: Forbidden
500:
description: Internal Server Error
"""
try:
authenticated_user_id = token_auth.current_user()
if not ProjectAdminService.is_user_action_permitted_on_project(
authenticated_user_id, project_id
):
raise ValueError()
except ValueError:
return {
"Error": "User is not a manager of the project",
"SubCode": "UserPermissionError",
}, 403
try:
campaign_project_dto = CampaignProjectDTO()
campaign_project_dto.campaign_id = campaign_id
campaign_project_dto.project_id = project_id
campaign_project_dto.validate()
except DataError as e:
current_app.logger.error(f"error validating request: {str(e)}")
return {"Error": str(e), "SubCode": "InvalidData"}, 400
try:
CampaignService.create_campaign_project(campaign_project_dto)
message = "campaign with id {} assigned successfully for project with id {}".format(
campaign_id, project_id
)
return ({"Success": message}, 200)
except Exception as e:
error_msg = f"ProjectsCampaignsAPI POST - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg, "SubCode": "InternalServerError"}, 500
def get(self, project_id):
"""
Gets all campaigns for a project
---
tags:
- campaigns
produces:
- application/json
parameters:
- name: project_id
in: path
description: Unique project ID
required: true
type: integer
default: 1
responses:
200:
description: Campaign list returned successfully
400:
description: Client Error - Invalid Request
401:
description: Unauthorized - Invalid credentials
500:
description: Internal Server Error
"""
try:
campaigns = CampaignService.get_project_campaigns_as_dto(project_id)
return campaigns.to_primitive(), 200
except NotFound:
return {"Error": "No campaign found", "SubCode": "NotFound"}, 404
except Exception as e:
error_msg = f"Messages GET - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg, "SubCode": "InternalServerError"}, 500
@token_auth.login_required
def delete(self, project_id, campaign_id):
"""
Delete a campaign for a project
---
tags:
- campaigns
produces:
- application/json
parameters:
- in: header
name: Authorization
description: Base64 encoded session token
required: true
type: string
default: Token sessionTokenHere==
- name: project_id
in: path
description: Unique project ID
required: true
type: integer
default: 1
- name: campaign_id
in: path
description: Unique campaign ID
required: true
type: integer
default: 1
responses:
200:
description: Campaign assigned successfully
400:
description: Client Error - Invalid Request
401:
description: Unauthorized - Invalid credentials
403:
description: Forbidden
500:
description: Internal Server Error
"""
try:
authenticated_user_id = token_auth.current_user()
if not ProjectAdminService.is_user_action_permitted_on_project(
authenticated_user_id, project_id
):
raise ValueError()
except ValueError:
return {
"Error": "User is not a manager of the project",
"SubCode": "UserPermissionError",
}, 403
try:
CampaignService.delete_project_campaign(project_id, campaign_id)
return {"Success": "Campaigns Deleted"}, 200
except NotFound:
return {"Error": "Campaign Not Found", "SubCode": "NotFound"}, 404
except Exception as e:
error_msg = f"ProjectsCampaignsAPI DELETE - unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {"Error": error_msg, "SubCode": "InternalServerError"}, 500
```
#### File: api/system/statistics.py
```python
from flask_restful import Resource, current_app
from backend.services.stats_service import StatsService
from flask_restful import request
from distutils.util import strtobool
class SystemStatisticsAPI(Resource):
def get(self):
"""
Get HomePage Stats
---
tags:
- system
produces:
- application/json
parameters:
- in: query
name: abbreviated
type: boolean
description: Set to false if complete details on projects including total area, campaigns, orgs are required
default: True
responses:
200:
description: Project stats
500:
description: Internal Server Error
"""
try:
abbreviated = (
strtobool(request.args.get("abbreviated"))
if request.args.get("abbreviated")
else True
)
stats = StatsService.get_homepage_stats(abbreviated)
return stats.to_primitive(), 200
except Exception as e:
error_msg = f"Unhandled error: {str(e)}"
current_app.logger.critical(error_msg)
return {
"Error": "Unable to fetch summary statistics",
"SubCode": "InternalServerError",
}, 500
``` |
{
"source": "Aadeshkale/gcp-security",
"score": 3
} |
#### File: gcp-security/gcpsecurity/gcp_cloud_sql.py
```python
import csv
from googleapiclient import discovery
from google.oauth2 import service_account
class SqlChecks:
"""
this class perform different checks on GCP Cloud Sql
"""
def __init__(self, sql_client, sql_instances):
self.sql_client = sql_client
self.sql_instances = sql_instances
# --- check methods ---
# this method check gcp cloud sql instance has public internet access
def check_4_1_cloud_sql_public_access(self):
check_id = 4.1
description = "Check for gcp cloud sql instance has public internet access"
if len(self.sql_instances) <= 0:
res = self.result_template(
check_id=check_id,
result=False,
reason="There is no gcp cloud sql instances created",
resource_list=[],
description=description
)
return res
else:
resource_list = []
for inst in self.sql_instances:
try:
if inst['settings']['ipConfiguration']['ipv4Enabled'] == True:
resource_list.append(inst['connectionName'])
except:
pass
if len(resource_list) > 0:
result = True
reason = "Gcp cloud sql instance has public internet access"
else:
result = False
reason = "ALL Gcp cloud sql instances does not have public internet access"
return self.result_template(check_id, result, reason, resource_list, description)
# this method check gcp cloud sql does not have auto backup
def check_4_2_cloud_sql_backup_config(self):
check_id = 4.2
description = "Check for gcp cloud sql instance does not have auto backup"
if len(self.sql_instances) <= 0:
res = self.result_template(
check_id=check_id,
result=False,
reason="There is no gcp cloud sql instances created",
resource_list=[],
description=description
)
return res
else:
resource_list = []
for inst in self.sql_instances:
try:
if inst['settings']['backupConfiguration']['enabled']:
pass
else:
resource_list.append(inst['connectionName'])
except:
resource_list.append(inst['connectionName'])
if len(resource_list) > 0:
result = True
reason = "Gcp cloud sql instance does not have auto backup"
else:
result = False
reason = "ALL gcp cloud sql instances have auto backup"
return self.result_template(check_id, result, reason, resource_list, description)
# this method check gcp cloud sql instance does not have auto scaling enabled
def check_4_3_cloud_sql_auto_scaling(self):
check_id = 4.3
description = "Check for gcp cloud sql instance does not have auto scaling enabled"
if len(self.sql_instances) <= 0:
res = self.result_template(
check_id=check_id,
result=False,
reason="There is no gcp cloud sql instances created",
resource_list=[],
description=description
)
return res
else:
resource_list = []
for inst in self.sql_instances:
try:
if inst['settings']['storageAutoResize']:
pass
else:
resource_list.append(inst['connectionName'])
except:
resource_list.append(inst['connectionName'])
if len(resource_list) > 0:
result = True
reason = "gcp cloud sql instance does not have auto scaling enabled"
else:
result = False
reason = "ALL gcp cloud sql instances have auto scaling enabled"
return self.result_template(check_id, result, reason, resource_list, description)
# this method check gcp cloud sql instance does not have high availability in region
def check_4_4_cloud_sql_high_availability(self):
check_id = 4.4
description = "Check for gcp cloud sql instance does not have high availability in region"
if len(self.sql_instances) <= 0:
res = self.result_template(
check_id=check_id,
result=False,
reason="There is no gcp cloud sql instances created",
resource_list=[],
description=description
)
return res
else:
resource_list = []
for inst in self.sql_instances:
try:
if inst['settings']['availabilityType'] == 'REGIONAL':
pass
else:
resource_list.append(inst['connectionName'])
except:
resource_list.append(inst['connectionName'])
if len(resource_list) > 0:
result = True
reason = "gcp cloud sql instance does not have high availability in region"
else:
result = False
reason = "ALL gcp cloud sql instances have high availability in region"
return self.result_template(check_id, result, reason, resource_list, description)
# this method check gcp cloud sql instance authorized to GEA Applications
def check_4_5_cloud_sql_gae_application(self):
check_id = 4.5
description = "Check for gcp cloud sql instance authorized to GEA Applications"
if len(self.sql_instances) <= 0:
res = self.result_template(
check_id=check_id,
result=False,
reason="There is no gcp cloud sql instances created",
resource_list=[],
description=description
)
return res
else:
resource_list = []
for inst in self.sql_instances:
try:
if len(inst['settings']['authorizedGaeApplications']) > 0:
res = dict()
gae_list = []
for gae in inst['settings']['authorizedGaeApplications']:
gae_list.append(gae)
res[inst['name']] = gae_list
resource_list.append(res)
else:
pass
except:
pass
if len(resource_list) > 0:
result = True
reason = "gcp cloud sql instance authorized to GEA Applications"
else:
result = False
reason = "ALL gcp cloud sql instances not authorized to GEA Applications"
return self.result_template(check_id, result, reason, resource_list, description)
# this method check gcp cloud sql instance does not have fail over replica
def check_4_6_cloud_sql_fail_over_replica(self):
check_id = 4.6
description = "Check for gcp cloud sql instance does not have fail over replica"
if len(self.sql_instances) <= 0:
res = self.result_template(
check_id=check_id,
result=False,
reason="There is no gcp cloud sql instances created",
resource_list=[],
description=description
)
return res
else:
resource_list = []
for inst in self.sql_instances:
try:
if len(inst['failoverReplica']['available']) == False:
resource_list.append(inst['connectionName'])
else:
pass
except:
resource_list.append(inst['connectionName'])
if len(resource_list) > 0:
result = True
reason = "gcp cloud sql instance does not have fail over replica"
else:
result = False
reason = "ALL gcp cloud sql instances have fail over replica"
return self.result_template(check_id, result, reason, resource_list, description)
# --- supporting methods ---
# this method generates template for each check
def result_template(self, check_id, result, reason, resource_list, description):
template = dict()
template['check_id'] = check_id
template['result'] = result
template['reason'] = reason
template['resource_list'] = resource_list
template['description'] = description
return template
# this method generate csv file for check results
def generate_csv(self, all_check_result):
with open('gcp_cloud_sql.csv', 'w') as outcsv:
headers = ["check_id", "result", "reason", "resource_list", "description"]
writer = csv.DictWriter(outcsv, fieldnames=headers)
writer.writeheader()
for row in all_check_result:
writer.writerow(row)
print("Output write to:gcp_cloud_sql.csv")
class SqlResource:
"""
this class set different resource information to perform checks on all gcp cloud sql
"""
def __init__(self, service_account_file, project_id):
credentials = service_account.Credentials.from_service_account_file(service_account_file)
# building gcp compute client using gcp compute v1 api
self.sql_client = discovery.build('sqladmin', 'v1beta4', credentials=credentials)
self.project = project_id
# this method returns information of all gcp cloud sql of project
def all_project_instances(self):
sql_instances = []
resp = self.sql_client.instances().list(project=self.project).execute()
if 'items' in str(resp):
for inst in resp['items']:
sql_instances.append(inst)
return sql_instances
class ExecuteCheckSql:
"""
This class Execute all check and generates report
"""
def __init__(self, servive_account_file_path, project_id):
self.servive_account_file_path = servive_account_file_path
self.project_id = project_id
# this method execute checks
def perform_check(self):
# getting resources for performing check
resource_obj = SqlResource(service_account_file=self.servive_account_file_path, project_id=self.project_id)
sql_instances = resource_obj.all_project_instances()
sql_client = resource_obj.sql_client
# initiate Checks class
check_obj = SqlChecks(sql_client=sql_client, sql_instances=sql_instances)
all_check_result = [
check_obj.check_4_1_cloud_sql_public_access(),
check_obj.check_4_2_cloud_sql_backup_config(),
check_obj.check_4_3_cloud_sql_auto_scaling(),
check_obj.check_4_4_cloud_sql_high_availability(),
check_obj.check_4_5_cloud_sql_gae_application(),
check_obj.check_4_6_cloud_sql_fail_over_replica(),
]
return all_check_result
``` |
{
"source": "aadeshnpn/flloat",
"score": 3
} |
#### File: flloat/base/convertible.py
```python
from abc import abstractmethod, ABC
from flloat.base.formulas import Formula, BinaryOperator, CommutativeBinaryOperator
from flloat.base.nnf import NNF
from flloat.base.truths import Truth, ImpliesTruth, EquivalenceTruth
class ConvertibleFormula(Formula):
@abstractmethod
def convert(self):
raise NotImplementedError
class BaseConvertibleFormula(ConvertibleFormula, Truth, NNF, ABC):
def truth(self, *args):
return self.convert().truth(*args)
def _to_nnf(self):
return self.convert().to_nnf()
def negate(self):
return self.convert().negate()
class ImpliesConvertible(ImpliesTruth, BaseConvertibleFormula, BinaryOperator):
@property
def And(self):
raise NotImplementedError
@property
def Or(self):
raise NotImplementedError
@property
def Not(self):
raise NotImplementedError
def convert(self):
fs = self.formulas
if len(fs) > 2:
a, b = self.And(fs[:-1]), fs[-1]
else:
a, b = fs
res = self.Or([self.Not(a), b])
return res
class EquivalenceConvertible(EquivalenceTruth, BaseConvertibleFormula, CommutativeBinaryOperator, ABC):
@property
def And(self):
raise NotImplementedError
@property
def Or(self):
raise NotImplementedError
@property
def Not(self):
raise NotImplementedError
def convert(self):
fs = self.formulas
pos = self.And(fs)
neg = self.And([self.Not(f) for f in fs])
res = self.Or([pos, neg])
return res
```
#### File: flloat/flloat/ldlf.py
```python
from abc import abstractmethod, ABC
from functools import lru_cache
from typing import Set
from flloat.base.delta import Delta, DeltaConvertibleFormula, EquivalenceDeltaConvertible, ImpliesDeltaConvertible
from flloat.base.formulas import Formula, CommutativeBinaryOperator, UnaryOperator, BinaryOperator, OperatorChildren, \
AtomicFormula
from flloat.base.symbols import Symbol, Symbols
from flloat.base.convertible import ConvertibleFormula
from flloat.base.nnf import NNF, NotNNF, DualBinaryOperatorNNF, DualNNF, AtomicNNF
from flloat.base.truths import NotTruth, AndTruth, OrTruth, Truth
from flloat.helpers import MAX_CACHE_SIZE
from flloat.flloat import to_automaton, DFAOTF
from flloat.pl import PLFormula, PLTrue, PLFalse, PLAnd, PLOr, PLAtomic
from flloat.semantics.traces import FiniteTrace, FiniteTraceTruth
from flloat.semantics.pl import PLInterpretation, PLFalseInterpretation
class RegExpTruth(Truth):
@abstractmethod
def truth(self, tr: FiniteTrace, start: int=0, end: int=0):
raise NotImplementedError
class LDLfFormula(Formula, FiniteTraceTruth, NNF, Delta):
@lru_cache(maxsize=MAX_CACHE_SIZE)
def delta(self, i: PLInterpretation, epsilon=False):
f = self.to_nnf()
if epsilon is False:
d = f._delta(i)
else:
# By definition, if epsilon=True, then the result must be either PLTrue or PLFalse
# Now, the output is a Propositional Formula with only PLTrue or PLFalse as atomics
# Hence, we just evaluate the formula with a dummy PLInterpretation
d = f._delta(None, epsilon)
d = PLTrue() if d.truth(PLFalseInterpretation()) else PLFalse()
return d
@abstractmethod
def _delta(self, i: PLInterpretation, epsilon=False):
"""apply delta function, assuming that 'self' is a LDLf formula in Negative Normal Form"""
raise NotImplementedError
def __repr__(self):
return self.__str__()
def to_automaton(self, labels: Set[Symbol] = None):
if labels is None:
labels = self.find_labels()
return to_automaton(self, labels)
class LDLfCommBinaryOperator(CommutativeBinaryOperator, LDLfFormula):
pass
class DeltaRegExp(ABC):
@abstractmethod
def delta_diamond(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
raise NotImplementedError
@abstractmethod
def delta_box(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
raise NotImplementedError
class RegExpFormula(Formula, RegExpTruth, NNF, DeltaRegExp):
# this should be never called. Just for override the inherited abstract method.
def negate(self):
raise NotImplementedError
class LDLfTemporalFormula(LDLfFormula):
@property
def temporal_brackets(self)->str:
raise NotImplementedError
def __init__(self, r:RegExpFormula, f:LDLfFormula):
super().__init__()
self.r = r
self.f = f
def _members(self):
return self.temporal_brackets, self.r, self.f
def __str__(self):
return self.temporal_brackets[0] + str(self.r) + self.temporal_brackets[1] + "(" + str(self.f) + ")"
def find_labels(self):
return self.f.find_labels().union(self.r.find_labels())
class LDLfTemporalFormulaNNF(LDLfTemporalFormula, DualNNF):
def _to_nnf(self):
return type(self)(self.r.to_nnf(), self.f.to_nnf())
def negate(self):
return self.Dual(self.r, LDLfNot(self.f))
class LDLfAtomic(AtomicFormula, AtomicNNF, LDLfFormula):
def __str__(self):
return AtomicFormula.__str__(self)
def find_labels(self):
return set()
class LDLfLogicalTrue(LDLfAtomic):
def __init__(self):
super().__init__(Symbols.LOGICAL_TRUE.value)
def truth(self, *args):
return True
def _to_nnf(self):
return self
def negate(self):
return LDLfLogicalFalse()
def _delta(self, i: PLInterpretation, epsilon=False):
return PLTrue()
class LDLfLogicalFalse(LDLfAtomic):
def __init__(self):
super().__init__(Symbols.LOGICAL_FALSE.value)
def truth(self, *args):
return False
def _to_nnf(self):
return self
def negate(self):
return LDLfLogicalTrue()
def _delta(self, i: PLInterpretation, epsilon=False):
return PLFalse()
class LDLfNot(NotTruth, LDLfFormula, NotNNF):
def _to_nnf(self):
neg = self.f.negate()
return neg.to_nnf()
def negate(self):
return self.f
def _delta(self, i: PLInterpretation, epsilon=False):
# should never called, since it is called from NNF formulas
raise Exception
class LDLfAnd(LDLfCommBinaryOperator, AndTruth, DualBinaryOperatorNNF):
def _delta(self, i:PLInterpretation, epsilon=False):
return PLAnd([f._delta(i, epsilon) for f in self.formulas])
class LDLfOr(LDLfCommBinaryOperator, OrTruth, DualBinaryOperatorNNF):
def _delta(self, i:PLInterpretation, epsilon=False):
return PLOr([f._delta(i, epsilon) for f in self.formulas])
class LDLfImplies(ImpliesDeltaConvertible, LDLfFormula):
And = LDLfAnd
Or = LDLfOr
Not = LDLfNot
class LDLfEquivalence(EquivalenceDeltaConvertible, LDLfCommBinaryOperator):
And = LDLfAnd
Or = LDLfOr
Not = LDLfNot
class LDLfDiamond(LDLfTemporalFormulaNNF, FiniteTraceTruth):
temporal_brackets = "<>"
def truth(self, i: FiniteTrace, pos: int=0):
# last + 1 in order to include the last step
return any(self.r.truth(i, pos, j) and self.f.truth(i, j) for j in range(pos, i.last()+1))
def _delta(self, i: PLInterpretation, epsilon=False):
return self.r.delta_diamond(self.f, i, epsilon)
class LDLfBox(ConvertibleFormula, LDLfTemporalFormulaNNF):
temporal_brackets = "[]"
def convert(self):
return LDLfNot(LDLfDiamond(self.r, LDLfNot(self.f)))
def truth(self, i: FiniteTrace, pos: int=0):
return self.convert().truth(i, pos)
def _delta(self, i: PLInterpretation, epsilon=False):
d = self.r.delta_box(self.f, i, epsilon)
return d
class RegExpPropositional(RegExpFormula, PLFormula):
def __init__(self, pl_formula:PLFormula):
RegExpFormula.__init__(self)
self.pl_formula = pl_formula
def truth(self, tr: FiniteTrace, start: int=0, end: int=0):
return end == start + 1 \
and end <= tr.last() \
and self.pl_formula.truth(tr.get(start))
def _members(self):
return RegExpPropositional, self.pl_formula
def __str__(self):
return str(self.pl_formula)
def _to_nnf(self):
return RegExpPropositional(self.pl_formula.to_nnf())
def negate(self):
return RegExpPropositional(self.pl_formula.negate())
def delta_diamond(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
if epsilon:
return PLFalse()
if self.pl_formula.truth(i):
return PLAtomic(_expand(f))
else:
return PLFalse()
def delta_box(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
if epsilon:
return PLTrue()
if self.pl_formula.truth(i):
return PLAtomic(_expand(f))
else:
return PLTrue()
def find_labels(self):
return self.pl_formula.find_labels()
def _find_atomics(self):
return self.pl_formula.find_atomics()
class RegExpTest(UnaryOperator, RegExpFormula):
operator_symbol = Symbols.PATH_TEST.value
# def __init__(self, f:LDLfFormula):
# RegExpFormula.__init__(self)
# UnaryOperator.__init__(self, f)
def truth(self, tr: FiniteTrace, start: int=0, end: int=0):
return start == end and self.f.truth(tr, start)
def __str__(self):
s = super().__str__()
s = s[1:] + s[0]
return s
def _to_nnf(self):
return RegExpTest(self.f.to_nnf())
def delta_diamond(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
return PLAnd([self.f._delta(i, epsilon), f._delta(i, epsilon)])
def delta_box(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
# ff = LDLfNot(self.f).to_nnf()
# dff = ff._delta(i, epsilon)
# fun = f._delta(i, epsilon)
return PLOr([LDLfNot(self.f).to_nnf()._delta(i, epsilon), f._delta(i, epsilon)])
def find_labels(self):
return self.f.find_labels()
class RegExpUnion(CommutativeBinaryOperator, RegExpFormula):
operator_symbol = Symbols.PATH_UNION.value
def __init__(self, formulas):
RegExpFormula.__init__(self)
CommutativeBinaryOperator.__init__(self, formulas)
def truth(self, tr: FiniteTrace, start: int=0, end: int=0):
return any(f.truth(tr, start, end) for f in self.formulas_set)
def _to_nnf(self):
return RegExpUnion([r.to_nnf() for r in self.formulas_set])
def delta_diamond(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
return PLOr([LDLfDiamond(r, f)._delta(i, epsilon) for r in self.formulas_set])
def delta_box(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
return PLAnd([LDLfBox(r, f)._delta(i, epsilon) for r in self.formulas_set])
class RegExpSequence(BinaryOperator, RegExpFormula):
operator_symbol = Symbols.PATH_SEQUENCE.value
def __init__(self, formulas: OperatorChildren):
RegExpFormula.__init__(self)
BinaryOperator.__init__(self, formulas)
def truth(self, tr: FiniteTrace, start: int=0, end: int=0):
f1 = self.formulas[0]
if len(self.formulas) == 2:
f2 = self.formulas[1]
else:
f2 = RegExpSequence(self.formulas[1:])
return any(f1.truth(tr, start, k) and f2.truth(tr, k, end) for k in range(start, end + 1))
def _to_nnf(self):
return RegExpSequence([r.to_nnf() for r in self.formulas])
def delta_diamond(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
res = LDLfDiamond(self.formulas[-1], f)
for r in reversed(self.formulas[:-1]):
res = LDLfDiamond(r, res)
return res._delta(i, epsilon)
def delta_box(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
res = LDLfBox(self.formulas[-1], f)
for r in reversed(self.formulas[:-1]):
res = LDLfBox(r, res)
return res._delta(i, epsilon)
class RegExpStar(UnaryOperator, RegExpFormula):
operator_symbol = Symbols.PATH_STAR.value
# def __init__(self, f):
# UnaryOperator.__init__(self, f)
# RegExpFormula.__init__(self)
def truth(self, tr: FiniteTrace, start: int=0, end: int=0):
return start == end \
or any(self.f.truth(tr, start, k) and self.truth(tr, k, end)
for k in range(start, end + 1))
def __str__(self):
s = super().__str__()
s = s[1:] + s[0]
return s
def _to_nnf(self):
return RegExpStar(self.f.to_nnf())
def delta_diamond(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
return PLOr([f._delta(i, epsilon), LDLfDiamond(self.f, F(LDLfDiamond(self, f)))._delta(i, epsilon)])
def delta_box(self, f:LDLfFormula, i: PLInterpretation, epsilon=False):
# subf = LDLfBox(self.f, T(LDLfBox(self, f)))
# k = subf._delta(i, epsilon)
# l = [f._delta(i, epsilon), subf]
# ff = PLAnd(l)
return PLAnd([f._delta(i, epsilon), LDLfBox(self.f, T(LDLfBox(self, f)))._delta(i, epsilon)])
class LDLfPropositional(DeltaConvertibleFormula, LDLfFormula):
def __init__(self, pl_formula:PLFormula):
super().__init__()
self.pl_formula = pl_formula
def convert(self):
return LDLfDiamond(RegExpPropositional(self.pl_formula), LDLfLogicalTrue())
def _members(self):
return LDLfPropositional, self.pl_formula
def find_labels(self):
return self.pl_formula.find_labels()
def __str__(self):
return str(self.convert())
class LDLfEnd(DeltaConvertibleFormula, LDLfAtomic):
def __init__(self):
super().__init__(Symbols.END.value)
def convert(self):
return LDLfBox(RegExpPropositional(PLTrue()), LDLfLogicalFalse())
class LDLfLast(DeltaConvertibleFormula, LDLfAtomic):
def __init__(self):
super().__init__(Symbols.LAST.value)
def convert(self):
return LDLfDiamond(RegExpPropositional(PLTrue()), LDLfEnd().convert())
class F(Formula, Delta, NNF):
def __init__(self, f: Formula):
super().__init__()
self.f = f
def _members(self):
return ("F", self.f)
def __str__(self):
return "_".join(map(str, self._members()))
def delta(self, i:PLInterpretation, epsilon=False):
return self._delta(i, epsilon)
def _delta(self,i:PLInterpretation, epsilon=False):
return PLFalse()
def _to_nnf(self):
return self
def negate(self):
return T(self.f)
def find_labels(self):
return super().find_labels()
class T(Formula, Delta, NNF):
def __init__(self, f: Formula):
super().__init__()
self.f = f
def _members(self):
return ("T", self.f)
def __str__(self):
return "_".join(map(str, self._members()))
def delta(self, i: PLInterpretation, epsilon=False):
return self._delta(i, epsilon)
def _delta(self, i:PLInterpretation, epsilon=False):
return PLTrue()
def _to_nnf(self):
return self
def negate(self):
return F(self.f)
def find_labels(self):
return super().find_labels()
def _expand(f:Formula):
if isinstance(f, F) or isinstance(f, T):
return _expand(f.f)
# elif isinstance(f, LDLfLogicalTrue):
# return PLTrue()
# elif isinstance(f, LDLfLogicalFalse):
# return PLFalse()
elif isinstance(f, LDLfDiamond) or isinstance(f, LDLfBox):
return type(f)(f.r, _expand(f.f))
elif isinstance(f, BinaryOperator):
return type(f)([_expand(subf) for subf in f.formulas])
# elif isinstance(f, LDLfLogicalTrue):
# return PLTrue()
# elif isinstance(f, LDLfLogicalFalse):
# return PLFalse()
else:
return f
AtomicNNF.Not = LDLfNot
LDLfAnd.Dual = LDLfOr
LDLfOr.Dual = LDLfAnd
LDLfDiamond.Dual = LDLfBox
LDLfBox.Dual = LDLfDiamond
```
#### File: flloat/parser/ldlf.py
```python
from flloat.base.symbols import Symbols
from flloat.base.parsing import Lexer, Parser
from flloat.ldlf import LDLfLogicalTrue, LDLfLogicalFalse, LDLfNot, LDLfOr, LDLfEquivalence, LDLfImplies, \
LDLfAnd, LDLfDiamond, LDLfBox, RegExpTest, RegExpStar, RegExpUnion, RegExpSequence, RegExpPropositional, LDLfEnd, \
LDLfLast
from flloat.pl import PLNot, PLAtomic, PLOr, PLAnd, PLImplies, PLEquivalence, PLTrue, PLFalse
from flloat.helpers import sym2regexp
class LDLfLexer(Lexer):
def __init__(self):
super().__init__()
reserved = {
'true': 'TRUE',
'false': 'FALSE',
'tt': 'TT',
'ff': 'FF',
'end': 'END',
'last': 'LAST',
}
# List of token names. This is always required
tokens = (
'ATOM',
'NOT',
'AND',
'OR',
'IMPLIES',
'EQUIVALENCE',
'TEST',
'SEQ',
'UNION',
'STAR',
'LPAREN',
'RPAREN',
'BOXLSEPARATOR',
'BOXRSEPARATOR',
'DIAMONDLSEPARATOR',
'DIAMONDRSEPARATOR',
) + tuple(reserved.values())
# Regular expression rules for simple tokens
t_NOT = sym2regexp(Symbols.NOT)
t_AND = sym2regexp(Symbols.AND)
t_OR = sym2regexp(Symbols.OR)
t_IMPLIES = sym2regexp(Symbols.IMPLIES)
t_EQUIVALENCE = sym2regexp(Symbols.EQUIVALENCE)
t_TEST = sym2regexp(Symbols.PATH_TEST)
t_SEQ = sym2regexp(Symbols.PATH_SEQUENCE)
t_UNION = sym2regexp(Symbols.PATH_UNION)
t_STAR = sym2regexp(Symbols.PATH_STAR)
t_LPAREN = sym2regexp(Symbols.ROUND_BRACKET_LEFT)
t_RPAREN = sym2regexp(Symbols.ROUND_BRACKET_RIGHT)
t_BOXLSEPARATOR = sym2regexp(Symbols.ALWAYS_BRACKET_LEFT)
t_BOXRSEPARATOR = sym2regexp(Symbols.ALWAYS_BRACKET_RIGHT)
t_DIAMONDLSEPARATOR = sym2regexp(Symbols.EVENTUALLY_BRACKET_LEFT)
t_DIAMONDRSEPARATOR = sym2regexp(Symbols.EVENTUALLY_BRACKET_RIGHT)
def t_ATOM(self, t):
r'[a-zA-Z_][a-zA-Z_0-9]*'
t.type = LDLfLexer.reserved.get(t.value, 'ATOM') # Check for reserved words
return t
# Yacc example
class LDLfParser(Parser):
def __init__(self):
lexer = LDLfLexer()
precedence = (
('left', 'EQUIVALENCE'),
('left', 'IMPLIES'),
('left', 'UNION'),
('left', 'SEQ'),
('left', 'STAR'),
('left', 'TEST'),
('left', 'OR'),
('left', 'AND'),
('right', 'DIAMONDLSEPARATOR', 'BOXLSEPARATOR'),
('left', 'DIAMONDRSEPARATOR', 'BOXRSEPARATOR'),
('right', 'NOT'),
)
super().__init__("ldlf", lexer.tokens, lexer, precedence)
# self.pl_parser = PLParser()
def p_temp_formula(self, p):
"""temp_formula : temp_formula EQUIVALENCE temp_formula
| temp_formula IMPLIES temp_formula
| temp_formula OR temp_formula
| temp_formula AND temp_formula
| BOXLSEPARATOR path BOXRSEPARATOR temp_formula
| DIAMONDLSEPARATOR path DIAMONDRSEPARATOR temp_formula
| NOT temp_formula
| TT
| FF
| END
| LAST"""
if len(p) == 2:
if p[1] == Symbols.LOGICAL_TRUE.value:
p[0] = LDLfLogicalTrue()
elif p[1] == Symbols.LOGICAL_FALSE.value:
p[0] = LDLfLogicalFalse()
elif p[1] == Symbols.END.value:
p[0] = LDLfEnd()
elif p[1] == Symbols.LAST.value:
p[0] = LDLfLast()
else:
p[0] = LDLfDiamond(RegExpPropositional(p[1]), LDLfLogicalTrue())
elif len(p) == 3:
p[0] = LDLfNot(p[2])
elif len(p) == 4:
l, o, r = p[1:]
if o == Symbols.EQUIVALENCE.value:
p[0] = LDLfEquivalence([l, r])
elif o == Symbols.IMPLIES.value:
p[0] = LDLfImplies([l, r])
elif o == Symbols.OR.value:
p[0] = LDLfOr([l, r])
elif o == Symbols.AND.value:
p[0] = LDLfAnd([l, r])
else:
raise ValueError
elif len(p) == 5:
if p[1] == Symbols.ALWAYS_BRACKET_LEFT.value:
p[0] = LDLfBox(p[2], p[4])
elif p[1] == Symbols.EVENTUALLY_BRACKET_LEFT.value:
p[0] = LDLfDiamond(p[2], p[4])
else:
raise ValueError
else:
raise ValueError
# def p_formula_propositional(self, p):
# 'formula : propositional'
# p[0] = LDLfDiamond(RegExpPropositional(p[1]), LDLfLogicalTrue())
def p_path(self, p):
"""path : path UNION path
| path SEQ path
| path STAR
| temp_formula TEST
| propositional"""
if len(p)==2:
p[0] = RegExpPropositional(p[1])
elif len(p)==3:
if p[2]==Symbols.PATH_TEST.value:
p[0] = RegExpTest(p[1])
elif p[2] == Symbols.PATH_STAR.value:
p[0] = RegExpStar(p[1])
else:
raise ValueError
elif len(p)==4:
if p[2]==Symbols.PATH_UNION.value:
p[0] = RegExpUnion([p[1], p[3]])
elif p[2] == Symbols.PATH_SEQUENCE.value:
p[0] = RegExpSequence([p[1], p[3]])
else:
raise ValueError
else:
raise ValueError
def p_propositional(self, p):
"""propositional : propositional EQUIVALENCE propositional
| propositional IMPLIES propositional
| propositional OR propositional
| propositional AND propositional
| NOT propositional
| FALSE
| TRUE
| ATOM"""
if len(p)==4:
if p[2] == Symbols.EQUIVALENCE.value:
p[0] = PLEquivalence([p[1], p[3]])
elif p[2] == Symbols.IMPLIES.value:
p[0] = PLImplies([p[1], p[3]])
elif p[2] == Symbols.OR.value:
p[0] = PLOr([p[1], p[3]])
elif p[2] == Symbols.AND.value:
p[0] = PLAnd([p[1], p[3]])
else:
raise ValueError
# else:
# p[0] = p[2]
elif len(p)==3:
p[0] = PLNot(p[2])
elif len(p)==2:
if p[1]==Symbols.TRUE.value:
p[0] = PLTrue()
elif p[1]==Symbols.FALSE.value:
p[0] = PLFalse()
else:
p[0] = PLAtomic(p[1])
else:
raise ValueError
def p_expr_paren(self, p):
"""temp_formula : LPAREN temp_formula RPAREN
path : LPAREN path RPAREN
propositional : LPAREN propositional RPAREN
"""
p[0] = p[2]
if __name__ == '__main__':
parser = LDLfParser()
while True:
try:
s = input('calc > ')
except EOFError:
break
if not s: continue
result = parser(s)
print(result)
```
#### File: flloat/semantics/traces.py
```python
from abc import abstractmethod
from typing import List, Set
from flloat.base.symbols import Symbol
from flloat.base.truths import Truth
from flloat.helpers import Hashable
from flloat.semantics.pl import PLInterpretation
class FiniteTrace(Hashable):
def __init__(self, trace: List[PLInterpretation]):
super().__init__()
self.trace = trace
def _members(self):
return tuple(self.trace)
@staticmethod
def from_symbol_sets(l: List[Set[Symbol]]):
return FiniteTrace([PLInterpretation(frozenset(s)) for s in l])
def length(self):
return len(self.trace)
def last(self):
return len(self.trace)-1
def _position_is_legal(self, position: int):
return position>=0 and position <= self.last()
def get(self, position: int) -> PLInterpretation:
assert self._position_is_legal(position)
return self.trace[position]
def segment(self, start: int, end: int):
if not self._position_is_legal(start) or not self._position_is_legal(end):
raise ValueError("Start or end position are not valid")
return FiniteTrace(self.trace[start: end])
def __str__(self):
return "Trace (length=%s)" % self.length() + "\n\t" + \
"\n\t".join("%d: {" % i + ", ".join(map(str, sorted(e))) + "}" for i, e in enumerate(self.trace))
class FiniteTraceTruth(Truth):
@abstractmethod
def truth(self, i: FiniteTrace, pos: int):
raise NotImplementedError
```
#### File: flloat/tests/test_misc.py
```python
import os
def test_ldlf_example_readme():
from flloat.parser.ldlf import LDLfParser
parser = LDLfParser()
formula = "<true*; A & B>tt"
parsed_formula = parser(formula)
assert str(parsed_formula) == "<((true)* ; (B & A))>(tt)" or str(parsed_formula) == "<((true)* ; (A & B))>(tt)"
assert parsed_formula.find_labels() == {c for c in "AB"}
from flloat.semantics.traces import FiniteTrace
t1 = FiniteTrace.from_symbol_sets([
{},
{"A"},
{"A"},
{"A", "B"},
{}
])
assert parsed_formula.truth(t1, 0)
t2 = FiniteTrace.from_symbol_sets([
{},
{"A"},
{"B"}
])
assert not parsed_formula.truth(t2, 0)
dfa = parsed_formula.to_automaton()
assert dfa.accepts(t1.trace)
assert not dfa.accepts(t2.trace)
def test_ltlf_example_readme():
from flloat.parser.ltlf import LTLfParser
from flloat.semantics.traces import FiniteTrace
parser = LTLfParser()
formula = "F (A & !B)"
parsed_formula = parser(formula)
t1 = FiniteTrace.from_symbol_sets([
{},
{"A"},
{"A"},
{"A", "B"}
])
assert parsed_formula.truth(t1, 0)
t2 = FiniteTrace.from_symbol_sets([
{},
{"A", "B"},
{"B"}
])
assert not parsed_formula.truth(t2, 0)
dfa = parsed_formula.to_automaton()
assert dfa.accepts(t1.trace)
assert not dfa.accepts(t2.trace)
def test_hash_consistency_after_pickling():
from flloat.parser.ltlf import LTLfParser
import pickle
parser = LTLfParser()
formula = "F (A & !B)"
old_obj = parser(formula)
h = hash(old_obj)
pickle.dump(old_obj, open("temp", "wb"))
new_obj = pickle.load(open("temp", "rb"))
assert new_obj._hash is None
assert h == hash(new_obj)
os.remove("temp")
``` |
{
"source": "aadeshnpn/swarm",
"score": 3
} |
#### File: examples/handcoded/experiment.py
```python
from simmodel import SimForgModel, SimCTModel, SimNMModel
# from swarms.utils.jsonhandler import JsonData
from swarms.utils.graph import GraphACC
from joblib import Parallel, delayed # noqa : F401
from swarms.utils.results import SimulationResults
from swarms.utils.jsonhandler import JsonPhenotypeData
# Global variables for width and height
width = 100
height = 100
UI = False
def extract_phenotype(agents, filename, method='ratio'):
"""Extract phenotype of the learning agents.
Sort the agents based on the overall fitness and then based on the
method extract phenotype of the agents.
Method can take {'ratio','higest','sample'}
"""
sorted_agents = sorted(
agents, key=lambda x: x.individual[0].fitness, reverse=True)
if method == 'ratio':
ratio_value = 0.4
upper_bound = ratio_value * len(agents)
selected_agents = agents[0:int(upper_bound)]
selected_phenotype = [
agent.individual[0].phenotype for agent in selected_agents]
# return selected_phenotype
else:
selected_phenotype = [sorted_agents[0].individual[0].phenotype]
# return [sorted_agents[0].individual[0].phenotype]
# Save the phenotype to a json file
JsonPhenotypeData.to_json(selected_phenotype, filename)
# Return the phenotype
return selected_phenotype
def simulate_forg(env, iteration):
"""Test the performane of evolved behavior."""
phenotypes = env[0]
threshold = 1.0
sim = SimForgModel(
100, 100, 100, 10, iter=iteration, xmlstrings=phenotypes, pname=env[1])
sim.build_environment_from_json()
# for all agents store the information about hub
for agent in sim.agents:
agent.shared_content['Hub'] = {sim.hub}
# agent.shared_content['Sites'] = {sim.site}
simresults = SimulationResults(
sim.pname, sim.connect, sim.sn, sim.stepcnt, sim.food_in_hub(),
phenotypes[0]
)
simresults.save_phenotype()
simresults.save_to_file()
# Iterate and execute each step in the environment
for i in range(iteration):
# For every iteration we need to store the results
# Save them into db or a file
sim.step()
value = sim.food_in_hub()
foraging_percent = (
value * 100.0) / (sim.num_agents * 1)
simresults = SimulationResults(
sim.pname, sim.connect, sim.sn, sim.stepcnt, foraging_percent,
phenotypes[0]
)
simresults.save_to_file()
# print ('food at site', len(sim.food_in_loc(sim.site.location)))
# print ('food at hub', len(sim.food_in_loc(sim.hub.location)))
# print("Total food in the hub", len(food_objects))
# food_objects = sim.food_in_loc(sim.hub.location)
# for food in food_objects:
# print('simulate phenotye:', dir(food))
sucess = False
print('Foraging percent', value)
if foraging_percent >= threshold:
print('Foraging success')
sucess = True
sim.experiment.update_experiment_simulation(foraging_percent, sucess)
# Plot the fitness in the graph
graph = GraphACC(sim.pname, 'simulation.csv')
graph.gen_plot()
def simulate_ct(env, iteration):
"""Test the performane of evolved behavior."""
phenotypes = env[0]
threshold = 1.0
sim = SimCTModel(
100, 100, 100, 10, iter=iteration, xmlstrings=phenotypes, pname=env[1])
sim.build_environment_from_json()
# for all agents store the information about hub
for agent in sim.agents:
agent.shared_content['Hub'] = {sim.hub}
# agent.shared_content['Sites'] = {sim.site}
simresults = SimulationResults(
sim.pname, sim.connect, sim.sn, sim.stepcnt, sim.food_in_hub(),
phenotypes[0]
)
simresults.save_phenotype()
simresults.save_to_file()
# Iterate and execute each step in the environment
for i in range(iteration):
# For every iteration we need to store the results
# Save them into db or a file
sim.step()
simresults = SimulationResults(
sim.pname, sim.connect, sim.sn, sim.stepcnt, sim.food_in_hub(),
phenotypes[0]
)
simresults.save_to_file()
# print ('food at site', len(sim.food_in_loc(sim.site.location)))
# print ('food at hub', len(sim.food_in_loc(sim.hub.location)))
# print("Total food in the hub", len(food_objects))
food_objects = sim.food_in_loc(sim.hub.location)
# for food in food_objects:
# print('simulate phenotye:', dir(food))
# value = sim.food_in_hub()
foraging_percent = (
len(food_objects) * 100.0) / (sim.num_agents * 2.0)
sucess = False
print('Foraging percent', food_objects)
if foraging_percent >= threshold:
print('Foraging success')
sucess = True
sim.experiment.update_experiment_simulation(len(food_objects), sucess)
# Plot the fitness in the graph
graph = GraphACC(sim.pname, 'simulation.csv')
graph.gen_plot()
def simulate_nm(env, iteration, N=100):
"""Test the performane of evolved behavior."""
# phenotype = agent.individual[0].phenotype
# phenotypes = extract_phenotype(agents)
phenotypes = env[0]
threshold = 1.0
sim = SimNMModel(
N, 100, 100, 10, iter=iteration, xmlstrings=phenotypes, pname=env[1])
sim.build_environment_from_json()
# for all agents store the information about hub
for agent in sim.agents:
agent.shared_content['Hub'] = {sim.hub}
# agent.shared_content['Sites'] = {sim.site}
simresults = SimulationResults(
sim.pname, sim.connect, sim.sn, sim.stepcnt, len(sim.debris_cleaned()),
phenotypes[0]
)
simresults.save_phenotype()
simresults.save_to_file()
# Iterate and execute each step in the environment
for i in range(iteration):
# For every iteration we need to store the results
# Save them into db or a file
sim.step()
simresults = SimulationResults(
sim.pname, sim.connect, sim.sn, sim.stepcnt,
len(sim.debris_cleaned()), phenotypes[0]
)
simresults.save_to_file()
# print ('food at site', len(sim.food_in_loc(sim.site.location)))
# print ('food at hub', len(sim.food_in_loc(sim.hub.location)))
# print("Total food in the hub", len(food_objects))
# food_objects = sim.food_in_loc(sim.hub.location)
# for food in food_objects:
# print('simulate phenotye:', dir(food))
value = len(sim.debris_cleaned())
foraging_percent = (
value * 100.0) / (sim.num_agents * 2.0)
sucess = False
print('Foraging percent', value)
if foraging_percent >= threshold:
print('Foraging success')
sucess = True
sim.experiment.update_experiment_simulation(value, sucess)
# Plot the fitness in the graph
graph = GraphACC(sim.pname, 'simulation.csv')
graph.gen_plot()
def main(iter):
"""Block for the main function."""
print('=======Start=========')
# pname = '/home/aadeshnpn/Documents/BYU/HCMI/research/handcoded/nm'
pname = '/home/aadeshnpn/Documents/BYU/hcmi/hri/handcoded/ct'
# for N in range(16):
# steps = [5000 for i in range(16)]
# env = (env.phenotypes, env.pname)
# aname = pname + '/' + str(N)
env = (['123', '123'], pname)
# Parallel(n_jobs=16)(delayed(simulate_ct)(env, i) for i in steps)
# Parallel(n_jobs=16)(delayed(simulate_nm)(env, i) for i in steps)
# simulate_forg(env, 5)
# simulate_ct(env, 5)
simulate_nm(env, 5)
print('=======End=========')
if __name__ == '__main__':
# Running 50 experiments in parallel
# steps = [100000 for i in range(50)]
# Parallel(n_jobs=8)(delayed(main)(i) for i in steps)
# Parallel(n_jobs=16)(delayed(main)(i) for i in range(1000, 100000, 2000))
main(10)
```
#### File: examples/resilience_goal/run.py
```python
from model import ViewerModel
from swarms.utils.jsonhandler import JsonPhenotypeData
# from joblib import Parallel, delayed
# Global variables for width and height
width = 800
height = 800
viewer = True
def main():
"""Block main."""
iteration = 2500
jname = '/tmp/16244729911974-all.json'
phenotype = JsonPhenotypeData.load_json_file(jname)['phenotypes']
# Create a test environment to visualize
viewer = ViewerModel(
100, width, height, 10, iter=iteration, viewer=True)
# Build the environment
viewer.build_environment_from_json()
# Create the agents in the environment from the sampled behaviors
viewer.create_agents(phenotypes=[phenotype[0]], random_init=True)
# for all agents store the information about hub
for agent in viewer.agents:
agent.shared_content['Hub'] = {viewer.hub}
# Iterate and execute each step in the environment
for i in range(iteration):
viewer.step()
# print('Execution done')
# Find if food has been deposited in the hub
# print('Cleaning Percentage', env.foraging_percent())
# print(len(env.debris_cleaned()))
# print ('food at site', len(env.food_in_loc(env.site.location)))
# print ('food at hub', len(env.food_in_loc(env.hub.location)))
if __name__ == '__main__':
main()
```
#### File: examples/resilience/simagent.py
```python
from swarms.lib.agent import Agent
import numpy as np
from swarms.utils.bt import BTConstruct
# from swarms.utils.results import Results
from py_trees import Behaviour, Blackboard
# import copy
from py_trees.meta import inverter
import py_trees
from py_trees.composites import Sequence, Selector
from py_trees.trees import BehaviourTree
from swarms.behaviors.sbehaviors import (
NeighbourObjects, IsVisitedBefore,
IsCarrying, IsInPartialAttached, RandomWalk, Move, AvoidSObjects
# ObjectsOnGrid, IsAgentDead,
)
from swarms.behaviors.scbehaviors import (
CompositeDrop, CompositeSingleCarry, MoveTowards,
Explore, CompositeDropPartial, CompositeMultipleCarry,
NewExplore, NewMoveAway, NewMoveTowards
# , AgentDead, AvoidTrap, ObstacleStuck
)
# import py_trees
from ponyge.operators.initialisation import initialisation
from ponyge.fitness.evaluation import evaluate_fitness
from ponyge.operators.crossover import crossover
from ponyge.operators.mutation import mutation
from ponyge.operators.replacement import replacement
from ponyge.operators.selection import selection
class SimForgAgentWithout(Agent):
"""Simulation agent.
An minimalistic behavior tree for swarm agent
implementing carry and drop behavior.
"""
def __init__(self, name, model, xmlstring=None):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 3
self.moveable = True
self.shared_content = dict()
self.carryable = False
self.passable = True
# Define a BTContruct object
self.bt = BTConstruct(None, self)
class DummyIndividual:
def __init__(self):
self.phenotype = None
dummyind = DummyIndividual()
self.individual = [dummyind]
self.individual[0].phenotype = xmlstring
# self.bt.xmlstring = xmlstring
# self.bt.construct()
#
# neighobst = NeighbourObjects('NeighbourObjects_Obstacles')
# neighobst.setup(0, self, 'Obstacles')
# Drop branch
dseq = py_trees.composites.Sequence('DSequence')
iscarrying = IsCarrying('IsCarrying_Food')
iscarrying.setup(0, self, 'Food')
neighhub = NeighbourObjects('NeighbourObjects_Hub')
neighhub.setup(0, self, 'Hub')
notneighhub = py_trees.meta.inverter(NeighbourObjects)(
'NeighbourObjects_Hub')
notneighhub.setup(0, self, 'Hub')
drop = CompositeDrop('CompositeDrop_Food')
drop.setup(0, self, 'Food')
dseq.add_children([neighhub, drop])
# Carry branch
cseq = py_trees.composites.Sequence('CSequence')
neighsite = NeighbourObjects('NeighbourObjects_Sites')
neighsite.setup(0, self, 'Sites')
neighfood = NeighbourObjects('NeighbourObjects_Food')
neighfood.setup(0, self, 'Food')
invcarrying = py_trees.meta.inverter(IsCarrying)('IsCarrying_Food')
invcarrying.setup(0, self, 'Food')
carry = CompositeSingleCarry('CompositeSingleCarry_Food')
carry.setup(0, self, 'Food')
cseq.add_children([neighsite, neighfood, invcarrying, carry])
# Locomotion branch
# Move to site
siteseq = py_trees.composites.Sequence('SiteSeq')
sitefound = IsVisitedBefore('IsVisitedBefore_Sites')
sitefound.setup(0, self, 'Sites')
gotosite = MoveTowards('MoveTowards_Sites')
gotosite.setup(0, self, 'Sites')
# siteseq.add_children([neighobst, neightrap, sitefound, invcarrying, gotosite])
siteseq.add_children([sitefound, invcarrying, gotosite])
# siteseq.add_children([invcarrying])
# Move to hub
hubseq = py_trees.composites.Sequence('HubSeq')
gotohub = MoveTowards('MoveTowards_Hub')
gotohub.setup(0, self, 'Hub')
# hubseq.add_children([neighobst, neightrap, iscarrying, gotohub])
hubseq.add_children([iscarrying, gotohub])
sitenotfound = py_trees.meta.inverter(IsVisitedBefore)(
'IsVisitedBefore_Sites')
sitenotfound.setup(0, self, 'Sites')
explore = Explore('Explore')
explore.setup(0, self)
# randwalk = py_trees.composites.Sequence('Randwalk')
# randwalk.add_children([neighobst, neightrap, sitenotfound, explore])
# randwalk.add_children([sitenotfound, explore])
locoselect = py_trees.composites.Selector('Move')
locoselect.add_children([siteseq, hubseq, explore])
# locoselect.add_children([hubseq, randwalk])
select = py_trees.composites.Selector('Main')
select.add_children([dseq, cseq, locoselect])
self.behaviour_tree = py_trees.trees.BehaviourTree(select)
# py_trees.display.render_dot_tree(
# self.behaviour_tree.root, name=model.pname + '/forgehc')
# py_trees.logging.level = py_trees.logging.Level.DEBUG
# py_trees.display.print_ascii_tree(select)
def step(self):
# self.bt.behaviour_tree.tick()
self.behaviour_tree.tick()
def advance(self):
pass
class SimForgAgentWith(Agent):
"""Simulation agent.
An minimalistic behavior tree for swarm agent
implementing carry and drop behavior.
"""
def __init__(self, name, model, xmlstring=None):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 3
self.moveable = True
self.shared_content = dict()
self.carryable = False
self.passable = True
# Define a BTContruct object
self.bt = BTConstruct(None, self)
class DummyIndividual:
def __init__(self):
self.phenotype = None
dummyind = DummyIndividual()
self.individual = [dummyind]
self.individual[0].phenotype = xmlstring
# self.bt.xmlstring = xmlstring
# self.bt.construct()
#
# neighobst = NeighbourObjects('NeighbourObjects_Obstacles')
# neighobst.setup(0, self, 'Obstacles')
# Drop branch
dseq = py_trees.composites.Sequence('DSequence')
iscarrying = IsCarrying('IsCarrying_Food')
iscarrying.setup(0, self, 'Food')
neighhub = NeighbourObjects('NeighbourObjects_Hub')
neighhub.setup(0, self, 'Hub')
notneighhub = py_trees.meta.inverter(NeighbourObjects)(
'NeighbourObjects_Hub')
notneighhub.setup(0, self, 'Hub')
drop = CompositeDrop('CompositeDrop_Food')
drop.setup(0, self, 'Food')
dseq.add_children([neighhub, drop])
# ## Obstacles and Trap
# neighobs = NeighbourObjects('NeighbourObjects_Obs')
# neighobs.setup(0, self, 'Obstacle')
# neightrap = NeighbourObjects('NeighbourObjects_Trap')
# neightrap.setup(0, self, 'Traps')
# avoidobstacle = AvoidSObjects('Obstacle')
# avoidobstacle.setup(0, agent)
# avoidtrap = AvoidSObjects('Trap')
# avoidtrap.setup(0, agent, item='Traps')
# otseq = py_trees.composites.Sequence('OTSequence')
# otseq.add_children([neighobs, avoidobstacle, neightrap, avoidtrap])
# Carry branch
cseq = py_trees.composites.Sequence('CSequence')
neighsite = NeighbourObjects('NeighbourObjects_Sites')
neighsite.setup(0, self, 'Sites')
neighfood = NeighbourObjects('NeighbourObjects_Food')
neighfood.setup(0, self, 'Food')
invcarrying = py_trees.meta.inverter(IsCarrying)('IsCarrying_Food')
invcarrying.setup(0, self, 'Food')
carry = CompositeSingleCarry('CompositeSingleCarry_Food')
carry.setup(0, self, 'Food')
cseq.add_children([neighsite, neighfood, invcarrying, carry])
# Locomotion branch
# Move to site
siteseq = py_trees.composites.Sequence('SiteSeq')
sitefound = IsVisitedBefore('IsVisitedBefore_Sites')
sitefound.setup(0, self, 'Sites')
gotosite = NewMoveTowards('NewMoveTowards_Sites')
gotosite.setup(0, self, 'Sites')
# siteseq.add_children([neighobst, neightrap, sitefound, invcarrying, gotosite])
siteseq.add_children([sitefound, invcarrying, gotosite])
# siteseq.add_children([invcarrying])
# Move to hub
hubseq = py_trees.composites.Sequence('HubSeq')
gotohub = NewMoveTowards('NewMoveTowards_Hub')
gotohub.setup(0, self, 'Hub')
# hubseq.add_children([neighobst, neightrap, iscarrying, gotohub])
hubseq.add_children([iscarrying, gotohub])
sitenotfound = py_trees.meta.inverter(IsVisitedBefore)(
'IsVisitedBefore_Sites')
sitenotfound.setup(0, self, 'Sites')
explore = NewExplore('NewExplore')
explore.setup(0, self)
# randwalk = py_trees.composites.Sequence('Randwalk')
# randwalk.add_children([neighobst, neightrap, sitenotfound, explore])
# randwalk.add_children([sitenotfound, avoidt, explore])
locoselect = py_trees.composites.Selector('Move')
locoselect.add_children([siteseq, hubseq, explore])
# locoselect.add_children([hubseq, randwalk])
select = py_trees.composites.Selector('Main')
select.add_children([dseq, cseq, locoselect])
self.behaviour_tree = py_trees.trees.BehaviourTree(select)
# py_trees.display.render_dot_tree(
# self.behaviour_tree.root, name=model.pname + '/forgehc')
# py_trees.logging.level = py_trees.logging.Level.DEBUG
# py_trees.display.print_ascii_tree(select)
def step(self):
# self.bt.behaviour_tree.tick()
self.behaviour_tree.tick()
def advance(self):
pass
class EvolAgent(Agent):
"""An minimalistic swarm agent."""
def __init__(self, name, model):
"""Initialize the agent."""
super().__init__(name, model)
self.location = ()
self.phenotypes = dict()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 3
self.results = "file" # This can take 2 values. db or file
# self.exchange_time = model.random.randint(2, 4)
# This doesn't help. Maybe only perform genetic operations when
# an agents meet 10% of its total population
# """
self.operation_threshold = 2
self.genome_storage = []
# Define a BTContruct object
self.bt = BTConstruct(None, self)
# self.blackboard = Blackboard()
# self.blackboard.shared_content = dict()
self.shared_content = dict()
# self.shared_content = dict(
self.carryable = False
self.passable = True
self.beta = 0.0001
self.food_collected = 0
# Grammatical Evolution part
from ponyge.algorithm.parameters import Parameters
parameter = Parameters()
parameter_list = ['--parameters', '../..,' + model.parm]
# Comment when different results is desired.
# Else set this for testing purpose
# parameter.params['RANDOM_SEED'] = name
# # np.random.randint(1, 99999999)
parameter.params['POPULATION_SIZE'] = self.operation_threshold // 2
parameter.set_params(parameter_list)
self.parameter = parameter
individual = initialisation(self.parameter, 1)
individual = evaluate_fitness(individual, self.parameter)
self.individual = individual
self.bt.xmlstring = self.individual[0].phenotype
self.bt.construct()
self.diversity_fitness = self.individual[0].fitness
self.delayed_reward = 0
# Location history
self.location_history = set()
self.timestamp = 0
self.step_count = 0
self.fitness_name = True
def get_food_in_hub(self):
"""Return food in the hub."""
grid = self.model.grid
hub_loc = self.model.hub.location
neighbours = grid.get_neighborhood(hub_loc, 10)
food_objects = grid.get_objects_from_list_of_grid('Food', neighbours)
agent_food_objects = []
for food in food_objects:
if (
food.agent_name == self.name and
food.phenotype == self.individual[0].phenotype):
agent_food_objects.append(food)
return agent_food_objects
def detect_food_carrying(self):
"""Detect if the agent is carrying food."""
if len(self.attached_objects) > 0:
print('Food carying', self.name, self.attached_objects)
output = py_trees.display.ascii_tree(self.bt.behaviour_tree.root)
print(output)
def store_genome(self, cellmates):
"""Store the genome from neighbours."""
# cellmates.remove(self)
# self.genome_storage += [agent.individual[0] for agent in cellmates]
for agent in cellmates:
if agent.food_collected > 0:
self.genome_storage += agent.individual
elif len(agent.attached_objects) > 0:
self.genome_storage += agent.individual
elif agent.exploration_fitness() > 10:
self.genome_storage += agent.individual
def exchange_chromosome(self,):
"""Perform genetic operations."""
# print('from exchange', self.name)
individuals = self.genome_storage
parents = selection(self.parameter, individuals)
cross_pop = crossover(self.parameter, parents)
new_pop = mutation(self.parameter, cross_pop)
new_pop = evaluate_fitness(new_pop, self.parameter)
individuals = replacement(self.parameter, new_pop, individuals)
individuals.sort(reverse=False)
self.individual = [individuals[0]]
self.individual[0].fitness = 0
self.genome_storage = []
def genetic_step(self):
"""Additional procedures called after genecti step."""
self.delayed_reward = self.individual[0].fitness
self.exchange_chromosome()
self.bt.xmlstring = self.individual[0].phenotype
self.bt.construct()
self.food_collected = 0
self.location_history = set()
self.timestamp = 0
self.diversity_fitness = self.individual[0].fitness
def overall_fitness(self):
"""Compute complete fitness.
Goals are represented by objective function. We use combination of
objective function to define overall fitness of the agents
performance.
"""
# Use a decyaing function to generate fitness
# Use two step decaying function
# First block gives importance to exploration and when as soon
# food has been found, the next block will focus on dropping
# the food on hub
self.individual[0].fitness = (1 - self.beta) * self.delayed_reward \
+ self.exploration_fitness() + self.carrying_fitness() \
+ self.food_collected
def carrying_fitness(self):
"""Compute carrying fitness.
This fitness supports the carrying behavior of
the agents.
"""
return len(self.attached_objects) * (self.timestamp)
def exploration_fitness(self):
"""Compute the exploration fitness."""
# Use exploration space as fitness values
return len(self.location_history) - 1
# New Agent methods for behavior based robotics
def sense(self):
"""Sense included in behavior tree."""
pass
def plan(self):
"""Plan not required for now."""
pass
def step(self):
"""Agent action at a single time step."""
# py_trees.logging.level = py_trees.logging.Level.DEBUG
# output = py_trees.display.ascii_tree(self.bt.behaviour_tree.root)
# Couting variables
self.timestamp += 1
self.step_count += 1
# Increase beta
self.beta = self.step_count / self.model.iter
self.location_history.add(self.location)
# Compute the behavior tree
self.bt.behaviour_tree.tick()
# Find the no.of food collected from the BT execution
self.food_collected = len(self.get_food_in_hub())
# Computes overall fitness using Beta function
self.overall_fitness()
self.phenotypes = dict()
self.phenotypes[self.individual[0].phenotype] = (
self.individual[0].fitness)
cellmates = self.model.grid.get_objects_from_grid(
type(self).__name__, self.location)
# Create a results instance and save it to a file
"""
self.results = Results(
self.model.pname, self.model.connect, self.model.sn, self.name,
self.step_count, self.timestamp, self.beta,
self.individual[0].fitness,
self.diversity_fitness, self.exploration_fitness(),
self.food_collected, len(cellmates), self.individual[0].genome,
self.individual[0].phenotype, self.bt
)
"""
# Save the results to a db
# self.results.save_to_file()
# Logic for gentic operations.
# If the genome storage has enough genomes and agents has done some
# exploration then compute the genetic step OR
# 600 time step has passed and the agent has not done anything useful
# then also perform genetic step
storage_threshold = len(
self.genome_storage) >= (self.model.num_agents / 1.4)
if storage_threshold:
self.genetic_step()
elif (
storage_threshold is False and self.timestamp > 50 and
self.exploration_fitness() < 10):
individual = initialisation(self.parameter, 10)
individual = evaluate_fitness(individual, self.parameter)
self.genome_storage = individual
self.genetic_step()
# If neighbours found, store the genome
if len(cellmates) > 1:
self.store_genome(cellmates)
def advance(self):
"""Require for staged activation."""
pass
class SimAgent(Agent):
"""Simulation agent.
An minimalistic behavior tree for swarm agent
implementing carry and drop behavior.
"""
def __init__(self, name, model, xmlstring=None):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 3
# self.moveable = True
self.passable = True
self.shared_content = dict()
self.carryable = False
# Define a BTContruct object
self.bt = BTConstruct(None, self)
class DummyIndividual:
def __init__(self):
self.phenotype = None
dummyind = DummyIndividual()
self.individual = [dummyind]
self.individual[0].phenotype = xmlstring
self.bt.xmlstring = xmlstring
self.bt.construct()
# print(self.name, self.bt.xmlstring, self.bt.behaviour_tree.root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
# py_trees.display.print_ascii_tree(self.bt.behaviour_tree.root)
def step(self):
self.bt.behaviour_tree.tick()
def advance(self):
pass
def replace_nodes(self):
dummy_bt = copy.copy(self.bt)
# dummy_bt.behaviour_tree.tick()
root = dummy_bt.behaviour_tree.root
# For now dummpy node is move but it could be different
name = 'Dummy' + str(self.model.random.randint(0, 1000, 1)[0])
dummynode = Dummymove(name)
def replace(roots, node):
if type(node).__name__ == 'Move':
roots.replace_child(node, dummynode)
for node in root.iterate():
try:
innerroot = node.behaviour_tree.root
for innernode in innerroot.iterate():
replace(innerroot, innernode)
except AttributeError:
replace(root, node)
return dummy_bt
```
#### File: examples/trap/experiment.py
```python
from model import SimTrapModel
from swarms.utils.ui import UI
# from joblib import Parallel, delayed
# Global variables for width and height
width = 400
height = 400
viewer = True
def main():
# Create a test environment to visualize
env = SimTrapModel(
2, width, height, 10, seed=123)
# for all agents store the information about hub
for agent in env.agents:
agent.shared_content['Hub'] = {env.hub}
# Overiding the default viewer properties
# env.ui = UI(
# (width, height), [env.hub], env.agents,
# [env.target], food=[], traps=[], obstacles=[env.obstacles])
# Iterate and execute each step in the environment
for i in range(220):
env.step()
print(i, env.agent.location, env.agent.direction, env.agent.dead)
if __name__ == '__main__':
main()
```
#### File: examples/trap/model.py
```python
from swarms.lib.model import Model
from swarms.lib.space import Grid
from swarms.lib.objects import Obstacles, Sites, Hub, Traps
from swarms.lib.time import SimultaneousActivation
from agent import SwarmAgentAvoid
class SimTrapModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(SimTrapModel, self).__init__(seed=None)
else:
super(SimTrapModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.hub = Hub(id=1, location=(+145, -145), radius=5)
self.grid.add_object_to_grid(self.hub.location, self.hub)
self.target = Sites(id=2, location=(155, 155), radius=15, q_value=0.9)
self.grid.add_object_to_grid(self.target.location, self.target)
self.obstacle1 = Obstacles(id=3, location=(40, 80), radius=29)
self.grid.add_object_to_grid(self.obstacle1.location, self.obstacle1)
self.obstacle2 = Obstacles(id=4, location=(100, 15), radius=20)
self.grid.add_object_to_grid(self.obstacle2.location, self.obstacle2)
self.obstacles = [self.obstacle1, self.obstacle2]
self.trap1 = Traps(id=5, location=(-40, -40), radius=21)
self.grid.add_object_to_grid(self.trap1.location, self.trap1)
self.trap2 = Traps(id=6, location=(100, 80), radius=20)
self.grid.add_object_to_grid(self.trap2.location, self.trap2)
self.traps = [self.trap1, self.trap2]
self.agents = []
for i in range(self.num_agents):
a = SwarmAgentAvoid(i, self)
self.schedule.add(a)
x = -190 + self.random.randint(-10, 70)
y = -190 + self.random.randint(-10, 70)
a.location = (x, y)
a.direction = -2.3661944901923448
self.grid.add_object_to_grid((x, y), a)
self.agents.append(a)
self.agent = a
self.agents
def step(self):
self.schedule.step()
```
#### File: examples/wealth/experiment.py
```python
from model import WealthEnvironmentModel
from swarms.utils.jsonhandler import JsonPhenotypeData
from swarms.utils.ui import UI
# from joblib import Parallel, delayed
# Global variables for width and height
width = 500
height = 500
viewer = True
def main():
"""Block main."""
iteration = 2500
# Create a test environment to visualize
env = WealthEnvironmentModel(
1, width, height, 10, seed=None)
# for all agents store the information about hub
for agent in env.agents:
agent.shared_content['Hub'] = {env.hub}
# Iterate and execute each step in the environment
for i in range(iteration):
env.step()
if __name__ == '__main__':
main()
```
#### File: examples/wealth/model.py
```python
from swarms.lib.model import Model
from swarms.lib.time import SimultaneousActivation
from swarms.lib.space import Grid
from swarms.lib.objects import Obstacles, Sites, Hub, Traps
import numpy as np
from agent import SwarmAgent
# Global variables for width and height
width = 500
height = 500
class WealthEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(WealthEnvironmentModel, self).__init__(seed=None)
else:
super(WealthEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.hub = Hub(id=1, location=(+145, -145), radius=5)
self.grid.add_object_to_grid(self.hub.location, self.hub)
self.site = Sites(id=2, location=(155, 155), radius=15, q_value=0.9)
self.grid.add_object_to_grid(self.site.location, self.site)
self.obstacles = Obstacles(id=3, location=(40, 80), radius=29)
self.grid.add_object_to_grid(self.obstacles.location, self.obstacles)
self.traps = Traps(id=4, location=(-40, -40), radius=21)
self.grid.add_object_to_grid(self.traps.location, self.traps)
self.agents = []
for i in range(self.num_agents):
a = SwarmAgent(i, self)
self.schedule.add(a)
# Add the agent to a random grid cell
x = self.random.randint(-self.grid.width / 2, self.grid.width / 2)
y = self.random.randint(-self.grid.height / 2, self.grid.height / 2)
a.location = (x, y)
self.grid.add_object_to_grid((x, y), a)
self.agents.append(a)
def step(self):
self.schedule.step()
```
#### File: swarm/scripts/acc.py
```python
import sys
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
from matplotlib.patches import Patch
plt.style.use('fivethirtyeight')
class ResMinMaxACC:
def __init__(self, directory, fnames, title="ACC Graph with Resilience"):
self.__dict__.update(locals())
# self.directory = directory
# self.fnames = fnames
def gen_plot(self):
fig = plt.figure()
self.normal_data = self.load_file(
self.fnames[0]) # pylint: disable=E1101
self.res1_data = self.load_file(
self.fnames[1]) # pylint: disable=E1101
self.res2_data = self.load_file(
self.fnames[2]) # pylint: disable=E1101
# self.mean1 = np.nanmean(self.normal_data, axis=0)
# self.mean2 = np.nanmean(self.res1_data, axis=0)
# self.mean3 = np.nanmean(self.res2_data, axis=0)
# print (self.mean1.shape, self.mean2.shape, self.mean2.shape)
# self.sd = np.nanstd(self.data, axis=1)
# self.max_sd = self.mean + self.sd
# self.min_sd = self.mean - self.sd
ax1 = fig.add_subplot(1, 1, 1)
plt.xlim(0, 10000)
plt.ylim(0, 100)
box_data = self.normal_data.values.T
box_data = [box_data[i] for i in range(0, 9000, 1000)]
boxprops = dict(linewidth=1, color='blue')
bp1 = ax1.boxplot(
box_data, 0, whiskerprops=boxprops,
showmeans=True, meanline=True, patch_artist=True,
positions=range(0, 9000, 1000), widths=600)
for patch in bp1['boxes']:
patch.set_facecolor('blue')
#patch.set_alpha(0.4)
patch.set_edgecolor('blue') # or try 'black'
patch.set_linewidth(1)
ax1.set_xlabel('Iteration')
ax1.set_ylabel('Performance')
ax1.set_title('Ideal Sensors', fontsize=13)
box_data = self.res1_data.values.T
box_data = [box_data[i] for i in range(0, 9000, 1000)]
fig1 = plt.figure()
ax2 = fig1.add_subplot(1, 1, 1)
bp2 = ax2.boxplot(
box_data, 0, whiskerprops=boxprops,
showmeans=True, meanline=True, patch_artist=True,
positions=range(0, 9000, 1000), widths=450)
for patch in bp2['boxes']:
patch.set_facecolor('blue')
#patch.set_alpha(0.4)
patch.set_edgecolor('blue') # or try 'black'
patch.set_linewidth(1)
ax2.set_xlabel('Iteration')
ax2.set_ylabel('Performance')
ax2.set_title('50% Sensor Failure', fontsize=13)
fig.tight_layout()
fig1.tight_layout()
fig.savefig(self.directory + '/acc_res.pdf') # pylint: disable=E1101
fig.savefig(self.directory + '/acc_res.png') # pylint: disable=E1101
fig1.savefig(self.directory + '/acc_res1.pdf') # pylint: disable=E1101
fig1.savefig(self.directory + '/acc_res1.png') # pylint: disable=E1101
plt.close(fig)
plt.close(fig1)
def load_file(self, fname):
# try:
data = pd.read_csv(
self.directory + '/' + fname, sep='|', # pylint: disable=E1101
skipinitialspace=True)
return data
# except FileNotFoundError:
# exit()
def save_step_graph(self, filename, fields):
pass
class ResMinMaxACC1:
def __init__(self, directory, fnames, title="ACC Graph with Resilience"):
self.__dict__.update(locals())
# self.directory = directory
# self.fnames = fnames
def gen_plot(self):
fig = plt.figure()
self.normal_data = self.load_file(
self.fnames[0]) # pylint: disable=E1101
self.res1_data = self.load_file(
self.fnames[1]) # pylint: disable=E1101
self.res2_data = self.load_file(
self.fnames[2]) # pylint: disable=E1101
self.mean1 = np.nanmean(self.normal_data, axis=0)
self.mean2 = np.nanmean(self.res1_data, axis=0)
self.mean3 = np.nanmean(self.res2_data, axis=0)
# print (self.mean1.shape, self.mean2.shape, self.mean2.shape)
# self.sd = np.nanstd(self.data, axis=1)
# self.max_sd = self.mean + self.sd
# self.min_sd = self.mean - self.sd
ax1 = fig.add_subplot(1, 1, 1)
print (self.normal_data.shape)
plt.xlim(0, 10000)
box_data = self.normal_data.values.T
box_data = [box_data[i] for i in range(1000, 9500, 1000)]
boxprops = dict(linewidth=1, color='pink')
bp1 = ax1.boxplot(
box_data, 0, whiskerprops=boxprops,
showmeans=True, meanline=True, patch_artist=True,
positions=range(1000, 9500, 1000), widths=600)
box_data = self.res1_data.values.T
box_data = [box_data[i] for i in range(1000, 9500, 1000)]
boxprops = dict(linewidth=1, color='lightblue')
bp2 = ax1.boxplot(
box_data, 0, whiskerprops=boxprops,
showmeans=True, meanline=True, patch_artist=True,
positions=range(1000, 9500, 1000), widths=450)
box_data = self.res2_data.values.T
box_data = [box_data[i] for i in range(1000, 9500, 1000)]
boxprops = dict(linewidth=1, color='lightgreen')
bp3 = ax1.boxplot(
box_data, 0,whiskerprops=boxprops,
showmeans=True, meanline=True, patch_artist=True,
positions=range(1000, 9500, 1000), widths=250)
for patch in bp1['boxes']:
patch.set_facecolor('pink')
patch.set_alpha(0.4)
patch.set_edgecolor('pink') # or try 'black'
patch.set_linewidth(1)
for patch in bp2['boxes']:
patch.set_facecolor('lightblue')
patch.set_alpha(0.6)
patch.set_edgecolor('lightblue') # or try 'black'
patch.set_linewidth(1)
for patch in bp3['boxes']:
patch.set_facecolor('lightgreen')
patch.set_alpha(0.6)
patch.set_edgecolor('lightgreen') # or try 'black'
patch.set_linewidth(1)
ax1.set_xlabel('Iteration')
ax1.set_ylabel('Performance')
ax1.set_title('Single-Source Foraging\nActuator Failure')
plt.tight_layout()
ax1.legend(
[bp1['boxes'][0], bp2['boxes'][0],bp3['boxes'][0]],
['Normal','Failure 1','Failure 2'], loc='upper left')
fig.savefig(self.directory + '/acc_res.pdf') # pylint: disable=E1101
fig.savefig(self.directory + '/acc_res.png') # pylint: disable=E1101
plt.close(fig)
def load_file(self, fname):
# try:
data = pd.read_csv(
self.directory + '/' + fname, sep='|', # pylint: disable=E1101
skipinitialspace=True)
return data
# except FileNotFoundError:
# exit()
def save_step_graph(self, filename, fields):
pass
def main():
"""Parse args and call graph module."""
filenames = sys.argv[1]
fdir = sys.argv[2]
filenames = filenames.split(',')
# print (filenames)
graph = ResMinMaxACC(fdir, filenames, "Single-Source Foraging")
graph.gen_plot()
if __name__ == '__main__':
main()
```
#### File: swarms/behaviors/sbehaviors.py
```python
import numpy as np
from py_trees.trees import BehaviourTree
from py_trees.behaviour import Behaviour
from py_trees.composites import Sequence, Selector, Parallel
from py_trees import common, blackboard
import py_trees
from swarms.utils.distangle import get_direction, check_intersect
from swarms.lib.objects import Pheromones, Signal, Cue
import os
import matplotlib
# If there is $DISPLAY, display the plot
if os.name == 'posix' and "DISPLAY" not in os.environ:
matplotlib.use('Agg')
import matplotlib.pyplot as plt
class ObjectsStore:
"""Static class to search.
This class provides a find method to search through
Behavior Tree blackboard and agent content.
"""
@staticmethod
def find(blackboard_content, agent_content, name, agent_name):
"""Let this method implement search.
This method find implements a search through
blackboard dictionary. If the object is not found
in blackboard, then agent content is searched.
"""
try:
if name is not None:
objects = blackboard_content[name]
return list(objects)
else:
return list(blackboard_content.values())
except KeyError:
try:
objects = agent_content[name]
return list(objects)
except KeyError:
return []
class NeighbourObjects(Behaviour):
"""Sense behavior for the agents.
Inherits the Behaviors class from py_trees. This
behavior implements the sense function for the agents. This allows
the agents to sense the nearby environment based on the their
sense radius.
"""
def __init__(self, name):
"""Init method for the sense behavior."""
super(NeighbourObjects, self).__init__(name)
def setup(self, timeout, agent, item):
"""Have defined the setup method.
This method defines the other objects required for the
behavior. Agent is the actor in the environment,
item is the name of the item we are trying to find in the
environment and timeout defines the execution time for the
behavior.
"""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.WRITE)
def initialise(self):
"""Everytime initialization. Not required for now."""
pass
def receive_signals(self):
"""Receive signals from other agents.
Since this is the primary behavior for the agents to sense
the environment, we include the receive signal method here.
The agents will be able to
sense the environment and check if
it receives any signals from other agents.
"""
def update(self):
"""
Sense the neighborhood.
This method gets the grid values based on the current location and
radius. The grids are used to search the environment. If the agents
find any objects, it is stored in the behavior tree blackboard which
is a dictionary with sets as values.
"""
# if self.item is None:
# grids = self.agent.model.grid.get_neighborhood(
# self.agent.location, self.agent.radius*4)
# else:
grids = self.agent.model.grid.get_neighborhood(
self.agent.location, self.agent.radius)
objects = self.agent.model.grid.get_objects_from_list_of_grid(
self.item, grids)
# Need to reset blackboard contents after each sense
self.blackboard.neighbourobj = dict()
if len(objects) >= 1:
if self.agent in objects:
objects.remove(self.agent)
if len(objects) >= 1:
for item in objects:
name = type(item).__name__
# Is the item is not carrable, its location
# and property doesnot change. So we can commit its
# information to memory
# if item.carryable is False and item.deathable is False:
if name in ['Sites', 'Hub', 'Boundary']:
try:
self.agent.shared_content[name].add(item)
except KeyError:
self.agent.shared_content[name] = {item}
else:
# name = name + str(self.agent.name)
try:
self.blackboard.neighbourobj[name].add(item)
except KeyError:
self.blackboard.neighbourobj = dict()
self.blackboard.neighbourobj[name] = {item}
return common.Status.SUCCESS
else:
return common.Status.FAILURE
else:
return common.Status.FAILURE
class NeighbourObjectsDist(Behaviour):
"""Sense behavior for the agents.
Inherits the Behaviors class from py_trees. This
behavior implements the sense function for the agents. This allows
the agents to sense the nearby environment based on the their
sense radius.
"""
def __init__(self, name):
"""Init method for the sense behavior."""
super(NeighbourObjectsDist, self).__init__(name)
def setup(self, timeout, agent, item):
"""Have defined the setup method.
This method defines the other objects required for the
behavior. Agent is the actor in the environment,
item is the name of the item we are trying to find in the
environment and timeout defines the execution time for the
behavior.
"""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.WRITE)
def initialise(self):
"""Everytime initialization. Not required for now."""
pass
def receive_signals(self):
"""Receive signals from other agents.
Since this is the primary behavior for the agents to sense
the environment, we include the receive signal method here.
The agents will be able to
sense the environment and check if
it receives any signals from other agents.
"""
def update(self):
"""
Sense the neighborhood.
This method gets the grid values based on the current location and
radius. The grids are used to search the environment. If the agents
find any objects, it is stored in the behavior tree blackboard which
is a dictionary with sets as values.
"""
# if self.item is None:
# grids = self.agent.model.grid.get_neighborhood(
# self.agent.location, self.agent.radius*4)
# else:
# grids = self.agent.model.grid.get_neighborhood(
# self.agent.location, self.agent.radius)
grids = []
# for i in range(1, self.agent.model.grid.grid_size):
status = common.Status.FAILURE
for i in range(0, self.agent.radius):
x = int(self.agent.location[0] + np.cos(
self.agent.direction) * i)
y = int(self.agent.location[1] + np.sin(
self.agent.direction) * i)
new_location, direction = self.agent.model.grid.check_limits(
(x, y), self.agent.direction)
# grids += self.agent.model.grid.get_neighborhood(new_location, 1)
limits, grid = self.agent.model.grid.find_grid(new_location)
# print(self.agent.name, grid, self.name, round(self.agent.direction, 2), self.id, limits)
objects = self.agent.model.grid.get_objects(
self.item, grid)
# print('nighbourdist', grid, objects, self.agent.location, (new_location), limits)
# Need to reset blackboard contents after each sense
self.blackboard.neighbourobj = dict()
if len(objects) >= 1:
if self.agent in objects:
objects.remove(self.agent)
for item in objects:
name = type(item).__name__
# Is the item is not carrable, its location
# and property doesnot change. So we can commit its
# information to memory
# if item.carryable is False and item.deathable is False:
# name = name + str(self.agent.name)
if item.passable is False:
try:
self.blackboard.neighbourobj[name].add(item)
except KeyError:
self.blackboard.neighbourobj[name] = {item}
# if status == common.Status.SUCCESS:
# pass
# else:
status = common.Status.SUCCESS
return status
return status
class GoTo(Behaviour):
"""GoTo behavior for the agents.
Inherits the Behaviors class from py_trees. This
behavior implements the GoTo function for the agents. This allows
the agents direct towards the object they want to reach. This behavior
is only concerned with direction alignment not with movement.
"""
def __init__(self, name):
"""Init method for the GoTo behavior."""
super(GoTo, self).__init__(name)
# self.blackboard = Blackboard()
# self.blackboard.neighbourobj = dict()
def setup(self, timeout, agent, item):
"""Have defined the setup method.
This method defines the other objects required for the
behavior. Agent is the actor in the environment,
item is the name of the item we are trying to find in the
environment and timeout defines the execution time for the
behavior.
"""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Everytime initialization. Not required for now."""
pass
def update(self):
"""
Goto towards the object of interest.
This method uses the ObjectsStore abstract class to find the
objects sensed before and agent shared storage. If the agent
find the object of interst in the store then, direction to the
object of interest is computed and agent direction is set to that
direction.
"""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
if len(objects) > 0:
objects = self.agent.model.random.choice(objects)
else:
objects = objects[0]
self.agent.direction = get_direction(
objects.location, self.agent.location) % (2 * np.pi)
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.FAILURE
# Behavior defined to move towards something
class Towards(Behaviour):
"""Towards behaviors.
Changes the direction to go towards the object.
"""
def __init__(self, name):
"""Initialize."""
super(Towards, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
def initialise(self):
"""Pass."""
pass
def update(self):
"""Nothing much to do."""
return common.Status.SUCCESS
# Behavior defined to move away from something
class Away(Behaviour):
"""Away behavior."""
def __init__(self, name):
"""Initialize."""
super(Away, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
def initialise(self):
"""Pass."""
pass
def update(self):
"""Compute direction and negate it."""
self.agent.direction = (self.agent.direction + np.pi) % (2 * np.pi)
return common.Status.SUCCESS
# Behavior defined for Randomwalk
class RandomWalk(Behaviour):
"""Random walk behaviors."""
def __init__(self, name):
"""Initialize."""
super(RandomWalk, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
def initialise(self):
"""Pass."""
pass
def update(self):
"""Compute random direction and set it to agent direction."""
delta_d = self.agent.model.random.normal(0, .1)
self.agent.direction = (self.agent.direction + delta_d) % (2 * np.pi)
return common.Status.SUCCESS
class IsMoveable(Behaviour):
"""Check is the item is moveable."""
def __init__(self, name):
"""Initialize."""
super(IsMoveable, self).__init__(name)
# self.blackboard = Blackboard()
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Get the object and check its movelable property."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
for obj in objects:
if not objects.moveable:
return common.Status.FAILURE
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.FAILURE
# Behavior defined to move
class Move(Behaviour):
"""Actually move the agent.
Move the agent with any other object fully attached or
partially attached to the agent.
"""
def __init__(self, name):
"""Initialize."""
super(Move, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.dt = 0.1
def initialise(self):
"""Pass."""
pass
def update_signals(self, old_loc, new_loc):
"""Signal also move along with agents.
Signal is created by the agent. It has certain broadcast radius. It
moves along with the agent. So this move behavior should also be
responsible to move the signals.
"""
try:
for signal in self.agent.signals:
if self.agent.model.grid.move_object(
old_loc, signal, new_loc):
pass
else:
return False
except IndexError:
pass
return True
def update_partial_attached_objects(self):
"""Move logic for partially attached objects."""
try:
for item in self.agent.partial_attached_objects:
accleration = self.agent.force / item.agents[self.agent]
velocity = (accleration * self.dt) / len(item.agents)
direction = self.agent.direction
"""
if np.cos(direction) > 0:
x = int(np.ceil(
item.location[0] + np.cos(direction) * velocity))
y = int(np.ceil(
item.location[1] + np.sin(direction) * velocity))
else:
x = int(np.floor(
item.location[0] + np.cos(direction) * velocity))
y = int(np.floor(
item.location[1] + np.sin(direction) * velocity))
"""
x = int(self.agent.location[0] + np.cos(
direction) * velocity)
y = int(self.agent.location[1] + np.sin(
direction) * velocity)
# object_agent = list(item.agents.keys())
# indx = self.agent.model.random.randint(0, len(object_agent))
# object_agent = object_agent[indx]
object_agent = self.agent
# new_location, direction
# = object_agent.model.grid.check_limits(
# (x, y), direction)
new_location = (x, y)
object_agent.model.grid.move_object(
item.location, item, new_location)
self.agent.direction = direction
item.location = new_location
return True
except (IndexError, ValueError):
return False
def update(self):
"""Move logic for agent and fully carried object."""
# Partially carried object
if not self.update_partial_attached_objects():
self.agent.accleration = self.agent.force / self.agent.get_weight()
self.agent.velocity = self.agent.accleration * 1.0
# print(self.agent.direction, self.agent.velocity, self.agent.location)
x = int(np.round(self.agent.location[0] + np.cos(
self.agent.direction) * self.agent.velocity))
y = int(np.round(self.agent.location[1] + np.sin(
self.agent.direction) * self.agent.velocity))
new_location, direction = self.agent.model.grid.check_limits(
(x, y), self.agent.direction)
# print('from move', self.name, self.agent.location, new_location, direction)
if self.agent.model.grid.move_object(
self.agent.location, self.agent, new_location):
# Now the agent location has been updated, update the signal grids
if not self.update_signals(self.agent.location, new_location):
return common.Status.FAILURE
self.agent.location = new_location
self.agent.direction = direction
# Full carried object moves along the agent
for item in self.agent.attached_objects:
item.location = self.agent.location
else:
return common.Status.FAILURE
else:
new_location = self.agent.partial_attached_objects[0].location
for agent in self.agent.partial_attached_objects[0].agents.keys():
if agent.model.grid.move_object(
agent.location, agent,
new_location):
agent.location = new_location
else:
return common.Status.FAILURE
# Now the agent location has been updated, update the signal grids
if not self.update_signals(self.agent.location, new_location):
return common.Status.FAILURE
return common.Status.SUCCESS
# Behavior define for donot move
class DoNotMove(Behaviour):
"""Stand still behaviors."""
def __init__(self, name):
"""Initialize."""
super(DoNotMove, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
def initialise(self):
"""Pass."""
pass
def update(self):
"""Update agent moveable property."""
self.agent.moveable = False
return common.Status.SUCCESS
# Behavior to check carryable attribute of an object
class IsCarryable(Behaviour):
"""Check carryable attribute of the item."""
def __init__(self, name):
"""Initialize."""
super(IsCarryable, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check carryable property."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
if objects.carryable:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
# Behavior to check carryable attribute of an object
class IsDropable(Behaviour):
"""Check dropable property."""
def __init__(self, name):
"""Initialize."""
super(IsDropable, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check the dropable attribute."""
status = common.Status.FAILURE
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
if len(objects) >= 1:
for obj in objects:
if status == common.Status.SUCCESS:
break
if objects.dropable:
status = common.Status.SUCCESS
return status
else:
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.SUCCESS
# Behavior define to check is the item is carrable on its own
class IsSingleCarry(Behaviour):
"""Single carry behavior."""
def __init__(self, name):
"""Initialize."""
super(IsSingleCarry, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to check if the object can be carried by single agent."""
# Logic to carry
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
if objects.weight:
if self.agent.get_capacity() > objects.calc_relative_weight():
return common.Status.SUCCESS
else:
return common.Status.FAILURE
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
# Behavior define to check is the item is carrable on its own or not
class IsMultipleCarry(Behaviour):
"""Multiple carry behaviour."""
def __init__(self, name):
"""Initialize."""
super(IsMultipleCarry, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for multiple carry by checking the weights."""
try:
# Logic to carry
# objects = self.blackboard.neighbourobj[self.thing].pop()
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
if objects.weight:
if self.agent.get_capacity() < objects.weight:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
class IsCarrying(Behaviour):
"""Condition check if the agent is carrying something."""
def __init__(self, name):
"""Initialize."""
super(IsCarrying, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for object carrying check."""
try:
things = []
for item in self.agent.attached_objects:
things.append(type(item).__name__)
if self.item in set(things):
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
# Behavior defined to drop the items currently carrying
class Drop(Behaviour):
"""Drop behavior to drop items which is being carried."""
def __init__(self, name):
"""Initialize."""
super(Drop, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to drop the item."""
try:
# Get the objects from the actuators
objects = list(filter(
lambda x: type(x).__name__ == self.item,
self.agent.attached_objects))[0]
# Grid
grid = self.agent.model.grid
static_grids = grid.get_neighborhood(self.agent.location, self.agent.radius)
envobjects = self.agent.model.grid.get_objects_from_list_of_grid(None, static_grids)
dropped = False
for obj in envobjects:
if type(obj).__name__ in ['Hub', 'Boundary', 'Obstacles']:
dropped = True
obj.dropped_objects.append(objects)
self.agent.attached_objects.remove(objects)
objects.agent_name = self.agent.name
break
if not dropped:
self.agent.model.grid.add_object_to_grid(objects.location, objects)
self.agent.attached_objects.remove(objects)
objects.agent_name = self.agent.name
# Temporary fix
# Store the genome which activated the single carry
try:
# objects.phenotype['drop'] =
# self.agent.individual[0].phenotype
objects.phenotype = {
self.agent.individual[0].phenotype: self.agent.individual[
0].fitness}
return common.Status.SUCCESS
except AttributeError:
pass
# objects.agents.remove(self.agent)
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.FAILURE
class DropPartial(Behaviour):
"""Drop behavior for partially attached object."""
def __init__(self, name):
"""Initialize."""
super(DropPartial, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to drop partially attached object."""
try:
objects = list(filter(
lambda x: type(x).__name__ == self.item,
self.agent.partial_attached_objects))[0]
objects.agents.pop(self.agent)
self.agent.partial_attached_objects.remove(objects)
# If the agent is last to drop reduce the size of the
# food to the half the size of the hub. This indicates
# that the food has been deposited to the hub
if len(objects.agents) == 0:
self.agent.model.grid.remove_object_from_grid(
objects.location, objects)
objects.radius = int(self.agent.model.hub.radius / 2)
objects.location = self.agent.model.hub.location
self.agent.model.grid.add_object_to_grid(
objects.location, objects)
try:
objects.phenotype = {
self.agent.individual[0].phenotype: self.agent.individual[
0].fitness}
return common.Status.SUCCESS
except AttributeError:
pass
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.FAILURE
# Behavior defined to carry the items found
class SingleCarry(Behaviour):
"""Carry behavior."""
def __init__(self, name):
"""Initialize."""
super(SingleCarry, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Carry logic to carry the object by the agent."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
self.agent.attached_objects.append(objects)
self.agent.model.grid.remove_object_from_grid(
objects.location, objects)
objects.agent_name = self.agent.name
# Add the agent to the object dict
# objects.agents[self.agent] = self.agent.get_capacity()
# Temporary fix
# Store the genome which activated the single carry
try:
objects.phenotype = {
self.agent.individual[0].phenotype: self.agent.individual[
0].fitness}
except AttributeError:
pass
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.FAILURE
except ValueError:
self.agent.attached_objects.remove(objects)
return common.Status.FAILURE
class InitiateMultipleCarry(Behaviour):
"""Behavior to initiate multiple carry process."""
def __init__(self, name):
"""Initialize."""
super(InitiateMultipleCarry, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to initiaite multiple carry process."""
try:
# objects = self.blackboard.neighbourobj[self.thing].pop()
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
relative_weight = objects.calc_relative_weight()
if relative_weight > 0:
if relative_weight - self.agent.get_capacity() >= 0:
capacity_used = self.agent.get_capacity()
else:
capacity_used = relative_weight
# Update the partial attached object
self.agent.partial_attached_objects.append(objects)
# Update the object so that it knows this agent
# has attached to it
objects.agents[self.agent] = capacity_used
return common.Status.SUCCESS
else:
# Redistribute the weights to all the attached objects
average_weight = objects.redistribute_weights()
self.agent.partial_attached_objects.append(objects)
objects.agents[self.agent] = average_weight
return common.Status.SUCCESS
try:
objects.phenotype = {
self.agent.individual[0].phenotype: self.agent.individual[
0].fitness}
except AttributeError:
pass
except (KeyError, AttributeError, IndexError):
return common.Status.FAILURE
class IsInPartialAttached(Behaviour):
"""Condition to check if the object is in partially attached list."""
def __init__(self, name):
"""Initialize."""
super(IsInPartialAttached, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to check if the object is in partially attached list."""
# objects = self.blackboard.neighbourobj[self.thing].pop()
try:
things = []
for item in self.agent.partial_attached_objects:
things.append(type(item).__name__)
objects = list(filter(
lambda x: type(x).__name__ == self.item,
self.agent.partial_attached_objects))[0]
if self.item in set(things) and \
self.agent in objects.agents:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except IndexError:
return common.Status.FAILURE
class IsEnoughStrengthToCarry(Behaviour):
"""Condition to check if the agent has enough strength to carry."""
def __init__(self, name):
"""Initialize."""
super(IsEnoughStrengthToCarry, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to check if the agent has enough strength to carry."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
if self.agent.get_capacity() >= objects.calc_relative_weight():
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except IndexError:
return common.Status.FAILURE
class IsMotionTrue(Behaviour):
"""Condition to check is the object is moving."""
def __init__(self, name):
"""Initialize."""
super(IsMotionTrue, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to check if the object is moving."""
try:
if self.agent.partial_attached_objects[0].motion is True:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
class IsVisitedBefore(Behaviour):
"""Condition to check is the object is visited before."""
def __init__(self, name):
"""Initialize."""
super(IsVisitedBefore, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic to check is the object is visited before."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
if objects:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
class MultipleCarry(Behaviour):
"""Multiple carry behavior."""
def __init__(self, name):
"""Initialize."""
super(MultipleCarry, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for multiple carry."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
self.agent.model.grid.remove_object_from_grid(
objects.location, objects)
return common.Status.SUCCESS
except IndexError:
return common.Status.FAILURE
# Lets start some communication behaviors
class SignalDoesNotExists(Behaviour):
"""Signal exists behavior.
This behavior enables agents to check it that signal already exists.
"""
def __init__(self, name):
"""Initialize."""
super(SignalDoesNotExists, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for sending signal."""
try:
# Find the object the agent is trying to signal
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
if len(self.agent.signals) > 0:
# Check the agetns signals array for its exitance
signal_objects = []
for signal in self.agent.signals:
signal_objects.append(signal.object_to_communicate)
if objects not in signal_objects:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
else:
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
# Lets start some communication behaviors
class IsSignalActive(Behaviour):
"""Is Signal active?
This behavior enables agents to check it that signal is already active.
"""
def __init__(self, name):
"""Initialize."""
super(IsSignalActive, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for sending signal."""
try:
# Find the object the agent is trying to signal.
if len(self.agent.signals) > 0:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (IndexError, AttributeError):
return common.Status.FAILURE
class SendSignal(Behaviour):
"""Signalling behavior.
This behavior enables agents to send signals about the information they
have gathered. The information could be about location of site, hub, food,
obstacles and others.
"""
def __init__(self, name):
"""Initialize."""
super(SendSignal, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for sending signal."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
# Initialize the signal object
signal = Signal(
id=self.agent.name, location=self.agent.location,
radius=self.agent.radius, object_to_communicate=objects)
# Add the signal to the grids so it could be sensed by
# other agents
self.agent.model.grid.add_object_to_grid(
self.agent.location, signal)
# Append the signal object to the agent signal list
self.agent.signals.append(signal)
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
class ReceiveSignal(Behaviour):
"""Receive signals from other agents.
Since this is the primary behavior for the agents to sense
the environment, we include the receive signal method here.
The agents will be able to sense the environment and check if
it receives any signals from other agents.
"""
def __init__(self, name):
"""Initialize."""
super(ReceiveSignal, self).__init__(name)
def setup(self, timeout, agent, item='Signal'):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for receiving signal."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
# Extract the information from the signal object and
# store into the agent memory
objects = [obj for obj in objects if obj.id != self.agent.name][0]
objects = objects.communicated_object
name = type(objects).__name__
try:
self.agent.shared_content[name].add(objects)
except KeyError:
self.agent.shared_content[name] = {objects}
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
class CueDoesNotExists(Behaviour):
"""Cue does not exists behavior.
This behavior enables agents to check if that cue already exists.
"""
def __init__(self, name):
"""Initialize."""
super(CueDoesNotExists, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for cue checking."""
try:
# Find the object the agent is trying to cue
grids = self.agent.model.grid.get_neighborhood(
self.agent.location, self.agent.radius)
cue_objects = self.agent.model.grid.get_objects_from_list_of_grid(
'Cue', grids)
if len(cue_objects) > 0:
# Check the agetns cue list for its exitance
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
cue_in_list = [
cue.object_to_communicate for cue in cue_objects]
if objects not in cue_in_list:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
else:
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
# Communication behaviors related to cue
class DropCue(Behaviour):
"""Drop cue in the environment.
This is a communication behavior where a physical object
is placed in the environment which gives a particular information
to the agents sensing this cue.
"""
def __init__(self, name):
"""Initialize."""
super(DropCue, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for dropping cue."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
# Initialize the cue object
cue = Cue(
id=self.agent.name, location=self.agent.location,
radius=self.agent.radius, object_to_communicate=objects)
# Add the cue to the grids so it could be sensed by
# other agents
self.agent.model.grid.add_object_to_grid(
cue.location, cue)
# We just drop the cue on the environment and don't keep track
# of it. Instead of using cue here we can derive a class from cue
# and call it pheromonone
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
class PickCue(Behaviour):
"""Pick cue in the environment.
This is a communication behavior where the information from the cue
object in the environment is pickedup.
"""
def __init__(self, name):
"""Initialize."""
super(PickCue, self).__init__(name)
def setup(self, timeout, agent, item='Cue'):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for pickup cue."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
# Get information from the cue. For now, the agents orients
# its direction towards the object that is communicated
self.agent.direction = get_direction(
objects.communicated_location, self.agent.location)
objects = objects.communicated_object
name = type(objects).__name__
try:
self.agent.shared_content[name].add(objects)
except KeyError:
self.agent.shared_content[name] = {objects}
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
class AvoidSObjects(Behaviour):
"""Avoid Static objects in the environment.
This is a avoid behaviors where the agents avoids
the static objects that are not passable.
"""
def __init__(self, name):
"""Initialize."""
super(AvoidSObjects, self).__init__(name)
def setup(self, timeout, agent, item='Obstacles'):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for avoid static objects."""
# try:
item = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
# alpha = get_direction(self.agent.location, objects.location)
# theta = self.agent.direction
# angle_diff = theta-alpha
# print('From', self.agent.name, self.name, item, self.agent.direction, item.location, item.radius)
x = int(np.ceil(self.agent.location[0] + np.cos(
self.agent.direction) * self.agent.radius))
y = int(np.ceil(self.agent.location[1] + np.sin(
self.agent.direction) * self.agent.radius))
# agent_A = y - self.agent.location[1]
# agent_B = self.agent.location[0] - x
# agent_C = agent_A * self.agent.location[0] + agent_B * self.agent.location[1]
# print('agetn ABC', agent_A, agent_B, agent_C)
# obj_x2 = int(np.ceil(item.location[0] + (np.cos(np.pi/4) * (item.radius))))
# obj_y2 = int(np.ceil(item.location[1] + (np.sin(np.pi/4) * (item.radius))))
# obj_loc_2 = self.agent.model.grid.find_upperbound((obj_x2, obj_y2))
# # print('obj x2', obj_x2, obj_y2, obj_loc_2)
# obj_x0 = int(np.floor(item.location[0] + (np.cos(np.pi/4 + np.pi) * (item.radius))))
# obj_y0 = int(np.floor(item.location[1] + (np.sin(np.pi/4 + np.pi) * (item.radius))))
# obj_loc_0 = self.agent.model.grid.find_lowerbound((obj_x0, obj_y0))
# # print('obj x1', obj_x0, obj_y0, obj_loc_0)
# obj_loc_1 = (obj_loc_2[0], obj_loc_0[1])
# obj_loc_3 = (obj_loc_0[0], obj_loc_2[1])
grids = self.agent.model.grid.get_neighborhood(item.location, item.radius)
points = [self.agent.model.grid.grid_reverse[grid] for grid in grids]
p1s, p2s = zip(*points)
x1s, y1s = zip(*p1s)
x2s, y2s = zip(*p2s)
x1 = min(x1s)
y1 = min(y1s)
x2 = max(x2s)
y2 = max(y2s)
# print(grids, [self.agent.model.grid.grid_reverse[grid] for grid in grids])
intersect = False
# for grid in grids:
# p1, p2 = self.agent.model.grid.grid_reverse[grid]
# x1, y1 = p1
# x2, y2 = p2
lines = [
[(x1, y1), (x2, y1)],
[(x2, y1), (x2, y2)],
[(x2, y2), (x1, y2)],
[(x1, y2), (x1, y1)]
]
# print('agent ray', self.agent.location, (x,y))
# print('rectangle obstacle',lines)
# plt.plot([self.agent.location[0], x], [self.agent.location[1], y], 'r--')
# plt.plot([lines[0][0][0], lines[0][1][0]], [lines[0][0][1], lines[0][1][1]],'b.-')
# plt.plot([lines[1][0][0], lines[1][1][0]], [lines[1][0][1], lines[1][1][1]],'b.-')
# plt.plot([lines[2][0][0], lines[2][1][0]], [lines[2][0][1], lines[2][1][1]],'b.-')
# plt.plot([lines[3][0][0], lines[3][1][0]], [lines[3][0][1], lines[3][1][1]],'b.-')
# # plt.xticks(range(-20, 20, 1))
# # plt.yticks(range(-20, 20, 1))
# plt.show()
for line in lines:
intersect = check_intersect(self.agent.location, (x, y), line[0], line[1])
if intersect:
dx = line[1][0] - line[0][0]
dy = line[1][1] - line[0][1]
self.agent.direction = np.arctan2(dy,dx)
break
# if intersect:
# break
# line_A = line[1][1] - line[0][1]
# line_B = line[0][0] - line[1][0]
# line_C = line_A * line[0][0] + line_B * line[0][1]
# slope = round(agent_A * line_B - line_A * agent_B, 2)
# # print('slope', slope)
# if slope == 0.0:
# break
# else:
# intersection_x = int((line_B * agent_C - agent_B * line_C) / slope)
# intersection_y = int((agent_A * line_C - line_A * agent_C) / slope)
# print('itersection point', intersection_x, intersection_y, self.agent.location, x, y, line)
# if (
# (intersection_x <= x) and ( intersection_x >= self.agent.location[0]) and
# (intersection_y <= y) and ( intersection_y >= self.agent.location[1])):
# # ((intersection_x <= line[1][0]) and ( intersection_x >= line[0][0]) and
# # (intersection_y <= line[1][1]) and ( intersection_y >= line[0][1]))):
# direction = np.arctan2(line[1][1] - line[0][1], line[1][0] - line[0][0])
# print('computed direction', direction)
# self.agent.direction = (direction + 2*np.pi) % (2*np.pi)
# break
# direction = self.agent.direction + np.pi/2
# self.agent.direction = direction % (2 * np.pi)
# print(self.agent.name, direction, self.agent.direction)
return common.Status.SUCCESS
# except (IndexError, AttributeError):
# return common.Status.FAILURE
# Behavior to check if the agent avoided obj
class DidAvoidedObj(Behaviour):
"""Logic to check if the agent avoided the objects."""
def __init__(self, name):
"""Initialize."""
super(DidAvoidedObj, self).__init__(name)
def setup(self, timeout, agent, item):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check if it can sense the object and its direction."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)[0]
alpha = get_direction(self.agent.location, objects.location)
theta = self.agent.direction
angle_diff = np.abs(theta-alpha)
if angle_diff < np.pi/2:
return common.Status.FAILURE
else:
return common.Status.SUCCESS
except (AttributeError, IndexError):
return common.Status.SUCCESS
# Behavior to check if the agent can move
class CanMove(Behaviour):
"""Logic to check if the agent can move in the intended direction."""
def __init__(self, name):
"""Initialize."""
super(CanMove, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check if it can sense the object and its direction."""
try:
if (self.agent.moveable and self.agent.dead is not True):
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (AttributeError, IndexError):
return common.Status.FAILURE
# Pheromone related bheaviors
class DropPheromone(Behaviour):
"""Drop pheromone in the environment.
This is a communication behavior where a pheromone object
is placed in the environment which gives a direction to follow for the
agents.
"""
def __init__(self, name, attractive=True):
"""Initialize."""
super(DropPheromone, self).__init__(name)
self.attractive = attractive
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.item = item
# self.blackboard = blackboard.Client(name=str(agent.name))
# self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
self.blackboard = blackboard.Client(name='Pheromones')
self.blackboard.register_key(key='pheromones', access=common.Access.WRITE)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for dropping pheromone."""
try:
if True:
# Initialize the pheromone object
pheromone = Pheromones(
id=self.agent.name, location=self.agent.location,
radius=self.agent.radius, attractive=self.attractive, direction=self.agent.direction)
pheromone.passable = self.attractive
# Add the pheromone to the grids so it could be sensed by
# other agents
self.agent.model.grid.add_object_to_grid(
pheromone.location, pheromone)
self.blackboard.pheromones.append(pheromone)
return common.Status.SUCCESS
else:
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
# Pheromone related bheaviors
class SensePheromone(Behaviour):
"""Sense pheromone in the environment.
This is a communication behavior where pheromones are sensed from
the environment and a direction is computed to follow.
"""
def __init__(self, name, attractive=True):
"""Initialize."""
super(SensePheromone, self).__init__(name)
self.attractive = True
def setup(self, timeout, agent, item='Pheromones'):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for dropping pheromone."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
repulsive = False
for obj in objects:
if obj.attractive is False:
repulsive = True
break
if not repulsive:
# Initialize the pheromone object
angles = [[
np.sin(obj.direction), np.cos(obj.direction), obj.strength[obj.current_time]] for obj in objects]
sin, cos, weight = zip(*angles)
sin, cos, weight = np.array(sin), np.array(cos), np.array(weight)
direction = np.arctan2(sum(sin * weight), sum(cos * weight))
self.agent.direction = direction % (2 * np.pi)
else:
direction = self.agent.direction + np.pi/2
self.agent.direction = direction % (2 * np.pi)
return common.Status.SUCCESS
except (IndexError, AttributeError):
return common.Status.FAILURE
class PheromoneExists(Behaviour):
"""Check if pheromone exists at the location where agent is.
This is a communication behavior where pheromones are sensed from
the environment and a direction is computed to follow.
"""
def __init__(self, name):
"""Initialize."""
super(PheromoneExists, self).__init__(name)
def setup(self, timeout, agent, item='Pheromones'):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Logic for dropping pheromone."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
objects = [obj for obj in objects if obj.id == self.agent.name]
if len(objects) >=1:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (IndexError, AttributeError):
return common.Status.FAILURE
class IsAgentDead(Behaviour):
"""Check if agent is dead.
"""
def __init__(self, name):
"""Initialize."""
super(IsAgentDead, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check agent is dead"""
try:
if self.agent.dead:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (IndexError, AttributeError):
return common.Status.FAILURE
class IsAttractivePheromone(Behaviour):
"""Check if the pheromone is attractive.
"""
def __init__(self, name):
"""Initialize."""
super(IsAttractivePheromone, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check if the pheromone is attractive or repulsive."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
repulsive = False
for obj in objects:
if obj.attractive is False:
repulsive = True
break
if not repulsive:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (IndexError, AttributeError):
return common.Status.FAILURE
class IsRepulsivePheromone(Behaviour):
"""Check if the pheromone is attractive.
"""
def __init__(self, name):
"""Initialize."""
super(IsRepulsivePheromone, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.item = item
self.blackboard = blackboard.Client(name=str(agent.name))
self.blackboard.register_key(key='neighbourobj', access=common.Access.READ)
def initialise(self):
"""Pass."""
pass
def update(self):
"""Check if the pheromone is attractive or repulsive."""
try:
objects = ObjectsStore.find(
self.blackboard.neighbourobj, self.agent.shared_content,
self.item, self.agent.name)
# print('repusive pheroment', objects, objects[0].attractive)
repulsive = False
for obj in objects:
if obj.attractive is False:
repulsive = True
break
if repulsive:
return common.Status.SUCCESS
else:
return common.Status.FAILURE
except (IndexError, AttributeError):
return common.Status.FAILURE
# Dummy node
class DummyNode(Behaviour):
"""Dummy node.
BT node that always returns Success.
"""
def __init__(self, name):
"""Initialize."""
super(DummyNode, self).__init__(name)
def setup(self, timeout, agent, item=None):
"""Setup."""
self.agent = agent
self.item = item
def initialise(self):
"""Pass."""
pass
def update(self):
"""Nothing much to do."""
return common.Status.SUCCESS
```
#### File: swarms/lib/space.py
```python
import math
import numpy as np
import re
class Grid:
"""Grid class.
Class that defines grid strucutre having attribute
Width, height and grid size.
"""
# pylint: disable=too-many-instance-attributes
# Nine is reasonable in this grid class
def __init__(self, width, height, grid_size=10):
"""Constructors for grid.
Args:
width: total width
height: total height
grid_size: granularity of the size of grid
Attributes:
x_limit: x-axis length in both direction
y_limit: y-axis length in both direction
grid: dictionary object which value is adjacent points
of grid and its value is the grid name
grid_objects: dictionary object which value is the grid name
and its value is the list of environment objects
"""
self.width = width
self.height = height
self.x_limit = width / 2
self.y_limit = height / 2
self.grid_size = grid_size
self.grid = dict()
self.grid_reverse = dict()
self.grid_objects = dict()
# self.width_fix = int(self.x_limit % self.grid_size)
# self.height_fix = int(self.y_limit % self.grid_size)
# If the width or height is not comptiable with grid size
if self.x_limit % self.grid_size != 0 \
or self.y_limit % self.grid_size != 0:
print("Grid size invalid")
exit(1)
# Create list for x cordinate & y cordinate to create grid
list_xcords = np.arange(
-self.width / 2, self.width / 2, self.grid_size).tolist()
list_ycords = np.arange(
-self.height / 2, self.height / 2, self.grid_size).tolist()
indx = 1
for ycord in list_ycords:
for xcord in list_xcords:
x_1 = xcord
y_1 = ycord
x_2 = xcord + self.grid_size
y_2 = ycord + self.grid_size
self.grid[(x_1, y_1), (x_2, y_2)] = indx
self.grid_reverse[indx] = (x_1, y_1), (x_2, y_2)
self.grid_objects[indx] = []
indx += 1
self.grid_len = indx - 1
def modify_points(self, point):
"""Modify poitns if the location line in the grid line."""
x, y = point[0], point[1]
if point[0] % self.grid_size == 0:
x = point[0] + 1
if point[1] % self.grid_size == 0:
y = point[1] + 1
if point[0] >= self.x_limit:
x = point[0] - self.grid_size + 1
if point[1] >= self.y_limit:
y = point[1] - self.grid_size + 1
return (x, y)
def find_lowerbound(self, point):
"""Find the lower bound from the point."""
point = self.find_upperbound(point)
return (point[0] - self.grid_size, point[1] - self.grid_size)
def find_upperbound(self, point):
"""Find the upper bound from the point."""
point = self.modify_points(point)
return (point[0] + self.grid_size - 1 * (
point[0] % self.grid_size), point[1] + self.grid_size - 1 * (
point[1] % self.grid_size))
def find_grid(self, point):
"""Find the grid based on the point passed."""
grid_key = (self.find_lowerbound(point), self.find_upperbound(point))
try:
return grid_key, self.grid[grid_key]
except KeyError:
print('KeyError', 'No grid key for ', grid_key)
exit()
def get_horizontal_neighbours(self, center_grid, scale, width_scale):
"""Get the neighboring horizontal grids."""
valid_horizontal_start = (math.floor(
(center_grid - 1) / width_scale) * width_scale) + 1
valid_horizontal_end = math.ceil(
center_grid / width_scale) * width_scale
if(center_grid - scale) < valid_horizontal_start:
horizontal_start = valid_horizontal_start
else:
horizontal_start = center_grid - scale
if(center_grid + scale + 1) > valid_horizontal_end:
horizontal_end = valid_horizontal_end + 1
else:
horizontal_end = center_grid + scale + 1
horizontal_grid = list(range(horizontal_start, horizontal_end, 1))
return horizontal_grid
# Find the adjacent grid based on radius
def get_neighborhood(self, point, radius):
"""Get the neighboring grids."""
all_grid = []
center_grid_key, center_grid = self.find_grid(point)
if self.grid_size >= radius:
return [center_grid]
else:
scale = int(radius / self.grid_size)
width_scale = int(self.width / self.grid_size)
horizontal_grid = self.get_horizontal_neighbours(
center_grid, scale, width_scale)
vertical_grid = list(range(
center_grid - scale * width_scale, center_grid +
1 + scale * width_scale, width_scale))
h_v_grid = []
for grid in vertical_grid:
h_v_grid += self.get_horizontal_neighbours(
grid, scale, width_scale)
all_grid = h_v_grid + horizontal_grid
all_grid = [grid for grid in all_grid if grid > 0 and
grid <= self.grid_len]
return list(set(all_grid))
def add_object_to_grid(self, point, objects):
"""Add object to a given grid."""
grid_values = self.get_neighborhood(point, objects.radius)
# print('add object to grid',grid_values, objects)
for grid in grid_values:
# gridobjects = self.get_objects(None, grid)
# for gobject in gridobjects:
# if not re.match('.*Agent.*' , type(gobject).__name__):
# if gobject.deathable and re.match('.*Agent.*' , type(objects).__name__):
# objects.dead = True
# print(grid, objects)
self.grid_objects[grid].append(objects)
# Remove object to the given grid
def remove_object_from_grid(self, point, objects):
"""Remove object from the given grid."""
grid_values = self.get_neighborhood(point, objects.radius)
for grid in grid_values:
self.grid_objects[grid].remove(objects)
def move_object(self, point, objects, newpoint):
"""Move object from the give grid to new grid."""
grid_key, grid_value = self.find_grid(point)
new_grid_key, new_grid_value = self.find_grid(newpoint)
# print('move object', point, newpoint, grid_value, new_grid_value)
if grid_value != new_grid_value:
if re.match('.*Agent.*' , type(objects).__name__) and objects.dead:
return False
elif re.match('.*Agent.*' , type(objects).__name__) and not objects.dead:
# print(point, newpoint, grid_value, new_grid_value)
if self.check_grid_deathable_constraints(new_grid_value):
objects.dead = True
objects.moveable = False
self.remove_object_from_grid(point, objects)
self.add_object_to_grid(newpoint, objects)
return True
else:
if self.check_grid_objects_constraints(new_grid_value):
self.remove_object_from_grid(point, objects)
self.add_object_to_grid(newpoint, objects)
return True
else:
return False
else:
if self.check_grid_objects_constraints(new_grid_value):
self.remove_object_from_grid(point, objects)
self.add_object_to_grid(newpoint, objects)
return True
else:
return False
else:
return True
# Check limits for the environment boundary
def check_limits(self, i, d):
"""Check the location is valid."""
x, y = i
if x > (self.width / 2):
x = x - (x - self.x_limit) - 2
d = np.pi + d
elif x < (self.width / 2) * -1:
x = x - (x + self.x_limit) + 2
d = np.pi + d
if y > (self.height / 2):
y = y - (y - self.y_limit) - 2
d = np.pi + d
elif y < (self.height / 2) * -1:
y = y - (y + self.y_limit) + 2
d = np.pi + d
return ((int(x), int(y)), d % (2*np.pi))
def check_grid_objects_constraints(self, new_grid_value):
"""Check the constraints on the next location."""
# grid_key, grid_value = self.find_grid(source_obj.location)
# new_grid_key, new_grid_value = self.find_grid(next_loc)
passable = True
objects_in_next_grid = self.get_objects(None, new_grid_value)
for obj in objects_in_next_grid:
if not obj.passable:
passable = False
break
return passable
def check_grid_deathable_constraints(self, new_grid_value):
"""Check the constraints on the next location."""
# grid_key, grid_value = self.find_grid(source_obj.location)
# new_grid_key, new_grid_value = self.find_grid(next_loc)
dead = False
objects_in_next_grid = self.get_objects(None, new_grid_value)
# print('dead const', objects_in_next_grid)
for obj in objects_in_next_grid:
try:
if obj.deathable:
dead = True
break
except:
pass
return dead
# Using fancy search to find the object in the particular grid
def get_objects(self, object_name, grid_value):
"""Use fancy search to find objects in a grid."""
if object_name is not None:
return list(filter(
lambda x: type(x).__name__ == object_name,
self.grid_objects[grid_value]))
else:
return list(filter(
lambda x: type(x).__name__ != 'list',
self.grid_objects[grid_value]))
def get_objects_from_grid(self, object_name, point):
"""Get objects from grid given a location."""
grid_key, grid_value = self.find_grid(point)
return self.get_objects(object_name, grid_value)
def get_objects_from_list_of_grid(self, object_name, grid_list):
"""Get list of objects from grid list."""
object_list = []
for grid in grid_list:
object_list += self.get_objects(object_name, grid)
return object_list
```
#### File: swarms/utils/db.py
```python
import sys
import psycopg2 as pgsql
class Connect():
"""Class to connect DB."""
def __init__(
self, dbname, username, passwd, hostname, sport=5432,
racflag=False):
"""Constructor."""
self.dbname = dbname
self.username = username
self.passwd = <PASSWORD>
self.hostname = hostname
self.sport = sport
self.racflag = racflag
self.__name__ = 'Connect'
def tns_connect(self):
"""Connect with tnscon string."""
try:
connect = pgsql.connect(
database=self.dbname, user=self.username, password=<PASSWORD>,
host=self.hostname, port=self.sport)
except pgsql.DatabaseError:
print(
"Unexpected error:", sys.exc_info(), "Function name:",
self.__name__)
else:
return connect
class Execute():
"""Class to execute different oracle statements."""
def __init__(self, con):
"""Constructor."""
self.con = con
self.cursor = self.con.cursor()
def execute_statement(self, statement):
"""Execute pgsql statement."""
try:
self.cursor.execute(statement)
except pgsql.Error as e:
# error,=e.args
print("Code:", e.pgcode)
print("Message:", e.pgerror)
print(sys.exc_info(), "Function name:execute_statement")
self.con.rollback()
else:
self.con.commit()
return self.cursor
def execute_statement_bind(self, statement, bindvars):
"""Execute oracle statement using bind vars."""
try:
self.cursor.execute(statement, bindvars)
except pgsql.Error as e:
# error,= e.args
print("Code:", e.pgcode)
print("Message:", e.pgerror)
print(sys.exc_info(), "Function name:execute_statement_bind")
self.con.rollback()
else:
self.con.commit()
return bindvars['sn']
def execute_function(self, *args, **kwargs):
"""Execute a pl/sql function with variable args."""
funct_args = []
for a in args:
# print a
funct_args.append(a)
for k, v in kwargs.items():
print("%s =%s" % (k, v))
if k == "function_name":
functname = v
try:
print("Function name:", functname, "Function Args:", funct_args)
# logger.info("Procedure arguments:"+proc_args)
output = self.cursor.callproc(functname, funct_args)
# output = output.fetchall()
except pgsql.DatabaseError:
print("Function error", sys.exc_info())
return False
else:
self.con.commit()
return int(output)
def execute_proc(self, *args, **kwargs):
"""Execute a pl/sql procedure with variable args."""
proc_args = []
for a in args:
print(a)
proc_args.append(a)
for k, v in kwargs.items():
print("%s =%s" % (k, v))
if k == "proc_name":
procname = v
try:
print("Proc Args:", proc_args)
# logger.info("Procedure arguments:"+proc_args)
self.cursor.callproc(procname, proc_args)
except pgsql.DatabaseError:
print("Procedure error")
return False
else:
self.con.commit()
return True
def excute_spool(self, **kwargs):
"""Execute pl/sql spool."""
for k, v in kwargs.items():
if k == "bulkid":
# bulkid = v
pass
elif k == "spoolname":
# spoolname = v
pass
elif k == "query":
query = v
# stat = "SELECT "
output = self.execute_statement(query)
return output
def close(self):
"""Close the db cursor."""
self.cursor.close()
class Trimstrdb():
"""Preprocess the queries."""
@staticmethod
def trimdb(inputstr, trimlen=3999):
"""Trim query method.
Trips the query to 3999 character and replacs single quotes.
"""
trimstr = inputstr[0:trimlen]
trimstr = trimstr.replace("'", "''")
return trimstr
class Dbexecute():
"""Execute queries in DB."""
def __init__(self, conn):
"""Constructor.
It takes db connection as argument.
"""
self.conn = conn
self.trimstrobj = Trimstrdb()
def insert_experiment(
self, id, N, seed, expname, iter, width, height, grid):
"""Insert into experiment table."""
exestat = Execute(self.conn)
output = 0
try:
exestat.cursor.execute("""INSERT INTO experiment(id, agent_size,
randomseed, experiment_name, iteration, width, height,
grid_size) VALUES (
%s, %s, %s, %s, %s, %s, %s,
%s);""", (id, N, seed, expname, iter, width, height, grid))
output = exestat.cursor.execute(
"SELECT sn from experiment where id=" + "'" + str(id) +
"'")
output = exestat.cursor.fetchall()
self.conn.commit()
exestat.close()
except pgsql.Error:
print(
"Unexpected error function insert_experiment:", sys.exc_info())
return False
else:
return int(output[0][0])
def insert_experiment_simulation(
self, id, N, seed, expname, iter, width, height, grid, phenotype):
"""Insert into experiment table."""
exestat = Execute(self.conn)
output = 0
try:
exestat.cursor.execute("""INSERT INTO experiment(id, agent_size,
randomseed, experiment_name, iteration, width, height,
grid_size, phenotype) VALUES (
%s, %s, %s, %s, %s, %s, %s, %s, %s);""", (
id, N, seed, expname, iter, width, height, grid, phenotype)
)
output = exestat.cursor.execute(
"SELECT sn from experiment where id=" + "'" + str(id) +
"'")
output = exestat.cursor.fetchall()
self.conn.commit()
exestat.close()
except pgsql.Error:
print(
"Unexpected error function insert_experiment:", sys.exc_info())
return False
else:
return int(output[0][0])
def insert_experiment_details(self, data_list):
"""Insert into experiment_details table."""
exestat = Execute(self.conn)
data = data_list
try:
exestat.cursor.execute("""INSERT INTO experiment_details(exp_id,
agent_name, step, time_step, beta, fitness, diversity,
explore, forage, neighbours, genotype, phenotype, bt)
VALUES (
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);""", (
data[0], data[1], data[2], data[3], data[4], data[5],
data[6], data[7], data[8], data[9], data[10], data[11],
data[12])
)
# output = exestat.cursor.execute(
# "SELECT sn from session_info where id=" + "'" + session_id +
# "'")
# output = exestat.cursor.fetchall()
self.conn.commit()
exestat.close()
return True
except pgsql.Error:
print(
"Unexpected error function insert_experiment_details:",
sys.exc_info())
return False
# else:
# return int(output[0][0])
def insert_experiment_best(self, data_list):
"""Insert into experiment_best table."""
exestat = Execute(self.conn)
data = data_list
try:
exestat.cursor.execute("""INSERT INTO experiment_best(exp_id,
agent_name, heading, step, beta, fitness, diversity, explore,
forage, phenotype) VALUES (
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);""", (
data[0], data[1], data[2], data[3], data[4], data[5],
data[6], data[7], data[8], data[9])
)
self.conn.commit()
exestat.close()
return True
except pgsql.Error:
print(
"Unexpected error function insert_experiment_best:",
sys.exc_info())
return False
# else:
# return int(output[0][0])
def retrieve_info(self, query):
"""Reterive info from the db."""
cur = self.conn.cursor()
try:
cur.execute(query)
data = cur.fetchall()
self.conn.commit()
self.conn.close()
return data
except pgsql.Error:
print("Unexptected error function:", sys.exc_info())
return False
def update_table_info(self, query):
"""Update the table in db."""
exestat = Execute(self.conn)
try:
exestat.cursor.execute(query)
self.conn.commit()
exestat.close()
except pgsql.Error:
print("Unexpected error function update info:", sys.exc_info())
return False
else:
return True
def execute_query(self, query):
"""Execute a custom query."""
exestat = Execute(self.conn)
try:
exestat.cursor.execute(query)
self.conn.commit()
exestat.close()
except pgsql.Error:
print("Unexpected error function execute query:", sys.exc_info())
return False
else:
return True
```
#### File: swarm/test/test_db.py
```python
import unittest
from swarms.utils.db import Connect, Dbexecute
class TestGrid(unittest.TestCase):
"""Test calss for database."""
def setUp(self):
"""Set up required stuffs."""
self.dbname = 'swarm'
self.username = 'swarm'
self.passwd = '<PASSWORD>'
self.hostname = 'localhost'
self.connect = Connect(
self.dbname, self.username, self.passwd, self.hostname
)
def test_connection(self):
"""Test connection to db."""
# This will return connection object
tnsconnect = self.connect.tns_connect()
# Check if the connection is valid
self.assertEqual(1, tnsconnect.status)
# Check if its connected to the right db with right parameters
tns_parms = {
'host': 'localhost',
'krbsrvname': 'postgres',
'options': '',
'tty': '',
'dbname': 'swarm',
'target_session_attrs': 'any',
'sslmode': 'prefer',
'port': '5432',
'user': 'swarm',
'sslcompression': '1'}
self.assertDictEqual(tns_parms, tnsconnect.get_dsn_parameters())
def test_insert_experiment(self):
"""Test insert statement to db."""
tnsconnect = self.connect.tns_connect()
dbexec = Dbexecute(tnsconnect)
sn = dbexec.insert_experiment(20150101024)
self.assertEqual('int', type(sn).__name__)
# After insertion is done need to delete the values as well
retval = dbexec.execute_query("DELETE from experiment where sn=" + str(sn))
self.assertEqual(True, retval)
def test_insert_experiment_details(self):
"""Test insert statement for experiment details table.
Since this table has a foreign key from experiment table. First
we need to populate experiment table.
"""
tnsconnect = self.connect.tns_connect()
dbexec = Dbexecute(tnsconnect)
sn = dbexec.insert_experiment(20150101025)
data_list = [sn, 1, 45, 7, 0.99, 80, 78, 45, 2, 5, '[1, 0, 1 ,1, 0]', '<xml>', 'none']
retval = dbexec.insert_experiment_details(data_list)
self.assertEqual(True, retval)
# After insertion is done first need to delete in child table
retval = dbexec.execute_query(
"DELETE from experiment_details where exp_id=" + str(sn))
self.assertEqual(True, retval)
# After child records deleted, safely delete parent
retval = dbexec.execute_query(
"DELETE from experiment where sn=" + str(sn))
self.assertEqual(True, retval)
def test_insert_experiment_best(self):
"""Test insert statement for experiment best table.
Since this table has a foreign key from experiment table. First
we need to populate experiment table.
"""
tnsconnect = self.connect.tns_connect()
dbexec = Dbexecute(tnsconnect)
sn = dbexec.insert_experiment(20150101025)
data_list = [sn, 1, 'MEAN', 7, 0.99, 80, 78, 45, 2, '<xml>']
retval = dbexec.insert_experiment_best(data_list)
self.assertEqual(True, retval)
# After insertion is done first need to delete in child table
retval = dbexec.execute_query(
"DELETE from experiment_best where exp_id=" + str(sn))
self.assertEqual(True, retval)
# After child records deleted, safely delete parent
retval = dbexec.execute_query(
"DELETE from experiment where sn=" + str(sn))
self.assertEqual(True, retval)
def test_update_experiment(self):
"""Test update statement for experiment table.
Since this table to update the end time we first need to
populate the values.
"""
tnsconnect = self.connect.tns_connect()
dbexec = Dbexecute(tnsconnect)
sn = dbexec.insert_experiment(20150101025)
# Update end time
retval = dbexec.execute_query(
"UPDATE experiment \
set end_date=timezone('utc'::text, now()) where sn=" + str(sn))
self.assertEqual(True, retval)
retval = dbexec.execute_query(
"DELETE from experiment where sn=" + str(sn))
self.assertEqual(True, retval)
```
#### File: swarm/test/test_full_bt.py
```python
from swarms.lib.agent import Agent
# from swarms.objects import Sites
from swarms.lib.model import Model
from swarms.lib.time import SimultaneousActivation
from swarms.lib.space import Grid
from unittest import TestCase
from swarms.utils.bt import BTConstruct
import py_trees
from py_trees import Blackboard
import numpy as np
# import xml.etree.ElementTree as ET
from swarms.behaviors.sbehaviors import ( # noqa: F401
IsCarryable, IsSingleCarry, SingleCarry,
NeighbourObjects, IsMultipleCarry, IsInPartialAttached,
InitiateMultipleCarry, IsEnoughStrengthToCarry,
Move, GoTo, IsMotionTrue, RandomWalk, IsMoveable,
MultipleCarry, Away, Towards, DoNotMove
)
from ponyge.operators.initialisation import initialisation
from ponyge.fitness.evaluation import evaluate_fitness
from ponyge.operators.crossover import crossover
from ponyge.operators.mutation import mutation
from ponyge.operators.replacement import replacement
from ponyge.operators.selection import selection
# Global variables for width and height
width = 100
height = 100
class GEBTAgent(Agent):
""" An minimalistic GE agent """
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 3
# self.exchange_time = model.random.randint(2, 4)
# This doesn't help. Maybe only perform genetic operations when
# an agents meet 10% of its total population
# """
self.operation_threshold = 2
self.genome_storage = []
# Define a BTContruct object
self.bt = BTConstruct(None, self)
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content = dict()
# Grammatical Evolution part
from ponyge.algorithm.parameters import Parameters
parameter = Parameters()
parameter_list = ['--parameters', 'swarm.txt']
# Comment when different results is desired.
# Else set this for testing purpose
parameter.params['RANDOM_SEED'] = name
# np.random.randint(1, 99999999)
parameter.params['POPULATION_SIZE'] = self.operation_threshold // 2
parameter.set_params(parameter_list)
self.parameter = parameter
individual = initialisation(self.parameter, 1)
individual = evaluate_fitness(individual, self.parameter)
self.individual = individual
self.bt.xmlstring = self.individual[0].phenotype
self.bt.construct()
def step(self):
# """
# Doing this is equivalent of using behavior tree with four classes
# in this order, Move, HasMoney, NeighbourCondition, ShareMoney
# self.move()
# execute BT
py_trees.logging.level = py_trees.logging.Level.DEBUG
# output = py_trees.display.ascii_tree(self.bt.behaviour_tree.root)
# print ('bt tree', output, self.individual[0].phenotype)
self.bt.behaviour_tree.tick()
cellmates = self.model.grid.get_objects_from_grid(
'GEBTAgent', self.location)
# print (cellmates)
if len(self.genome_storage) >= self.operation_threshold:
self.exchange_chromosome(cellmates)
self.bt.xmlstring = self.individual[0].phenotype
self.bt.construct()
if len(cellmates) > 1:
self.store_genome(cellmates)
def advance(self):
pass
def move(self):
new_location = ()
x = int(self.location[0] + np.cos(self.direction) * self.speed)
y = int(self.location[1] + np.sin(self.direction) * self.speed)
new_location, direction = self.model.grid.check_limits(
(x, y), self.direction)
self.model.grid.move_object(self.location, self, new_location)
self.location = new_location
self.direction = direction
def store_genome(self, cellmates):
# cellmates.remove(self)
self.genome_storage += [agent.individual[0] for agent in cellmates]
def exchange_chromosome(self, cellmates):
print('from exchange', self.name)
individuals = self.genome_storage
parents = selection(self.parameter, individuals)
cross_pop = crossover(self.parameter, parents)
new_pop = mutation(self.parameter, cross_pop)
new_pop = evaluate_fitness(new_pop, self.parameter)
individuals = replacement(self.parameter, new_pop, individuals)
individuals.sort(reverse=False)
self.individual = [individuals[0]]
self.genome_storage = []
class GEEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(GEEnvironmentModel, self).__init__(seed=None)
else:
super(GEEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
for i in range(self.num_agents):
a = GEBTAgent(i, self)
self.schedule.add(a)
# Add the agent to a random grid cell
# x = self.random.randint(
# -self.grid.width / 2, self.grid.width / 2)
x = 0
# y = self.random.randint(
# -self.grid.height / 2, self.grid.height / 2)
y = 0
a.location = (x, y)
self.grid.add_object_to_grid((x, y), a)
a.operation_threshold = 2 # self.num_agents // 10
def step(self):
self.schedule.step()
class TestGEBTSmallGrid(TestCase):
def setUp(self):
self.environment = GEEnvironmentModel(10, 100, 100, 10, 123)
for i in range(2):
self.environment.step()
# for agent in self.environment.schedule.agents:
# self.target_phenotype = agent.individual[0].phenotype
# self.target_fitness = agent.individual[0].fitness
# print(
# 'Step', i, agent.name, agent.individual[0].fitness,
# agent.location)
# def test_target_string(self):
# self.assertEqual('<?xml version="1.0" encoding="UTF-8"?><Sequence><Sequence><Sequence><cond>IsMoveable</cond><cond>IsMupltipleCarry</cond><act>RandomWalk</act></Sequence> <Sequence><cond>IsMotionTrue</cond><cond>IsMoveable</cond><cond>IsMotionTrue</cond><act>SingleCarry</act></Sequence></Sequence> <Selector><cond>IsMotionTrue</cond><cond>IsCarryable</cond><cond>IsMupltipleCarry</cond><act>GoTo</act></Selector></Sequence>', self.target_phenotype)
def test_one_traget(self):
self.assertEqual(14.285714285714285, self.environment.schedule.agents[0].individual[0].fitness)
```
#### File: swarm/test/test_ge_stringmatch.py
```python
from unittest import TestCase
from swarms.lib.agent import Agent
from swarms.lib.model import Model
from swarms.lib.time import SimultaneousActivation # RandomActivation, StagedActivation
from swarms.lib.space import Grid
from ponyge.operators.initialisation import initialisation
from ponyge.fitness.evaluation import evaluate_fitness
from ponyge.operators.crossover import crossover
from ponyge.operators.mutation import mutation
from ponyge.operators.replacement import replacement
from ponyge.operators.selection import selection
import numpy as np
# Global variables for width and height
width = 100
height = 100
class GEAgent(Agent):
""" An minimalistic GE agent """
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 3
# self.exchange_time = model.random.randint(2, 4)
# This doesn't help. Maybe only perform genetic operations when
# an agents meet 10% of its total population
# """
self.operation_threshold = 10
self.genome_storage = []
# Grammatical Evolution part
from ponyge.algorithm.parameters import Parameters
parameter = Parameters()
# list_params_files = ['string_match.txt', 'regression.txt', 'classification.txt']
parameter_list = ['--parameters', 'string_match_dist.txt']
parameter.params['RANDOM_SEED'] = 1234 #np.random.randint(1, 99999999)
parameter.params['POPULATION_SIZE'] = self.operation_threshold // 2
parameter.set_params(parameter_list)
self.parameter = parameter
individual = initialisation(self.parameter, 1)
individual = evaluate_fitness(individual, self.parameter)
self.individual = individual
def step(self):
# """
# Doing this is equivalent of using behavior tree with four classes
# in this order, Move, HasMoney, NeighbourCondition, ShareMoney
self.move()
cellmates = self.model.grid.get_objects_from_grid('GEAgent', self.location)
if len(self.genome_storage) >= self.operation_threshold:
self.exchange_chromosome(cellmates)
if len(cellmates) > 1:
self.store_genome(cellmates)
def advance(self):
pass
def move(self):
new_location = ()
x = int(self.location[0] + np.cos(self.direction) * self.speed)
y = int(self.location[1] + np.sin(self.direction) * self.speed)
new_location, direction = self.model.grid.check_limits((x, y), self.direction)
self.model.grid.move_object(self.location, self, new_location)
self.location = new_location
self.direction = direction
def store_genome(self, cellmates):
# cellmates.remove(self)
self.genome_storage += [agent.individual[0] for agent in cellmates]
def exchange_chromosome(self, cellmates):
individuals = self.genome_storage
parents = selection(self.parameter, individuals)
cross_pop = crossover(self.parameter, parents)
new_pop = mutation(self.parameter, cross_pop)
new_pop = evaluate_fitness(new_pop, self.parameter)
individuals = replacement(self.parameter, new_pop, individuals)
individuals.sort(reverse=True)
self.individual = [individuals[0]]
self.genome_storage = []
class TestGESmallGrid(TestCase):
def setUp(self):
self.environment = GEEnvironmentModel(100, 100, 100, 10, 123)
for i in range(200):
self.environment.step()
self.one_target = False
for agent in self.environment.schedule.agents:
self.target = agent.individual[0].phenotype
if agent.individual[0].phenotype == 'Hello':
self.one_target = True
def test_target_string(self):
self.assertEqual(self.target, 'Hello')
def test_one_traget(self):
self.assertEqual(self.one_target, True)
class GEEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(GEEnvironmentModel, self).__init__(seed=None)
else:
super(GEEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
for i in range(self.num_agents):
a = GEAgent(i, self)
self.schedule.add(a)
# Add the agent to a random grid cell
x = self.random.randint(-self.grid.width / 2, self.grid.width / 2)
y = self.random.randint(-self.grid.height / 2, self.grid.height / 2)
a.location = (x, y)
self.grid.add_object_to_grid((x, y), a)
a.operation_threshold = self.num_agents // 10
# exit()
def step(self):
self.schedule.step()
```
#### File: swarm/test/test_swarm_new_behaviors.py
```python
from unittest import TestCase
from swarms.lib.agent import Agent
from swarms.lib.model import Model
from swarms.lib.time import SimultaneousActivation
from swarms.lib.space import Grid
from swarms.behaviors.sbehaviors import (
GoTo, RandomWalk, NeighbourObjects,
Away, Towards, DoNotMove, Move, AvoidSObjects
)
from swarms.behaviors.scbehaviors import (
AvoidTrapObstaclesBehaviour, NewMoveTowards, NewExplore,
NewMoveAway, Explore
)
from swarms.lib.objects import Sites, Hub, Obstacles, Traps
import py_trees
from py_trees import Blackboard
import numpy as np
from py_trees.meta import failure_is_success
# Class to tets Passable attribute for agents
class SwarmAgentGoTo(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = GoTo('1')
low.setup(0, self, type(model.target).__name__)
high = Move('2')
high.setup(0, self)
root.add_children([low, high])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
def step(self):
self.behaviour_tree.tick()
class GoToSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(GoToSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(GoToSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.obstacle = Obstacles(id=2, location=(9, 9), radius=5)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.obstacle.location, self.obstacle)
for i in range(self.num_agents):
a = SwarmAgentGoTo(i, self)
self.schedule.add(a)
x = -45
y = -30
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestGoToSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = GoToSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(25):
self.environment.step()
# print(i, self.environment.agent.location)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (-1, 7))
# Class to tets the avoid behavior for the agent
class SwarmAgentAvoid(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.carryable = False
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = GoTo('1')
low.setup(0, self, type(model.target).__name__)
medium = failure_is_success(AvoidTrapObstaclesBehaviour)('2')
medium.setup(0, self, None)
high = Move('3')
high.setup(0, self)
# root.add_children([low, medium, med, high])
root.add_children([low, medium, high])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
# py_trees.display.print_ascii_tree(root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
def step(self):
self.behaviour_tree.tick()
class AvoidSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(AvoidSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(AvoidSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.obstacle = Obstacles(id=2, location=(9, 9), radius=5)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.obstacle.location, self.obstacle)
for i in range(self.num_agents):
a = SwarmAgentAvoid(i, self)
self.schedule.add(a)
x = -30
y = -41
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestAvoidSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = AvoidSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(77):
self.environment.step()
# print(i, self.environment.agent.location)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (45, 45))
def test_agent_grid(self):
self.assertIsInstance(
self.environment.grid.get_objects_from_grid('Sites',self.environment.agent.location)[0], Sites)
# Class to tets agent dead in trap
class SwarmAgentTrap(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = GoTo('1')
low.setup(0, self, type(model.target).__name__)
high = Move('2')
high.setup(0, self)
root.add_children([low, high])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
def step(self):
self.behaviour_tree.tick()
class TrapSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(TrapSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(TrapSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.target = Traps(id=1, location=(20,20), radius=8)
self.grid.add_object_to_grid(self.target.location, self.target)
for i in range(self.num_agents):
a = SwarmAgentTrap(i, self)
self.schedule.add(a)
x = -45
y = -45
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestTrapSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = TrapSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(50):
self.environment.step()
# print(i, self.environment.agent.location, self.environment.agent.dead)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (22, 20))
def test_agent_dead(self):
self.assertEqual(self.environment.agent.dead, True)
# Class to tets the avoid trap behavior for the agent
class SwarmAgentAvoidTrap(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.carryable = False
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = GoTo('1')
low.setup(0, self, type(model.target).__name__)
medium = failure_is_success(AvoidTrapObstaclesBehaviour)('2')
medium.setup(0, self)
high = Move('3')
high.setup(0, self)
# root.add_children([low, medium, med, high])
root.add_children([low, medium, high])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
# py_trees.display.print_ascii_tree(root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
def step(self):
self.behaviour_tree.tick()
class AvoidTrapSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(AvoidTrapSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(AvoidTrapSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.trap = Traps(id=2, location=(9, 9), radius=5)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.trap.location, self.trap)
for i in range(self.num_agents):
a = SwarmAgentAvoidTrap(i, self)
self.schedule.add(a)
x = -45
y = -45
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestAvoidTrapSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = AvoidTrapSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(120):
self.environment.step()
# print(i, self.environment.agent.location, self.environment.agent.dead)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (45, 45))
def test_agent_goal(self):
self.assertIsInstance(
self.environment.grid.get_objects_from_grid(
'Sites',self.environment.agent.location)[0], Sites)
# Class to tets the avoid trap behavior for the agent
class SwarmAgentAvoidTrapNew(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.carryable = False
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = NewMoveTowards('1')
low.setup(0, self, type(model.target).__name__)
# root.add_children([low, medium, med, high])
root.add_children([low])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
# py_trees.display.print_ascii_tree(root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
def step(self):
self.behaviour_tree.tick()
class AvoidTrapNewSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(AvoidTrapNewSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(AvoidTrapNewSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.trap = Traps(id=2, location=(9, 9), radius=5)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.trap.location, self.trap)
for i in range(self.num_agents):
a = SwarmAgentAvoidTrapNew(i, self)
self.schedule.add(a)
x = -45
y = -45
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestAvoidTrapNewSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = AvoidTrapNewSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(120):
self.environment.step()
# print(i, self.environment.agent.location, self.environment.agent.dead)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (45, 45))
def test_agent_goal(self):
self.assertIsInstance(
self.environment.grid.get_objects_from_grid(
'Sites',self.environment.agent.location)[0], Sites)
# Class to tets the avoid trap behavior for the agent
class SwarmAgentExploreNew(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.carryable = False
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = NewExplore('1')
low.setup(0, self)
# root.add_children([low, medium, med, high])
root.add_children([low])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
# py_trees.display.print_ascii_tree(root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
def step(self):
self.behaviour_tree.tick()
class ExploreNewSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(ExploreNewSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(ExploreNewSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.obstacle = Obstacles(id=2, location=(9, 9), radius=5)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.obstacle.location, self.obstacle)
for i in range(self.num_agents):
a = SwarmAgentExploreNew(i, self)
self.schedule.add(a)
x = 25
y = 25
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestExploreNewSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = ExploreNewSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(50):
self.environment.step()
# print(i, self.environment.agent.location)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (34, -33))
# Class to tets the avoid trap behavior for the agent
class SwarmAgentMoveAway(Agent):
""" An minimalistic behavior tree for swarm agent implementing
move away behavior.
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.carryable = False
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = NewMoveAway('1')
# low = NewMoveTowards('1')
low.setup(0, self, type(model.target).__name__)
# root.add_children([low, medium, med, high])
root.add_children([low])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
# py_trees.display.print_ascii_tree(root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
def step(self):
self.behaviour_tree.tick()
class MoveAwaySwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(MoveAwaySwarmEnvironmentModel, self).__init__(seed=None)
else:
super(MoveAwaySwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.obstacle = Obstacles(id=2, location=(9, 9), radius=5)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.obstacle.location, self.obstacle)
for i in range(self.num_agents):
a = SwarmAgentMoveAway(i, self)
self.schedule.add(a)
x = 35
y = 35
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestMoveAwaySwarmSmallGrid(TestCase):
def setUp(self):
self.environment = MoveAwaySwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(75):
self.environment.step()
# print(i, self.environment.agent.location, self.environment.agent.dead)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (-48, -48))
class SwarmAgentExplore(Agent):
""" An minimalistic behavior tree for swarm agent implementing goto
behavior
"""
def __init__(self, name, model):
super().__init__(name, model)
self.location = ()
self.direction = model.random.rand() * (2 * np.pi)
self.speed = 2
self.radius = 5
self.moveable = True
self.carryable = False
self.shared_content = dict()
root = py_trees.composites.Sequence("Sequence")
self.blackboard = Blackboard()
self.blackboard.shared_content = dict()
self.shared_content[type(model.target).__name__] = {model.target}
low = Explore('1')
low.setup(0, self)
# root.add_children([low, medium, med, high])
root.add_children([low])
self.behaviour_tree = py_trees.trees.BehaviourTree(root)
# py_trees.display.print_ascii_tree(root)
# py_trees.logging.level = py_trees.logging.Level.DEBUG
def step(self):
self.behaviour_tree.tick()
class ExploreSwarmEnvironmentModel(Model):
""" A environemnt to model swarms """
def __init__(self, N, width, height, grid=10, seed=None):
if seed is None:
super(ExploreSwarmEnvironmentModel, self).__init__(seed=None)
else:
super(ExploreSwarmEnvironmentModel, self).__init__(seed)
self.num_agents = N
self.grid = Grid(width, height, grid)
self.schedule = SimultaneousActivation(self)
self.obstacle = Obstacles(id=2, location=(9, 9), radius=15)
self.target = Sites(id=1, location=(45, 45), radius=5, q_value=0.5)
self.grid.add_object_to_grid(self.target.location, self.target)
self.grid.add_object_to_grid(self.obstacle.location, self.obstacle)
for i in range(self.num_agents):
a = SwarmAgentExplore(i, self)
self.schedule.add(a)
x = -35
y = -35
a.location = (x, y)
a.direction = -2.3561944901923448
self.grid.add_object_to_grid((x, y), a)
self.agent = a
def step(self):
self.schedule.step()
class TestExploreSwarmSmallGrid(TestCase):
def setUp(self):
self.environment = ExploreSwarmEnvironmentModel(1, 100, 100, 10, 123)
for i in range(100):
self.environment.step()
# print(i, self.environment.agent.location)
def test_agent_path(self):
self.assertEqual(self.environment.agent.location, (15, -11))
``` |
{
"source": "aadesousa/2048-AI",
"score": 3
} |
#### File: aadesousa/2048-AI/puzzle.py
```python
import time
import random
from tkinter import Frame, Label, CENTER
import logic
import constants as c
from threading import Thread
class GameGrid(Frame):
def __init__(self):
Frame.__init__(self)
self.grid()
self.master.title('2048')
self.commands = {c.KEY_UP: logic.up, c.KEY_DOWN: logic.down,
c.KEY_LEFT: logic.left, c.KEY_RIGHT: logic.right,
c.KEY_UP_ALT: logic.up, c.KEY_DOWN_ALT: logic.down,
c.KEY_LEFT_ALT: logic.left,
c.KEY_RIGHT_ALT: logic.right}
# self.gamelogic = gamelogic
self.grid_cells = []
self.init_grid()
self.init_matrix()
self.update_grid_cells()
self.after(0, self.d)
self.mainloop()
def sx(self, m):
return (sorted([item for sublist in m for item in sublist], reverse = True))[0]
def d(self):
while logic.game_state(self.matrix) != 'lose':
#for x in range(100000):
mlist = ["w", "s", "a", "d"]
dic= {0 : logic.up(self.matrix)[0],
1 : logic.down(self.matrix)[0],
2 : logic.left(self.matrix)[0],
3 : logic.right(self.matrix)[0]}
up = logic.up(self.matrix)[0]
down = logic.down(self.matrix)[0]
left = logic.left(self.matrix)[0]
right = logic.right(self.matrix)[0]
actt=[self.sx(up), self.sx(down), self.sx(left), self.sx(right)]
max_val = max(actt)
maxact=[i for i, x in enumerate(actt) if x == max_val]
acttt= []
#time.sleep(1)
for maxx in maxact:
if logic.game_state(dic[maxx]) != 'lose':
acttt.append(maxact.index(maxx))
#max_val = max(act)
#actt = [i for i, x in enumerate(act) if x == max_val]
if len(acttt) > 0:
self.key_down(mlist[random.choice(acttt)])
elif len(actt) == 0:
self.key_down(random.choice(mlist))
#time.sleep(.5)
if logic.game_state(dic[0]) == 'lose' and logic.game_state(dic[1]) == 'lose' and logic.game_state(dic[2]) == 'lose' and logic.game_state(dic[3]) == 'lose':
logic.new_game(4)
def init_grid(self):
background = Frame(self, bg=c.BACKGROUND_COLOR_GAME,
width=c.SIZE, height=c.SIZE)
background.grid()
for i in range(c.GRID_LEN):
grid_row = []
for j in range(c.GRID_LEN):
cell = Frame(background, bg=c.BACKGROUND_COLOR_CELL_EMPTY,
width=c.SIZE / c.GRID_LEN,
height=c.SIZE / c.GRID_LEN)
cell.grid(row=i, column=j, padx=c.GRID_PADDING,
pady=c.GRID_PADDING)
t = Label(master=cell, text="",
bg=c.BACKGROUND_COLOR_CELL_EMPTY,
justify=CENTER, font=c.FONT, width=5, height=2)
t.grid()
grid_row.append(t)
self.grid_cells.append(grid_row)
def gen(self):
return random.randint(0, c.GRID_LEN - 1)
def init_matrix(self):
self.matrix = logic.new_game(4)
self.history_matrixs = list()
self.matrix = logic.add_two(self.matrix)
self.matrix = logic.add_two(self.matrix)
def update_grid_cells(self):
for i in range(c.GRID_LEN):
for j in range(c.GRID_LEN):
new_number = self.matrix[i][j]
if new_number == 0:
self.grid_cells[i][j].configure(
text="", bg=c.BACKGROUND_COLOR_CELL_EMPTY)
else:
self.grid_cells[i][j].configure(text=str(
new_number), bg=c.BACKGROUND_COLOR_DICT[new_number],
fg=c.CELL_COLOR_DICT[new_number])
self.update_idletasks()
def key_down(self, event):
key = repr(event)
if key == c.KEY_BACK and len(self.history_matrixs) > 1:
self.matrix = self.history_matrixs.pop()
self.update_grid_cells()
print('back on step total step:', len(self.history_matrixs))
elif key in self.commands:
self.matrix, done = self.commands[repr(event)](self.matrix)
if done:
self.matrix = logic.add_two(self.matrix)
# record last move
self.history_matrixs.append(self.matrix)
self.update_grid_cells()
return self.matrix
done = False
if logic.game_state(self.matrix) == 'win':
self.grid_cells[1][1].configure(
text="You", bg=c.BACKGROUND_COLOR_CELL_EMPTY)
self.grid_cells[1][2].configure(
text="Win!", bg=c.BACKGROUND_COLOR_CELL_EMPTY)
if logic.game_state(self.matrix) == 'lose':
self.grid_cells[1][1].configure(
text="You", bg=c.BACKGROUND_COLOR_CELL_EMPTY)
self.grid_cells[1][2].configure(
text="Lose!", bg=c.BACKGROUND_COLOR_CELL_EMPTY)
def generate_next(self):
index = (self.gen(), self.gen())
while self.matrix[index[0]][index[1]] != 0:
index = (self.gen(), self.gen())
self.matrix[index[0]][index[1]] = 2
gamegrid = GameGrid()
#mlist= ["w", "a", "s", "d"]
#key_down(mlist[random.randint(0, 3)])
``` |
{
"source": "aadharna/GVGAI_GYM",
"score": 3
} |
#### File: gvgai/gvgai/__init__.py
```python
import re
import os
from gym.envs.registration import register
from gvgai.client.gvgai_client import GVGAIClient
dir = os.path.dirname(__file__)
gamesPath = os.path.normpath(os.path.join(dir, '../../../games'))
games = os.listdir(gamesPath)
def get_games_path():
return gamesPath
for game in games:
gamePath = os.path.join(gamesPath, game)
if (os.path.isdir(gamePath)):
game_dir_regex = '(?P<name>.+)_v(?P<version>\d+)'
game_parts = re.search(game_dir_regex, game)
name = game_parts.group('name')
version = game_parts.group('version')
# Register all the levels which are in the directory
level_filenames = [lvl for lvl in os.listdir(gamePath) if 'lvl' in lvl]
for level_filename in level_filenames:
level = level_filename.split('.')[0]
level = level.replace('_', '-')
register(
id=f'gvgai-{level}-v{version}',
entry_point='gvgai.gym:GVGAI_Env',
kwargs={'environment_id': level},
# max_episode_steps=2000
)
level = f'{name}-custom'
# Register the custom environment so any levels can be passed in
register(
id=f'gvgai-{level}-v{version}',
entry_point='gvgai.gym:GVGAI_Env',
kwargs={'environment_id': level},
# max_episode_steps=2000
)
```
#### File: gvgai/utils/level_data_generator.py
```python
import logging
import random
class LevelGenerator():
"""
Generates random level data
"""
def __init__(self, name):
self._name = name
self._logger = logging.getLogger(name)
def generate(self, num_levels, config):
raise NotImplemented
class SokobanGenerator(LevelGenerator):
"""
wwwwwwwwwwwww
w........w..w
w...1.......w
w...A.1.w.0ww
www.w1..wwwww
w.......w.0.w
w.1........ww
w..........ww
wwwwwwwwwwwww
"""
def __init__(self):
super().__init__("Sokoban")
def _get_sprite(self, prob_wall, prob_box, prob_hole):
sprite_select = random.uniform(0, 1)
# Wall
if sprite_select < prob_wall:
return 'w'
# Box
if sprite_select < prob_box + prob_wall:
return '1'
# Hole
if sprite_select < prob_box + prob_wall + prob_hole:
return '0'
# Empty space
return '.'
def _generate_single(self, config):
prob_wall = config['prob_wall']
prob_box = config['prob_box']
prob_hole = config['prob_hole']
assert prob_wall + prob_box + prob_hole < 1.0, 'Probabilities must not sum larger than 1'
width = config['width']
height = config['height']
level_string_array = []
# Randomly place walls
for h in range(height):
row_string_array = []
for w in range(width):
if w == 0 or h == 0 or h == height-1 or w == width-1:
row_string_array.append('w')
else:
row_string_array.append(self._get_sprite(prob_wall, prob_box, prob_hole))
level_string_array.append(row_string_array)
# Add the agent within the outer walls
x,y = random.randint(1, width-2), random.randint(1, height-2)
level_string_array[y][x] = 'A'
level_string_array = [''.join(r) for r in level_string_array]
return '\n'.join(level_string_array)
def generate(self, num_levels, config):
for level in range(num_levels):
yield self._generate_single(config)
``` |
{
"source": "aadharsh0428-prog/url_shortener_finn",
"score": 3
} |
#### File: aadharsh0428-prog/url_shortener_finn/encode.py
```python
import pyshorteners #Framework to shorten URLs.
from functools import cache #Framework to use caching in Python.
@cache #Decorator function to cache below method.
def url_short(url): #Function to shorten URLs. Long URLs are passed as inputs.
x = pyshorteners.Shortener() #Object is created to access pyshorteners' functions.
k=x.tinyurl.short(url) #Step where shortened URL is returned as a string.
return k
```
#### File: aadharsh0428-prog/url_shortener_finn/validate.py
```python
import validators #Framework to check for whether URL is valid or not.
from marshmallow import ValidationError #Framework to echo Validation Error.
from functools import cache #Framework to use caching in Python.
@cache #Decorator function to cache below method.
def check(url): #Functionn to check if URL is valid or not.
if not url:
raise ValidationError("Such input data is not accepted") #If URL entered is empty or unsatisfactory datatype.
else:
if not validators.url(url): #Condition to check if URL is valid or not
raise ValidationError("Enter a valid URL")
else:
return True
``` |
{
"source": "Aadhik611/Mono-craft",
"score": 3
} |
#### File: python/src/core.py
```python
from ursina import *
from ursina.prefabs.first_person_controller import FirstPersonController
import threading, random
import esp, client as clientlib
# $Connection portal
IP = "192.168.100.245"
RECV_HEADER = 1024
# Recv thread controls
global recv_thread_status
recv_thread_status = True
global latest_recv_message
latest_recv_message = ""
client = clientlib.app(IP, 80, "|")
client.connect()
app = Ursina()
class Voxel(Button):
def __init__(self, position=(0, 0, 0)):
super().__init__(
parent=scene,
position=position,
model="cube",
origin_y=0.5,
texture="white_cube",
color=color.rgb(0, random.randrange(200, 240), 8),
highlight_color=color.lime,
)
def input(self, key):
if self.hovered:
if key == "left mouse down":
new_block_pos = self.position + mouse.normal
client.send(
f"CREATE-BLOCK {str(int(new_block_pos.x))} {str(int(new_block_pos.y))} {str(int(new_block_pos.z))} "
)
# voxel = Voxel(position=new_block_pos)
if key == "right mouse down":
destroy(self)
def constant_recv():
while recv_thread_status:
print("Thread still running!")
latest_recv_message = str(client.recv(RECV_HEADER).decode("utf-8"))
print(latest_recv_message)
if latest_recv_message.startswith("CREATE-BLOCK "):
print(latest_recv_message[len("CREATE-BLOCK ") :])
new_voxel_pos = latest_recv_message[len("CREATE-BLOCK ") :].split(" ")
print(new_voxel_pos)
print(new_voxel_pos[0], new_voxel_pos[1], new_voxel_pos[2])
new_voxel = Voxel(
position=(
int(new_voxel_pos[0]),
int(new_voxel_pos[1]),
int(new_voxel_pos[2]),
)
)
print(
f"[RECV-FROM-SERVER]: added new block at {latest_recv_message[len('CREATE-BLOCK '):]}"
)
constant_recv_thread = threading.Thread(target=constant_recv)
constant_recv_thread.start()
def input(key):
if key == "q":
recv_thread_status = False
client.close()
app.closeWindow()
for z in range(8):
for x in range(8):
voxel = Voxel(position=(x, 0, z))
player = FirstPersonController()
app.run()
```
#### File: python/src/esp.py
```python
import time
class esp:
def __init__(self, connection):
self.client = connection
def blink(self, delay=0.2):
self.client.send("LED")
time.sleep(delay)
self.client.send("LED")
def blink_times(self, delay=0.2, times=2):
for i in range(times):
self.blink(delay=delay)
``` |
{
"source": "Aadhith-Ujesh/online_class_attending_bot",
"score": 3
} |
#### File: Aadhith-Ujesh/online_class_attending_bot/bot.py
```python
import pyautogui
import datetime
def attender(team,driver,t3):
from selenium import webdriver
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
# driver = webdriver.Chrome(r"H:\msteamsbot\chromedriver_win32\chromedriver.exe")
driver.get("https://login.microsoftonline.com/common/oauth2/v2.0/authorize?response_type=id_token&scope=openid%20profile&client_id=5e3ce6c0-2b1f-4285-8d4b-75ee78787346&redirect_uri=https%3A%2F%2Fteams.microsoft.com%2Fgo&state=eyJpZCI6ImFhNzZiYzE1LTg4ZDAtNDlmMi1hMDllLTUwZGNkZjZjNzU2MiIsInRzIjoxNjMzNDQ5NTYyLCJtZXRob2QiOiJyZWRpcmVjdEludGVyYWN0aW9uIn0%3D&nonce=a2db4be8-ec7b-44fb-a89e-953c6dad8ecf&client_info=1&x-client-SKU=MSAL.JS&x-client-Ver=1.3.4&client-request-id=13704c37-bf4c-4fc1-9f99-b55993562eb0&response_mode=fragment&sso_reload=true")
driver.maximize_window()
time.sleep(1)
uname = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.ID, 'i0116'))
)
time.sleep(2)
uname.send_keys("<EMAIL>")
nextbutton = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.ID, 'idSIButton9'))
)
nextbutton.click()
time.sleep(2)
passwd = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.ID, 'i0118'))
)
passwd.send_keys("<PASSWORD>@")
loginbutton = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.ID, 'idSIButton9'))
)
loginbutton.click()
time.sleep(2)
no = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.ID, 'idBtn_Back'))
)
no.click()
time.sleep(1)
try:
finalLogin = WebDriverWait(driver,5).until(
EC.presence_of_element_located((By.ID, 'i0116'))
)
finalLogin.send_keys("2<PASSWORD> <EMAIL>")
time.sleep(1)
finalNext = WebDriverWait(driver,5).until(
EC.presence_of_element_located((By.ID, 'idSIButton9'))
)
finalNext.click()
except:
cl = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.CLASS_NAME, 'table'))
)
cl.click()
time.sleep(15)
time.sleep(2)
dismiss = (driver.find_elements_by_class_name("action-button "))
for i in range(len(dismiss)):
val = dismiss[i].get_attribute("Title")
if(val == "Dismiss"):
dismiss[i].click()
pyautogui.moveTo(300, 300)
parentElement1 = driver.find_elements_by_class_name("team-card")
time.sleep(2)
pyautogui.scroll(-800)
parentElement2 = driver.find_elements_by_class_name("team-card")
time.sleep(2)
pyautogui.scroll(-800)
parentElement3 = driver.find_elements_by_class_name("team-card")
time.sleep(2)
pyautogui.scroll(-800)
parentElement4 = driver.find_elements_by_class_name("team-card")
time.sleep(2)
parentElement = set()
parentElement = set.union(set(parentElement1),set(parentElement2),set(parentElement3),set(parentElement4))
parentElement = list(parentElement)
for ele in range(len(parentElement)):
elementList = parentElement[ele].find_element_by_class_name("team-name")
print(elementList.text)
for ele in range(len(parentElement)):
elementList = parentElement[ele].find_element_by_class_name("team-name")
if(elementList.text == team):
parentElement[ele].click()
break
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\extensionicon.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\extensionicon.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\Screenshot (46).png', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(2)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\Screenshot (46).png', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(2)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\onlyscreen.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\onlyscreen.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\system.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\system.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\startrecording.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\startrecording.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\chrometab.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\chrometab.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\microsoft-teams.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\microsoft-teams.PNG', confidence=0.6)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\share.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\share.PNG', confidence=0.7)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
k = 1
while(k<30):
try:
joinbutton = driver.find_element_by_class_name("ts-calling-join-button")
joinbutton.click()
break
except:
time.sleep(60)
driver.refresh()
k+=1
try:
img_location = pyautogui.locateOnScreen('H:/msteamsbot/allowbutton.PNG', confidence=0.6)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:/msteamsbot/allowbutton.PNG', confidence=0.5)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
time.sleep(5)
try:
img_location = pyautogui.locateOnScreen('H:/msteamsbot/vdoff.PNG', confidence=0.9)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:/msteamsbot/vdoff.PNG', confidence=0.8)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
time.sleep(2)
join = driver.find_element_by_class_name("join-btn")
join.click()
time.sleep(2)
while(1):
now = datetime.datetime.now()
cur = now.strftime("%H:%M")
arr1 = cur.split(":")
arr1 = list(map(int,arr1))
t1 = (arr1[0]*3600) + (arr1[1]*60)
print(t3-t1)
if(t1>=t3):
#exit class
pyautogui.moveTo(400, 400, 2)
time.sleep(1)
hangup = driver.find_element_by_id("hangup-button")
hangup.click()
time.sleep(2)
break
else:
time.sleep(60)
#save the video
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\stopsharing.PNG', confidence=0.6)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(2)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\stopsharing.PNG', confidence=0.5)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(2)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\continue.PNG', confidence=0.6)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\continue.PNG', confidence=0.5)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(1)
try:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\save.PNG', confidence=0.6)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(2)
except:
img_location = pyautogui.locateOnScreen('H:\msteamsbot\save.PNG', confidence=0.5)
image_location_point = pyautogui.center(img_location)
x, y = image_location_point
pyautogui.click(x, y)
time.sleep(2)
time.sleep(100)
# try:
# img_location = pyautogui.locateOnScreen('H:\msteamsbot\maximize.PNG', confidence=0.8)
# image_location_point = pyautogui.center(img_location)
# x, y = image_location_point
# pyautogui.click(x, y)
# time.sleep(2)
# except:
# img_location = pyautogui.locateOnScreen('H:\msteamsbot\maximize.PNG', confidence=0.7)
# image_location_point = pyautogui.center(img_location)
# x, y = image_location_point
# pyautogui.click(x, y)
# time.sleep(2)
# try:
# img_location = pyautogui.locateOnScreen('H:\msteamsbot\quit.PNG', confidence=0.8)
# image_location_point = pyautogui.center(img_location)
# x, y = image_location_point
# pyautogui.click(x, y)
# time.sleep(1)
# except:
# img_location = pyautogui.locateOnScreen('H:\msteamsbot\quit.PNG', confidence=0.7)
# image_location_point = pyautogui.center(img_location)
# x, y = image_location_point
# pyautogui.click(x, y)
# time.sleep(1)
# time.sleep(1000000)
profile = driver.find_element_by_id("personDropdown")
profile.click()
time.sleep(1)
signout = driver.find_element_by_id("logout-button")
signout.click()
logout = WebDriverWait(driver,50).until(
EC.presence_of_element_located((By.CLASS_NAME, 'table'))
)
logout.click()
time.sleep(1)
return
```
#### File: Aadhith-Ujesh/online_class_attending_bot/scheduleparser.py
```python
from datetime import datetime
def sche():
day = datetime.today().weekday()
print(datetime.today().weekday())
schedule = []
if(day == 0):
k = "monday.txt"
elif(day == 1):
k = "tuesday.txt"
elif(day == 2):
k = "wednesday.txt"
elif(day == 3):
k = "thursday.txt"
elif(day == 4):
k = "friday.txt"
with open(k, "r") as file:
data = file.readlines()
for line in data:
word = line.split("=")
print(word)
j = word[0]
j = j.strip()
word[0] = j
k = word[1]
k = k.strip()
word[1] = k
m = word[2]
m = m.strip()
word[2] = m
schedule.append(word)
print(schedule)
return schedule
sche()
``` |
{
"source": "aadhithya/gan-zoo-pytorch",
"score": 3
} |
#### File: gan-zoo-pytorch/models/dcgan.py
```python
import torch
import torch.nn as nn
from torch import autograd
import os
from torch.optim.adam import Adam
from base.model import BaseGAN
from models.modules.net import NetG, NetD
from utils.utils import init_weight
class DCGAN(BaseGAN):
"""
Deep Convolutional GAN https://arxiv.org/pdf/1511.06434.pdf
"""
def __init__(self, cfg, writer):
super().__init__(cfg, writer)
self.netG = NetG(
z_dim=self.cfg.z_dim,
out_ch=self.cfg.img_ch,
norm_layer=nn.BatchNorm2d,
final_activation=torch.tanh,
)
self.netD = NetD(
self.cfg.img_ch,
norm_layer=nn.BatchNorm2d,
final_activation=torch.sigmoid,
)
self.netG.apply(init_weight)
self.netD.apply(init_weight)
# * DCGAN Alternating optimization
self.n_critic = 1
self.bce_loss = nn.BCELoss()
self._update_model_optimizers()
def _update_model_optimizers(self):
self.netG = self.netG.to(self.cfg.device)
self.netD = self.netD.to(self.cfg.device)
self.optG = Adam(
self.netG.parameters(), lr=self.cfg.lr.g, betas=(0.5, 0.999)
)
self.optD = Adam(
self.netD.parameters(), lr=self.cfg.lr.d, betas=(0.5, 0.999)
)
self.netG.train()
self.netD.train()
def generator_step(self, data):
self.optG.zero_grad()
# noise = self.sample_noise()
fake_logits = self.netD(self.fake_images)
loss = self.bce_loss(fake_logits, torch.ones_like(fake_logits))
loss.backward()
self.optG.step()
self.metrics["gen-loss"] += [loss.item()]
def critic_step(self, data):
self.optD.zero_grad()
real_images = data[0].float().to(self.cfg.device)
real_logits = self.netD(real_images).view(-1)
real_loss = self.bce_loss(real_logits, torch.ones_like(real_logits))
noise = self.sample_noise()
self.fake_images = self.netG(noise)
fake_logits = self.netD(self.fake_images).view(-1)
fake_loss = self.bce_loss(fake_logits, torch.zeros_like(fake_logits))
loss = real_loss + fake_loss
loss.backward(retain_graph=True)
self.optD.step()
self.metrics["discriminator-loss"] += [loss.item()]
``` |
{
"source": "AadhithyanBalaji/amazon-review-wordcloud",
"score": 3
} |
#### File: AadhithyanBalaji/amazon-review-wordcloud/extract-prep-cloud.py
```python
from urllib.request import urlopen;
from bs4 import BeautifulSoup;
import pandas as pd;
from wordcloud import WordCloud, STOPWORDS ;
import matplotlib.pyplot as plt ;
from joblib import Parallel, delayed;
import random;
import time;
import threading;
import os;
product_id = "B07HCXQZ4P";
reviews = [];
def ScrapePage(pagenum):
reviewpageurl = url +str(pagenum);
exceptionstatus = True;
scrape_attempt = 0;
while exceptionstatus and scrape_attempt < 3:
try:
print("\nExtracting reviews for :"+str(pagenum)+", ATTEMPT : "+str(scrape_attempt));
html = urlopen(reviewpageurl);
soup = BeautifulSoup(html,'lxml');
review_text = soup.find_all("span",class_="a-size-base review-text");
for review in review_text:
reviews.append(review.get_text());
df = pd.DataFrame(reviews);
if not os.path.isfile(r'\review_'+product_id+'.csv'):
df.to_csv(r'\review_'+product_id+'.csv', header=False,index=False)
else: # else it exists so append without writing the header
with open(r'\review_'+product_id+'.csv', 'a') as f:
df.to_csv(f, header=False,index=False)
exceptionstatus = False;
except Exception as e:
print('\n Exception at : '+str(pagenum)+'-'+str(e));
scrape_attempt+=1;
sleeptime = random.randint(1,3);
print('\n'+ str(threading.current_thread())+'Sleeping for : '+str(sleeptime));
time.sleep(sleeptime);
def ExtractWords(val):
cw= ' ';
print('val :'+val);
# typecaste each val to string
val = str(val).lower();
# split the value
tokens = val.split();
for words in tokens:
cw = cw + words + ' ';
print('cw after :'+cw);
return str(cw);
#------------------Getting the number of pages in review
url = "https://www.amazon.in/product-reviews/"+product_id+"/ref=cm_cr_getr_d_paging_btm_2?showViewpoints=1&pageNumber=";
html = urlopen(url+"1");
soup = BeautifulSoup(html,'lxml');
page_buttons = soup.find_all("li",class_="page-button");
pagecount = int(page_buttons[len(page_buttons)-1].get_text().replace(",",""));
#-----------------Identifying the number of threads needed to scrape the reviews
if pagecount < 10:
threadcount = pagecount;
else:
threadcount = pagecount / 10;
threadcount = int(threadcount);
threadcount = 8;
print('\nThreads created :'+str(threadcount));
#-----------------Scraping the reviews multi threaded
Parallel(n_jobs=threadcount,prefer="threads")(delayed(ScrapePage)(i) for i in range(1,pagecount));
print('\n Scraping complete!');
#-----------------Saving the scraped reviews in to a csv through dataframe
#df = pd.DataFrame(reviews);
#df.to_csv(r'C:\Users\Aadhithyan_Balaji\Desktop\review_'+product_id+'.csv');
df = pd.read_csv(r"c:\users\aadhithyan_balaji\desktop\review_"+product_id+".csv", encoding ="latin-1",usecols=[1])
print(df);
comment_words = ' ';
stopwords = set(STOPWORDS)
# iterate through the csv file
comment_words = comment_words + str(Parallel(n_jobs=threadcount,prefer="threads")(delayed(ExtractWords)(frame) for frame in df));
print('\nExtracted words from the review!\nPreparing the word cloud...') ;
wordcloud = WordCloud(width = 800, height = 800,
background_color ='white',
stopwords = stopwords,
min_font_size = 10).generate(comment_words)
print('\nPlotting word cloud') ;
# plot the WordCloud image
plt.figure(figsize = (8, 8), facecolor = None)
plt.imshow(wordcloud)
plt.axis("off")
plt.tight_layout(pad = 0)
plt.show()
``` |
{
"source": "aadhithya/pytorch-yolo-v1",
"score": 3
} |
#### File: aadhithya/pytorch-yolo-v1/loss.py
```python
from model import YOLOv1
import torch
import torch.nn as nn
class YOLOv1Loss(nn.Module):
def __init__(self, S=7, B=2, C=20):
"""
__init__ initialize YOLOv1 Loss.
Args:
S (int, optional): split_size. Defaults to 7.
B (int, optional): number of boxes. Defaults to 2.
C (int, optional): number of classes. Defaults to 20.
"""
super().__init__()
self.mse = nn.MSELoss(reduction="sum")
self.S = S
self.B = B
self.C = C
self.l_noobl = 0.5
self.l_coord = 5
def forward(self, predictions, target):
predictions = predictions.reshape(-1, self.S, self.S, self.C + Self.B*5)
iou_b1 = get_iou(predictions[...,21:25], target[...,21:25])
iou_b2 = get_iou(predictions[...,26:30], target[...,21:25])
ious = torch.stack([iou_b1, iou_b2], 0)
_, max_iou = torch.max(ious, dim=0)
exists_box = target[...,20].unsqueeze(3) # select target objectness.object
# * Box Coordinates Loss
# Select the bounding boxes with highest IoU
box_predictions = exists_box * (
(
max_iou * predictions[..., 26:30] +
(1 - max_iou) * predictions[..., 21:25]
)
)
# Select targets which has an object
box_targets = exists_box * target[...,21:25]
box_predictions[...,2:4] = torch.sign(box_predictions[...,2:4]) * torch.sqrt(
torch.abs(box_predictions[..., 2:4]) + 1e-6
)
box_targets[..., 2:4] = torch.sqrt(box_targets[..., 2:4])
box_loss = self.mse(
torch.flatten(box_predictions, end_dim=-2),
torch.flatten(box_targets, end_dim=-2)
)
# * Object Losss
pred_box = (
max_iou * predictions[..., 25:26] +
(1-max_iou) * predictions[..., 20:21]
)
object_loss = self.mse(
torch.flatten(exists_box * pred_box),
torch.flatten(exists_box * target[..., 20:21])
)
# * No Object Loss
# For the first box
no_boject_loss = self.mse(
torch.flatten((1-max_iou) * predictions[...,20:21], start_dim=1),
torch.flatten((1-max_iou) * target[...,20:21], start_dim=1)
)
# For the second box
no_boject_loss += self.mse(
torch.flatten(max_iou * predictions[...,25:26], start_dim=1),
torch.flatten(max_iou * target[...,20:21], start_dim=1)
)
# * Class prediction Loss
class_loss = self.mse(
torch.flatten(exists_box * predictions[...,:20], end_dim=-2),
torch.flatten(exists_box * target[...,:20], end_dim=-2)
)
# * Total Loss
loss = (
self.l_coord * box_loss
+ object_loss
+ self.l_noobl * no_boject_loss
+ class_loss
)
return loss
```
#### File: aadhithya/pytorch-yolo-v1/utils.py
```python
import torch
def get_iou(box1, box2, box_format="midpoint"):
"""
Calculates intersection over union
Parameters:
boxes1 (tensor): Predictions of Bounding Boxes (BATCH_SIZE, 4)
box2 (tensor): Correct Labels of Boxes (BATCH_SIZE, 4)
box_format (str): midpoint/corners, if boxes (x,y,w,h) or (x1,y1,x2,y2)
Returns:
tensor: Intersection over union for all examples
"""
if box_format == "corners":
# * box 1
box1_x1 = box1[...,0:1]
box1_y1 = box1[...,1:2]
box1_x2 = box1[...,2:3]
box1_y2 = box1[...,3:4]
# * box 2
box2_x1 = box2[...,0:1]
box2_y1 = box2[...,1:2]
box2_x2 = box2[...,2:3]
box2_y2 = box2[...,3:4]
elif box_format == "midpoint":
box1_x1 = box1[..., 0:1] - box1[..., 2:3] / 2
box1_y1 = box1[..., 1:2] - box1[..., 3:4] / 2
box1_x2 = box1[..., 0:1] + box1[..., 2:3] / 2
box1_y2 = box1[..., 1:2] + box1[..., 3:4] / 2
box2_x1 = box2[..., 0:1] - box2[..., 2:3] / 2
box2_y1 = box2[..., 1:2] - box2[..., 3:4] / 2
box2_x2 = box2[..., 0:1] + box2[..., 2:3] / 2
box2_y2 = box2[..., 1:2] + box2[..., 3:4] / 2
else:
raise NotImplementedError(f"OOPs! {box_format} not supported!")
x1 = torch.max(box1_x1, box2_x1)
y1 = torch.max(box1_y1, box2_y1)
x2 = torch.min(box1_x2, box2_x2)
y2 = torch.min(box1_y2, box2_y2)
intersection = (x2 - x1).clamp(0) * (y2 - y1).clamp(0)
box1_area = abs((box1_x2 - box1_x1) * (box1_y2 - box1_y1))
box2_area = abs((box2_x2 - box2_x1) * (box2_y2 - box2_y1))
retrun intersection / (box1_area + box2_area - intersection + 1e-6)
def nonmax_suppression(bboxes, iou_threshold, prob_threshold, box_format="midpoint"):
# bboxes: [[class, pblty, x1, y1, x2, y2],[...],...,[...]]
bboxes = [box for box in bboxes if box[1] > prob_threshold]
bboxes = sorted(bboxes, key=lambda x:x[1], reverse=True)
bboxes_after_nms = []
while bboxes:
chosen_box = boxes.pop(0)
# * keep boxes that don't belong to the same class as chosen_box or
# * add the boxes if the iou with chosen box is less than threshold
bboxes = [
box for box in bboxes
if box[0] != chosen_box or
get_iou(
torch.tensor(chosen_box[2:]),
torch.tensor(box[2:]),
box_format=box_format
) < iou_threshold
]
bboxes_after_nms.append(chosen_box)
return bboxes_after_nms
``` |
{
"source": "aadhitthyaa/Customer-Analytics",
"score": 3
} |
#### File: aadhitthyaa/Customer-Analytics/SAP_CustomerAnalytics.py
```python
import pandas as pd
import sapdi
ws = sapdi.get_workspace(name='Hackathon')
dc = ws.get_datacollection(name='01_Rating_and_Review')
with dc.open('/unsupervised/nivea_uk.csv').get_reader() as reader:
df = pd.read_csv(reader)
# Nivea_uk file
df
# Importing nivea_us file
import pandas as pd
import sapdi
ws = sapdi.get_workspace(name='Hackathon')
dc = ws.get_datacollection(name='01_Rating_and_Review')
with dc.open('/unsupervised/nivea_us.csv').get_reader() as reader:
df1 = pd.read_csv(reader)
# Nivea us file
df1
# Reviews from Amazon
import pandas as pd
import sapdi
ws = sapdi.get_workspace(name='Hackathon')
dc = ws.get_datacollection(name='01_Rating_and_Review')
with dc.open('/unsupervised/nivea_uk_amazon.csv').get_reader() as reader:
df2 = pd.read_csv(reader)
# Merging the 3 files
dataframe = pd.concat([df,df1,df2])
# Dataframe with us,uk and amazon data
dataframe
#Installing nltk lib
pip install -U --user nltk
# Loading the required components
import nltk
nltk.download()
# Stopwords function
from nltk.corpus import stopwords
# Stem function
from nltk.stem import PorterStemmer
# Defining the stop words
stopwords=stopwords.words('english')
# Examining the stop words
print(stopwords)
# Creating a column of reviews without stopwords
dataframe['without_stopwords'] = dataframe['content'].apply(lambda x: ' '.join([word for word in x.split() if word not in (stopwords)]))
# Examining the created dataframe
Dataframe
# Stemming the review content
ps = PorterStemmer()
dataframe['stemmed_content'] = dataframe['without_stopwords'].apply(lambda x: ' '.join([ps.stem(word) for word in x.split()]))
# Examining the created datframe
dataframe
# Importing nltk library for lemmetization
import nltk
nltk.download('wordnet')
# Lemmetizing the reviews
from nltk.stem.wordnet import WordNetLemmatizer
lmtzr = WordNetLemmatizer()
dataframe['lemmatized_text'] = dataframe['without_stopwords'].apply(lambda x: ' '.join([lmtzr.lemmatize(word,'v') for word in x.split() ]))
# Examining the dataframe
dataframe
# Tokenize function
from nltk import tokenize
import nltk
nltk.download('punkt')
# Tokenizing the lemmatized reviews
dataframe['tokenized_text'] = dataframe.apply(lambda row: nltk.sent_tokenize(row['lemmatized_text']), axis=1)
dataframe
pip install --user -U gensim
# Loading gensim library and functions for LDA
import gensim
from gensim import corpora
# Creating tokenized words for matrix to be included in LDA
dataframe['tokenized_words'] = pd.Series(dataframe['lemmatized_text']).apply(lambda x: x.split())
dataframe
# Defining a dictionary to store tokenized words
dictionary = corpora.Dictionary(dataframe['tokenized_words'])
# Creating a matrix
doc_term_matrix = [dictionary.doc2bow(rev) for rev in dataframe['tokenized_words']]
# Creating the object for LDA model using gensim library
LDA = gensim.models.ldamodel.LdaModel
# Building LDA model
lda_model = LDA(corpus=doc_term_matrix, id2word=dictionary, num_topics=10, random_state=100,
chunksize=1000, passes=50)
# Exploring the LDA model topics
lda_model.print_topics()
# Installing libraries for visualizing the topics
pip install pyldavis
# Visualization of the topics generated by LDA model
import pyLDAvis.gensim
pyLDAvis.enable_notebook()
vis = pyLDAvis.gensim.prepare(lda_model, doc_term_matrix, dictionary)
vis
#Sentiment analysis on each review
import nltk
nltk.download('vader_lexicon')
from nltk.sentiment.vader import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
sentences = dataframe['lemmatized_text'].to_list()
for sentence in sentences:
senti = analyzer.polarity_scores(sentence)
print(sentence)
print(senti,'\n')
if senti['compound'] >= 0.05 :
print("Positive",'\n')
elif senti['compound'] <= - 0.05 :
print("Negative",'\n')
else :
print("Neutral",'\n')
# Sentiment analysis on each sentence in review
import nltk
nltk.download('vader_lexicon')
from nltk.sentiment.vader import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
sentences = dataframe['tokenized_text']
sentences1 = sentences.explode()
for sentence in sentences1:
senti = analyzer.polarity_scores(sentence)
print(sentence)
print(senti,'\n')
if senti['compound'] >= 0.05 :
print("Positive",'\n')
elif senti['compound'] <= - 0.05 :
print("Negative",'\n')
else :
print("Neutral",'\n')
# Sentiment analysis on each review in datframe
dataframe1 = dataframe
from nltk.sentiment.vader import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
def get_sentiment(row, **kwargs):
sentiment_score = analyzer.polarity_scores(row)
positive_meter = round((sentiment_score['pos'] * 10), 2)
negative_meter = round((sentiment_score['neg'] * 10), 2)
return positive_meter if kwargs['k'] == 'positive' else negative_meter
dataframe1['positive'] = dataframe1.lemmatized_text.apply(get_sentiment, k='positive')
dataframe1['negative'] = dataframe1.lemmatized_text.apply(get_sentiment, k='negative')
# Creating a column for sentiment analysis based on positive and negative review
dataframe1.loc[dataframe1['positive'] < dataframe1['negative'], 'Sentiment'] = 'Negative'
dataframe1.loc[dataframe1['positive'] > dataframe1['negative'] , 'Sentiment'] = 'Positive'
dataframe1.loc[dataframe1['positive'] == dataframe1['negative'] , 'Sentiment'] = 'Neutral'
# Final dataframe
dataframe1.head()
# Sentiment analysis for each sentence in review in dataframe
dataframe2 = dataframe.explode('tokenized_text')
from nltk.sentiment.vader import SentimentIntensityAnalyzer
analyzer = SentimentIntensityAnalyzer()
def get_sentiment(row, **kwargs):
sentiment_score = analyzer.polarity_scores(row)
positive_meter = round((sentiment_score['pos'] * 10), 2)
negative_meter = round((sentiment_score['neg'] * 10), 2)
return positive_meter if kwargs['k'] == 'positive' else negative_meter
dataframe2['positive'] = dataframe2.tokenized_text.apply(get_sentiment, k='positive')
dataframe2['negative'] = dataframe2.tokenized_text.apply(get_sentiment, k='negative')
# Creating a column for sentiment analysis based on positive and negative review
dataframe2.loc[dataframe2['positive'] < dataframe2['negative'], 'Sentiment'] = 'Negative'
dataframe2.loc[dataframe2['positive'] > dataframe2['negative'] , 'Sentiment'] = 'Positive'
dataframe2.loc[dataframe2['positive'] == dataframe2['negative'] , 'Sentiment'] = 'Neutral'
# Final dataframe
dataframe2.head()
``` |
{
"source": "aadhityasw/Competitive-Programs",
"score": 4
} |
#### File: questions/q102_array_leaders/code.py
```python
def leaders(A,N):
lead = []
ma = A[-1]
for num in reversed(A) :
if num >= ma :
ma = num
lead.append(ma)
return reversed(lead)
# The below code will result in the same answer but will take a lot more time in the process because
# the addition of two lists take more time than append operation with reverse.
""" lead = [ma] + lead
return lead"""
import math
def main():
T=int(input())
while(T>0):
N=int(input())
A=[int(x) for x in input().strip().split()]
A=leaders(A,N)
for i in A:
print(i,end=" ")
print()
T-=1
if __name__=="__main__":
main()
```
#### File: questions/q109_stock_buy_sell/code.py
```python
class Solution:
def stockBuySell(self, A, n):
arr = []
i = 0
while i < n :
while i < n-1 and A[i+1] <= A[i] :
i += 1
buy = i
while i < n-1 and A[i+1] >= A[i] :
i += 1
sell = i
i += 1
if A[sell] > A[buy] :
arr.append([buy, sell])
return arr
```
#### File: questions/q117_linked_list_loop/code.py
```python
def detectLoop(head):
ptr = head
ptr2 = head
while True :
if ptr is None or ptr2 is None or ptr2.next is None :
return False
ptr = ptr.next
ptr2 = ptr2.next.next
if ptr is ptr2 :
return True
# Node Class
class Node:
def __init__(self, data): # data -> value stored in node
self.data = data
self.next = None
# Linked List Class
class LinkedList:
def __init__(self):
self.head = None
self.tail = None
# creates a new node with given value and appends it at the end of the linked list
def insert(self, val):
if self.head is None:
self.head = Node(val)
self.tail = self.head
else:
self.tail.next = Node(val)
self.tail = self.tail.next
#connects last node to node at position pos from begining.
def loopHere(self,pos):
if pos==0:
return
walk = self.head
for i in range(1,pos):
walk = walk.next
self.tail.next = walk
if __name__ == '__main__':
for _ in range(int(input())):
n = int(input())
LL = LinkedList()
for i in input().split():
LL.insert(int(i))
LL.loopHere(int(input()))
print(detectLoop(LL.head))
```
#### File: questions/q118_linked_list_loop_removal/code.py
```python
def removeLoop(head):
ptr = head
ptr2 = head
while True :
if ptr is None or ptr2 is None or ptr2.next is None :
return
ptr = ptr.next
ptr2 = ptr2.next.next
if ptr is ptr2 :
loopNode = ptr
break
ptr = loopNode.next
count = 1
while ptr is not loopNode :
ptr = ptr.next
count += 1
ptr = head
ptr1 = head
ptr2 = head.next
while count > 1 :
ptr2 = ptr2.next
ptr1 = ptr1.next
count -= 1
while ptr is not ptr2 :
ptr = ptr.next
ptr2 = ptr2.next
ptr1 = ptr1.next
ptr1.next = None
```
#### File: questions/q134_maximum_stream_subarray/brute_force.py
```python
def max_of_subarrays(arr,n,k):
'''
you can use collections module here.
:param a: given array
:param n: size of array
:param k: value of k
:return: A list of required values
'''
temp = arr[:k]
ma = 0
res = [max(temp)]
for i in range(k, n) :
temp = temp[1:] + [arr[i]]
res.append(max(temp))
return res
```
#### File: questions/q134_maximum_stream_subarray/code.py
```python
from collections import deque
def max_of_subarrays(arr,n,k):
'''
you can use collections module here.
:param a: given array
:param n: size of array
:param k: value of k
:return: A list of required values
'''
queue = deque()
res = []
# To enter the initial k elements
for i in range(k) :
while queue and arr[queue[-1]] <= arr[i] :
queue.pop()
queue.append(i)
for i in range(k, n) :
res.append(arr[queue[0]])
# Remove elements out of range
while queue and queue[0] < (i-k) :
queue.popleft()
# Remove all elements smaller than arr[i]
while queue and arr[queue[-1]] <= arr[i] :
queue.pop()
queue.append(i)
res.append(arr[queue[0]])
return res
```
#### File: questions/q141_vertical_traversal_binary_tree/code.py
```python
def verticalOrder(root):
pos = []
neg = []
queue = [(root, 0)]
while len(queue) > 0 :
ptr, H = queue.pop(0)
if ptr.left :
queue.append((ptr.left, H-1))
if ptr.right :
queue.append((ptr.right, H+1))
if H >= 0 :
if len(pos) > H :
pos[H].append(ptr.data)
elif len(pos) == H :
pos.append([ptr.data])
else :
H_p = (-1 * H) - 1
if len(neg) > H_p :
neg[H_p].append(ptr.data)
elif len(neg) == H_p :
neg.append([ptr.data])
arr = []
for i in range(len(neg)-1, -1, -1) :
arr.extend(neg[i])
for i in range(len(pos)) :
arr.extend(pos[i])
return arr
```
#### File: questions/q142_binary_tree_spiral_level_order/code.py
```python
def findSpiral(root):
if root is None :
return []
cur_level = [root]
next_level = []
arr = []
level = 0
while len(cur_level) > 0 :
for i in range(len(cur_level)) :
ptr = cur_level[i]
if ptr.left :
next_level.append(ptr.left)
if ptr.right :
next_level.append(ptr.right)
if level % 2 == 0 :
for i in range(len(cur_level)-1, -1, -1) :
arr.append(cur_level[i].data)
else :
for i in range(len(cur_level)) :
arr.append(cur_level[i].data)
cur_level = next_level
next_level = []
level += 1
return arr
```
#### File: questions/q143_binary_tree_connect_level_nodes/code.py
```python
def connect(root):
'''
:param root: root of the given tree
:return: none, just connect accordingly.
'''
if root is None :
return
cur_level = [root]
next_level = []
while len(cur_level) > 0 :
for i in range(len(cur_level)) :
ptr = cur_level[i]
if ptr.left :
next_level.append(ptr.left)
if ptr.right :
next_level.append(ptr.right)
for i in range(len(cur_level)-1) :
cur_level[i].nextRight = cur_level[i+1]
cur_level[-1] = None
cur_level = next_level
next_level = []
import sys
sys.setrecursionlimit(50000)
from collections import deque
# Tree Node
class Node:
def __init__(self, val):
self.right = None
self.data = val
self.left = None
self.nextRight = None
# Function to Build Tree
def buildTree(s):
#Corner Case
if(len(s)==0 or s[0]=="N"):
return None
# Creating list of strings from input
# string after spliting by space
ip=list(map(str,s.split()))
# Create the root of the tree
root=Node(int(ip[0]))
size=0
q=deque()
# Push the root to the queue
q.append(root)
size=size+1
# Starting from the second element
i=1
while(size>0 and i<len(ip)):
# Get and remove the front of the queue
currNode=q[0]
q.popleft()
size=size-1
# Get the current node's value from the string
currVal=ip[i]
# If the left child is not null
if(currVal!="N"):
# Create the left child for the current node
currNode.left=Node(int(currVal))
# Push it to the queue
q.append(currNode.left)
size=size+1
# For the right child
i=i+1
if(i>=len(ip)):
break
currVal=ip[i]
# If the right child is not null
if(currVal!="N"):
# Create the right child for the current node
currNode.right=Node(int(currVal))
# Push it to the queue
q.append(currNode.right)
size=size+1
i=i+1
return root
def InOrder(root):
'''
:param root: root of the given tree.
:return: None, print the space separated in order Traversal of the given tree.
'''
if root is None: # check if the root is none
return
InOrder(root.left) # do in order of left child
print(root.data, end=" ") # print root of the given tree
InOrder(root.right) # do in order of right child
def printSpecial(root):
leftmost_node = root
while leftmost_node :
curr_node = leftmost_node
leftmost_node = None
if curr_node.left :
leftmost_node = curr_node.left
elif curr_node.right :
leftmost_node = curr_node.right
print(curr_node.data,end=" ")
while curr_node.nextRight :
print(curr_node.nextRight.data,end=" ")
curr_node = curr_node.nextRight
print()
if __name__=="__main__":
t=int(input())
for _ in range(0,t):
s=input()
root=buildTree(s)
connect(root)
printSpecial(root)
InOrder(root)
print()
```
#### File: questions/q156_merge_k_sorted_linked_list/code.py
```python
import heapq
class Node:
def __init__(self,x):
self.data = x
self.next = None
def mergeKLists(arr,K) :
heap = []
for i in range(K) :
heap.append((arr[i].data, i, arr[i]))
heapq.heapify(heap)
head = Node(-1)
ptr = head
while len(heap) > 0 :
(v, i, node) = heapq.heappop(heap)
ptr.next = node
node = node.next
ptr = ptr.next
ptr.next = None
if node is not None :
heapq.heappush(heap, (node.data, i, node))
ptr = head.next
head.next = None
head = ptr
return head
class LinkedList:
def __init__(self):
self.head=None
self.tail=None
def add(self,x):
if self.head is None:
self.head=Node(x)
self.tail=self.head
else:
self.tail.next=Node(x)
self.tail=self.tail.next
def printList(head):
walk = head
while walk:
print(walk.data, end=' ')
walk=walk.next
print()
if __name__=="__main__":
for _ in range(int(input())):
n=int(input())
line=[int(x) for x in input().strip().split()]
heads=[]
index=0
for i in range(n):
size=line[index]
index+=1
newList = LinkedList()
for _ in range(size):
newList.add(line[index])
index+=1
heads.append(newList.head)
merged_list = mergeKLists(heads,n)
printList(merged_list)
```
#### File: questions/q167_swap_equal_sum_arrays/code.py
```python
class Solution:
def findSwapValues(self,a, n, b, m):
summA = sum(a)
summB = sum(b)
diff = (summA - summB)
if diff % 2 != 0 :
return -1
diff = diff / 2
# We need to find num1 in a and num2 in b such that
# summA - num1 + num2 = summB - num2 + num1
# Which brings us to
# num1 - num2 = (summA - summB) / 2
i = 0
j = 0
while i < n and j < m :
d = a[i] - b[j]
if d == diff :
return 1
elif d < diff :
i += 1
else :
j += 1
return -1
```
#### File: questions/q168_depth_first_traversal/code.py
```python
class Solution:
def recursiveDFS(self, visited, adj, u) :
if u not in visited :
visited.append(u)
for v in adj[u] :
self.recursiveDFS(visited, adj, v)
def dfsOfGraph(self, V, adj):
visited = []
self.recursiveDFS(visited, adj, 0)
return visited
if __name__ == '__main__':
T=int(input())
for i in range(T):
V, E = map(int, input().split())
adj = [[] for i in range(V)]
for _ in range(E):
u, v = map(int, input().split())
adj[u].append(v)
ob = Solution()
ans = ob.dfsOfGraph(V, adj)
for i in range(len(ans)):
print(ans[i], end = " ")
print()
```
#### File: questions/q172_array_distinct_elements_count_window/code.py
```python
import collections
class Solution:
def countDistinct(self, A, N, K):
freq = collections.Counter(A[:K])
count = len(freq.keys())
arr = [count]
for i in range(K, N) :
freq[A[i-K]] -= 1
if freq[A[i-K]] == 0 :
count -= 1
if A[i] in freq :
if freq[A[i]] == 0 :
count += 1
freq[A[i]] += 1
else :
freq[A[i]] = 1
count += 1
arr.append(count)
return arr
if __name__=='__main__':
t = int(input())
for i in range(t):
n,k = list(map(int, input().strip().split()))
arr = list(map(int, input().strip().split()))
res = Solution().countDistinct(arr, n, k)
for i in res:
print (i, end = " ")
print ()
```
#### File: questions/q174_longest_consecutive_subsequence/code.py
```python
class Solution:
# arr[] : the input array
# N : size of the array arr[]
#Function to return length of longest subsequence of consecutive integers.
def findLongestConseqSubseq(self,arr, N):
arr.sort()
maximum = 0
for i in range(N) :
j = i + 1
c = 1
while j < N and (arr[j] - arr[j-1] < 2) :
if arr[j-1] != arr[j] :
c += 1
j += 1
maximum = max(maximum, c)
return maximum
import atexit
import io
import sys
_INPUT_LINES = sys.stdin.read().splitlines()
input = iter(_INPUT_LINES).__next__
_OUTPUT_BUFFER = io.StringIO()
sys.stdout = _OUTPUT_BUFFER
@atexit.register
def write():
sys.__stdout__.write(_OUTPUT_BUFFER.getvalue())
if __name__ == '__main__':
t = int(input())
for tt in range(t):
n=int(input())
a = list(map(int, input().strip().split()))
print(Solution().findLongestConseqSubseq(a,n))
```
#### File: questions/q179_equal_array/code.py
```python
class Solution:
#Function to check if two arrays are equal or not.
def check(self,A,B,N):
board = {}
count = 0
for i in A :
if i in board :
if board[i] == 0 :
count += 1
board[i] += 1
else :
count += 1
board[i] = 1
#print(count, board)
for i in B :
if i in board :
if board[i] == 0 :
return False
board[i] -= 1
if board[i] == 0 :
count -= 1
else :
return False
#print(count, board)
if count != 0 :
return False
return True
if __name__=='__main__':
t=int(input())
for tc in range(t):
N=int(input())
A = [int(x) for x in input().replace(' ',' ').strip().split(' ')]
B = [int(x) for x in input().replace(' ',' ').strip().split(' ')]
ob=Solution()
if ob.check(A,B,N):
print(1)
else:
print(0)
```
#### File: questions/q182_character_equal_frequency_one_removal/code.py
```python
import collections
class Solution:
def getCount(self, n, freq) :
# Find the number of characters with freq=1
count = 0
for _, v in freq.items() :
if v == n :
count += 1
return count
def sameFreq(self, s):
freq = collections.Counter(s)
if len(list(freq.keys())) <= 1 :
return 1
dis = list(set(freq.values()))
# If we have many frequencies, removing one won't fix it
if len(dis) > 2 :
return 0
# If all letters already have same frequency, no need to remove
if len(dis) == 1 :
return 1
# If either a character has frequency as 1, or if there is just one letter with freq=1
if ((abs(dis[0] - dis[1]) == 1) and (self.getCount(max(dis[0], dis[1]), freq) == 1)) or ((1 in dis) and (self.getCount(1, freq) == 1)) :
return 1
else :
return 0
if __name__ == "__main__":
T=int(input())
for _ in range(T):
s = input()
ob = Solution()
answer = ob.sameFreq(s)
if answer:
print(1)
else:
print(0)
```
#### File: questions/q188_find_path_grid/code.py
```python
class Solution:
def getCandidates(self, r, c) :
positions = [
(r-1, c),
(r, c-1),
(r, c+1),
(r+1, c)
]
return [(i, j) for (i, j) in positions if 0<=i<self.M and 0<=j<self.N and self.grid[i][j] > 0 and not self.visited[i][j]]
def is_Possible(self, grid):
self.grid = grid
self.M = len(grid)
self.N = len(grid[0])
self.visited = [[False for _ in range(self.N)] for _ in range(self.M)]
stack = []
s = None
for i in range(self.M) :
for j in range(self.N) :
if grid[i][j] == 1 :
s = [i, j]
break
if s is not None :
break
# Depth First Search
if grid[s[0]][s[1]] == 1 :
stack.append((s[0], s[1]))
self.visited[s[0]][s[1]] = True
while len(stack) > 0 :
(r, c) = stack.pop()
for (i, j) in self.getCandidates(r, c) :
stack.append((i, j))
self.visited[i][j] = True
if grid[i][j] == 2 :
return True
return False
if __name__ == '__main__':
T=int(input())
for i in range(T):
n = int(input())
grid = []
for _ in range(n):
a = list(map(int, input().split()))
grid.append(a)
obj = Solution()
ans = obj.is_Possible(grid)
if(ans):
print("1")
else:
print("0")
```
#### File: questions/q191_special_matrix/code.py
```python
class Solution:
def minMoves (self, N, matrix):
st = []
for i in range(50):
x = (i*(i+1))/2
st.append(int(x))
ans = 1e9
for i in range(N):
c = 0
for j in range(N):
high = bisect.bisect(st,matrix[i][j])
low = 0
if high:
low = high-1
c += min(matrix[i][j] - st[low] , st[high] - matrix[i][j])
ans = min(ans , c)
for i in range(N):
c = 0
for j in range(N):
high = bisect.bisect(st,matrix[j][i])
low = 0
if high:
low = high-1
c += min(matrix[j][i] - st[low] , st[high] - matrix[j][i])
ans = min(ans , c)
return ans
```
#### File: questions/q195_missing_and_repeating_in_array/code.py
```python
class Solution:
def findTwoElement( self,arr, n):
x_minus_y = sum(arr) - (n*(n+1)/2)
x_2_minus_y_2 = int(sum([i**2 for i in arr]) - (n*(n+1)*(2*n+1)/6))
x_plus_y = int(x_2_minus_y_2 / x_minus_y)
x = int((x_plus_y + x_minus_y) / 2)
y = x_plus_y - x
return [x, y]
if __name__ == '__main__':
tc=int(input())
while tc > 0:
n=int(input())
arr=list(map(int, input().strip().split()))
ob = Solution()
ans=ob.findTwoElement(arr, n)
print(str(ans[0])+" "+str(ans[1]))
tc=tc-1
```
#### File: questions/q197_choose_and_swap/code.py
```python
class Solution:
def chooseandswap (self, A):
opt = 'a'
fir = A[0]
arr = [0]*26
for s in A :
arr[ord(s)-97] += 1
i = 0
while i < len(A) :
if opt > 'z' :
break
while opt < fir :
if opt in A :
ans = ""
for s in A :
if s == opt :
ans += fir
elif s == fir :
ans += opt
else :
ans += s
return ans
opt = chr(ord(opt) + 1)
opt = chr(ord(opt) + 1)
while i < len(A) and A[i] <= fir :
i += 1
if i < len(A) :
fir = A[i]
return A
if __name__ == '__main__':
ob = Solution()
t = int (input ())
for _ in range (t):
A = input()
ans = ob.chooseandswap(A)
print(ans)
```
#### File: questions/q198_lru_page_faults/code.py
```python
class Solution:
def pageFaults(self, N, C, pages):
cache_size = 0
page_faults = 0
cache = {i : 0 for i in range(1001)}
end = 0
start = 0
while end < N :
ele = pages[end]
if cache[ele] > 0 :
cache[ele] += 1
else :
cache[ele] = 1
cache_size += 1
page_faults += 1
while cache_size > C :
cache[pages[start]] -= 1
if cache[pages[start]] == 0 :
cache_size -= 1
start += 1
end += 1
return page_faults
if __name__ == '__main__':
t = int (input ())
for _ in range (t):
N = int(input())
pages = input().split()
for itr in range(N):
pages[itr] = int(pages[itr])
C = int(input())
ob = Solution()
print(ob.pageFaults(N, C, pages))
```
#### File: questions/q204_geek_collects_balls/code.py
```python
class Solution:
def maxBalls(self, N, M, a, b):
i = 0
j = 0
s1 = 0
s2 = 0
ans = 0
while i < N and j < M :
if a[i] == b[j] :
while i < N-1 and a[i] == a[i+1] :
s1 += a[i]
i += 1
while j < M-1 and b[j] == b[j+1] :
s2 += b[j]
j += 1
ans += max(s1, s2) + a[i]
i += 1
j += 1
s1 = 0
s2 = 0
elif a[i] < b[j] :
s1 += a[i]
i += 1
elif b[j] < a[i] :
s2 += b[j]
j += 1
if i < N :
s1 += sum(a[i:])
elif j < M :
s2 += sum(b[j:])
ans += max(s1, s2)
return ans
if __name__ == '__main__':
t = int(input())
for _ in range(t):
N, M = [int(x) for x in input().split()]
a = input().split()
b = input().split()
for i in range(N):
a[i] = int(a[i])
for i in range(M):
b[i] = int(b[i])
ob = Solution()
print(ob.maxBalls(N, M, a, b))
```
#### File: questions/q221_box_stacking/code.py
```python
class Solution:
def formArray(self, height, width, length, n) :
"""
Given the dimensions of the boxes, returns an array of all the possible combinations of the boxes by tilting.
Each combination has max of 6 possibilities, and so the returned array will have maximum of 6*n elements, and will be sorted.
Each element will be (area, length, width, height).
"""
# Initialize the array
arr = []
# Fill the array
for i in range(n) :
# Width and Length is in the base
arr.append(((width[i]*length[i]), max(length[i], width[i]), min(length[i], width[i]), height[i]))
# Width and height is in the base
if height[i] != length[i] :
arr.append(((width[i]*height[i]), max(height[i], width[i]), min(height[i], width[i]), length[i]))
# Length and height is in the base
if height[i] != width[i] :
arr.append(((height[i]*length[i]), max(length[i], height[i]), min(length[i], height[i]), width[i]))
# Sort the array
arr.sort()
# Return the sorted array
return arr
def isLessThan(self, ele1, ele2) :
"""
Takes two elements of the formed array, and returns True if `ele1 < ele2` based on the conditions.
If `ele1 < ele2`, it means that `ele1` can be stacked on top of `ele2`.
Each `ele` is (area, length, width, height).
And Say ele1 < ele2 only if :
ele1.area < ele2.area
ele1.length < ele2.length
ele1.width < ele2.width
"""
if (ele1[0] < ele2[0]) and (ele1[1] < ele2[1]) and (ele1[2] < ele2[2]) :
return True
return False
#Your task is to complete this function
#Function should return an integer denoting the required answer
def maxHeight(self,height, width, length, n):
# Form the sorted array of all combinations of the boxes. Sorted in increasing order
# Each element : (base_area, length, width, height)
arr = self.formArray(height, width, length, n)
num_combinations = len(arr)
# Form the overall stacked_heights array
stacked_height = [ele[3] for ele in arr]
max_stacked_height = arr[0][3]
# Loop through all combinations of boxes to find the right combination
for i in range(1, num_combinations) :
for j in range(i-1, -1, -1) :
if self.isLessThan(arr[j], arr[i]) :
stacked_height[i] = max(stacked_height[i], (stacked_height[j] + arr[i][3]))
max_stacked_height = max(max_stacked_height, stacked_height[i])
# Return the maximum height that can be formed
return max_stacked_height
if __name__=='__main__':
t = int(input())
for i in range(t):
n = int(input())
arr = [int(x) for x in input().split()]
i=0
height=[]
width=[]
length=[]
for i in range(0,3*n,3):
height.append(arr[i])
width.append(arr[i+1])
length.append(arr[i+2])
ob=Solution()
print(ob.maxHeight(height, width, length, n))
```
#### File: questions/q226_egg_dropping/code.py
```python
class Solution:
#Function to find minimum number of attempts needed in
#order to find the critical floor.
def eggDrop(self,n, k):
# Each element is the number of floors which can be covered given `i` eggs <= table[i]
table = [0] * (n+1)
i = 0
# While we have not yet reached the required floor's reach, we continue
while table[n] < k :
# We find the new reach if we can take another step
for j in range(n, 0, -1) :
table[j] += table[j-1] + 1
i += 1
return i
import atexit
import io
import sys
if __name__ == '__main__':
test_cases = int(input())
for cases in range(test_cases):
n,k = map(int,input().strip().split())
ob=Solution()
print(ob.eggDrop(n,k))
```
#### File: questions/q227_optimal_game_strategy/dynamic_programming.py
```python
def optimalStrategyOfGame(arr, n):
table = [[(0, 0) for _ in range(n)] for _ in range(n)]
i = n
while i > 0 :
for j in range(i) :
# We fill the r'th row and c'th column in this iteration
r = j
c = j + n - i
if r == c :
# If we are filling the diagonal elements
table[r][c] = (arr[r], 0)
else :
# All other cases
# We choose the first number or arr[r] for this option
option_1 = arr[r] + table[r+1][c][1]
# We choose the last number or arr[c] for this option
option_2 = arr[c] + table[r][c-1][1]
# We fill the table according to the results of these options
if option_1 >= option_2 :
# If taking the first value is favorable, then that value is for player 1
# And the player 2 recieves the favorable position for the remaining array
table[r][c] = (
option_1,
table[r+1][c][0]
)
else :
# If taking the last value is favorable, then that value is for player 1
# And the player 2 recieves the favorable position for the remaining array
table[r][c] = (
option_2,
table[r][c-1][0]
)
# After every iteration of this loop, we fill one less number in the table
# This is because we fill only the upper diagonal portion of the table matrix
i -= 1
#for row in table :
# print(row)
# Return the score of player 1 for the whole array
return table[0][n-1][0]
import atexit
import io
import sys
if __name__ == '__main__':
test_cases = int(input())
for cases in range(test_cases):
n = int(input())
arr = list(map(int,input().strip().split()))
print(optimalStrategyOfGame(arr,n))
```
#### File: questions/q22_prison_labor_dodgers/q22.py
```python
def solution(x, y) :
if len(x) > len(y) :
num = (set(x) - set(y)).pop()
elif len(y) > len(x) :
num = (set(y) - set(x)).pop()
print(num)
solution([14, 27, 1, 4, 2, 50, 3, 1], [2, 4, -4, 3, 1, 1, 14, 27, 50])
```
#### File: questions/q231_shortest_common_supersequence/code.py
```python
class Solution:
def longestCommonSubstr(self, S1, S2, n, m):
table = [[0 for _ in range(m+1)] for _ in range(n+1)]
for i in range(1, n+1) :
for j in range(1, m+1) :
if S1[i-1] == S2[j-1] :
table[i][j] = 1 + table[i-1][j-1]
else :
table[i][j] = max(table[i-1][j], table[i][j-1])
return table[n][m]
#Function to find length of shortest common supersequence of two strings.
def shortestCommonSupersequence(self, X, Y, m, n):
lcs = self.longestCommonSubstr(X, Y, m, n)
return m + n - lcs
if __name__ == '__main__':
t=int(input())
for tcs in range(t):
X,Y=input().split()
print(Solution().shortestCommonSupersequence(X,Y,len(X),len(Y)))
```
#### File: questions/q239_k_element_two_sorted_array/code.py
```python
class Solution:
def kthElement(self, arr1, arr2, n, m, k):
# If we are left with one number to choose, we return the minimum of the two arrays
if k == 1 :
if n > 0 and m > 0 :
return min(arr1[0], arr2[0])
if n > 0 :
return arr1[0]
return arr2[0]
if n == 0 :
return arr2[k-1]
if m == 0 :
return arr1[k-1]
# Find the middle point
mid1 = n // 2
mid2 = m // 2
# If the k'th number lies to the right half of the arrays
if mid1 + mid2 + 2 <= k :
# If the middle number of arr1 is greater, then we constrict the search to the right portion of the other array and this array remains the same
if arr1[mid1] > arr2[mid2] :
return self.kthElement(arr1, arr2[mid2+1:], n, m-mid2-1, k-mid2-1)
# On the other hand if the middle element of arr2 is greater, then we only constrict the search to right portion of arr1
else :
return self.kthElement(arr1[mid1+1:], arr2, n-mid1-1, m, k-mid1-1)
# If the k'th number lies to the left half of the arrays
else :
# If the middle number of arr1 is greater, we constrict the search to left half of this array
if arr1[mid1] > arr2[mid2] :
return self.kthElement(arr1[:mid1], arr2, mid1, m, k)
# If the middle number of arr2 is greater, we constrict the search to left half of this array
else :
return self.kthElement(arr1, arr2[:mid2], n, mid2, k)
def main():
T = int(input())
while(T > 0):
sz = [int(x) for x in input().strip().split()]
n, m, k = sz[0], sz[1], sz[2]
a = [int(x) for x in input().strip().split()]
b = [int(x) for x in input().strip().split()]
ob = Solution()
print(ob.kthElement( a, b, n, m, k))
T -= 1
if __name__ == "__main__":
main()
```
#### File: questions/q23_dont_get_volunteered/q23.py
```python
def possible_actions(state) :
actions = []
# Top Left
if (state % 8) > 0 and (state // 8) > 1 :
actions.append((((state // 8) - 2) * 8) + (state % 8) - 1)
# Top Right
if (state % 8) < 7 and (state // 8) > 1 :
actions.append((((state // 8) - 2) * 8) + (state % 8) + 1)
# Bottom Left
if (state % 8) > 0 and (state // 8) < 6 :
actions.append((((state // 8) + 2) * 8) + (state % 8) - 1)
# Bottom Right
if (state % 8) < 7 and (state // 8) < 6 :
actions.append((((state // 8) + 2) * 8) + (state % 8) + 1)
# Left Top
if (state % 8) > 1 and (state // 8) > 0 :
actions.append((((state // 8) - 1) * 8) + (state % 8) - 2)
# Left Bottom
if (state % 8) > 1 and (state // 8) < 7 :
actions.append((((state // 8) + 1) * 8) + (state % 8) - 2)
# Right Top
if (state % 8) < 6 and (state // 8) > 0 :
actions.append((((state // 8) - 1) * 8) + (state % 8) + 2)
# Right Bottom
if (state % 8) < 6 and (state // 8) < 7 :
actions.append((((state // 8) + 1) * 8) + (state % 8) + 2)
# Removing invalid values
for action in list(actions) :
if action < 0 or action > 63 :
actions.remove(action)
return actions
def solution(src, dest) :
if src == dest :
return 0
frontier = [(src, 0)]
visited = []
while len(frontier) > 0 :
step = frontier[0]
frontier = frontier[1:]
visited.append(step[0])
actions = possible_actions(step[0])
if dest in actions :
return (step[1] + 1)
for action in possible_actions(step[0]) :
if action not in visited :
frontier.append((action, step[1]+1))
print(solution(0, 63))
```
#### File: questions/q245_quick_sort/code.py
```python
class Solution:
#Function to sort a list using quick sort algorithm.
def quickSort(self,arr,low,high):
if low < high :
pt = self.partition(arr, low, high)
self.quickSort(arr, low, pt-1)
self.quickSort(arr, pt+1, high)
def partition(self,arr,low,high):
pos = low-1
pivot = high
for i in range(low, high) :
if arr[i] <= arr[pivot] :
pos += 1
arr[pos], arr[i] = arr[i], arr[pos]
arr[pivot], arr[pos+1] = arr[pos+1], arr[pivot]
return pos+1
if __name__ == "__main__":
t=int(input())
for i in range(t):
n=int(input())
arr=list(map(int,input().split()))
Solution().quickSort(arr,0,n-1)
for i in range(n):
print(arr[i],end=" ")
print()
```
#### File: questions/q246_generate_ip_address/code.py
```python
class Solution:
def generate(self, s, level, prev) :
"""
Utility function to generate and print IP address from given string.
Parameters
----------
s - the remaining string left to create IP addresses
level - the current level we are in, it ranges from 0 to 4 for IpV4 format
prev - the portion of generated ip address generated until now
"""
# Base Case
if level == self.totalLevels :
# If the string starts with 0 but the number is not zero we return
# If the number is greater than 255, we return
if int(s) >= 256 or (s[0] == '0' and len(s) > 1) :
return
self.ans.append((prev + s))
return
# Take i characters from s for every iteration
for i in range(1, (len(s)-(self.totalLevels - level) + 1)) :
# If the string starts with 0 but the number is not zero we return
if s[0] == '0' and i > 1 :
return
# We proceed only if we have extracted a number in appropriate range for current level
if int(s[:i]) < 256 :
self.generate(s[i:], level+1, (prev + s[:i] + '.'))
def genIp(self, s):
self.ans = []
self.totalLevels = 4
self.generate(s, 1, "")
return self.ans
if __name__=='__main__':
t = int(input())
for i in range(t):
s = input().strip()
res = Solution().genIp(s)
res.sort()
for u in res:
print(u)
```
#### File: questions/q247_word_boggle/code.py
```python
class Node :
"""
This is a Trie Node, and can be used to form a trie.
"""
def __init__(self, numUniqueSyllables = 256):
"""
Used to define the Trie Node.
Parameters
----------
numUniqueSyllables - Number of unique syllables of the language being used, defaults to 26 of the english language.
"""
self.is_leaf = False
self.children = [None] * numUniqueSyllables
self.num_unique_syllables = numUniqueSyllables
self.word_count = 0
self.added = False
def checkPresence(self, character) :
"""
Given a character returns if the character is present in this node's children or not
Parameter
---------
character - the character which needs to be checked
"""
encoded_character = ord(character)
return (self.children[encoded_character] is not None)
def getChildNode(self, character) :
"""
Given a character returns the child with this value.
Parameter
---------
character - the character whose node needs to be returned
"""
encoded_character = ord(character)
return self.children[encoded_character]
class Trie :
def __init__(self):
self.root = Node()
def insertWord(self, word) :
"""
A customized function designed to enter only the capital letters of the words as nodes.
Parameters
----------
word - the word to be inserted into the trie
"""
# Initialze a pointer to point to the root node
cur_node = self.root
for ch in word :
if cur_node.children[ord(ch)] is None :
cur_node.children[ord(ch)] = Node()
cur_node = cur_node.children[ord(ch)]
# Mark the end node as a leaf
cur_node.is_leaf = True
cur_node.word_count += 1
class Solution:
def getCandidatePositions(self, r, c, board) :
"""
Given the current position and the board, determines the candidate positions for the next step
Parameters
----------
r - the row position
c - the column position
board - the board
"""
# Find the dimensions of the board
l, w = len(board), len(board[0])
# List out the possible positions
possible_positions = [
(r-1, c-1), (r-1, c), (r-1, c+1), (r, c-1),
(r, c+1), (r+1, c-1), (r+1, c), (r+1, c+1)
]
# Among these possible positions, find the candidates
candidate_positions = []
for position in possible_positions :
(i, j) = position
if (0 <= i < l) and (0 <= j < w) and (not self.visited[i][j]) :
candidate_positions.append(position)
# Return the list of candidate positions
return candidate_positions
def search(self, position, board, node, s) :
"""
Searches if the character in the current position is present in the child's node or not, if so then recurses to find the remaining part of the word.
Parameters
----------
position - the (row, column) positions of the current location in the board
board - the board
node - the current node in the trie
s - the currently formed string
"""
# Extract the positions
r, c = position
# If the current board position has been marked as visited then return
if self.visited[r][c] :
return
# Check if word can be formed using this character
if node.checkPresence(board[r][c]) :
# Get the child node
child = node.getChildNode(board[r][c])
# If the child node is a leaf node, then add the currently forned string as a found word
if (child.is_leaf) and (not child.added) :
# Add how many ever instances of the word is present in the dictionary
for _ in range(child.word_count) :
self.found_words.append(s+board[r][c])
child.added = True
# Mark the current character in board as visited
self.visited[r][c] = True
for next_position in self.getCandidatePositions(r, c, board) :
self.search(next_position, board, child, s+board[r][c])
# Mark the current character in board as not visited again, as we are backtracking and persuing another path
self.visited[r][c] = False
def wordBoggle(self,board,dictionary):
if len(board) == 0 or len(dictionary) == 0 :
return []
# Create a Trie structure and insert all the words into this
trie = Trie()
for word in dictionary :
trie.insertWord(word)
# Create an array to store the found words
self.found_words = []
# Fetch the co-ordinates of the board
r, c = len(board), len(board[0])
# Create a visited array
self.visited = [[False for _ in range(c)] for _ in range(r)]
# Perform the search
for i in range(r) :
for j in range(c) :
self.search((i, j), board, trie.root, "")
# Returns the found words
return list(self.found_words)
if __name__ == "__main__":
t=int(input())
for _ in range(t):
N=int(input())
dictionary=[x for x in input().strip().split()]
line=input().strip().split()
R=int(line[0])
C=int(line[1])
board=[]
for _ in range(R):
board.append( [x for x in input().strip().split()] )
obj = Solution()
found = obj.wordBoggle(board,dictionary)
if len(found) is 0:
print(-1)
continue
found.sort()
for i in found:
print(i,end=' ')
print()
```
#### File: questions/q248_rotate_bits/code.py
```python
class Solution:
def rotate(self, N, D):
D = D%16
val1 = ((N << D) % (2 ** 16)) ^ int(N // (2 ** (16 - D)))
#val1 = (N << D) | (N >> (16 - D))
val2 = (N >> D) ^ int((2 ** (16 - D)) * (N % (2 ** D)))
return [val1, val2]
if __name__ == '__main__':
t = int(input())
for _ in range(t):
n, d = input().strip().split(" ")
n, d = int(n), int(d)
ob = Solution()
ans = ob.rotate(n, d)
print(ans[0])
print(ans[1])
```
#### File: questions/q249_rat_in_maze/code.py
```python
class Solution:
def findCandidates(self, arr, n, cur_position) :
x, y = cur_position
# This order resolves the problem of sorting the final results
options = [
(x+1, y, 'D'), (x, y-1, 'L'),
(x, y+1, 'R'), (x-1, y, 'U')
]
candidates = []
for option in options :
if (0 <= option[0] < n) and (0 <= option[1] < n) and (not self.visited[option[0]][option[1]]) and (arr[option[0]][option[1]] != 0) :
candidates.append(option)
return candidates
def findValidPaths(self, arr, n, cur_position, cur_path) :
x, y = cur_position
self.visited[x][y] = True
if x == n-1 == y :
self.validPaths.append(cur_path)
else :
for candidate in self.findCandidates(arr, n, cur_position) :
direction = candidate[2]
self.findValidPaths(arr, n, candidate[:2], cur_path+direction)
self.visited[x][y] = False
def findPath(self, m, n):
# If the first cell is zero we return saying that no path exists
if m[0][0] == 0 :
return []
self.validPaths = []
self.visited = [[False for _ in range(n)] for _ in range(n)]
# Find the paths
self.findValidPaths(m, n, (0, 0), "")
return self.validPaths
#{
# Driver Code Starts
#Initial Template for Python 3
if __name__=='__main__':
t = int(input())
for i in range(t):
n = list(map(int, input().strip().split()))
arr = list(map(int, input().strip().split()))
matrix = [[0 for i in range(n[0])]for j in range(n[0])]
k=0
for i in range(n[0]):
for j in range(n[0]):
matrix[i][j] = arr[k]
k+=1
ob = Solution()
result = ob.findPath(matrix, n[0])
if len(result) == 0 :
print(-1)
else:
for x in result:
print(x,end = " ")
print()
```
#### File: questions/q24_en_route_salute/q24.py
```python
def solution(s) :
rights = 0
salutes = 0
for ch in s :
if ch == '>' :
rights += 1
elif ch == '<' :
salutes += (rights * 2)
return salutes
print(solution("--->-><-><-->-"))
```
#### File: questions/q261_is_sudoku_valid/code.py
```python
class Solution:
def isValid(self, mat):
numbers_row = [{nu : False for nu in range(1, 10)} for _ in range(9)]
numbers_col = [{nu : False for nu in range(1, 10)} for _ in range(9)]
# Check for each row and column
for i in range(9) :
for j in range(9) :
# Check for each row
if mat[i][j] != 0 and numbers_row[i][mat[i][j]] :
return 0
else :
numbers_row[i][mat[i][j]] = True
# Check for each column
if mat[j][i] != 0 and numbers_col[i][mat[j][i]] :
return 0
else :
numbers_col[i][mat[j][i]] = True
# Check for all mini grids
numbers = [{nu : False for nu in range(1, 10)} for _ in range(9)]
ptr = 0
for i in range(0, 9, 3) :
for j in range(0, 9, 3) :
for k in range(9) :
r = i + k // 3
c = j + k % 3
if mat[r][c] != 0 and numbers[ptr][mat[r][c]] :
return 0
else :
numbers[ptr][mat[r][c]] = True
ptr += 1
return 1
#{
# Driver Code Starts
#Initial Template for Python 3
if __name__ == '__main__':
t = int(input())
for _ in range(t):
arr = input().split()
mat = [[0]*9 for x in range(9)]
for i in range(81):
mat[i//9][i%9] = int(arr[i])
ob = Solution()
print(ob.isValid(mat))
# } Driver Code Ends
```
#### File: questions/q263_merge_k_sorted_arrays/using_arrays.py
```python
class Solution:
#Function to merge k sorted arrays.
def mergeKArrays(self, arr, K):
# Initialize the pointers for each array
pointers = [0] * K
# Initialize an array to store the sorted elements
sorted_elements = []
# Add the elements into this array till it is complete with k^2 elements
while len(sorted_elements) < (K*K) :
# Create an array of all the elements to compare
comparing_elements = [(arr[i][pointers[i]], i) for i in range(K) if pointers[i] < K]
# Find the minimum element and the array it belongs to from these elements
(min_ele, min_arr) = min(comparing_elements)
# Increment the pointer of the array with the minimum element
pointers[min_arr] += 1
# Store this minimum element
sorted_elements.append(min_ele)
# Return the sorted elements array
return sorted_elements
if __name__=="__main__":
t=int(input())
for _ in range(t):
n=int(input())
numbers=[[ 0 for _ in range(n) ] for _ in range(n) ]
line=input().strip().split()
for i in range(n):
for j in range(n):
numbers[i][j]=int(line[i*n+j])
ob = Solution();
merged_list=ob.mergeKArrays(numbers, n)
for i in merged_list:
print(i,end=' ')
print()
```
#### File: questions/q264_prerequisite_task/code.py
```python
class Solution:
def hasCycle(self, v, path) :
"""
Given the path taken until now, and the vertex, checks if there are any cycles
"""
# If the vertex is already present in the path, then cycle is present
if path[v] :
return True
# Mark the current vertex in the path
path[v] = True
# Traverse through all tasks that can be done now
for neighbor in self.graph[v] :
# If the neighbor is present in the current path, there is a cycle
if path[neighbor] :
return True
# Keep searching for cycles
if not self.visited[neighbor] and self.hasCycle(neighbor, path) :
return True
# Mark the current vertex as not in path
path[v] = False
# Mark the current vertex that it has been cleared of no cycles
self.visited[v] = True
# Return that there is no cycle
return False
def isPossible(self,N,prerequisites):
# Initialize an array to serve as a graph
# Each element is a list of prerequisites to the task in index i
self.graph = [[] for _ in range(N)]
# Fill the graph with the pre-requisites
for i, j in prerequisites :
self.graph[j].append(i)
# Perform Cycle detection
self.visited = [False]*N
for i in range(N) :
if not self.visited[i] :
if self.hasCycle(i, [False]*N) :
return False
# If there is no cycle
return True
if __name__ == '__main__':
test_cases = int(input())
for cases in range(test_cases) :
N=int(input())
P=int(input())
prerequisites=[]
for _ in range(P):
pair = [int(x) for x in input().split()]
prerequisites.append(pair)
ob=Solution()
if(ob.isPossible(N,prerequisites)):
print("Yes")
else:
print("No")
```
#### File: questions/q269_maximum_sum_non_negative_subarray/code.py
```python
class Solution:
# @param A : list of integers
# @return a list of integers
def maxset(self, A):
max_sum = -1 * float("inf")
elements = []
n = len(A)
pos = 0
while pos < n :
while pos< n and A[pos] < 0 :
pos += 1
start = pos
s = 0
while pos < n and A[pos] >= 0 :
s += A[pos]
pos += 1
if s > max_sum or elements is None :
max_sum = s
elements = A[start : pos]
elif s == max_sum :
if pos-start+1 > len(elements) :
elements = A[start : pos]
elif pos-start+1 == len(elements) and A[start] < elements[0] :
elements = A[start : pos]
return elements
```
#### File: questions/q276_rearrange_array/code.py
```python
class Solution:
def get(self, A, n, i) :
return A[i] % n
# @param A : list of integers
# Modify the array A which is passed by reference.
# You do not need to return anything in this case.
def arrange(self, A):
n = len(A)
for i in range(n) :
original_number = self.get(A, n, i)
A[i] = (self.get(A, n, original_number) * n) + original_number
for i in range(n) :
A[i] = A[i] // n
return A
```
#### File: questions/q277_n_digit_number_less_than_k/binary_search.py
```python
class Solution:
def __init__(self):
self.store = {}
# @param A : list of integers
# @param B : integer
# @param C : integer
# @return an integer
def solve(self, A, B, C, level=0):
# If number of digits needed is zero, then we return 0
if B == 0 :
return 0
n = len(A)
if C < 0 :
return 0
# If the array is empty return or not enough elements are present
if n == 0 :
return 0
# If the answer is present in the store, then return it
if level > 0 and (B, C) is self.store :
return self.store[(B, C)]
# If the number of digits is one, then it is the base case
if B == 1 :
# Find the position until which it is possible to choose
front = 0
end = n-1
mid = -1
while front < end :
mid = (front+end) // 2
if A[mid] < C :
front = mid+1
elif A[mid] > C :
end = mid-1
else :
front = mid-1
end = mid-1
mid = mid-1
return mid+1
"""# Loop till we find a number equal or greater than C
i = -1
while i < n-1 and A[i+1] < C :
i += 1
# Return the number of digits that can be useful
return i+1"""
# If the number of digits required is more, then we perform recursive disintergration of the problem
# Initialize a variable to store the count of all cases
total_count = 0
# Binary search till 10*i is less than C so that another level is possible
start = 0
# In the zero'th level the first number cannot be 0
if level == 0 :
while A[start] == 0 :
start += 1
# Perform Binary Search
front = start
end = n-1
mid = -1
while front < end :
mid = (front+end) // 2
val = ((10**(B-1))*A[mid])
if val < C :
front = mid+1
elif val > C :
end = mid-1
else :
front = mid
end = mid
# Add for the previous elements, they will have full amount of combinations
total_count += (n ** (B-1)) * (mid-start)
# Find for the mid+1 th choice
cur_count = self.solve(A, B-1, C-((10**(B-1))*A[mid]), level+1)
total_count += cur_count
"""while i < n :
if ((10**(B-1))*A[i]) <= C :
cur_count = self.solve(A, B-1, C-((10**(B-1))*A[i]), level+1)
total_count += cur_count
i += 1"""
# Store this value
self.store[(B, C)] = total_count
# Return the total count from all sub-parts
return total_count
#A = [ 0, 1, 2, 3, 4, 5, 7, 8, 9 ]
#B = 9
#C = 51822
#A = [0, 1, 2, 5]
#B = 2
#C = 21
A = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 ]
B = 9
C = 661261993
ob = Solution()
print(ob.solve(A, B, C))
```
#### File: questions/q278_square_root_integer/egg_drop_style.py
```python
class Solution:
# @param A : integer
# @return an integer
def sqrt(self, A):
x = 0
base = 10**3
while base>0:
while (x+base)**2<=A: x+= base
base //= 2
return x
```
#### File: questions/q281_matrix_median/code.py
```python
class Solution:
def findPositionInArray(self, element, array) :
"""
Given an array and a element, returns the count of numbers less than the element.
"""
# If the element is less than the least, we return 0
if element < array[0] :
return 0
# Find the length of the array
n = len(array)
# Perform the binary search
front = 0
rear = n-1
while front < rear :
mid = (front + rear + 1) // 2
if array[mid] <= element :
front = mid
else :
rear = mid - 1
# Return the count
return front + 1
def getCountLessThan(self, mid, A) :
"""
Given a matrix and a element, returns the count of numbers less than the element across all rows.
"""
# Initialize the count
count = 0
# Find the count for each row
for row in A :
count += self.findPositionInArray(mid, row)
# Return this overall count
return count
# @param A : list of list of integers
# @return an integer
def findMedian(self, A):
# Find the dimensions of the matrix
m = len(A)
if m == 0 :
return
n = len(A[0])
# Find the goal position where we will find the median
# We add 1 because we are given that (n*m) is odd
goal = ((m * n) // 2) + 1
# Find the minimum and maximum element in the matrix
min_ele = float("inf")
max_ele = -1 * float("inf")
for row in A :
min_ele = min(min_ele, row[0])
max_ele = max(max_ele, row[-1])
# Perform binary search till we reach the goal position
front = min_ele
rear = max_ele
while front < rear :
# Find the mid position
mid = (front + rear) // 2
# In each row find the count of numbers less than mid
count = self.getCountLessThan(mid, A)
# Compare this count with our goal and make the decisions
if count < goal :
front = mid + 1
else :
rear = mid
return front
```
#### File: questions/q285_divide_integers/code.py
```python
class Solution:
# @param A : integer
# @param B : integer
# @return an integer
def divide(self, dividend, divisor):
INT_MAX = 2**31 - 1
INT_MIN = -2**31
res = 0
p = abs(dividend)
q = abs(divisor)
if divisor == 0 or (divisor == 1 and dividend >= INT_MAX) :
return INT_MAX
if dividend <= INT_MIN and divisor == -1 :
return INT_MAX
if abs(divisor) == 1 :
return dividend * divisor
while p >= q :
c = 0
while p > (q << c) :
c += 1
res += 1 << (c -1)
p -= q << (c - 1)
if (dividend > 0 and divisor > 0) or (dividend < 0 and divisor < 0) :
return res
else :
return -res
```
#### File: questions/q286_single_number_among_thrice/xor_unsing_bits.py
```python
class Solution:
def xorBaseThree(self, a, b) :
"""
Given two numbers returns their xor base 3
"""
ans = 0
power = 0
while a > 0 or b > 0 :
ans += ((3**power) * (((a%3) + (b%3)) % 3))
power += 1
a = a // 3
b = b // 3
#print("XOR", a, b, ans)
return ans
# @param A : tuple of integers
# @return an integer
def singleNumber(self, A):
res = None
for num in A :
if res is None :
res = num
else :
res = self.xorBaseThree(res, num)
#print(res)
return res
```
#### File: questions/q286_single_number_among_thrice/xor_using_strings.py
```python
class Solution:
def __init__(self) :
self.store_base3 = {}
def convertToBaseThree(self, n) :
"""
Given a number returns its base 3 format
"""
if n in self.store_base3 :
return self.store_base3[n]
s = ""
num = n
while num > 0 :
a = num % 3
s = str(a) + s
num = num // 3
self.store_base3[n] = s
#print("base conversion", n, s)
return s
def xorBaseThree(self, a, b) :
"""
Given two base 3 strings, returns its XOR to base 3
"""
s = ""
i = len(a) - 1
j = len(b) - 1
while i >= 0 or j >= 0 :
if i >= 0 and j >= 0 :
temp = (int(a[i]) + int(b[j])) % 3
elif i >= 0 :
temp = int(a[i])
else :
temp = int(b[j])
s = str(temp) + s
i -= 1
j -= 1
#print("XOR", a, b, s)
return s
# @param A : tuple of integers
# @return an integer
def singleNumber(self, A):
res = ""
for num in A :
if res == "" :
res = self.convertToBaseThree(num)
else :
res = self.xorBaseThree(res, self.convertToBaseThree(num))
res = int(res, 3)
return res
```
#### File: questions/q287_minimum_xor_value/code.py
```python
class Solution:
# @param A : list of integers
# @return an integer
def findMinXor(self, A):
n = len(A)
A.sort()
min_val = float("inf")
for i in range(n-1) :
min_val = min(min_val, A[i]^A[i+1])
return min_val
```
#### File: questions/q289_integer_to_roman/code.py
```python
class Solution:
# @param A : integer
# @return a strings
def intToRoman(self, A):
integers = [1000, 500, 100, 50, 10, 5, 1]
romans = ["M", "D", "C", "L", "X", "V", "I"]
num = A
s = ""
i = 0
while i < 7 and num > 0 :
digit = num // integers[i]
if 0 < digit < 4 :
s += romans[i]*digit
elif digit == 4 :
s += romans[i] + romans[i-1]
elif digit == 5 :
s += romans[i-1]
elif 5 < digit < 9 :
s += romans[i-1] + romans[i]*(digit - 5)
elif digit == 9 :
s += romans[i] + romans[i-2]
num = num % integers[i]
i += 2
return s
```
#### File: questions/q291_insert_front_to_form_palindrome/using_loops.py
```python
class Solution:
# @param A : string
# @return an integer
def solve(self, A):
if len(A) <= 1:
return 0
i, j, j_start = 0, len(A)-1, len(A)-1
while i < j:
if A[i] == A[j]:
i += 1
j -= 1
else:
j = j_start - 1
j_start = j
i = 0
return len(A) - 1 - j_start
```
#### File: questions/q292_pair_with_given_difference/simple_storage.py
```python
class Solution:
def findPair(self, arr, L,N):
store = set()
for num in arr :
if num in store :
return True
store.add(num - N)
store.add(num + N)
return False
if __name__ == '__main__':
t = int(input())
for _ in range(t):
L,N = [int(x) for x in input().split()]
arr = [int(x) for x in input().split()]
solObj = Solution()
if(solObj.findPair(arr,L, N)):
print(1)
else:
print(-1)
```
#### File: questions/q293_three_number_sum/code.py
```python
class Solution:
# @param A : list of integers
# @param B : integer
# @return an integer
def threeSumClosest(self, A, B):
A.sort()
n = len(A)
min_diff = float("inf")
closest_value = float("inf")
for i in range(n-2) :
front = i+1
rear = n-1
while front < rear :
cur_sum = A[i] + A[front] + A[rear]
if cur_sum == B :
return cur_sum
if abs(B - cur_sum) < min_diff :
min_diff = abs(B - cur_sum)
closest_value = cur_sum
if cur_sum < B :
front += 1
else :
rear -= 1
return closest_value
```
#### File: questions/q294_three_sum_zero/two_pointers.py
```python
class Solution:
# @param A : list of integers
# @return a list of list of integers
def threeSum(self, A):
ans = set()
A.sort()
n = len(A)
for i in range(n-2) :
front = i+1
rear = n-1
while front < rear :
cur_sum = A[i] + A[front] + A[rear]
if cur_sum == 0 :
ans.add((A[i], A[front], A[rear]))
front += 1
elif cur_sum < 0 :
front += 1
else :
rear -= 1
ans = list(ans)
ans.sort()
return ans
```
#### File: questions/q305_largest_rectangle_histogram/code.py
```python
class Solution:
# @param A : list of integers
# @return an integer
def largestRectangleArea(self, A):
height = A
height.append(0)
stack = [-1]
ans = 0
for i in range(len(height)):
while height[i] < height[stack[-1]]:
h = height[stack.pop()]
w = i - stack[-1] - 1
ans = max(ans, h * w)
stack.append(i)
return ans
```
#### File: questions/q307_gray_code/code.py
```python
class Solution:
def __init__(self) :
self.store = []
def recursiveTraverse(self, s, pos) :
"""
Given a character array and a pos, changes bit in that position
"""
if pos >= len(s) :
return
self.recursiveTraverse(s, pos+1)
if s[pos] == '0' :
s[pos] = '1'
else :
s[pos] = '0'
self.store.append(int("".join(s),2))
self.recursiveTraverse(s, pos+1)
# @param A : integer
# @return a list of integers
def grayCode(self, A):
s = ['0']*A
self.store.append(int("".join(s),2))
self.recursiveTraverse(s, 0)
return self.store
```
#### File: questions/q313_fraction/code.py
```python
class Solution:
# @param numerator : integer
# @param denominator : integer
# @return a string
def fractionToDecimal(self, n, d):
# If result is a whole number
if n % d == 0 :
return str(n // d)
# Initialize variable to store result
ans = ''
# Prepare the sign
if (abs(n)/n) * (abs(d)/d) < 0 :
ans += '-'
n = abs(n)
d = abs(d)
# Add the integer part (left part to the decimal point)
ans += str(n // d) + '.'
n = n % d
# Now n < d, and we find the fractional value
# Check for cycles (recurring decimals) using fast and slow pointers
slow = n
fast = n
while fast != 0 :
slow = (slow * 10) % d
fast = (fast * 10) % d
if fast != 0 :
fast = (fast * 10) % d
if fast == slow :
break
if fast == 0 :
# No cycle in the list
slow = n
while slow != 0 :
ans += str((slow * 10) // d)
slow = (slow * 10) % d
else :
# If a cycle is present
# Save the pre-cycle elements
slow = n
while slow != fast :
ans += str((slow * 10) // d)
slow = (slow * 10) % d
fast = (fast * 10) % d
# Now add the recurring portion
ans += '('
while True :
ans += str((slow * 10) // d)
slow = (slow * 10) % d
if slow == fast :
break
ans += ')'
return ans
ob = Solution()
A = -1
B = -2147483648
print(ob.fractionToDecimal(A, B))
```
#### File: questions/q316_circle_drawing/code.py
```python
def dist(n, pos) :
i, j = pos
return ((i-n)**2 + (j-n)**2)
n = int(input())
matrix = [[' ' for _ in range(2*n + 1)] for _ in range(2*n + 1)]
for i in range(2*n+1) :
for j in range(2*n+1) :
if dist(n, (i, j)) <= n**2 + 1 :
matrix[i][j] = '.'
for row in matrix :
print(" ".join(row))
```
#### File: questions/q318_first_n_numbers_xor/code.py
```python
def computeXOR(n) :
# if n is multiple of 4
if n % 4 == 0 :
return n
# If n % 4 gives remainder 1
if n % 4 == 1 :
return 1
# If n%4 gives remainder 2
if n % 4 == 2 :
return n + 1
# If n%4 gives remainder 3
return 0
n = int(input())
print(computeXOR(n))
```
#### File: questions/q322_longest_subarray_length/code.py
```python
class Solution:
# @param A : list of integers
# @return an integer
def solve(self, A):
summ = 0
n = len(A)
table = {}
max_leng = 0
for i, num in enumerate(A) :
if num == 0 :
summ -= 1
else :
summ += 1
if summ == 1 :
max_leng = i+1
if summ not in table :
table[summ] = i
if summ-1 in table :
max_leng = max(max_leng, (i - table[summ-1]))
return max_leng
```
#### File: questions/q324_longest_increasing_decreasing_subsequence/memoization.py
```python
class Solution:
def forward(self, A, ind) :
# If we are out of bounds, we return the current count
if ind >= len(A) :
return 0
# If present in the store, we return it
if self.forward_store[ind] is not None :
return self.forward_store[ind]
count = 0
for i in range(ind) :
if A[i] < A[ind] :
count = max(
count,
self.forward(A, i)
)
count += 1
self.forward_store[ind] = count
return count
def reverse(self, A, ind) :
# If we are out of bounds, we return the current count
if ind >= len(A) :
return 0
# If present in the store, we return it
if self.reverse_store[ind] is not None :
return self.reverse_store[ind]
count = 0
for i in range(ind+1, len(A)) :
if A[i] < A[ind] :
count = max(
count,
self.reverse(A, i)
)
count += 1
self.reverse_store[ind] = count
return count
# @param A : tuple of integers
# @return an integer
def longestSubsequenceLength(self, A):
n = len(A)
self.forward_store = [None]*n
self.reverse_store = [None]*n
for i in range(n) :
if self.forward_store[i] is None :
self.forward(A, i)
if self.reverse_store[i] is None :
self.reverse(A, i)
count = 0
for i in range(n) :
count = max(
count,
self.forward_store[i] + self.reverse_store[i] - 1
)
#print(self.forward_store)
#print(self.reverse_store)
return count
```
#### File: questions/q326_repeating_subsequence/code.py
```python
class Solution:
# @param A : string
# @return an integer
def anytwo(self, A):
n = len(A)
if n <= 1 :
return 0
table = [[0 for _ in range(n+1)] for _ in range(n+1)]
for i in range(1, n+1) :
for j in range(1, n+1) :
if A[i-1] == A[j-1] and i != j :
table[i][j] = 1 + table[i-1][j-1]
elif i != j :
table[i][j] = max(table[i-1][j], table[i][j-1])
if table[i][j] > 1 :
return 1
return 0
```
#### File: questions/q336_bst_preorder_verification/code.py
```python
class Solution:
def canRepresentBST(self, arr, N):
stack = []
root = float("-inf")
for i in range(N) :
if arr[i] < root :
return 0
while len(stack) > 0 and stack[-1] < arr[i] :
root = stack.pop()
stack.append(arr[i])
return 1
import sys
sys.setrecursionlimit(10**6)
if __name__ == '__main__':
t = int(input())
for _ in range (t):
N = int(input())
arr = input().split()
for itr in range(N):
arr[itr] = int(arr[itr])
ob = Solution()
print(ob.canRepresentBST(arr, N))
```
#### File: questions/q339_robot_fair/code.py
```python
def shiftLeft(direction) :
left_shift = {
'N' : 'W',
'E' : 'N',
'S' : 'E',
'W' : 'S'
}
return left_shift[direction]
def shiftRight(direction) :
right_shift = {
'N' : 'E',
'E' : 'S',
'S' : 'W',
'W' : 'N'
}
return right_shift[direction]
def makeMove(position, direction) :
if direction == 'N' :
return (position[0]-1, position[1])
elif direction == 'E' :
return (position[0], position[1]+1)
elif direction == 'W' :
return (position[0], position[1]-1)
elif direction == 'S' :
return (position[0]+1, position[1])
def validateMove(position, matrix) :
r, c = len(matrix), len(matrix[0])
if (0 <= position[0] < r) and (0 <= position[1] < c) and (matrix[position[0]][position[1]] is None) :
return True
return False
def findPath(matrix, position, direction, path, final_positions) :
if len(path) == 0 :
final_positions.add(tuple(position))
#print("end", position)
return
#print(position, path, direction)
matrix[position[0]][position[1]] = 'Y'
cur_option = path[0]
if cur_option == 'L' :
findPath(matrix, position, shiftLeft(direction), path[1:], final_positions)
elif cur_option == 'R' :
findPath(matrix, position, shiftRight(direction), path[1:], final_positions)
elif cur_option == 'F' :
new_position = makeMove(position, direction)
if validateMove(new_position, matrix) :
findPath(matrix, new_position, direction, path[1:], final_positions)
matrix[position[0]][position[1]] = None
row, col = map(int, input().strip().split())
matrix = []
for i in range(row) :
cur_row = [None if ch == '.' else ch for ch in list(input().strip().split())]
matrix.append(cur_row)
initial_position = list(map(int, input().strip().split()))
num_path = int(input())
path = input().strip()
final_positions = set()
available_directions = ['N', 'S', 'E', 'W']
for dire in available_directions :
findPath(matrix, initial_position, dire, path, final_positions)
final_positions = list(final_positions)
final_positions.sort()
for x, y in final_positions :
print(x, y)
```
#### File: questions/q347_thesaurus/code.py
```python
def populateCounts(end) :
"""
Given the maximum number through which the table needs to be populated, we form the table.
Parameters
----------
end - the extent of continuous missing letters in string (or the length of the table needed)
Return
------
table - with i'th index pointing to the number of possible combinations possible for 'i' missing letters
"""
# Initialize a table
# Element at index i : (edge_same[i], edge_different[i])
table = [(None, None), (25, 24)]
# Populate the table till the given number
for i in range(2, end + 1) :
table.append((
(25 * table[i-1][1]),
((24 * table[i-1][1]) + (1 * table[i-1][0]))
))
# Return the populated table
return table
def getMissingConfiguration(s) :
"""
Given a string, returns all the counts of concecutive missing values in the string, in the form of array.
We also include another value indicating if the edges are same or different
"""
# Initialize variables for computing and storing the values
missing_counts = []
n = len(s)
# Iterate through the loop and find all the missing letter configurations
i = 1
while i < n-1 :
if s[i] == '?' :
c = 0
left_edge = s[i-1]
while i < n-1 and s[i] == '?' :
c += 1
i += 1
right_edge = s[i]
missing_counts.append((
c, (True if left_edge == right_edge else False)
))
else :
i += 1
# Return the missing congiguration
return missing_counts
def solveInterrior(s) :
"""
Given a string with defined edge characters, we find the number of possible ways in which we can fill the blanks.
Assumption : First and last characters are filled
Parameters
----------
s - the string with defined edge letters
Return
------
count - the total number of possible ways to fill the blanks for this edge defined string
"""
# Get the missing letters configuration from processing the string
missing_count_configuration = getMissingConfiguration(s)
# Initialize a variable for storing the overall count
total_count = 1
if len(missing_count_configuration) > 0 :
# Find the maximum number of concecutive missing characters
max_missing_count = max(missing_count_configuration, key=lambda x : x[0])[0]
# Fill the DP table
table = populateCounts(max_missing_count)
# Compute for every missing configuration
for (num_missing, same_edge) in missing_count_configuration :
if same_edge :
total_count *= table[num_missing][0]
else :
total_count *= table[num_missing][1]
return total_count
def solve(s) :
"""
Given a string with blanks, returns the number of possible ways to fill the blanks with some conditions
Parameter
---------
s - the string
Return
------
total_count - the total number of possible ways to fill the blanks
"""
if len(s) == 0 :
return 0
if len(s) == 1 :
if s == '?' :
return 26
return 1
# Check for repeated characters
n = len(s)
for i in range(1, n) :
if (s[i] != '?') and (s[i-1] != '?') and (s[i] == s[i-1]) :
return 0
# First and last characters filled
if (s[0] != '?') and (s[-1] != '?') :
# If first and last characters are not equal
if (s[0] != s[-1]) :
return 0
# If first and last characters are equal
return solveInterrior(s)
# If only first character is filled
if s[0] != '?' :
# Copy the first character to the last
s = s[:-1] + s[0]
return solveInterrior(s)
# If only the last character is filled
if s[-1] != '?' :
# Copy the last character to the first
s = s[-1] + s[1:]
return solveInterrior(s)
# If both first and last characters are missing
# If the string is just two characters long, and both are missing
if s == '??' :
return 0
# Add the 2nd and the 2nd last characters if they are empty
# The edge letters cannot be equal to these
avoid_sets = set()
if s[1] != '?' :
avoid_sets.add(s[1])
if s[-2] != '?' :
avoid_sets.add(s[-2])
# Variable to store the overall count
total_count = 0
# For every other letter possible, find the count
for i in range(97, 123) :
ch = ord(i)
if ch not in avoid_sets :
total_count += solveInterrior(ch + s[1:-1] + ch)
# Return the total count
return total_count
print(solve("abcd"))
print(solve("abc?"))
print(solve("a?za"))
print(solve("abca"))
print(solve("a??ba"))
print(solve("a???c?b?"))
print(solve("a????cb?"))
print(solve("a???c??b?"))
```
#### File: questions/q351_largest_BST_subtree_size/code.py
```python
def recurse(root) :
if root is None :
return (None, None, 0)
if root.left is None and root.right is None :
return (root.data, root.data, 1)
if root.left and root.right :
left = False
left_part = recurse(root.left)
(l1, r1, s1) = left_part
if r1 is not None and r1 < root.data :
left = True
right = False
right_part = recurse(root.right)
(l2, r2, s2) = right_part
if l2 is not None and l2 > root.data :
right = True
if left and right :
return (l1, r2, s1+s2+1)
return (None, None, max(s1, s2))
if root.left :
left_part = recurse(root.left)
(l1, r1, s1) = left_part
if r1 is not None and r1 < root.data :
return (l1, root.data, s1+1)
return (None, None, s1)
if root.right :
right_part = recurse(root.right)
(l2, r2, s2) = right_part
if l2 is not None and l2 > root.data :
return (root.data, r2, s2+1)
return (None, None, s2)
# Return the size of the largest sub-tree which is also a BST
def largestBst(root):
_, _, size = recurse(root)
return size
import sys
sys.setrecursionlimit(1000000)
from collections import deque
# Tree Node
class Node:
def __init__(self, val):
self.right = None
self.data = val
self.left = None
# Function to Build Tree
def buildTree(s):
# Corner Case
if (len(s) == 0 or s[0] == "N"):
return None
# Creating list of strings from input
# string after spliting by space
ip = list(map(str, s.split()))
# Create the root of the tree
root = Node(int(ip[0]))
size = 0
q = deque()
# Push the root to the queue
q.append(root)
size = size + 1
# Starting from the second element
i = 1
while size > 0 and i < len(ip):
# Get and remove the front of the queue
currNode = q[0]
q.popleft()
size = size - 1
# Get the current node's value from the string
currVal = ip[i]
# If the left child is not null
if (currVal != "N"):
# Create the left child for the current node
currNode.left = Node(int(currVal))
# Push it to the queue
q.append(currNode.left)
size = size + 1
# For the right child
i = i + 1
if (i >= len(ip)):
break
currVal = ip[i]
# If the right child is not null
if (currVal != "N"):
# Create the right child for the current node
currNode.right = Node(int(currVal))
# Push it to the queue
q.append(currNode.right)
size = size + 1
i = i + 1
return root
if __name__ == "__main__":
t = int(input())
for _ in range(0, t):
s = input()
root = buildTree(s)
print(largestBst(root))
```
#### File: questions/q359_preorder_to_binary_tree/code.py
```python
def constructTree(pre, preLN, n):
stack = []
# Add the first node as the root node
root = Node(pre[0])
stack.append(root)
i = 1
while i < n :
node = Node(pre[i])
while len(stack) > 0 and stack[-1].left and stack[-1].right :
stack.pop()
if stack[-1].left is None :
stack[-1].left = node
else :
stack[-1].right = node
if preLN[i] == 'N' :
stack.append(node)
i += 1
return root
class Node:
def __init__(self,val):
self.data = val
self.left = None
self.right = None
def printInorder(root):
if not root:
return
printInorder(root.left)
print(root.data,end=' ')
printInorder(root.right)
if __name__ == '__main__':
test_cases = int(input())
for cases in range(test_cases):
n = int(input()) # number of nodes in tree
pre = list(map(int, input().strip().split())) # nodes
preln=list(map(str, input().strip().split())) # leaf or not
# construct the tree according to given list
root=constructTree(pre, preln, n)
printInorder(root)
print()
```
#### File: questions/q372_best_time_buy_sell_stock_multiple_transactions/code.py
```python
class Solution:
def maxProfit(self, prices) -> int:
profit = 0
n = len(prices)
i = 0
while i < n :
# Find the lowest price to buy
while i+1 < n and prices[i] > prices[i+1] :
i += 1
buy = i
# Find the highest price to sell
while i+1 < n and prices[i] < prices[i+1] :
i += 1
sell = i
# Add the profit of our current transaction
profit += (prices[sell] - prices[buy])
i += 1
# Return the overall profit obtained
return profit
```
#### File: questions/q381_row_column_sorted_matrix_search/code.py
```python
class Solution:
#Function to search a given number in row-column sorted matrix.
def search(self,matrix, n, m, x):
i = 0
j = m-1
while i < n and j >= 0 :
if matrix[i][j] < x :
# Ignore the row, and proceed to the next
i += 1
elif matrix[i][j] > x :
# Ignore the column, and proceed to the previous
j -= 1
else :
return True
return False
if __name__ == '__main__':
t = int(input())
for _ in range(t):
size = input().strip().split()
r = int(size[0])
c = int(size[1])
line = input().strip().split()
matrix = [ [0 for _ in range(c)] for _ in range(r) ]
for i in range(r):
for j in range(c):
matrix[i][j] = int( line[i*c+j] )
target = int(input())
obj = Solution()
if (obj.search(matrix,r,c,target)):
print(1)
else:
print(0)
```
#### File: questions/q39_chef_and_card_game/q39.py
```python
def findsum(n) :
if n < 10 :
return n
else :
summ = 0
while(n > 0) :
a = n % 10
summ += a
n = int(n // 10)
return summ
test = int(input())
for tes in range(test) :
n = int(input())
points = [0, 0]
for round in range(n) :
a, b = map(int, input().strip().split())
a = findsum(a)
b = findsum(b)
if a > b :
points[0] += 1
elif b > a :
points[1] += 1
else :
points[0] += 1
points[1] += 1
if points[0] > points[1] :
print("0", points[0])
elif points[1] > points[0] :
print("1", points[1])
else :
print("2", points[0])
```
#### File: questions/q48_unit_gcd/q48.py
```python
"""
def gcd(a, b):
if (b == 0):
return a
return gcd(b, a%b)
testcase = int(input())
for tes in range(testcase) :
n = int(input())
arr = []
for i in range(2, n+1, 2) :
arr.append([i])
leng = len(arr)
for i in range(1, n+1, 2) :
flag1 = False
for j in range(leng) :
flag2 = True
for k in arr[j] :
if gcd(i, k) != 1 and i!=1 and k!=1 :
flag2 = False
break
if flag2 :
arr[j].append(i)
flag1 = True
break
if not flag1 :
arr.append([i])
leng += 1
print(len(arr))
for ele in arr :
print(len(ele), end=' ')
for i in ele :
print(i, end=' ')
print()
"""
# Method 3
"""
def gcd(a, b):
while True :
if (b == 0):
return a
c = b
b = a%b
a = c
testcase = int(input())
for tes in range(testcase) :
n = int(input())
arr = []
left = [i for i in range(3, n+1, 2)]
for i in range(2, n+1, 2) :
arr.append([i])
leng = len(arr)
if n > 1 :
arr[0].append(1)
if n == 1 :
arr.append([1])
num = 0
np = 3
while len(left) > 0 :
if num == 0 or num > n :
num = left[0]
pnum = num
i = 0
p = np
np += 2
if num in left :
left.remove(num)
flag1 = False
while i<len(arr) :
flag2 = True
for j in arr[i] :
if num%j==0 or ( j!=1 and gcd(num, j) != 1) :
flag2 = False
break
if flag2 :
arr[i].append(num)
num = (p*pnum)
p += 2
flag1 = True
break
i += 1
if not flag1 :
arr.append([i])
num = (p*pnum)
p += 2
else :
num = (p*pnum)
p += 2
print(len(arr))
for ele in arr :
print(len(ele), end=' ')
for i in ele :
print(i, end=' ')
print()
"""
```
#### File: questions/q66_median_sorted_array/q66.py
```python
class Solution:
def findMedianSortedArrays(self, nums1, nums2) :
nums = []
while (len(nums1) and len(nums2)) :
if nums1[0] < nums2[0] :
nums.append(nums1[0])
nums1 = nums1[1:]
else :
nums.append(nums2[0])
nums2 = nums2[1:]
if len(nums1) :
nums += nums1
else :
nums += nums2
if len(nums) % 2 == 0 :
median = (nums[(len(nums) // 2) - 1] + nums[len(nums) // 2]) / 2
else :
median = nums[len(nums) // 2]
return median
#%%
class Solution1:
def findMedianSortedArrays(self, nums1, nums2) :
m, n = len(nums1), len(nums2)
# Ensures that (n, nums2) is the longer array
if m > n :
m, nums1, n, nums2 = n, nums2, m, nums1
# If one array is empty
if m == 0 :
if n % 2 == 0 :
return (nums2[n // 2] + nums2[(n // 2) - 1]) / 2
return nums2[n // 2]
left_half_length = (m + n + 1) // 2
# nums1 can contribute to either 0 up until all of its elements to the left half
aMinCount = 0
aMaxCount = m
while (aMinCount <= aMaxCount) :
aCount = aMinCount + int((aMaxCount - aMinCount) / 2)
bCount = left_half_length - aCount
if aCount > 0 and nums1[aCount - 1] > nums2[bCount] :
aMaxCount = aCount - 1
elif aCount < m and nums2[bCount - 1] > nums1[aCount] :
aMinCount = aCount + 1
else :
if aCount == 0 :
leftHalfEnd = nums2[bCount - 1]
elif bCount == 0 :
leftHalfEnd = nums1[aCount - 1]
else :
leftHalfEnd = max(nums1[aCount - 1], nums2[bCount - 1])
if (m + n) % 2 :
# If the length of merged array is odd
return (leftHalfEnd)
else :
if aCount == m :
rightHalfStart = nums2[bCount]
elif bCount == n :
rightHalfStart = nums1[aCount]
else :
rightHalfStart = min(nums1[aCount], nums2[bCount])
return (leftHalfEnd + rightHalfStart) / 2.0
```
#### File: questions/q69_palindrome_number/brute_force.py
```python
class Solution:
def isPalindrome(self, x: int) -> bool:
if x < 0 :
return False
arr = []
while x > 0 :
arr.append(x % 10)
x = x // 10
right_side_start = (len(arr) // 2) + (0 if len(arr)%2==0 else 1)
left = (len(arr) // 2) - 1
for i in range(right_side_start, len(arr)) :
if left < 0 or arr[left] != arr[i] :
return False
left -= 1
return True
```
#### File: questions/q71_string_to_integer/q71.py
```python
class Solution:
def myAtoi(self, s: str) -> int:
i = 0
num = 0
while i < len(s) and s[i] == ' ' :
i += 1
sign = 1
if i < len(s) and s[i] == '-' :
sign = -1
i += 1
elif i < len(s) and s[i] == '+' :
sign = 1
i += 1
while i < len(s) and 48 <= ord(s[i]) <= 57 :
num = num*10 + ord(s[i])-48
i += 1
if sign == 1 and num > (2**31 - 1) :
return (2**31 - 1)
elif sign == -1 and num > (2**31) :
return -1*(2**31)
return num*sign
```
#### File: questions/q76_strongly_connected_components/kosaraju.py
```python
from collections import defaultdict
def DFS(adj, vertex, visited, stack) :
if visited[vertex] :
return
visited[vertex] = True
for i in adj[vertex] :
if not visited[i] :
DFS(adj, i, visited, stack)
stack.append(vertex)
def reverse_graph(adj, V) :
opp_graph = {}
for u in adj :
if u not in opp_graph :
opp_graph[u] = []
for v in adj[u] :
if v in opp_graph :
opp_graph[v].append(u)
else :
opp_graph[v] = [u]
return opp_graph
def DFS2(adj, vertex, visited) :
visited[vertex] = True
for v in adj[vertex] :
if not visited[v] :
DFS2(adj, v, visited)
def countSCCs (adj, V):
visited = [False for i in range(V)]
stack = []
for i in range(V) :
DFS(adj, i, visited, stack)
reversed_adj = reverse_graph(adj, V)
visited = [False for i in range(V)]
num_SCC = 0
while len(stack) > 0 :
vertex = stack[-1]
stack = stack[:-1]
if not visited[vertex] :
DFS2(reversed_adj, vertex, visited)
num_SCC += 1
return num_SCC
def creategraph(n, arr, graph) :
i = 0
while i < 2 * e :
graph[arr[i]].append(arr[i+1])
i += 2
t = int(input())
for i in range(t):
n,e = list(map(int, input().strip().split()))
arr = list(map(int, input().strip().split()))
graph = defaultdict(list)
creategraph(e, arr, graph)
print (countSCCs(graph, n))
```
#### File: questions/q87_inversion_array/brute_force.py
```python
def inversionCount(a,n):
count = 0
for i in range(n) :
for j in range(i) :
if a[i] < a[j] :
count += 1
return count
import atexit
import io
import sys
_INPUT_LINES = sys.stdin.read().splitlines()
input = iter(_INPUT_LINES).__next__
_OUTPUT_BUFFER = io.StringIO()
sys.stdout = _OUTPUT_BUFFER
@atexit.register
def write():
sys.__stdout__.write(_OUTPUT_BUFFER.getvalue())
if __name__=='__main__':
t = int(input())
for tt in range(t):
n = int(input())
a = list(map(int, input().strip().split()))
print(inversionCount(a,n))
```
#### File: questions/q87_inversion_array/code.py
```python
def merge_sort_and_count(arr, start, end) :
count = 0
if start >= end :
return 0
# Splitting Process
mid = (start + end) // 2
count += merge_sort_and_count(arr, start, mid)
count += merge_sort_and_count(arr, mid+1, end)
# Merging Process
temp = []
i = start
j = mid+1
while i <= mid and j <= end :
if arr[i] > arr[j] :
count += mid-i+1
temp.append(arr[j])
j += 1
else :
temp.append(arr[i])
i += 1
while i <= mid :
temp.append(arr[i])
i += 1
while j <= end :
temp.append(arr[j])
j += 1
for i in range(len(temp)) :
arr[start+i] = temp[i]
return count
def inversionCount(a,n):
count = merge_sort_and_count(a, 0, n-1)
return count
import atexit
import io
import sys
_INPUT_LINES = sys.stdin.read().splitlines()
input = iter(_INPUT_LINES).__next__
_OUTPUT_BUFFER = io.StringIO()
sys.stdout = _OUTPUT_BUFFER
@atexit.register
def write():
sys.__stdout__.write(_OUTPUT_BUFFER.getvalue())
if __name__=='__main__':
t = int(input())
for tt in range(t):
n = int(input())
a = list(map(int, input().strip().split()))
print(inversionCount(a,n))
```
#### File: questions/q90_roman_to_integer/code.py
```python
def romanToDecimal(s):
num = 0
i = 0
translation = {
'I': 1, 'V': 5, 'X': 10, 'L': 50, 'C': 100, 'D': 500, 'M': 1000
}
roman = ['I', 'V', 'X', 'L', 'C', 'D', 'M']
while i < len(s) :
if i<len(s)-1 and roman.index(s[i]) <= roman.index(s[i+1]) :
temp_sum = translation[s[i]]
temp_num = s[i]
i += 1
while i < len(s) and s[i] == temp_num :
temp_sum += translation[s[i]]
i += 1
while i < len(s) and roman.index(s[i]) > roman.index(temp_num) :
temp_sum = translation[s[i]] - temp_sum
temp_num = s[i]
i += 1
num += temp_sum
else :
num += translation[s[i]]
i += 1
return num
if __name__=='__main__':
t = int(input())
for _ in range(t):
print(romanToDecimal(str(input())))
``` |
{
"source": "aadhityasw/Data-Structures",
"score": 4
} |
#### File: Heap/Max-Heap/MaxHeap-Library.py
```python
import heapq
class MaxHeap :
def __init__(self, arr = []) :
"""
Initializes the iterable, and forms a heap.
"""
self.heap = arr
heapq.heapify(self.heap)
def insert(self, ele) :
"""
Inserts an element into the min-heap.
"""
heapq.heappush(self.heap, (-1 * ele))
def extractMaxElement(self) :
"""
Removes and returns the minimum(root) element from the heap.
"""
return (-1 * heapq.heappop(self.heap))
# Runner code
heapObj = MaxHeap()
heapObj.insert(3)
heapObj.insert(2)
heapObj.insert(15)
heapObj.insert(5)
heapObj.insert(4)
heapObj.insert(45)
print(heapObj.extractMaxElement())
print(heapObj.extractMaxElement())
print(heapObj.extractMaxElement())
``` |
{
"source": "aadhityasw/GymKhanna",
"score": 2
} |
#### File: GymKhaana/gymnasium/models.py
```python
from django.db import models
from django.utils import timezone
class Equipmenttype(models.Model) :
name = models.CharField(max_length=200)
description = models.TextField(max_length=500, blank=True, null=True)
class Meta:
verbose_name = "Equipment Type"
verbose_name_plural = "Equipment Types"
def __str__(self):
return self.name
class Equipment(models.Model) :
name = models.CharField(max_length=200, verbose_name='Model Name')
date_of_purchase = models.DateField()
equipment_type = models.ForeignKey(Equipmenttype, on_delete=models.CASCADE, null=True, related_name='equipments')
detail = models.TextField(max_length=500, blank=True, null=True)
class Meta:
verbose_name = "Equipment"
verbose_name_plural = "Equipments"
def __str__(self):
stri = self.name + str(self.equipment_type.name)
return stri
class AMC(models.Model) :
equipment = models.ForeignKey('Equipment', on_delete=models.CASCADE, related_name='amc')
start_date = models.DateField()
renewal_date = models.DateField()
count = models.IntegerField(verbose_name='Count of Previous AMC\'s', default=0)
price = models.FloatField(max_length=5, default=0.0)
class Meta:
verbose_name = "AMC"
verbose_name_plural = "AMC"
def __str__(self):
return self.equipment.name
class Package(models.Model) :
name = models.CharField(max_length=200)
price = models.FloatField(max_length=4)
duration = models.IntegerField(default=1)
class Meta:
verbose_name = "Package"
verbose_name_plural = "Packages"
def __str__(self):
stri = self.name + " - " + str(self.duration) + " months"
return (stri)
class GymClass(models.Model) :
name = models.CharField(max_length=200)
timings = models.CharField(max_length=100)
class Meta:
verbose_name = "Gym class"
verbose_name_plural = "Gym classes"
def __str__(self):
stri = self.name
return (stri)
class Membership(models.Model) :
name = models.ForeignKey(
'users.CustomerProfile',
on_delete=models.CASCADE,
related_name="customer_membership",
)
deadline = models.DateTimeField()
package = models.ForeignKey('Package', on_delete=models.SET_NULL, related_name="membership_for_package", null=True)
gym_class = models.ForeignKey('GymClass', on_delete=models.SET_NULL, related_name="membership_for_gym_class", null=True)
class Meta :
verbose_name = "Membership"
verbose_name_plural = "Memberships"
def __str__(self):
return_string = str(self.name) + " - " + str(self.package)
return (return_string)
class Notification(models.Model) :
gym_class = models.ManyToManyField(GymClass, related_name="notification_for_gym_class")
author = models.ForeignKey(
'users.CustomUser',
on_delete=models.CASCADE,
related_name="trainer",
limit_choices_to=
models.Q(role='T') | models.Q(role='M') | models.Q(role='A')
)
content = models.TextField(max_length=500)
expiry = models.DateTimeField(verbose_name="Expiry for Notification", default=None)
class Meta :
verbose_name = "Notification"
verbose_name_plural = "Notifications"
def __str__(self):
return (str(self.id))
class Announcement(models.Model) :
author = models.ForeignKey(
'users.CustomUser',
on_delete=models.CASCADE,
limit_choices_to={'role' : 'M'})
content = models.TextField(max_length=500)
# Use (timezone.now() + timezone.timedelta(1)) to add 1 day to current time.
expiry = models.DateTimeField(verbose_name="Expiry for Announcement", default=None)
class Meta :
verbose_name = "Announcement"
verbose_name_plural = "Announcements"
def __str__(self):
return (str(self.author))
"""class Payment(models.Model) :
customer = models.ForeignKey('users.CustomerProfile', on_delete=models.CASCADE, related_name="customer_membership")
payment_id = models.IntegerField()
reciept = models.FileField()"""
```
#### File: GymKhaana/users/models.py
```python
from django.db import models
from django.contrib.auth.models import AbstractUser, UserManager
from gymnasium.models import Equipmenttype
class CustomUserManager(UserManager) :
pass
class CustomUser(AbstractUser) :
role_choices = [('C', 'Customer'), ('T', 'Trainer'), ('M', 'Manager'), ('A', 'Administrator')]
role = models.CharField(max_length=1, choices=role_choices, default='C')
objects = CustomUserManager()
def __str__(self):
return (self.username)
class CustomerProfile(models.Model) :
account = models.ForeignKey('CustomUser', on_delete=models.CASCADE, related_name="customer_profile_account", limit_choices_to={'role' : 'C'})
full_name = models.CharField(max_length=100, null=False, blank=False, verbose_name="Full Name", default='-')
reg_no = models.CharField(max_length=15, null=False, blank=False, verbose_name='Regustration Number', default='-')
mobile = models.IntegerField(null=False, blank=False)
medical_history = models.TextField(max_length=1000, blank=False)
gender = models.CharField(max_length=1, choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other')], default='O')
age = models.IntegerField(null=False, blank=False)
weight = models.IntegerField(null=False, blank=False)
allergies = models.TextField(max_length=500)
address = models.TextField(max_length=500)
gym_package = models.ForeignKey('gymnasium.Package', on_delete=models.SET_NULL, related_name="customer_profile_for_package", null=True, blank=True, default=None)
gym_class = models.ForeignKey('gymnasium.GymClass', on_delete=models.SET_NULL, related_name="customer_profile_for_gym_class", null=True, blank=True, default=None)
equipment_interest = models.ManyToManyField(Equipmenttype, related_name="customer_profile_for_equipment_interest")
def __str__(self):
return str(self.full_name)
class ManagerProfile(models.Model) :
account = models.ForeignKey('CustomUser', on_delete=models.CASCADE, related_name="manager_profile_account", limit_choices_to={'role' : 'M'})
full_name = models.CharField(max_length=100, null=False, blank=False, verbose_name="Full Name", default='-')
mobile = models.IntegerField(null=False, blank=False)
gender = models.CharField(max_length=1, choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other')], default='O')
address = models.TextField(max_length=500)
age = models.IntegerField(null=False, blank=False)
def __str__(self):
return str(self.full_name)
class TrainerProfile(models.Model) :
account = models.ForeignKey('CustomUser', on_delete=models.CASCADE, related_name="trainer_profile_account", limit_choices_to={'role' : 'T'})
full_name = models.CharField(max_length=100, null=False, blank=False, verbose_name="Full Name", default='-')
mobile = models.IntegerField(null=False, blank=False)
gender = models.CharField(max_length=1, choices=[('M', 'Male'), ('F', 'Female'), ('O', 'Other')], default='O')
address = models.TextField(max_length=500)
age = models.IntegerField(null=False, blank=False)
medical_history = models.TextField(max_length=1000, blank=True)
gym_class = models.ManyToManyField('gymnasium.GymClass', related_name="allocated_trainers", blank=True)
def __str__(self):
return str(self.full_name)
``` |
{
"source": "aadhityasw/VIT-Labs",
"score": 4
} |
#### File: IoT_Domain_Analyst_ECE_3502/Lab_1/minkowski.py
```python
def minkowski(a, b, p) :
summ = 0
n = len(a)
for i in range(n) :
summ += (b[i]-a[i])**p
summ = summ ** (1/p)
return summ
a = [0, 3, 4, 5]
b = [7, 6, 3, -1]
p=3
print(minkowski(a, b, p))
``` |
{
"source": "AADHRY2019/GP",
"score": 2
} |
#### File: GP/model/rvqa.py
```python
import numpy as np
import tensorflow as tf
class Randomvqa(object):
def __init__(self,config):
self.word_dim = config['word_dim']
self.vocab_num = config['vocab_num']
self.pretrained_embedding = config['pretrained_embedding']
self.appear_dim = config['appear_dim']
self.frame_num = config['frame_num']
self.motion_dim = config['motion_dim']
self.clip_num = config['clip_num']
self.common_dim = config['common_dim']
self.answer_num = config['answer_num']
self.logit = None
def build_inference(self):
with tf.name_scope('input'):
self.appear = tf.placeholder(
tf.float32, [None, self.frame_num, self.appear_dim], 'appear')
self.motion = tf.placeholder(
tf.float32, [None, self.clip_num, self.motion_dim], 'motion')
self.question_encode = tf.placeholder(
tf.int64, [None, None], 'question_encode')
with tf.variable_scope('init'):
shape = tf.shape(self.question_encode)
batch_size = shape[0]
with tf.name_scope('output'):
self.logit = tf.zeros([batch_size,self.answer_num])+1.0/self.answer_num
self.prediction =tf.zeros([batch_size],dtype = tf.int64)
def build_loss(self, reg_coeff):
"""Compute loss and acc."""
with tf.name_scope('answer'):
self.answer_encode = tf.placeholder(
tf.int64, [None], 'answer_encode')
answer_one_hot = tf.one_hot(
self.answer_encode, self.answer_num)
with tf.name_scope('loss'):
log_loss = tf.losses.log_loss(
answer_one_hot, self.logit, scope='log_loss')
reg_loss = tf.add_n(
tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES), name='reg_loss')
# fix channel selection
self.loss = log_loss + reg_coeff * reg_loss
with tf.name_scope("acc"):
correct = tf.equal(self.prediction, self.answer_encode)
self.acc = tf.reduce_mean(tf.cast(correct, "float"))
``` |
{
"source": "Aadi0902/AirSim",
"score": 2
} |
#### File: AK testing/PWM model/PWMtest.py
```python
import numpy as np
import airsim
from airsim import Vector3r
import time
import xlrd
import control
import matrixmath
import gain_matrix_calculator as calK
from scipy import signal
from squaternion import Quaternion
import control.matlab
from scipy.spatial.transform import Rotation as R
class PWMtest:
def main(self):
multirotorClient = airsim.MultirotorClient()
multirotorClient.confirmConnection()
multirotorClient.enableApiControl(True)
pwm = 0.6
state = multirotorClient.getMultirotorState()
initialTime = state.timestamp/1000000000
for ind in range(5):
print("Iteration: %d" %(ind))
multirotorClient.moveByMotorPWMsAsync(pwm, pwm, pwm, pwm, 2).join()
state = multirotorClient.getMultirotorState()
FinalTime = state.timestamp/1000000000
print("Time: %f" %(FinalTime - initialTime))
print("Out")
time.sleep(20)
print("Hover")
multirotorClient.hoverAsync().join()
time.sleep(10)
class LQRtestPWM:
def main(self):
#Time step
Ts = 0.1
# Maximum angular velocity
max_angular_vel = 6393.667 * 2 * np.pi / 60
#Final state
x_bar = np.array([[10.0],
[10.0],
[10.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0],
[0.0]])
#Gain matrix
K, u_bar = calK.gainMatrix(Ts,max_angular_vel)
# #Setup airsim multirotor multirotorClient
multirotorClient = airsim.MultirotorClient()
multirotorClient.confirmConnection()
multirotorClient.enableApiControl(True)
vehicleClient = airsim.VehicleClient()
state = multirotorClient.getMultirotorState()
print(state.kinematics_estimated.position)
# Arm the drone
print("arming the drone...")
multirotorClient.armDisarm(True)
if state.landed_state == airsim.LandedState.Landed:
print("taking off...")
multirotorClient.takeoffAsync().join()
else:
multirotorClient.hoverAsync().join()
time.sleep(2)
# Declare u matrix 4 x 1
# u = [0,
# 0,
# 0,
# 0]
# pwm = np.array([0,
# 0,
# 0,
# 0])
print("Controls start")
#time.sleep(2)
#multirotorClient.moveByMotorPWMsAsync(1, 1, 1, 1,3).join()
#newX = [[],[],[],[],[],[],[],[],[],[],[],[]]
# Start step loop
for index in range(1000):
# Re initilize u for every iteration
# u = [0,
# 0,
# 0,
# 0]
# Get state of the multiorotor
state = multirotorClient.getMultirotorState()
state = state.kinematics_estimated
initialState = state.position
#Convert from quaternion to euler angle
#euler = ls.quaternion_to_euler(state.orientation.x_val,state.orientation.y_val, state.orientation.z_val,state.orientation.w_val)
q = R.from_quat([state.orientation.x_val,
state.orientation.y_val,
state.orientation.z_val,
state.orientation.w_val])
e = q.as_euler('zyx')
# q = Quaternion(state.orientation.w_val,
# state.orientation.x_val,
# state.orientation.y_val,
# state.orientation.z_val)
# e = q.to_euler()
# rotationMatrix = np.linalg.inv([[0, 1, 0],
# [1, 0, 0],
# [0, 0, -1]])
# position = [[state.position.x_val],
# [state.position.y_val],
# [state.position.z_val]]
# linear_velocity = [[state.linear_velocity.x_val],
# [state.linear_velocity.x_val],
# [state.linear_velocity.z_val]]
#Store the current state of multirotor in x
#e[2] = e[2] + np.pi if e[2]<=np.pi else e[2] - np.pi
x = np.array([[state.position.x_val],
[-state.position.y_val],
[-state.position.z_val],
[e[0]],
[-e[1]],
[-e[2]],
[state.linear_velocity.x_val],
[-state.linear_velocity.y_val],
[-state.linear_velocity.z_val],
[state.angular_velocity.x_val],
[-state.angular_velocity.y_val],
[-state.angular_velocity.z_val]])
# Compute u
u = np.dot(K, x_bar-x) + u_bar
#print(np.dot(K, x_bar - x))
#squared_angular_velocity = u_bar
# pwmHover = 0.5937
# # Compute required pwm signal
# sq_ctrl_hover = (pwmHover * max_angular_vel)**2
#sq_ctrl_delta = np.dot(K, x_bar - x)
sq_ctrl = [max(u[0][0], 0.0),
max(u[1][0], 0.0),
max(u[2][0], 0.0),
max(u[3][0], 0.0)] # max is just in case norm of sq_ctrl_delta is too large (can be negative)
pwm1 = min((np.sqrt(sq_ctrl[0])/max_angular_vel),1.0)
pwm2 = min((np.sqrt(sq_ctrl[1])/max_angular_vel),1.0)
pwm3 = min((np.sqrt(sq_ctrl[2])/max_angular_vel),1.0)
pwm4 = min((np.sqrt(sq_ctrl[3])/max_angular_vel),1.0)
#pwm = np.sqrt(max(squared_angular_velocity + (pwmHover*max_angular_vel)**2, 0)) / max_angular_vel
multirotorClient.moveByMotorPWMsAsync(pwm4, pwm1, pwm3, pwm2,Ts).join()
#multirotorClient.moveToPositionAsync(x_bar[0], x_bar[1], x_bar[2], 0, 1200,
#airsim.DrivetrainType.MaxDegreeOfFreedom, airsim.YawMode(False,0), -1, 1).join()
#multirotorClient.moveByMotorPWMsAsync(pwmHover, pwmHover, pwmHover, pwmHover, Ts).join()
# print(x_bar[0][0])
# multirotorClient.moveToPositionAsync(x_bar[0][0], x_bar[1][0], -x_bar[2][0], 1.0).join()
state = multirotorClient.getMultirotorState()
state = state.kinematics_estimated
# print(state)
time.sleep(10)
print("Free fall")
multirotorClient.moveByMotorPWMsAsync(0, 0, 0, 0, 10).join
time.sleep(10)
print("disarming...")
multirotorClient.armDisarm(False)
multirotorClient.enableApiControl(False)
print("done.")
def quaternion_to_euler(self,x, y, z, w):
r = R.from_quat([x,y,z,w])
r = r.as_euler('xyz')
# import math
# t0 = +2.0 * (w * x + y * z)
# t1 = +1.0 - 2.0 * (x ** 2 + y ** y)
# X = math.atan2(t0, t1)
# t2 = +2.0 * (w * y - z * x)
# t2 = +1.0 if t2 > +1.0 else t2
# t2 = -1.0 if t2 < -1.0 else t2
# Y = math.asin(t2)
# t3 = +2.0 * (w * z + x * y)
# t4 = +1.0 - 2.0 * (y * y + z * z)
# Z = math.atan2(t3, t4)
return r[0], r[1], r[2]
ls = LQRtestPWM()
print(ls.quaternion_to_euler(0.7071068, 0, 0, 0.7071068))
ls.main()
``` |
{
"source": "Aadi775/python-chat-bot",
"score": 3
} |
#### File: Aadi775/python-chat-bot/lib.py
```python
import requests
from time import sleep
from random import randint,choice
from bs4 import BeautifulSoup
import sys
import os
import json
import math
help_options = ["Answer few of your questions.", "Roll a dice.", "Toss a coin", "Subtract number", "Add numbers" , "find factorial",'riddle']
def find_database_path():
relative_path = sys.argv[0]
letter_list = [x for x in relative_path]
slashindex = []
lix = ["\ "]
if lix[0][0] not in letter_list:
return "database.json"
else:
for item in letter_list:
if item == lix[0][0]:
indexx = letter_list.index(lix[0][0])
slashindex.append(indexx)
letter_list[indexx] = "a"
return relative_path[0:slashindex[-1]]+"\database.json"
def load_database():
path = find_database_path()
if os.path.exists(path):
with open(path, "r") as jsonFile:
data = json.load(jsonFile)
tdata = data["tdata"]
with open(path, "w") as jsonFile:
json.dump(data,jsonFile,indent=4)
else:
initial_data = {
"tdata":{},
"user_review":[]
}
with open(path, "w") as jsonFile:
json.dump(initial_data,jsonFile,indent=4)
with open(path, "r") as jsonFile:
data = json.load(jsonFile)
tdata = data["tdata"]
with open(path, "w") as jsonFile:
json.dump(data,jsonFile,indent=4)
return tdata
def square():
n=int(input("What number you want to square:") )
print(n*n)
def squareroot():
x=int(input("What number you want to find square root of:") )
print(math.sqrt(x))
def helpx():
for item in help_options:
print(item)
def save_database(data):
path = find_database_path()
with open(path,"r") as jsonFile:
data1 = json.load(jsonFile)
data1["tdata"] = data
with open(path, "w") as jsonFile:
json.dump(data1,jsonFile,indent=4)
def search():
name=input("what you want to search: ")
URL="https://en.wikipedia.org/wiki/"+name
heders = { "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36 OPR/72.0.3815.211"}
page = requests.get(URL,heders)
soup= BeautifulSoup(page.content, 'html.parser')
title=soup.findAll("p")
print(title[2].text)
print(title[3].text)
def fact(n):
if n==0:
return 1
return n*fact(n-1)
def roll_a_dice ():
print("your number is ", end="", flush=True)
sleep(1)
print(".", end="", flush=True)
sleep(1)
print(".", end="", flush=True)
sleep(1)
print(".", end="", flush=True)
sleep(1)
print(randint(1, 6))
def sub():
t = input("do you want to subtract numbers say yes or no ").lower()
if t == "yes":
n1 = int(input("give me first number"))
n2 = int(input("give me second number"))
print(n1-n2)
def add():
t = input("do you want to add numbers say yes or no ").lower()
if t == "yes":
n = int(input("who many numbers do you want add "))
y=0
for i in range(n):
x= int(input('your number'))
y+=x
print(y)
def toss():
print(choice(["Heads", "Tails"]))
def dumy():
numP=int(input("who many paras you want: ") )
URL="https://www.lipsum.com/feed/html"
heders = { "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36 OPR/72.0.3815.211"}
page = requests.get(URL,heders)
soup= BeautifulSoup(page.content, 'html.parser')
title=soup.findAll("p")
for i in range(numP):
print (title[i].text)
def dumytext():
numwords=int(input('num words you want'))
for i in range(numwords):
numletters=randint(2,6)
x = ["q","w","e","r","t","y",'u','i','o','p','a','s','d','f','g','h','j','k','l','z','x','c','v','b','n','m']
WORD=''
for i in range(numletters):
cl=choice(x)
WORD=WORD+cl
print(WORD, end=' ')
def sayhi():
hi= ["hi" , "hey" , "hello" , "hope you are good" , "how are you " , "how is your day" , "hi there","hello!" , "I'm good!" , "fine! how about you ?" , "hello friend" , "hope you are good too!"]
print(randon.choice(hi))
def riddle():
URL="https://www.prodigygame.com/main-en/blog/riddles-for-kids/"
heders = { "User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36 OPR/72.0.3815.211"}
page = requests.get(URL,heders)
soup= BeautifulSoup(page.content, 'html.parser')
div=soup.find_all('div')
divindex=[]
questions=[]
answers=[]
for diva in div[35:]:
q=diva.findAll("strong")
e=diva.findAll("p")
for pa in q:
pa=pa.text
if pa[1]=='.' or pa[2]=='.':
questions.append(pa)
for em in e:
em=em.text
if em[6]==':':
answers.append(em)
ran=choice(range(len(answers)))
print(questions[ran][2:])
print(answers[ran][7:])
``` |
{
"source": "aadibajpai/ccextractor-web",
"score": 2
} |
#### File: ccextractor-web/daemon/parsers.py
```python
import json
import argparse
class ParseJob():
def __init__(self, job_file):
self.job_config = {}
with open(job_file, 'r', encoding="utf-8") as f:
self.job_config = json.load(f)
self.ccextractor_executable = self.job_config['executable_path']
self.filename = self.job_config['filename']
self.job_number = self.job_config['job_number']
self.parameters = self.job_config['parameters']
self.platform = self.job_config['platform']
self.token = self.job_config['token']
self.output_file_extension = self.job_config['output_file_extension']
def get_job_config(self):
return self.job_config
class ParseParameters():
def __init__(self, argv):
self.paramters = {}
while argv:
if argv[0][0] == '-':
self.paramters[argv[0]] = argv[1]
argv = argv[1:]
self.job_dir = self.paramters['-jobDir']
self.output_dir = self.paramters['-outputDir']
self.archive_dir = self.paramters['-archiveDir']
self.ccextractor_binaries_dir = self.paramters['-ccextractorBinariesDir']
self.log_dir = self.paramters['-logDir']
self.report_url = self.paramters['-reportURL']
def get_raw_parameters(self):
return self.paramters
class ParseCCExtractorParameters():
def __init__(self, params):
params = json.loads(params)
self.params_list = []
for key, value in params.items():
self.params_list.append(key)
if value:
self.params_list.append(value)
``` |
{
"source": "aadibajpai/common-coalition-convert",
"score": 3
} |
#### File: aadibajpai/common-coalition-convert/main.py
```python
from selenium import webdriver
def main():
with open("routine.txt", "r") as routine:
url_in = routine.readline()[1:]
url_out = routine.readline()[1:]
driver_in = webdriver.Firefox()
driver_in.get(url_in)
driver_out = webdriver.Firefox()
driver_out.get(url_out)
input("Please log in on both windows. Press Enter once done.\n")
for line in routine:
if(line[0:1] == "i"):
exec_routine(driver_in, line[2:])
elif(line[0:1] == "o"):
exec_routine(driver_out, line[2:])
def exec_routine(driver, command):
if(command[0:1] == "g"):
driver.get(command[2:])
if __name__ == '__main__':
main()
``` |
{
"source": "aadibajpai/sample-platform",
"score": 2
} |
#### File: aadibajpai/sample-platform/manage.py
```python
import os
import unittest
from exceptions import CCExtractorEndedWithNonZero, MissingPathToCCExtractor
from flask_script import Command, Manager
from mod_regression.controllers import mod_regression
from mod_regression.update_regression import update_expected_results
from run import app
manager = Manager(app)
@manager.add_command
class UpdateResults(Command):
"""
Update results for the present samples with new ccextractor version.
Pass path to CCExtractor binary as the first argument. Example, `python manage.py update /path/to/ccextractor`
"""
name = 'update'
capture_all_args = True
def run(self, remaining):
"""Driver function for update subcommand."""
if len(remaining) == 0:
print('path to ccextractor is missing')
raise MissingPathToCCExtractor
path_to_ccex = remaining[0]
print('path to ccextractor: ' + str(path_to_ccex))
if not update_expected_results(path_to_ccex):
print('update function errored')
raise CCExtractorEndedWithNonZero
print('update function finished')
return 0
if __name__ == '__main__':
manager.run()
```
#### File: sample-platform/mod_auth/forms.py
```python
from __future__ import annotations
from typing import Any, Callable, Optional, Type
from flask_wtf import FlaskForm
from wtforms import PasswordField, SelectField, StringField, SubmitField
from wtforms.fields.html5 import EmailField
from wtforms.fields.simple import PasswordField
from wtforms.validators import DataRequired, Email, ValidationError
import mod_auth.models
from mod_auth.models import Role, User
def unique_username(form, field) -> None:
"""
Check if a user already exists with this name.
:param form: The form which is being passed in
:type form: Form
:param field: The data value for the 'name' inserted by new User
:type field : StringField
"""
user = User.query.filter(User.name == field.data).first()
if user is not None:
raise ValidationError('There is already a user with this name')
def valid_password(form: CompleteSignupForm, field: PasswordField) -> None:
"""
Check for validity of a password.
:param form: The form which is being passed in
:type form: Form
:param field: The data value for the 'password' inserted by User
:type field : PasswordField
"""
from run import config
min_pwd_len = int(config['MIN_PWD_LEN'])
max_pwd_len = int(config['MAX_PWD_LEN'])
pass_size = len(field.data)
if pass_size == 0:
raise ValidationError('new password cannot be empty')
if pass_size < min_pwd_len or pass_size > max_pwd_len:
raise ValidationError(
'Password needs to be between {min_pwd_len} and {max_pwd_len} characters long (you entered {char})'.format(
min_pwd_len=min_pwd_len, max_pwd_len=max_pwd_len, char=pass_size)
)
def email_not_in_use(has_user_field: bool = False) -> Callable:
"""
Check if the passed email is already in use.
:param has_user_field : Whether an email has an existing User (False by
default)
:type has_user_field : boolean
"""
def _email_not_in_use(form, field):
user_id = -1 if not has_user_field else form.user.id
# Check if email is not already in use
user = User.query.filter(User.email == field.data).first()
if user is not None and user.id != user_id and len(field.data) > 0:
raise ValidationError('This address is already in use')
return _email_not_in_use
class LoginForm(FlaskForm):
"""Render form for User to enter Log in credentials."""
email = EmailField('Email', [
DataRequired(message='Email address is not filled in'),
Email(message='Entered value is not a valid email address')
])
password = PasswordField('Password', [DataRequired(message='Password cannot be empty.')])
submit = SubmitField('Login')
class SignupForm(FlaskForm):
"""Sign up form for new Users."""
email = EmailField('Email', [
DataRequired(message='Email address is not filled in'),
Email(message='Entered value is not a valid email address')
])
submit = SubmitField('Register')
class DeactivationForm(FlaskForm):
"""Deactivate existing account."""
submit = SubmitField('Deactivate account')
class RoleChangeForm(FlaskForm):
"""Change the Role."""
role = SelectField('Select a role', [DataRequired(message='Role is not filled in.')], coerce=str)
submit = SubmitField('Change role')
class CompleteSignupForm(FlaskForm):
"""Complete Sign up form for new users."""
name = StringField('Name', [DataRequired(message='Name is not filled in.')])
password = PasswordField('Password', [DataRequired(message='Password is not filled in.'), valid_password])
password_repeat = PasswordField('Repeat password', [DataRequired(message='Repeated password is not filled in.')])
submit = SubmitField('Register')
@staticmethod
def validate_password_repeat(form: CompleteSignupForm, field: PasswordField) -> None:
"""
Validate if the repeated password is the same as 'password'.
:param form: The form which is being passed in
:type form: CompleteSignupForm
:param field : The data value for the 'password' entered by User
:type field : PasswordField
"""
if field.data != form.password.data:
raise ValidationError('The password needs to match the new password')
class AccountForm(FlaskForm):
"""Form for editing current Account."""
def __init__(self, formdata=None, obj=None, prefix='', *args, **kwargs) -> None:
super(AccountForm, self).__init__(formdata=formdata, obj=obj, prefix=prefix, *args, **kwargs)
self.user = obj
current_password = PasswordField('Current password', [DataRequired(message='current password cannot be empty')])
new_password = PasswordField('<PASSWORD>')
new_password_repeat = PasswordField('Repeat new password')
name = StringField('Name', [DataRequired(message='Name is not filled in.')])
email = EmailField('Email', [
DataRequired(message='email address is not filled in'),
Email(message='entered value is not a valid email address'),
email_not_in_use(True)
])
submit = SubmitField('Update account')
@staticmethod
def validate_current_password(form, field) -> None:
"""
Validate current password entered with the password stored in database.
:param form: The form which is being passed in
:type form: AccountForm
:param field: The data value for the 'password' entered by User
:type field : PasswordField
"""
if form.user is not None:
if not form.user.is_password_valid(field.data):
raise ValidationError('Invalid password')
else:
raise ValidationError('User instance not passed to form validation')
@staticmethod
def validate_new_password(form, field) -> None:
"""
Validate the new password entered.
:param form: The form which is being passed in
:type form: AccountForm
:param field: The data value for the 'password' entered by User
:type field : PasswordField
"""
if len(field.data) == 0 and len(form.new_password_repeat.data) == 0:
return
valid_password(form, field)
@staticmethod
def validate_new_password_repeat(form, field) -> None:
"""
Validate new password repeat and checks if it matches 'new_password'.
:param form: The form which is being passed in
:type form: AccountForm
:param field: The data value for the 'password' entered by User
:type field : PasswordField
"""
if form.email is not None:
# Email form is present, so it's optional
if len(field.data) == 0 and len(form.new_password.data) == 0:
return
if field.data != form.new_password.data:
raise ValidationError('The password needs to match the new password')
class ResetForm(FlaskForm):
"""Form for resetting password."""
email = EmailField('Email', [
DataRequired(message='Email address is not filled in'),
Email(message='Entered value is not a valid email address')
])
submit = SubmitField('Request reset instructions')
class CompleteResetForm(FlaskForm):
"""Reset password form after clicking on the link in the email."""
password = PasswordField('Password', [DataRequired(message='Password is not filled in.'), valid_password])
password_repeat = PasswordField('Repeat password', [DataRequired(message='Repeated password is not filled in.')])
submit = SubmitField('Reset password')
@staticmethod
def validate_password_repeat(form, field) -> None:
"""
Validate new password repeat and checks if it matches 'password'.
:param form: The form which is being passed in
:type form: CompleteResetForm
:param field: The data value for the 'password' entered by User
:type field : PasswordField
"""
if field.data != form.password.data:
raise ValidationError('The password needs to match the new password')
```
#### File: sample-platform/mod_auth/models.py
```python
import string
from typing import Any, Dict, Tuple, Type
from passlib.apps import custom_app_context as pwd_context
from sqlalchemy import Column, Integer, String, Text
import database
from database import Base, DeclEnum
class Role(DeclEnum):
"""Roles available for users."""
admin = "admin", "Admin"
user = "user", "User"
contributor = "contributor", "Contributor"
tester = "tester", "Tester"
class User(Base):
"""Model for an user."""
__tablename__ = 'user'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
name = Column(String(50), unique=True)
email = Column(String(255), unique=True, nullable=True)
github_token = Column(Text(), nullable=True)
password = Column(String(255), unique=False, nullable=False)
role = Column(Role.db_type())
def __init__(self, name, role=Role.user, email=None, password='', github_token=None) -> None:
"""
Parametrized constructor for the User model.
:param name: The value of the 'name' field of User model
:type name: str
:param role: The value of the 'role' field of User model
:type role: Role
:param email: The value of the 'email' field of User model (None by
default)
:type email: str
:param password: The value of the 'password' field of User model (
empty by default)
:type password: str
"""
self.name = name
self.email = email
self.password = password
self.role = role
self.github_token = github_token
def __repr__(self) -> str:
"""
Represent a User Model by its 'name' Field.
:return str(name): Returns the string containing 'name' field
of the User model
:rtype str(name): str
"""
return '<User {name}>'.format(name=self.name)
@staticmethod
def generate_hash(password: str) -> str:
"""
Generate a Hash value for a password.
:param password: The password to be hashed
:type password: str
:return : The hashed password
:rtype : str
"""
# Go for increased strength no matter what
return pwd_context.encrypt(password, category='admin')
@staticmethod
def create_random_password(length=16) -> str:
"""
Create a random password of default length 16.
:param length: If parameter is passed, length will be the parameter.
16 by default
:type length: int
:return : Randomly generated password
:rtype : str
"""
chars = string.ascii_letters + string.digits + '!@#$%^&*()'
import os
return ''.join(chars[ord(os.urandom(1)) % len(chars)] for i in range(length))
def is_password_valid(self, password) -> Any:
"""
Check the validity of the password.
:param password: The password to be validated
:type password: str
:return : Validity of password
:rtype : boolean
"""
return pwd_context.verify(password, self.password)
def update_password(self, new_password) -> None:
"""
Update the password to a new one.
:param new_password: The new password to be updated
:type new_password: str
"""
self.password = self.generate_hash(new_password)
@property
def is_admin(self):
"""
Verify if an User is a admin.
:return : Checks if User has an admin role
:rtype: boolean
"""
return self.role == Role.admin
def has_role(self, name) -> Any:
"""
Check whether the User has a particular role.
:param name: Role of the user
:type name: str
:return : Checks whether a User has 'name' role
:rtype: boolean
"""
return self.role.value == name or self.is_admin
```
#### File: sample-platform/mod_sample/models.py
```python
from datetime import datetime
from typing import Any, Dict, Type
from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text
from sqlalchemy.orm import relationship
import database
from database import Base, DeclEnum
class Sample(Base):
"""Model to store and manage sample."""
__tablename__ = 'sample'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
sha = Column(String(128), unique=True)
extension = Column(String(64), nullable=False)
original_name = Column(Text(), nullable=False)
extra_files = relationship('ExtraFile', back_populates='sample')
tests = relationship('RegressionTest', back_populates='sample')
upload = relationship('Upload', uselist=False, back_populates='sample')
def __init__(self, sha, extension, original_name) -> None:
"""
Parametrized constructor for the Sample model.
:param sha: The value of the 'sha' field of Sample model
:type sha: str
:param extension: The value of the 'extension' field of Sample model
:type extension: str
:param original_name: The value of the 'original_name' field of Sample model
:type original_name: str
"""
self.sha = sha
self.extension = extension
self.original_name = original_name
def __repr__(self) -> str:
"""
Represent a Sample Model by its 'sha' Field.
:return: Returns the string containing 'sha' field of the Category model
:rtype: str
"""
return '<Sample {hash}>'.format(hash=self.sha)
@property
def filename(self):
"""Return the full filename of the sample."""
extension = ("." + self.extension) if len(self.extension) > 0 else ""
return "{sha}{extension}".format(sha=self.sha, extension=extension)
class ExtraFile(Base):
"""Model to store and manage sample extra data."""
__tablename__ = 'sample_extra'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
sample_id = Column(Integer, ForeignKey('sample.id', onupdate="CASCADE", ondelete="CASCADE"))
sample = relationship('Sample', uselist=False, back_populates='extra_files')
original_name = Column(Text(), nullable=False)
extension = Column(String(64), nullable=False)
def __init__(self, sample_id, extension, original_name) -> None:
"""
Parametrized constructor for the ExtraFile model.
:param sample_id: The value of the 'sha' field of ExtraFile model
:type sample_id: int
:param extension: The value of the 'extension' field of ExtraFile model
:type extension: str
:param original_name: The value of the 'original_name' field of ExtraFile model
:type original_name: str
"""
self.sample_id = sample_id
self.extension = extension
self.original_name = original_name
def __repr__(self) -> str:
"""
Represent a ExtraFile Model by its 'sample_id' Field.
:return: Returns the string containing 'sha' field of the ExtraFile model
:rtype: str
"""
return '<Sample extra for {id}>'.format(id=self.sample_id)
@property
def short_name(self, length=5): # type: ignore
"""
Return the short name of an additional file.
:param length: How many characters of the hash should be retained for the short name? Defaults to 5.
:type length: int
:return: A short name consisting of the first x characters of the hash, the id and the file extension.
:rtype: str
"""
return "{short}_{id}.{extension}".format(
short=self.sample.sha[:length], id=self.id,
extension=self.extension
)
@property
def filename(self):
"""
Return filename.
:return: Returns the full name of the file using the hash, id and file extension.
:rtype: str
"""
extension = ("." + self.extension) if len(self.extension) > 0 else ""
return "{sha}_{id}{extension}".format(sha=self.sample.sha, id=self.id, extension=extension)
class ForbiddenExtension(Base):
"""Model to store and manage forbidden extensions."""
__tablename__ = 'extension_forbidden'
__table_args__ = {'mysql_engine': 'InnoDB'}
extension = Column(String(32), primary_key=True)
def __init__(self, extension) -> None:
"""
Parametrized constructor for the ForbiddenExtension model.
:param extension: The value of the 'extension' field of ForbiddenExtension model
:type extension: str
"""
self.extension = extension
def __repr__(self) -> str:
"""
Represent a ForbiddenExtension Model by its 'extension' Field.
:return: Returns the string containing 'extension' field of the ForbiddenExtension model
:rtype: str
"""
return '<Forbidden extension {extension}>'.format(extension=self.extension)
class ForbiddenMimeType(Base):
"""Model to store and manage forbidden mimetype."""
__tablename__ = 'mimetype_forbidden'
__table_args__ = {'mysql_engine': 'InnoDB'}
mimetype = Column(String(64), primary_key=True)
def __init__(self, mimetype) -> None:
"""
Parametrized constructor for the ForbiddenMimeType model.
:param mimetype: The value of the 'mimetype' field of ForbiddenMimeType model
:type mimetype: str
"""
self.mimetype = mimetype
def __repr__(self) -> str:
"""
Represent a ForbiddenMimeType Model by its 'mimetype' Field.
:return: Returns the string containing 'mimetype' field of the ForbiddenMimeType model
:rtype: str
"""
return '<Forbidden MimeType {mime}>'.format(mime=self.mimetype)
class Issue(Base):
"""Model to store and manage sample issue."""
__tablename__ = 'sample_issue'
__table_args__ = {'mysql_engine': 'InnoDB'}
id = Column(Integer, primary_key=True)
sample_id = Column(Integer, ForeignKey('sample.id', onupdate="CASCADE",
ondelete="CASCADE"))
sample = relationship('Sample', uselist=False)
issue_id = Column(Integer, nullable=False)
title = Column(Text(), nullable=False)
user = Column(Text(), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False)
status = Column(Text(), nullable=False)
def __init__(self, sample_id, issue_id, date, title, user, status) -> None:
"""
Parametrized constructor for the Issue model.
:param sample_id: The value of the 'sample_id' field of Issue model
:type sample_id: int
:param issue_id: The value of the 'issue_id' field of Issue model
:type issue_id: int
:param date: The value of the 'created_at' field of Issue model
:type date: datetime
:param title: The value of the 'title' field of Issue model
:type title: str
:param user: The value of the 'user' field of Issue model
:type user: str
:param status: The value of the 'status' field of Issue model
:type status: str
"""
self.sample_id = sample_id
self.issue_id = issue_id
self.created_at = datetime.strptime(date, '%Y-%m-%dT%H:%M:%SZ')
self.title = title
self.user = user
self.status = status
```
#### File: tests/test_auth/TestForms.py
```python
from flask import g
from wtforms.validators import ValidationError
from mod_auth.forms import unique_username, valid_password
from mod_auth.models import User
from tests.base import BaseTestCase
class Field:
def __init__(self, data):
self.data = data
class TestForm(BaseTestCase):
def test_unique_username(self):
"""
Test that username is always unique.
"""
user = User(name="thealphadollar")
g.db.add(user)
g.db.commit()
user_field = Field("thealphadollar")
with self.assertRaises(ValidationError):
unique_username(None, user_field)
def test_empty_invalid_password(self):
"""
Test validation fail for zero length password.
"""
pass_field = Field("")
with self.assertRaises(ValidationError):
valid_password(None, pass_field)
def test_less_than_min_length_invalid_password(self):
"""
Test validation fail for password of length less than min length.
"""
pass_field = Field("".join(['x' * (int(self.app.config['MIN_PWD_LEN']) - 1)]))
with self.assertRaises(ValidationError):
valid_password(None, pass_field)
def test_more_than_max_length_invalid_password(self):
"""
Test validation fail for password of length more than max length.
"""
pass_field = Field("".join(['x' * (int(self.app.config['MAX_PWD_LEN']) + 1)]))
with self.assertRaises(ValidationError):
valid_password(None, pass_field)
def test_valid_password(self):
"""
Test validation pass for valid password.
"""
pass_field = Field("".join(['x' * (int(self.app.config['MAX_PWD_LEN']))]))
valid_password(None, pass_field)
```
#### File: sample-platform/tests/TestUtility.py
```python
from unittest import mock
from tests.base import BaseTestCase
class TestUtility(BaseTestCase):
@mock.patch('utility.path')
def test_serve_file_download(self, mock_path):
"""
Test function serve_file_download.
"""
from utility import serve_file_download
response = serve_file_download('to_download', 'folder', 'accl_folder')
self.assert200(response)
self.assertEqual(2, mock_path.join.call_count)
mock_path.getsize.assert_called_once_with(mock_path.join())
```
#### File: aadibajpai/sample-platform/utility.py
```python
from os import path
from typing import Any
from flask import make_response
ROOT_DIR = path.dirname(path.abspath(__file__))
def serve_file_download(file_name, file_folder, xaccel_folder,
subfolder='', content_type='application/octet-stream') -> Any:
"""
Endpoint to serve file download.
:param file_name: name of the file
:type file_name: str
:param content_type: content type of the file, defaults to 'application/octet-stream'
:type content_type: str, optional
:return: response, the file download
:rtype: Flask response
"""
from run import config
file_path = path.join(config.get('SAMPLE_REPOSITORY', ''), file_folder, subfolder, file_name)
response = make_response()
response.headers['Content-Description'] = 'File Transfer'
response.headers['Cache-Control'] = 'no-cache'
response.headers['Content-Type'] = content_type
response.headers['Content-Disposition'] = 'attachment; filename={name}'.format(name=file_name)
response.headers['Content-Length'] = path.getsize(file_path)
response.headers['X-Accel-Redirect'] = '/' + path.join(xaccel_folder, subfolder, file_name)
return response
``` |
{
"source": "AadiCool/variable_CNN",
"score": 3
} |
#### File: AadiCool/variable_CNN/CNN_general_train.py
```python
import numpy as np
from datetime import datetime
cnn_layer_types = ["CONV", "MAXPOOL"]
# ( layer type , x_length , y_length , zero_padding, no of mask ) zero_padding and no of mask not applicable for MAXPOOL
cnn_layer_info = []
ndelst = inpt_dim = [] # contains the node numbers in FC layer
mask_depth = [] # contains the mask depths of each layer
epoch_itr = optLyr = hydLyr = 0
lrn_rate = nrm_fac = 0.0
read_wt = 0
instructions_file = "instructions.txt"
data_input_file = "data_input_train.txt"
data_output_file = "data_output_train.txt"
weight_file = ""
f_ins = open(instructions_file, "r")
lns = f_ins.readlines()
# reading the instructions from the instruction files
try:
lrn_rate = float(lns[0].strip(' \n')) # first line should be learning rate
epoch_itr = int(lns[1].strip(' \n')) # second line should contain no of iterations
inpt_dim = lns[2].strip(' \n').split(' ') # third line should contain the input matrix dimensions
inpt_dim = [int(inpt_dim[i]) for i in range(len(inpt_dim))]
if (len(inpt_dim) == 3):
mask_depth.append(inpt_dim[2])
else:
mask_depth.append(1)
optLyr = int(lns[3].strip(' \n')) # fourth line should contain no of nodes in output layer
nrm_fac = float(lns[4].strip(' \n')) # fifth line should contain normalization factor
hydLyr = int(lns[5].strip(' \n')) # sixth line should contain no of hidden layer
ndelst.extend(
[int(x) for x in lns[6].strip(' \n').split(' ')]) # seventh line should contain no of nodes in hidden layer
ndelst.append(optLyr)
read_wt_ln = lns[7].strip(' \n')
if (int(read_wt_ln[0]) == 1):
weight_file = (read_wt_ln.split(' '))[1]
read_wt = 1
for i in range(8, len(lns)): # From eighth line the convolutions and pooling instructions are given
intgs = lns[i].strip(' \n').split(' ')
operate = cnn_layer_types.index(intgs[0])
if (operate == 0): # check for convolution or pooling
cnn_layer_info.append((operate, int(intgs[1]), int(intgs[2]), int(intgs[3]), int(intgs[4])))
mask_depth.append(int(intgs[4]))
else:
cnn_layer_info.append((operate, int(intgs[1]), int(intgs[2])))
mask_depth.append(mask_depth[-1])
except:
print("Wrong Instruction list .. Exitting code")
exit(1)
f_ins.close()
# checking whether convolution operations are correct or not
def check_input():
row, col = inpt_dim[0], inpt_dim[1]
for i in range(len(cnn_layer_info)):
pad = 0 # the pad applied
if (cnn_layer_info[i][0] == 0):
pad = cnn_layer_info[i][3]
row = row - cnn_layer_info[i][1] + 2 * pad + 1
col = col - cnn_layer_info[i][2] + 2 * pad + 1
return row, col
row, col = check_input()
if (row <= 0 or col <= 0): # row and column should be positive to be valid
print("Invalid Convolution and pooling layers .. Exitting code")
exit(1)
inpLyr = row * col * mask_depth[-1] # no of input nodes for the fully connected layer
ndelst.insert(0, inpLyr)
# printing the layer informations
print(" Learn Rate = " + str(lrn_rate))
print(" No of epoch iterations = " + str(epoch_itr))
print(" No of input layer node = " + str(inpLyr))
print(" No of output layer node = " + str(optLyr))
print(" No of normalization = " + str(nrm_fac))
for i in range(len(cnn_layer_info)):
pad = 0
no_mask = None
if (cnn_layer_info[i][0] == 0):
pad = cnn_layer_info[i][3]
no_mask = cnn_layer_info[i][4]
print(" " + cnn_layer_types[cnn_layer_info[i][0]] + " " + str(cnn_layer_info[i][1]) + "X" + str(
cnn_layer_info[i][2]) + " pad " + str(pad) + " no of masks " + str(no_mask))
print(" No of Hidden layers = " + str(hydLyr))
print(" No of nodes in the hidden layers = ", end="")
for i in range(1, len(ndelst) - 1):
print(str(ndelst[i]), end=" ")
print("")
train_input = []
train_input_data = []
train_output = []
no_of_input_data = 0
# accepting input in the specified format and also the output
f_in = open(data_input_file, "r")
f_out = open(data_output_file, "r")
for lns in f_in:
intgs = [(float(x)) for x in lns.strip(' \n').split()]
if (len(intgs) == 0):
train_input.append(np.array(train_input_data))
train_input_data = []
no_of_input_data += 1
continue
train_input_data.append(np.multiply(1.0 / nrm_fac, intgs))
f_in.close()
for lns in f_out:
intgs = [float(x) for x in lns.split()]
train_output.append(intgs)
f_out.close()
def make_conv_mask(dep, row, col): # creating the mask for the convolution
return np.random.rand(dep, row, col) - .5 * np.ones(shape=(dep, row, col), dtype=float)
def make_max_pool(dep, row, col): # creating a dummy mask of same shape -- no use
return np.zeros(shape=(dep, row, col), dtype=float)
# for max pool, the positions of the maximum wrt to the weight mask is stored
def create_masks(): # returning the masks for the convolution
cnn_masks = [] # contains all the corelation masks for each layer
func_dict = {0: make_conv_mask, 1: make_max_pool} # the functions acc to masks
for i in range(len(cnn_layer_info)):
lyr_cnn_msk = [] # contains the mask for each layers
if (cnn_layer_info[i][0] != 1): # create masks for CONV Pool
for k in range(mask_depth[i + 1]): # creating specified no of masks in each layer
lyr_cnn_msk.append(
func_dict[cnn_layer_info[i][0]](mask_depth[i], cnn_layer_info[i][1], cnn_layer_info[i][2]))
else:
lyr_cnn_msk.append(
func_dict[cnn_layer_info[i][0]](mask_depth[i], cnn_layer_info[i][1], cnn_layer_info[i][2]))
cnn_masks.append(lyr_cnn_msk)
return cnn_masks
#read weights and masks from a file
def read_masks_wts():
f_wt = open(weight_file, "r")
lns = f_wt.readlines()
c = 0
wtmtx = [] # the array of the corresponding weight matrices
masks_list = [] # the convolution masks
for i in range(len(cnn_layer_info)):
if( cnn_layer_info[i][0] == 0 ):
masks_list_lyr = []
for j in range(cnn_layer_info[i][-1]):
masks = np.zeros(shape=(mask_depth[i],cnn_layer_info[i][1],cnn_layer_info[i][2]), dtype=float)
for row in range(len(masks[0])):
row_ln = [x for x in lns[c].strip(' \n').split('\t')]
c+=1
for dep in range(len(masks)):
mtx_row = [(float(x)) for x in row_ln[dep].strip(' \n').split(' ')]
for col in range(len(masks[0][0])):
masks[dep][row][col] = mtx_row[col]
masks_list_lyr.append(masks)
c+=1
c+=1
else:
masks_list_lyr = []
masks = np.zeros(shape=(mask_depth[i], cnn_layer_info[i][1], cnn_layer_info[i][2]), dtype=float)
c = c + 3 + len(masks)
masks_list_lyr.append(masks)
masks_list.append(masks_list_lyr)
c+=1
for i in range(hydLyr + 1):
wt = [] # the weights
for j in range(0, ndelst[i + 1]):
intgs = [(float(x)) for x in lns[c].split()]
wt.append(np.array(intgs))
c += 1
wtmtx.append(np.array(wt))
c += 2
f_wt.close()
return wtmtx, masks_list
# creates the initial weights for the FC layer
def create_initial_wts():
wtmtx = [] # initial weight matrix list
for i in range(1, len(ndelst), 1):
# creating zero-centered weights
wtmtx.append(
np.random.rand(ndelst[i], ndelst[i - 1]) - .5 * np.ones(shape=(ndelst[i], ndelst[i - 1]), dtype=float))
return wtmtx
# used for adding zero pad as necessary
def add_padding(inpt, p):
opt_arr = np.zeros((len(inpt), len(inpt[0]) + 2 * p, len(inpt[0][0]) + 2 * p), dtype=float)
opt_arr[:, p:len(inpt[0]) + p, p:len(inpt[0][0]) + p] = inpt
return opt_arr
# used for removing the pad
def remove_pad(inpt, p):
return inpt[:, p:len(inpt[0]) - p, p:len(inpt[0][0]) - p]
def sigmoid(z):
# sigmoid function
return 1 / (1 + np.exp(-z))
def sigmoidPrime(z):
# gradient of sigmoid function
return np.exp(-z) / ((1 + np.exp(-z)) ** 2)
# used for applying convolution for CONV layers
def convolute(mask, inpt, opt_dep):
row = len(inpt[0]) - len(mask[0][0]) + 1
col = len(inpt[0][0]) - len(mask[0][0][0]) + 1
result = np.zeros(shape=(opt_dep, row, col), dtype=float)
for k in range(opt_dep):
for i in range(row):
for j in range(col):
result[k][i][j] = np.sum(
np.multiply(mask[k], inpt[:, i:(i + len(mask[0][0])), j:j + len(mask[0][0][0])]))
return result
# used for applying MAX Pool layers
def convolute_max_pool(mask, inpt, dep):
row = len(inpt[0]) - len(mask[0]) + 1
col = len(inpt[0][0]) - len(mask[0][0]) + 1
# print("row "+str(row))
# print("col " + str(col))
max_pos = np.zeros(shape=(dep, row, col), dtype=float)
result = np.zeros(shape=(dep, row, col), dtype=float)
for k in range(dep):
for i in range(row):
for j in range(col):
a = inpt[k, i:i + len(mask[0]), j:j + len(mask[0][0])]
pos = np.unravel_index(np.argmax(a, axis=None), a.shape)
max_pos[k][i][j] = 2 * pos[0] + pos[1] # stores the 2D position where maximum occurs
result[k][i][j] = np.amax(a)
return max_pos, result
# performs the forward pass of the CONV and MAXPOOL layers
def forword_cnn(inpt, cnn_masks):
inpt_list = []
for i in range(len(cnn_layer_info)):
if (cnn_layer_info[i][0] == 1): # special treatment for MAXPOOL layers
# print(str(len(inpt[0])) + " in forward_cnn1")
inpt_list.append(inpt)
cnn_masks[i][0] = make_max_pool(mask_depth[i], cnn_layer_info[i][1], cnn_layer_info[i][2])
cnn_masks[i][0], inpt = convolute_max_pool(cnn_masks[i][0], inpt, mask_depth[i])
# print(str(len(inpt[0])) + " in forward_cnn2")
else:
if (cnn_layer_info[i][0] == 0): # adding padding for CONV layers
inpt = add_padding(inpt, cnn_layer_info[i][-2])
inpt_list.append(inpt)
inpt = convolute(cnn_masks[i], inpt, mask_depth[i + 1])
inpt_list.append(inpt)
return inpt_list, cnn_masks
# performs the forward pass of the FC layer
def forward_pass(wtmtx, lyrs):
lyrs_list = [] # the layers contained in a list
lyrs_list_no_sgm = [] # the layers before the sigmoid is applied
lyrs_list.append(lyrs)
lyrs_list_no_sgm.append(lyrs)
for i in range(0, len(ndelst) - 1):
lyrs_list_no_sgm.append(np.matmul(wtmtx[i], lyrs))
lyrs = sigmoid(lyrs_list_no_sgm[-1])
lyrs_list.append(lyrs)
return lyrs_list, lyrs_list_no_sgm
# calculating mask gradient for CONV
def calc_mask_grad(mask, opt_lyr_grad, inpt_lyr):
mask_grad = np.zeros(shape=(len(mask), len(mask[0]), len(mask[0][0])), dtype=float)
for k in range(len(inpt_lyr)): # calculating mask gradient layer-wise
grad_2d = np.zeros(shape=(len(mask[0]), len(mask[0][0])), dtype=float)
for i in range(len(mask[0])):
for j in range(len(mask[0][0])):
grad_2d[i][j] = np.sum(
np.multiply(opt_lyr_grad, inpt_lyr[k, i:i + len(opt_lyr_grad), j:j + len(opt_lyr_grad[0])]))
mask_grad[k, :, :] = grad_2d
return mask_grad
# calculating layer gradients at each position for CONV
def jugar_grad(mask, opt_grad, i1, j1):
res = 0.0
for i in range(i1, i1 - len(mask), -1):
for j in range(j1, j1 - len(mask[0]), -1):
try: # for exitting index greater than highest length
if (i < 0 or j < 0): # for exitting negative indices
continue
res += opt_grad[i][j] * mask[i1 - i][j1 - j]
except:
pass
return res
# calculating layer gradients for CONV
def cnn_lyr_grad(mask_list, opt_lyr_grad, inpt_lyr):
inpt_lyr_grad = np.zeros(shape=(len(inpt_lyr), len(inpt_lyr[0]), len(inpt_lyr[0][0])), dtype=float)
for k in range(len(mask_list)):
mask = mask_list[k]
opt_grad = opt_lyr_grad[k]
for k1 in range(len(inpt_lyr)):
for i1 in range(len(inpt_lyr[0])):
for j1 in range(len(inpt_lyr[0][0])):
inpt_lyr_grad[k1][i1][j1] += jugar_grad(mask[k1], opt_grad, i1, j1)
return inpt_lyr_grad
# calculating layer gradients for MAX_POOL
def jugar_grad_max_pool(pos_mask, opt_grad, i1, j1, row_mask, col_mask):
res = 0.0
for i in range(i1, i1 - row_mask, -1):
for j in range(j1, j1 - col_mask, -1):
try: # for exitting index greater than highest length
if (i < 0 or j < 0): # for exitting negative indices
continue
mask = np.zeros(shape=(row_mask, col_mask), dtype=float)
rw = int(pos_mask[i1 - i][j1 - j] / col_mask)
cl = int(pos_mask[i1 - i][j1 - j]) - int(pos_mask[i1 - i][j1 - j] / col_mask)
mask[rw][cl] = 1.0
res += opt_grad[i][j] * mask[i1 - i][j1 - j]
except:
pass
return res
# calculating layer gradients for MAX_POOL
def cnn_lyr_grad_max_pool(pos_mask_list, opt_lyr_grad, inpt_lyr):
inpt_lyr_grad = np.zeros(shape=(len(inpt_lyr), len(inpt_lyr[0]), len(inpt_lyr[0][0])), dtype=float)
row_mask = len(inpt_lyr[0]) - len(opt_lyr_grad[0]) + 1
col_mask = len(inpt_lyr[0][0]) - len(opt_lyr_grad[0][0]) + 1
for k1 in range(len(inpt_lyr)):
pos_mask = pos_mask_list[k1]
opt_grad = opt_lyr_grad[k1]
for i1 in range(len(inpt_lyr[0])):
for j1 in range(len(inpt_lyr[0][0])):
inpt_lyr_grad[k1][i1][j1] = jugar_grad_max_pool(pos_mask, opt_grad, i1, j1, row_mask, col_mask)
return inpt_lyr_grad
# calculates the backward pass of the CONV and MAXPOOL layers
def backward_cnn(inpt_list, cnn_masks, last_lyr_grad):
mask_grad_list = []
layer_grad_list = []
layer_grad_list.append(last_lyr_grad)
for i in range(1, len(cnn_masks) + 1):
if (cnn_layer_info[-1 * i][0] == 0):
mask_grad_lyr = []
for j in range(len(cnn_masks[-1 * i])):
mask_grad_lyr.append(
calc_mask_grad(cnn_masks[-1 * i][j], layer_grad_list[-1][j], inpt_list[-1 * i - 1]))
mask_grad_list.append(mask_grad_lyr)
lyr_grad = cnn_lyr_grad(cnn_masks[-1 * i], layer_grad_list[-1], inpt_list[-1 * i - 1])
layer_grad_list.append(remove_pad(lyr_grad, cnn_layer_info[-1 * i][-2]))
inpt_list[-1 * i - 1] = remove_pad(inpt_list[-1 * i - 1], cnn_layer_info[-1 * i][-2])
elif (cnn_layer_info[-1 * i][0] == 1):
layer_grad_list.append(
cnn_lyr_grad_max_pool(cnn_masks[-1 * i][0], layer_grad_list[-1], inpt_list[-1 * i - 1]))
mask_grad_list.append(cnn_masks[-1 * i]) # adding dummy gradients to maintain indices
mask_grad_list = mask_grad_list[::-1]
layer_grad_list = layer_grad_list[::-1]
return mask_grad_list, layer_grad_list
# performs the cost function of the entire network
def cost_func(final_lyr, label):
for i in range(len(final_lyr)):
final_lyr[i] = final_lyr[i] - label[i] # difference between the required labels
err = np.linalg.norm(final_lyr) ** 2 # taking the squares
return final_lyr, err
# performs the backpropagation of the FC layer
def backprop(wtmtx, lyrs, lyrs_list_no_sgm):
lyr_grad = [] # gradient for the corresponding layers
wt_grad = [] # gradient for the weight matrices
opt_lyr = np.multiply(2, lyrs[-1]) # gradient from the error function
x = sigmoidPrime(np.array(lyrs_list_no_sgm[-1])) # gradient while passing the sigmoid layer
opt_lyr = np.multiply(opt_lyr, x) # final output layer gradient with weights multiplied
lyr_grad.append(opt_lyr)
for i in range(2, len(lyrs) + 1):
x = np.matmul(lyr_grad[-1], np.transpose(lyrs[-1 * i]))
wt_grad.append(x)
opt_lyr = np.matmul(np.transpose(wtmtx[1 - i]), lyr_grad[-1])
opt_lyr = np.multiply(opt_lyr, sigmoidPrime(np.array(lyrs_list_no_sgm[-1 * i])))
lyr_grad.append(opt_lyr)
wt_grad = wt_grad[::-1] # reversing the array
lyr_grad = lyr_grad[::-1] # reversing the array
return wt_grad, lyr_grad
# update the CONV and the MAXPOOL layers masks
def cnn_update_masks(masks, masks_grad):
global lrn_rate
new_masks = []
for i in range(len(masks)):
if (cnn_layer_info[i][0] == 1):
new_masks.append(masks[i])
else:
new_masks_lyr = []
for j in range(len(masks[i])):
new_masks_lyr.append(masks[i][j] + np.multiply(lrn_rate * (-1), masks_grad[i][j]))
new_masks.append(new_masks_lyr)
return new_masks
# updating the new weight matrix as per gradient of the FC layer
def wt_update(wtx_grad_dt_pts, wtx):
global lrn_rate
return np.add(wtx, np.multiply(lrn_rate * (-1), wtx_grad_dt_pts[0]))
#used for calculating gradients over all the data points
def run(cnn_masks, wtmx, k):
mask_grad_dt_pts = []
wt_grad_dt_pts = []
err_total = 0.0
for i in range(no_of_input_data):
inptt = np.array(train_input[i]).reshape(mask_depth[0], len(train_input[i]), len(train_input[i][0]))
inp, msk = forword_cnn(inptt, cnn_masks)
inp_last = np.array(inp[-1])
sgm, no_sgm = forward_pass(wtmx, inp_last.reshape(inpLyr, 1))
sgm[-1], err = cost_func(sgm[-1], train_output[i])
err_total += err # taking up for the total error
wt_grad, lyrs_grad = backprop(wtmx, sgm, no_sgm)
fst_lyr_grad = np.array(lyrs_grad[0]).reshape(inp_last.shape)
msk_grad, inp_grad = backward_cnn(inp, msk, fst_lyr_grad)
wt_grad_dt_pts.append(wt_grad)
mask_grad_dt_pts.append(msk_grad)
if (i != 0):
wt_grad_dt_pts[0] = np.add(wt_grad_dt_pts[0], wt_grad_dt_pts[1]) # the zeroth element is the sum
wt_grad_dt_pts = wt_grad_dt_pts[:1] # discarding the next element, the grad weight for that data point
for i in range(len(mask_grad_dt_pts[0])):
for j in range(len(mask_grad_dt_pts[0][i])):
mask_grad_dt_pts[0][i][j] = np.add(mask_grad_dt_pts[0][i][j], mask_grad_dt_pts[1][i][j])
mask_grad_dt_pts = mask_grad_dt_pts[:1] # discarding the next element, the grad mask for that data point
wtmx = wt_update(wt_grad_dt_pts, wtmx)
cnn_masks = cnn_update_masks(cnn_masks, mask_grad_dt_pts[0])
print("The error for the epoch " + str(k) + " " + str(err_total), end="")
return wtmx, cnn_masks, err_total
# used for copying CNN masks
def copy_cnn_mask(cnn_masks):
mask_new = []
for i in range(len(cnn_masks)):
mask_lyr_new = []
for j in range(len(cnn_masks[i])):
mask_lyr_new.append(np.copy(cnn_masks[i][j]))
mask_new.append(mask_lyr_new)
return mask_new
# used for executing the code and calculating the final masks and weights over all epochs
def execute():
print(" ")
global read_wt
if( read_wt == 0):
wtmx = create_initial_wts()
cnn_masks = create_masks()
else:
wtmx, cnn_masks = read_masks_wts()
tmstart = datetime.now()
wtmx, cnn_masks, err_prev = run(cnn_masks, wtmx, 1) # performing first iteration
tmend = datetime.now()
print(" Tiem required = " + str((tmend - tmstart).total_seconds()))
wtmx_min_err = np.copy(wtmx)
cnn_masks_min_err = copy_cnn_mask(cnn_masks)
for i in range(1, epoch_itr):
tmstart = datetime.now()
wtmx, cnn_masks, err_total = run(cnn_masks, wtmx, i + 1)
tmend = datetime.now()
print(" Tiem required = "+str((tmend-tmstart).total_seconds()))
if (err_total < err_prev): # taking the weight matrix for minimum error
wtmx_min_err = np.copy(wtmx)
cnn_masks_min_err = copy_cnn_mask(cnn_masks)
err_prev = err_total
print("\n The minimum error is " + str(err_prev))
return wtmx_min_err, cnn_masks_min_err
def write_Matrix(wtmtx,cnn_masks): #writing the weight matrices to a file
f=open("weightMatrix.txt","w")
#write the CONV and MAXPOOL masks
for i1 in range(len(cnn_masks)):
for j1 in range(len(cnn_masks[i1])):
if(cnn_layer_info[i1][0] == 0):
mask = cnn_masks[i1][j1]
else:
mask = np.zeros(shape=(len(cnn_masks[i1][j1]), cnn_layer_info[i1][1], cnn_layer_info[i1][2]), dtype=float)
for row in range(len(mask[0])):
for dep in range(len(mask)):
for col in range(len(mask[0][0])):
f.write(str(mask[dep][row][col])+" ")
f.write("\t")
f.write("\n")
f.write("\n")
f.write("\n")
f.write("\n")
# write the FC weights
for i in range(len(wtmtx)):
for j in range(len(wtmtx[i])):
for k in range(len(wtmtx[i][j])):
f.write( str(wtmtx[i][j][k]) +" " )
f.write("\n")
f.write("\n\n")
f.close()
wtmtx, cnn_msks = execute()
write_Matrix(wtmtx, cnn_msks)
``` |
{
"source": "AadiCool/variable_Neural_Net",
"score": 3
} |
#### File: variable_Neural_Net/Full_General/NN_general_train.py
```python
import numpy as np
ndelst = []
epoch_itr = inpLyr = optLyr = nrm_fac = hydLyr = 0
lrn_rate = 0.0
read_wt = 0
instructions_file = "instructions.txt"
data_input_file = "data_input_train.txt"
data_output_file = "data_output_train.txt"
weight_file = ""
f=open( instructions_file , "r")
lns = f.readlines()
try:
lrn_rate = float( lns[0].strip( ' \n' ) ) #first line should be learning rate
epoch_itr = int( lns[1].strip( ' \n' ) ) #second line should contain no of iterations
inpLyr = int( lns[2].strip( ' \n' ) ) #third line should contain no of nodes in input layer
optLyr = int( lns[3].strip( ' \n' ) ) #fourth line should contain no of nodes in output layer
nrm_fac = float( lns[4].strip( ' \n' ) ) #fifth line should contain normalization factor
hydLyr = int( lns[5].strip( ' \n' ) ) #sixth line should contain no of hidden layer
ndelst.append(inpLyr)
ndelst.extend( [ int(x) for x in lns[6].strip( ' \n' ).split(' ')] ) #seventh line should contain no of nodes in hidden layer
ndelst.append(optLyr)
read_wt_ln = lns[7].strip( ' \n' )
if ( int( read_wt_ln[0] ) == 1 ):
weight_file = ( read_wt_ln.split(' ') )[1]
read_wt = 1
f.close()
except:
print("Wrong Instruction list .. Exitting code")
exit(1)
print(" Learn Rate = "+str(lrn_rate) )
print(" No of epoch iterations = "+str(epoch_itr) )
print(" No of input layer node = "+str(inpLyr) )
print(" No of output layer node = "+str(optLyr) )
print(" No of normalization = "+str(nrm_fac) )
print(" No of Hidden layers = "+str(hydLyr) )
print(" No of nodes in the hidden layers = " , end="")
for i in range(1,len( ndelst) - 1 ):
print( str(ndelst[i]) , end=" ")
print("")
train_input = []
train_output = []
no_of_input_data = 0
#accepting input in the specified format and also the output
f_in = open( data_input_file ,"r")
f_out = open( data_output_file ,"r")
for lns in f_in:
intgs = [ ( float(x) ) for x in lns.split() ]
train_input.append( np.multiply( 1.0/nrm_fac , intgs ) )
no_of_input_data += 1
f_in.close()
for lns in f_out:
intgs = [ float(x) for x in lns.split() ]
train_output.append( intgs )
f_out.close()
def read_weights(): # used for reading weights from a file
f_wt = open( weight_file , "r")
lns = f_wt.readlines()
c = 0
wtmtx = [] # the array of the corresponding weight matrices
for i in range(hydLyr + 1):
wt = [] # the weights
for j in range(0, ndelst[i + 1]):
intgs = [(float(x)) for x in lns[c].split()]
wt.append(np.array(intgs))
c += 1
wtmtx.append(np.array(wt))
c += 2
f_wt.close()
return wtmtx
def create_initial_wts():
wtmtx = [] #initial weight matrix list
for i in range(1,len(ndelst),1):
#creating zero-centered weights
wtmtx.append( np.random.rand( ndelst[i], ndelst[i-1] ) - .5 * np.ones( shape=( ndelst[i], ndelst[i-1] ) ,dtype=float ) )
return wtmtx
def sigmoid(z):
#sigmoid function
return 1/(1+np.exp(-z))
def sigmoidPrime(z):
#gradient of sigmoid function
return np.exp(-z)/((1+np.exp(-z))**2)
def forward_pass( wtmtx ,lyrs ):
lyrs_list = [] #the layers contained in a list
lyrs_list_no_sgm = [] #the layers before the sigmoid is applied
lyrs_list.append(lyrs)
lyrs_list_no_sgm.append(lyrs)
for i in range(0 , len(ndelst)-1 ):
lyrs_list_no_sgm.append( np.matmul( wtmtx[i] , lyrs ) )
lyrs = sigmoid( lyrs_list_no_sgm[-1] )
lyrs_list.append( lyrs )
return lyrs_list , lyrs_list_no_sgm
def cost_func( final_lyr , label):
for i in range( len( final_lyr ) ):
final_lyr[i] = final_lyr[i] - label[i] # difference between the required labels
err = np.linalg.norm( final_lyr ) ** 2 # taking the squares
return final_lyr , err
def backprop( wtmtx , lyrs , lyrs_list_no_sgm ):
lyr_grad = [] # gradient for the corresponding layers
wt_grad = [] # gradient for the weight matrices
opt_lyr = np.multiply( 2 , lyrs[-1] ) # gradient from the error function
x=sigmoidPrime( np.array( lyrs_list_no_sgm[-1] ) ) # gradient while passing the sigmoid layer
opt_lyr = np.multiply( opt_lyr , x ) # final output layer gradient with weights multiplied
lyr_grad.append(opt_lyr)
for i in range( 2, len(lyrs)+1 ):
x = np.matmul( lyr_grad[-1] , np.transpose( lyrs[-1*i] ) )
wt_grad.append( x )
opt_lyr = np.matmul( np.transpose( wtmtx[ 1-i ] ), lyr_grad[ -1 ] )
opt_lyr = np.multiply( opt_lyr , sigmoidPrime( np.array( lyrs_list_no_sgm[-1*i] ) ) )
lyr_grad.append( opt_lyr )
wt_grad = wt_grad[::-1] #reversing the array
lyr_grad = lyr_grad[::-1] #reversing the array
return wt_grad , lyr_grad
def wt_update( wtx_grad_dt_pts , wtx ): #updating the new weight matrix as per gradient
return np.add( wtx , np.multiply( lrn_rate*(-1) , wtx_grad_dt_pts[0] ) )
def run( wtmx , k ):
wt_grad_dt_pts = [] #the gradient of the weights for different data points
err_total = 0 #total error for all the data points
for i in range( no_of_input_data ):
sgm, no_sgm = forward_pass( wtmx , np.array( train_input[i] ).reshape(inpLyr, 1) )
sgm[-1], err = cost_func( sgm[-1], train_output[i] )
err_total += err # taking up for the total error
wt_grad , lyrs_grad = backprop( wtmx, sgm, no_sgm )
wt_grad_dt_pts.append( wt_grad )
if ( i!=0 ):
wt_grad_dt_pts[0] = np.add( wt_grad_dt_pts[0] , wt_grad_dt_pts[1] ) #the zeroth element is the sum
wt_grad_dt_pts = wt_grad_dt_pts[:1] #discarding the next element, the grad weight for that data point
print( "The error for the epoch "+str(k) + " " + str(err_total) )
return wt_update( wt_grad_dt_pts , wtmx ) , err_total
def execute():
print( " ")
global lrn_rate
global read_wt
if ( read_wt == 1 ):
wtmx = read_weights()
else:
wtmx = create_initial_wts()
wtmx , err_prev = run( wtmx, 1) #performing first iteration
wtmx_min_err = np.copy( wtmx )
for i in range(1 , epoch_itr):
wtmx , err_total = run( wtmx , i+1)
if ( err_total < err_prev ): # taking the weight matrix for minimum error
wtmx_min_err = np.copy( wtmx )
err_prev = err_total
print("\n The minimum error is "+str( err_prev ))
return wtmx_min_err
def predict( wtmx , input_lyr , result ):
for i in range(0 , len(ndelst)-1 ):
input_lyr = sigmoid( np.matmul( wtmx[i] , input_lyr ) )
max_index = 0
result = np.argmax( result ) #taking maximum no as the label
for i in range(1 , len(input_lyr)):
if ( input_lyr[i] > input_lyr[max_index] ):
max_index = i
if ( max_index == result ):
return True
else:
return False
wtmtx = execute()
crct_cnt = 0
print("\nTrain set prediction ")
for i in range( no_of_input_data ):
if ( predict( wtmtx , train_input[i] , train_output[i]) ):
crct_cnt += 1
print( "\n No of correct predictions is "+str(crct_cnt) )
f=open("weightMatrix.txt","w")
for i in range(len(wtmtx)):
for j in range(len(wtmtx[i])):
for k in range(len(wtmtx[i][j])):
f.write( str(wtmtx[i][j][k]) +" " )
f.write("\n")
f.write("\n\n")
``` |
{
"source": "Aadigoel/python-daily-pills",
"score": 3
} |
#### File: day-001/src/app2.py
```python
from flask import Flask
import logging
app = Flask(__name__)
logging.basicConfig(filename='app.log',level=logging.DEBUG)
@app.route("/")
def hello():
app.logger.info('Main request successfull')
return "Hello World!"
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
``` |
{
"source": "aadiharan99/LC-Diagnosis",
"score": 3
} |
#### File: aadiharan99/LC-Diagnosis/data_extractor.py
```python
import pandas as pd
import tciaclient
from tciaclient import *
# import skimage as image
import numpy as np
import matplotlib.pyplot as plt
import os,shutil
import zipfile,pathlib
os.chdir('/Volumes/Aadi WD/Capstone_Project')
#### defining a helper function #####
def getResponse(response):
if response.getcode() is not 200:
raise ValueError("Server returned an error")
else:
return response.read()
#### defining a function for image retireval and returning the list of all the paths ######
def image_retriever(json_df):
pwd_=pathlib.Path('/Volumes/Aadi WD/Capstone_Project')
series_UID=[i for i in json_df.SeriesInstanceUID]
patient_id=[j for j in json_df.PatientID]
### retrieving images for all SreisInstanceUIDs
folder_list=[]
for num in range(len(series_UID)):
response=client_connection.get_image(series_UID[num],downloadPath=pwd_,zipFileName="images.zip")
fid=zipfile.ZipFile("images.zip")
fid.extractall(patient_id[num])
folder_list.append(str(pwd_)+"/"+str(patient_id[num]))
print(str(num)+'/'+str(len(series_UID)))
return folder_list
##### defining a function to unpack images from their individual directories into the directory for that particular disease ####
def image_unpacker(str_path):
path_list=os.listdir(str_path)
sub_folder_list=[]
for subpath in path_list:
sub_folder_list.append(os.path.join(str_path,subpath))
# print(sub_folder_list)
for image_files in sub_folder_list:
image_folder_list=os.listdir(image_files)
for files in image_folder_list:
# print(files)
shutil.move(os.path.join(image_files,files),os.path.join(str_path,files))
return os.listdir(str_path)
#defining basic parameters for API call#
baseUrl="https://services.cancerimagingarchive.net/services/v4"
resource = "TCIA"
#### establishing connection to ####
client_connection=tciaclient.TCIAClient(baseUrl=baseUrl,resource=resource)
#### extracting images part1 #####
response=client_connection.get_series(collection="Lung-PET-CT-Dx",modality="PT")
str_response=getResponse(response)
series_df=pd.io.json.read_json(str_response)
# series_df=series_df[series_df.BodyPartExamined=="CHEST"]
images_list_1=image_retriever(series_df)
for path_ in images_list_1:
if "A" in path_:
shutil.move(path_,"Adenocarcinoma")
if "B" in path_:
shutil.move(path_,"Small Cell Carcinoma")
if "G" in path_:
shutil.move(path_,"Squamous Cell Carcinoma")
#### extracting images part2: Adding to Adenocarcinoma images #######
response2=client_connection.get_series(collection="TCGA-LUAD",modality="PT")
str_response2=getResponse(response2)
series_df2=pd.io.json.read_json(str_response2)
series_df2=series_df2[series_df2.BodyPartExamined=="LUNG"]
tcga_image_paths=image_retriever(series_df2)
tcga_image_paths=list(set(tcga_image_paths))
for a_paths in tcga_image_paths:
if "TCGA" in a_paths:
shutil.move(a_paths,"Adenocarcinoma")
tcga_image_paths.pop(tcga_image_paths.index(a_paths))
#### extracting images part3: Adding to Squamous cell carcinoma images ######
response3=client_connection.get_series(collection="TCGA-LUSC",modality="PT")
str_response3=getResponse(response3)
series_df3=pd.io.json.read_json(str_response3)
series_df3=series_df3[series_df3.BodyPartExamined=="LUNG"]
tcga_scimage_paths=image_retriever(series_df3)
tcga_scimage_paths=list(set(tcga_scimage_paths))
for i in range(len(tcga_scimage_paths)):
shutil.move(tcga_scimage_paths[i],"Squamous Cell Carcinoma")
# #### extracting images part4: Adding to squamous cell carcinoma images ######
# response4=client_connection.get_series(collection="CPTAC-LSCC",modality="PT")
# str_response4=getResponse(response4)
# series_df4=pd.io.json.read_json(str_response4)
# series_df4=series_df4[series_df4.BodyPartExamined=="LUNG"]
# cptac_sc_paths=image_retriever(series_df4)
# cptac_sc_paths=list(set(cptac_sc_paths))
# for i in range(len(cptac_sc_paths)):
# shutil.move(cptac_sc_paths[i],"Squamous Cell Carcinoma")
# image_unpacker('Adenocarcinoma')
# image_unpacker('Small Cell Carcinoma')
``` |
{
"source": "Aadil101/credit-card",
"score": 3
} |
#### File: Aadil101/credit-card/credit-card.py
```python
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from webdriver_manager.chrome import ChromeDriverManager
import imaplib, email
from bs4 import BeautifulSoup
import re
from time import sleep
import pandas as pd
import math
import yaml
import sys
# options
with open('config.yml', 'r') as stream:
try:
config = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
options = webdriver.ChromeOptions()
options.add_argument("user-data-dir={}".format(config['user-data-dir']))
# driver
driver = webdriver.Chrome(ChromeDriverManager().install(), options=options); sleep(5)
# login
def login():
# function to search for a key value pair
def search(client, key, value):
result, _bytes = client.search(None, key, '"{}"'.format(value))
return _bytes
# login to CC
driver.get(config['url']['login']); sleep(5)
driver.find_element_by_name('userid').send_keys(config['cc']['member-number']); sleep(5)
driver.find_element_by_name('password').send_keys(config['cc']['password']); sleep(5)
driver.find_element_by_xpath("//button[@type='submit']").send_keys(Keys.RETURN); sleep(10)
if driver.current_url == config['url']['mfa']:
driver.find_element_by_name('sendOTP').send_keys(Keys.RETURN); sleep(60)
# login to Gmail
client = imaplib.IMAP4_SSL('imap.gmail.com')
try:
client.login(config['email']['address'], config['email']['password'])
except:
print('Oops. You may have to enable "less secure app access" for your Gmail account. Please see README.md for instructions')
sys.exit(1)
_ = client.select('Inbox')
uids = search(client, 'FROM', config['cc']['mfa-email'])[0].split()
_, latest_email_bytes = client.fetch(uids[-1], '(RFC822)')
latest_email_text = str(latest_email_bytes[0][1])
soup = BeautifulSoup(latest_email_text, 'lxml')
pattern_1 = re.compile(r'passcode is (\d+)')
code = pattern_1.findall(soup.find_all(text=pattern_1)[0])[0]
# 2-factor
driver.find_element_by_id('mfaCodeInputField').send_keys(code); sleep(5)
driver.find_element_by_name('registerDevice').send_keys(Keys.RETURN); sleep(5)
driver.refresh(); sleep(5)
if driver.current_url == config['url']['home']:
return
else:
print("Oops. Driver's current URL does not match home URL you provided in config.yml")
login()
# get new monthly balance
def get_new_balance():
try:
driver.find_element_by_xpath("//a[@aria-label='e-Statements']").send_keys(Keys.RETURN); sleep(5)
except:
driver.find_element_by_class_name('navbar-toggle').send_keys(Keys.RETURN); sleep(5)
driver.find_element_by_link_text("e-Statements").click(); sleep(5)
driver.switch_to.frame(driver.find_element_by_tag_name("iframe")); sleep(10)
driver.find_elements_by_xpath('//input[@type="submit"]')[1].click(); sleep(5)
table = driver.find_element_by_xpath("//table[@summary='Summary of Account Activity']")
html = table.get_attribute('outerHTML')
table_pd = pd.read_html(html, index_col=0)[0]
new_balance = float(table_pd.loc['New Balance', 1])
since_date = driver.find_element_by_xpath('//select[@name="HistoryID"]').find_elements_by_xpath('//option')[0].text.strip(); sleep(5)
driver.get(config['url']['home']); sleep(5)
return new_balance, since_date
new_balance, since_date = get_new_balance()
# check if monthly balance has already been paid
def is_new_balance_paid():
driver.find_element_by_xpath('//a[@title="{}"]'.format(config['to-account'])).click(); sleep(5)
try:
driver.find_element_by_id('dLabeldate_range').click(); sleep(5)
except:
driver.maximize_window()
driver.find_element_by_id('dLabeldate_range').click(); sleep(5)
driver.find_element_by_xpath('//input[@type="text"]').click(); sleep(5)
driver.find_element_by_xpath('//input[@type="text"]').send_keys(since_date); sleep(5)
driver.find_element_by_id('date_range_go').click(); sleep(5)
table = driver.find_element_by_xpath('//table[@class="table cardlytics_history_table"]')
html = table.get_attribute('outerHTML')
table_pd = pd.read_html(html)[0]
payments = table_pd[table_pd['Description'].str.contains('PAYMENT')]
payment_amounts = payments['Amount'].apply(lambda x: x[1:x.index('Applied') if 'Applied' in x else len(x)]).astype(float).tolist()
driver.back(); sleep(5)
return new_balance in payment_amounts
is_paid = is_new_balance_paid()
# if monthly balance hasn't been paid, make transfer
def move_money(from_account, to_account, amount):
driver.find_element_by_id('transferLinkaccounts').click(); sleep(5)
from_dropdown, to_dropdown = driver.find_elements_by_class_name('dropdown'); sleep(5)
accounts = from_dropdown.find_elements_by_xpath('//div[starts-with(@id, "listAccountDescription")]')
from_dropdown_accounts, to_dropdown_accounts = accounts[:3], accounts[3:]
from_dropdown.click(); sleep(5)
from_dropdown_account_description_2_balance = dict(zip([item.text for item in from_dropdown_accounts], [float(item.text[1:].replace(',', '')) for item in driver.find_elements_by_xpath('//span[starts-with(@id, "accountBalance")]') if item.text != '']))
from_account_balance = from_dropdown_account_description_2_balance[from_account]
from_account_i = list(from_dropdown_account_description_2_balance.keys()).index(from_account)
if from_account_balance >= config['from-account-keep']+amount:
from_dropdown_accounts[from_account_i].click(); sleep(5)
to_dropdown.click(); sleep(5)
to_dropdown_account_description_2_balance = dict(zip([item.text for item in to_dropdown_accounts], [float(item.text[1:].replace(',', '')) for item in driver.find_elements_by_xpath('//span[starts-with(@id, "accountBalance")]') if item.text != '']))
to_account_i = list(to_dropdown_account_description_2_balance.keys()).index(to_account)
to_dropdown_accounts[to_account_i].click(); sleep(5)
try:
driver.find_element_by_id('otherAmountRadio').click(); sleep(5)
driver.find_element_by_id('otherAmountValue').send_keys(str(amount)); sleep(5)
except:
driver.find_element_by_id('amountInputField').send_keys(str(amount)); sleep(5)
driver.find_element_by_id('makeTransfer').click(); sleep(10)
driver.find_element_by_id('transfersConfirmationConfirmButton').click(); sleep(10)
driver.find_element_by_id('accountsButton').click(); sleep(5)
return True
else:
cursory_amount = math.ceil(config['from-account-keep'] + amount - from_account_balance)
driver.get(config['url']['home']); sleep(5)
success = move_money(config['pool-account'], from_account, cursory_amount)
if success:
return move_money(from_account, to_account, amount)
else:
return False
if not is_paid:
success = move_money(config['from-account'], config['to-account'], new_balance)
if not success:
print('uh-oh')
# done!
driver.quit()
``` |
{
"source": "Aadil-Eyasim/prac4",
"score": 3
} |
#### File: Aadil-Eyasim/prac4/prac4.py
```python
import busio
import digitalio
import board
import adafruit_mcp3xxx.mcp3008 as MCP
from adafruit_mcp3xxx.analog_in import AnalogIn
import RPi.GPIO as GPIO
import threading
import time
import math
#create the spi bus
spi = busio.SPI(clock=board.SCK, MISO=board.MISO, MOSI=board.MOSI)
# create the cs (chip select)
cs = digitalio.DigitalInOut(board.D5)
# create the mcp object
mcp = MCP.MCP3008(spi, cs)
# create an analog input channel on pin 1
chan1 = AnalogIn(mcp, MCP.P1) #for Temp sensor
chan2 = AnalogIn(mcp, MCP.P2) #for LDR
#print(’Raw ADC Value: ’, chan.value)
#print(’ADC Voltage: ’ + str(chan.voltage) + ’V’)
toggle_btn = 23
def setup():
GPIO.setmode(GPIO.BCM)
GPIO.setup(toggle_btn, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(toggle_btn, GPIO.FALLING, callback=toggle_btn_pressed, bouncetime=300)
pass
t = 10
start = time.time()
def print_runtime_temp_thread():
thread = threading.Timer(t, print_runtime_temp_thread)
thread.daemon = True
thread.start()
end = time.time() #get the end time
runtime = math.trunc(end-start)
temp = round(((chan1.voltage - 0.5)*100), 2)
print('{:<12s} {:<15d} {:<5.2f} {:<6s} {:<13d}'.format(str(runtime)+'s', chan1.value, temp, 'C', chan2.value))
pass
def toggle_btn_pressed(toggle_btn):
global t
if GPIO.event_detected(toggle_btn):
if t==5:
t=1
elif t==10:
t=5
else:
t=10
return t
pass
if __name__ == "__main__":
setup()
print('{:<12s} {:<15s} {:<12s} {:<15s}'.format('Runtime','Temp Reading', 'Temp', 'Light Reading'))
print_runtime_temp_thread()
# Tell our program to run indefinitely
while True:
pass
``` |
{
"source": "aadiljamal/project",
"score": 3
} |
#### File: project/util/img_to_npy.py
```python
import os
import numpy as np
import cv2
image_dir = './image_dir'
label_file = 'val.txt'
image_height=224
image_width=224
image_chans=3
classes=3
one_hot=True
#one_hot=True
output_file = 'dataset.npz'
compress = True
#compress = False
def center_crop(image,out_height,out_width):
image_height, image_width = image.shape[:2]
offset_height = (image_height - out_height) // 2
offset_width = (image_width - out_width) // 2
image = image[offset_height:offset_height+out_height, offset_width:offset_width+out_width,:]
return image
def resize_maintain_aspect(image,target_h,target_w):
image_height, image_width = image.shape[:2]
if image_height > image_width:
new_width = target_w
new_height = int(image_height*(target_w/image_width))
else:
new_height = target_h
new_width = int(image_width*(target_h/image_height))
image = cv2.resize(image,(new_width,new_height),interpolation=cv2.INTER_CUBIC)
return image
def main():
# open & read text file that lists all images and their labels
f = open(label_file, 'r')
listImages = f.readlines()
f.close()
# make placeholder arrays
x = np.ndarray(shape=(len(listImages),image_height,image_width,3), dtype=np.float32, order='C')
if (one_hot):
y = np.ndarray(shape=(len(listImages),classes), dtype=np.uint32, order='C')
else:
y = np.ndarray(shape=(len(listImages)), dtype=np.uint32, order='C')
for i in range(len(listImages)):
image_name,label = listImages[i].split()
# open image to numpy array
img = cv2.imread(os.path.join(image_dir,image_name))
# resize
img = resize_maintain_aspect(img,image_height,image_width)
# center crop to target height & width
img = center_crop(img,image_height,image_width)
# switch to RGB from BGR
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# normalize then write into placeholder
x[i] = (img/255.0).astype(np.float32)
if (one_hot):
label_1hot = np.zeros(classes,dtype=np.uint32,order='C')
np.put(label_1hot,int(label),1)
y[i] = label_1hot
else:
y[i] = int(label)
if (compress):
np.savez_compressed(output_file, x=x, y=y)
else:
np.savez(output_file, x=x, y=y)
print(' Saved to',output_file)
# now load back in and unpack
train_f = np.load(output_file)
x_train = train_f['x']
y_train = train_f['y']
# this should print 2 identical integers
_,label_0 = listImages[7].split()
print(label_0, np.argmax(y_train[7]))
return
if __name__ == '__main__':
main()
```
#### File: project/util/import numpy as np.py
```python
import numpy as np
import cairocffi as cairo
def vector_to_raster(vector_images, side=28, line_diameter=16, padding=16, bg_color=(0,0,0), fg_color=(1,1,1)):
"""
padding and line_diameter are relative to the original 256x256 image.
"""
original_side = 256.
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, side, side)
ctx = cairo.Context(surface)
ctx.set_antialias(cairo.ANTIALIAS_BEST)
ctx.set_line_cap(cairo.LINE_CAP_ROUND)
ctx.set_line_join(cairo.LINE_JOIN_ROUND)
ctx.set_line_width(line_diameter)
# scale to match the new size
# add padding at the edges for the line_diameter
# and add additional padding to account for antialiasing
total_padding = padding * 2. + line_diameter
new_scale = float(side) / float(original_side + total_padding)
ctx.scale(new_scale, new_scale)
ctx.translate(total_padding / 2., total_padding / 2.)
raster_images = []
for vector_image in vector_images:
# clear background
ctx.set_source_rgb(*bg_color)
ctx.paint()
bbox = np.hstack(vector_image).max(axis=1)
offset = ((original_side, original_side) - bbox) / 2.
offset = offset.reshape(-1,1)
centered = [stroke + offset for stroke in vector_image]
# draw strokes, this is the most cpu-intensive part
ctx.set_source_rgb(*fg_color)
for xv, yv in centered:
ctx.move_to(xv[0], yv[0])
for x, y in zip(xv, yv):
ctx.line_to(x, y)
ctx.stroke()
data = surface.get_data()
raster_image = np.copy(np.asarray(data)[::4])
raster_images.append(raster_image)
return raster_images
``` |
{
"source": "aadilk97/hw01",
"score": 3
} |
#### File: hw01/test/test_func.py
```python
def inc(x):
return x + 1
def mul(x, y):
return x * y
def test_answer():
assert inc(3) == 4
assert mul(3, 4) == 12
``` |
{
"source": "AadilLatif/HELICS-Examples",
"score": 2
} |
#### File: python/timing-demo/runner.py
```python
import libtmux
def main():
session = libtmux.Server().list_sessions()[0]
session.attached_window.split_window(vertical=False)
for i, p in enumerate(session.attached_window.children):
p.clear()
p.send_keys("python ./timing-federate{}.py".format(i+1))
p.enter()
if __name__ == "__main__":
main()
```
#### File: python/timing-demo/timing-federate2.py
```python
import helics as h
def get_input(grantedtime):
valid_input = False
while not valid_input:
print(
"Enter request_time (int) (and value_to_send (float)) [e.g.: 4, 10.0]: ",
end="",
)
string = input()
string = string.strip()
request_time_str = string.replace(",", " ").split(" ")[0]
try:
request_time = int(request_time_str)
if request_time <= grantedtime:
raise RuntimeError("Cannot proceed here because invalid input.")
except:
print(
"request_time has to be an 'int' and has to be greater than grantedtime."
)
valid_input = False
continue
else:
valid_input = True
try:
value_to_send = (
string.replace(request_time_str, "").strip().strip(",").strip()
)
except:
value_to_send = None
valid_input = True
continue
try:
value_to_send = str(value_to_send)
except:
print("value_to_send must be a str or be blank")
valid_input = False
continue
else:
valid_input = True
return request_time, value_to_send
def create_value_federate(deltat=1.0, fedinitstring="--federates=1 --tick=0"):
fedinfo = h.helicsCreateFederateInfo()
h.helicsFederateInfoSetCoreName(fedinfo, "TestB Federate")
h.helicsFederateInfoSetCoreTypeFromString(fedinfo, "zmq")
h.helicsFederateInfoSetCoreInitString(fedinfo, fedinitstring)
h.helicsFederateInfoSetTimeProperty(fedinfo, h.helics_property_time_delta, deltat)
h.helicsFederateInfoSetIntegerProperty(fedinfo, h.helics_property_int_log_level, 1)
fed = h.helicsCreateCombinationFederate("TestB Federate", fedinfo)
return fed
def destroy_value_federate(fed):
h.helicsFederateFinalize(fed)
state = h.helicsFederateGetState(fed)
h.helicsFederateFree(fed)
h.helicsCloseLibrary()
def main():
fed = create_value_federate()
pubid = h.helicsFederateRegisterGlobalTypePublication(
fed, "federate2-to-federate1", "string", ""
)
subid = h.helicsFederateRegisterSubscription(fed, "federate1-to-federate2", "")
epid = h.helicsFederateRegisterGlobalEndpoint(fed, "endpoint2", "")
h.helicsInputSetDefaultNamedPoint(subid, "", 0)
print("Entering execution mode")
h.helicsFederateEnterExecutingMode(fed)
grantedtime = -1
while True:
try:
stop_at_time, value_to_send = get_input(grantedtime)
except KeyboardInterrupt:
print("")
break
while grantedtime < stop_at_time:
print(">>>>>>>> Requesting time = {}".format(stop_at_time))
grantedtime = h.helicsFederateRequestTime(fed, stop_at_time)
grantedtime = int(grantedtime)
if grantedtime != stop_at_time:
value = h.helicsSubscriptionGetKey(subid)
print("Interrupt value '{}' from Federate 1".format(value))
print("<<<<<<<< Granted Time = {}".format(grantedtime))
assert (
grantedtime == stop_at_time
), "stop_at_time = {}, grantedtime = {}".format(stop_at_time, grantedtime)
if value_to_send is not None and value_to_send != "":
print("Sending '{}' to Federate 1".format(value_to_send))
h.helicsPublicationPublishString(pubid, str(value_to_send))
h.helicsEndpointSendMessageRaw(epid, "endpoint1", str(value_to_send))
value = h.helicsSubscriptionGetKey(subid)
print("Received value '{}' from Federate 1".format(value))
while h.helicsEndpointHasMessage(epid):
value = h.helicsEndpointGetMessage(epid)
print(
"Received message '{}' at time {} from Federate 1".format(
value.data, value.time
)
)
print("----------------------------------")
destroy_value_federate(fed)
if __name__ == "__main__":
main()
```
#### File: advanced/advanced_orchestration/plot_timeseries.py
```python
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot') # this was just used for the examples
# data
t = np.linspace(0,100,100)
y = 5 * np.sin(t/10) + 4*np.random.randn(100*150).reshape(150, 100)
y_ = 5 * np.sin(t/10) + 4*np.random.randn(100*4000).reshape(4000, 100)
t__ = np.linspace(0,100,6)
y__ = 5 * np.sin(t__/10) + 4*np.random.randn(6*4000).reshape(4000, 6)
t.shape
y.shape
perc1 =
np.percentile(y, np.linspace(1, 50, num=10, endpoint=False), axis=0)#.shape
n=10
np.percentile(y, np.linspace(1, 50, num=n, endpoint=False), axis=0)
np.percentile(y, np.linspace(50, 99, num=n+1)[1:], axis=0)
# credit goes to this thread:
# https://github.com/arviz-devs/arviz/issues/2#issuecomment-310468720
def tsplot(x, y, n=20, percentile_min=1, percentile_max=99, color='r', plot_mean=True, plot_median=False, line_color='k', **kwargs):
# calculate the lower and upper percentile groups, skipping 50 percentile
perc1 = np.percentile(y, np.linspace(percentile_min, 50, num=n, endpoint=False), axis=0)
perc2 = np.percentile(y, np.linspace(50, percentile_max, num=n+1)[1:], axis=0)
if 'alpha' in kwargs:
alpha = kwargs.pop('alpha')
else:
alpha = 1/n
# fill lower and upper percentile groups
for p1, p2 in zip(perc1, perc2):
plt.fill_between(x, p1, p2, alpha=alpha, color=color, edgecolor=None)
if plot_mean:
plt.plot(x, np.mean(y, axis=0), color=line_color)
if plot_median:
plt.plot(x, np.median(y, axis=0), color=line_color)
return plt.gca()
tsplot(t, y, n=100, percentile_min=2.5, percentile_max=97.5, plot_median=True, plot_mean=False, color='g', line_color='navy')
plt.plot()
plt.show()
tsplot(t, y, n=5, percentile_min=2.5, percentile_max=97.5, plot_median=True, plot_mean=False, color='g', line_color='navy')
# IQR
tsplot(t, y_, n=1, percentile_min=25, percentile_max=75, plot_median=False, plot_mean=False, color='g', line_color='navy', alpha=0.3)
# 90% interval
tsplot(t, y_, n=1, percentile_min=5, percentile_max=95, plot_median=True, plot_mean=False, color='g', line_color='navy', alpha=0.3)
y_.shape
```
#### File: fundamental_message_comm/filter_federate/Battery.py
```python
import helics as h
import logging
import numpy as np
import matplotlib.pyplot as plt
logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.DEBUG)
def destroy_federate(fed):
'''
As part of ending a HELICS co-simulation it is good housekeeping to
formally destroy a federate. Doing so informs the rest of the
federation that it is no longer a part of the co-simulation and they
should proceed without it (if applicable). Generally this is done
when the co-simulation is complete and all federates end execution
at more or less the same wall-clock time.
:param fed: Federate to be destroyed
:return: (none)
'''
status = h.helicsFederateDisconnect(fed)
h.helicsFederateFree(fed)
h.helicsCloseLibrary()
logger.info('Federate finalized')
def get_new_battery(numBattery):
'''
Using hard-coded probabilities, a distribution of battery of
fixed battery sizes are generated. The number of batteries is a user
provided parameter.
:param numBattery: Number of batteries to generate
:return
listOfBatts: List of generated batteries
'''
# Probabilities of a new EV charging at the specified level.
lvl1 = 0.2
lvl2 = 0.2
lvl3 = 0.6
listOfBatts = np.random.choice([25,62,100],numBattery,p=[lvl1,lvl2,
lvl3]).tolist()
return listOfBatts
if __name__ == "__main__":
np.random.seed(2608)
########## Registering federate and configuring from JSON################
fed = h.helicsCreateValueFederateFromConfig("BatteryConfig.json")
federate_name = h.helicsFederateGetName(fed)
logger.info(f'Created federate {federate_name}')
print(f'Created federate {federate_name}')
sub_count = h.helicsFederateGetInputCount(fed)
logger.debug(f'\tNumber of subscriptions: {sub_count}')
pub_count = h.helicsFederateGetPublicationCount(fed)
logger.debug(f'\tNumber of publications: {pub_count}')
# Diagnostics to confirm JSON config correctly added the required
# publications and subscriptions
subid = {}
sub_name = {}
for i in range(0, sub_count):
subid[i] = h.helicsFederateGetInputByIndex(fed, i)
sub_name[i] = h.helicsSubscriptionGetTarget(subid[i])
logger.debug(f'\tRegistered subscription---> {sub_name[i]}')
pubid = {}
pub_name = {}
for i in range(0, pub_count):
pubid[i] = h.helicsFederateGetPublicationByIndex(fed, i)
pub_name[i] = h.helicsPublicationGetName(pubid[i])
logger.debug(f'\tRegistered publication---> {pub_name[i]}')
############## Entering Execution Mode ##################################
h.helicsFederateEnterExecutingMode(fed)
logger.info('Entered HELICS execution mode')
hours = 24*7 # one week
total_interval = int(60 * 60 * hours)
update_interval = int(h.helicsFederateGetTimeProperty(
fed,
h.HELICS_PROPERTY_TIME_PERIOD))
grantedtime = 0
# Define battery physics as empirical values
socs = np.array([0, 1])
effective_R = np.array([8, 150])
batt_list = get_new_battery(pub_count)
current_soc = {}
for i in range (0, pub_count):
current_soc[i] = (np.random.randint(0,60))/100
# Data collection lists
time_sim = []
current = []
soc = {}
# As long as granted time is in the time range to be simulated...
while grantedtime < total_interval:
# Time request for the next physical interval to be simulated
requested_time = (grantedtime+update_interval)
logger.debug(f'Requesting time {requested_time}\n')
grantedtime = h.helicsFederateRequestTime (fed, requested_time)
logger.debug(f'Granted time {grantedtime}')
for j in range(0,sub_count):
logger.debug(f'Battery {j+1} time {grantedtime}')
# Get the applied charging voltage from the EV
charging_voltage = h.helicsInputGetDouble((subid[j]))
logger.debug(f'\tReceived voltage {charging_voltage:.2f} from input'
f' {h.helicsSubscriptionGetTarget(subid[j])}')
# EV is fully charged and a new EV is moving in
# This is indicated by the charging removing voltage when it
# thinks the EV is full
if charging_voltage == 0:
new_batt = get_new_battery(1)
batt_list[j] = new_batt[0]
current_soc[j] = (np.random.randint(0,80))/100
charging_current = 0
# Calculate charging current and update SOC
R = np.interp(current_soc[j], socs, effective_R)
logger.debug(f'\tEffective R (ohms): {R:.2f}')
charging_current = charging_voltage / R
logger.debug(f'\tCharging current (A): {charging_current:.2f}')
added_energy = (charging_current * charging_voltage * \
update_interval/3600) / 1000
logger.debug(f'\tAdded energy (kWh): {added_energy:.4f}')
current_soc[j] = current_soc[j] + added_energy / batt_list[j]
logger.debug(f'\tSOC: {current_soc[j]:.4f}')
# Publish out charging current
h.helicsPublicationPublishDouble(pubid[j], charging_current)
logger.debug(f'\tPublished {pub_name[j]} with value '
f'{charging_current:.2f}')
# Store SOC for later analysis/graphing
if subid[j] not in soc:
soc[subid[j]] = []
soc[subid[j]].append(float(current_soc[j]))
# Data collection vectors
time_sim.append(grantedtime)
current.append(charging_current)
# Cleaning up HELICS stuff once we've finished the co-simulation.
destroy_federate(fed)
# Printing out final results graphs for comparison/diagnostic purposes.
xaxis = np.array(time_sim)/3600
y = []
for key in soc:
y.append(np.array(soc[key]))
plt.figure()
fig, axs = plt.subplots(5, sharex=True, sharey=True)
fig.suptitle('SOC of each EV Battery')
axs[0].plot(xaxis, y[0], color='tab:blue', linestyle='-')
axs[0].set_yticks(np.arange(0,1.25,0.5))
axs[0].set(ylabel='Batt1')
axs[0].grid(True)
axs[1].plot(xaxis, y[1], color='tab:blue', linestyle='-')
axs[1].set(ylabel='Batt2')
axs[1].grid(True)
axs[2].plot(xaxis, y[2], color='tab:blue', linestyle='-')
axs[2].set(ylabel='Batt3')
axs[2].grid(True)
axs[3].plot(xaxis, y[3], color='tab:blue', linestyle='-')
axs[3].set(ylabel='Batt4')
axs[3].grid(True)
axs[4].plot(xaxis, y[4], color='tab:blue', linestyle='-')
axs[4].set(ylabel='Batt5')
axs[4].grid(True)
plt.xlabel('time (hr)')
#for ax in axs():
# ax.label_outer()
plt.show()
``` |
{
"source": "aadilmehdis/oppia",
"score": 2
} |
#### File: core/domain/visualization_registry.py
```python
import inspect
import os
from extensions.visualizations import models
import feconf
import utils
class Registry(object):
"""Registry of all visualizations."""
# Dict mapping visualization class names to their classes.
visualizations_dict = {}
@classmethod
def _refresh_registry(cls):
cls.visualizations_dict.clear()
# Add new visualization instances to the registry.
for name, clazz in inspect.getmembers(models, inspect.isclass):
if name.endswith('_test') or name == 'BaseVisualization':
continue
ancestor_names = [
base_class.__name__ for base_class in inspect.getmro(clazz)]
if 'BaseVisualization' not in ancestor_names:
continue
cls.visualizations_dict[clazz.__name__] = clazz
@classmethod
def get_full_html(cls):
"""Returns the HTML bodies for all visualizations."""
js_directives = utils.get_file_contents(os.path.join(
feconf.VISUALIZATIONS_DIR, 'visualizations.js'))
return '<script>%s</script>\n' % (js_directives)
@classmethod
def get_visualization_class(cls, visualization_id):
"""Gets a visualization class by its id (which is also its class name).
The registry will refresh if the desired class is not found. If it's
still not found after the refresh, this method will throw an error.
"""
if visualization_id not in cls.visualizations_dict:
cls._refresh_registry()
if visualization_id not in cls.visualizations_dict:
raise TypeError(
'\'%s\' is not a valid visualization id.' % visualization_id)
return cls.visualizations_dict[visualization_id]
@classmethod
def get_all_visualization_ids(cls):
"""Gets a visualization class by its id
(which is also its class name).
"""
if not cls.visualizations_dict:
cls._refresh_registry()
return cls.visualizations_dict.keys()
``` |
{
"source": "aadilmughal786/ACulator-Tkinter",
"score": 4
} |
#### File: aadilmughal786/ACulator-Tkinter/ACulator.py
```python
import tkinter
from math import *
from built_in import *
from user_define import *
#------------------- devloper variable ----------------------------------
devloper = "<NAME>"
version = "v1.0"
#--------------------------- main window(root) ----------------------------
root = tkinter.Tk()
root.title("ACulator")
root.resizable(0,0)
#------------------------- global variable --------------------------
font_btn_num = "bold 11"
font_entry = "bold 16"
entry_var = tkinter.StringVar()
#------------------------------- functons ----------------------
def WantExit():
def exitt():
root1.destroy()
root.destroy()
root1 = tkinter.Tk()
root1.title("Want Exit?")
root1.resizable(0,0)
root1.geometry("300x100")
Msg = tkinter.Label(root1,text = "Do you want to exit ?",font = "20")
Msg.place(x = 70,y = 20)
CalcleButton = tkinter.Button(root1,text = "Cancle",command = root1.destroy)
CalcleButton.place(x =70,y = 60)
YesButton = tkinter.Button(root1,text = "Yes",command = exitt)
YesButton.place( x =190,y = 60)
root1.mainloop()
def char_print(symbol):
entry_var.set(entry_var.get()+symbol)
def facto():
if len(entry_var.get())==0:
entry_var.set("Enter Somthing")
return 0
try:
if int(entry_var.get())>25:
entry_var.set("Too Long")
else:
entry_var.set(str(factorial(int(entry_var.get()))))
except:
entry_var.set("Error")
def allclear():
if len(entry_var.get())==0:
entry_var.set("Nothing to clean")
return 0
else:
entry_var.set("")
def sqrrt():
if len(entry_var.get())==0:
entry_var.set("Enter Somthing")
return 0
try:
entry_var.set(str(sqrt(float(entry_var.get()))))
except:
entry_var.set("Error")
def persant():
if len(entry_var.get())==0:
entry_var.set("Enter Somthing")
return 0
try:
entry_var.set(str(float(eval(entry_var.get()))/100))
except:
entry_var.set("Error")
def equal():
if len(entry_var.get())==0:
entry_var.set("Enter Somthing")
return 0
try:
entry_var.set(eval(entry_var.get()))
except:
entry_var.set("Error")
#------------------------- menu -----------------------------------------
menubar = tkinter.Menu(root)
menubar.add_command(label = "About")
menubar.add_command(label = "Exit",command = WantExit)
root.config(menu = menubar)
#------------------ Entery for input ---------------------------
EntryInput = tkinter.Entry(root,font = font_entry,justify = tkinter.RIGHT,textvariable = entry_var)
EntryInput.grid(row = 0,column = 0,columnspan = 5,sticky="nsew",padx = 3,pady = (3,3))
#------------------ first Buttons Row ---------------------------
ButtonFactorial = tkinter.Button(root,text = "x!" ,width = 6,command = facto)
ButtonFactorial.grid(row = 3,column = 0,sticky="nsew",ipady = 3,padx = (3,0))
ParaOpen = tkinter.Button(root,text = "(",width = 6,command = lambda:char_print("("))
ParaOpen.grid(row = 3,column = 1,sticky="nsew")
ParaClose = tkinter.Button(root,text = ")",width = 6,command = lambda:char_print(")"))
ParaClose.grid(row = 3,column = 2,sticky="nsew")
PerButton = tkinter.Button(root,text = "%",width = 6,command = persant)
PerButton.grid(row = 3,column = 3,sticky="nsew")
AllClear = tkinter.Button(root,text = "AC",fg = "red",font = font_btn_num,width = 6,command = allclear)
AllClear.grid(row = 3,column = 4,sticky="nsew",padx = (0,3))
#------------------ second Buttons Row ---------------------------
PiButton = tkinter.Button(root,text = "pi",command = lambda:char_print("pi"))
PiButton.grid(row = 4,column = 0,sticky="nsew",ipady = 3,padx = (3,0))
ButtonSeven = tkinter.Button(root,text = "7",font = font_btn_num,command = lambda:char_print("7"))
ButtonSeven.grid(row = 4,column = 1,sticky="nsew")
ButtonEight = tkinter.Button(root,text = "8",font = font_btn_num,command = lambda:char_print("8"))
ButtonEight.grid(row = 4,column = 2,sticky="nsew")
ButtonNine = tkinter.Button(root,text = "9",font = font_btn_num,command = lambda:char_print("9"))
ButtonNine.grid(row = 4,column = 3,sticky="nsew")
DivButton = tkinter.Button(root,text = "/",command = lambda:char_print("/"))
DivButton.grid(row = 4,column = 4,sticky="nsew",padx = (0,3))
#------------------ third Buttons Row ---------------------------
ValueOfe = tkinter.Button(root,text = "e",command = lambda:char_print("e"))
ValueOfe.grid(row = 5,column = 0,sticky="nsew",ipady = 3,padx = (3,0))
ButtonFour = tkinter.Button(root,text = "4",font = font_btn_num,command = lambda:char_print("4"))
ButtonFour.grid(row = 5,column = 1,sticky="nsew")
ButtonFive = tkinter.Button(root,text = "5",font = font_btn_num,command = lambda:char_print("5"))
ButtonFive.grid(row = 5,column = 2,sticky="nsew")
ButtonSix = tkinter.Button(root,text = "6",font = font_btn_num,command = lambda:char_print("6"))
ButtonSix.grid(row = 5,column = 3,sticky="nsew")
MulButton = tkinter.Button(root,text = "*",command = lambda:char_print("*"))
MulButton.grid(row = 5,column = 4,sticky="nsew",padx = (0,3))
#------------------ forth Buttons Row ---------------------------
ButtonSqrt = tkinter.Button(root,text = "√",command = sqrrt)
ButtonSqrt.grid(row = 6,column = 0,sticky="nsew",ipady = 3,padx = (3,0))
ButtonOne = tkinter.Button(root,text = "1",font = font_btn_num,command = lambda:char_print("1"))
ButtonOne.grid(row = 6,column = 1,sticky="nsew")
ButtonTwo = tkinter.Button(root,text = "2",font = font_btn_num,command = lambda:char_print("2"))
ButtonTwo.grid(row = 6,column = 2,sticky="nsew")
ButtonThree = tkinter.Button(root,text = "3",font = font_btn_num,command = lambda:char_print("3"))
ButtonThree.grid(row = 6,column = 3,sticky="nsew")
SubButton = tkinter.Button(root,text = "-",command = lambda:char_print("-"))
SubButton.grid(row = 6,column = 4,sticky="nsew",padx = (0,3))
#------------------ fifth Buttons Row ---------------------------
DotButton = tkinter.Button(root,text = ".",command = lambda:char_print("."))
DotButton.grid(row = 7,column = 0,sticky="nsew",ipady = 3,padx = (3,0),pady = (0,3))
ButtonZero = tkinter.Button(root,text = "0",font = font_btn_num,command = lambda:char_print("0"))
ButtonZero.grid(row = 7,column = 1,sticky="nsew",pady = (0,3))
EvalButton = tkinter.Button(root,text = "=",font = font_btn_num,command = equal)
EvalButton.grid(row = 7,column = 2 ,columnspan = 2,sticky="nsew",pady = (0,3))
AddButton = tkinter.Button(root,text = "+",command = lambda:char_print("+"))
AddButton.grid(row = 7,column = 4,sticky="nsew",padx = (0,3),pady = (0,3))
#------------------- event loop(for event hndling) -----------------------------
root.mainloop()
``` |
{
"source": "aadil-srivastava01/haystack",
"score": 3
} |
#### File: haystack/database/elasticsearch.py
```python
from elasticsearch import Elasticsearch
from elasticsearch.helpers import scan
from haystack.database.base import BaseDocumentStore
class ElasticsearchDocumentStore(BaseDocumentStore):
def __init__(
self,
host="localhost",
username="",
password="",
index="document",
search_fields="text",
text_field="text",
name_field="name",
doc_id_field="document_id",
tag_fields=None,
custom_mapping=None,
):
self.client = Elasticsearch(hosts=[{"host": host}], http_auth=(username, password))
# if no custom_mapping is supplied, use the default mapping
if not custom_mapping:
custom_mapping = {
"mappings": {
"properties": {
name_field: {"type": "text"},
text_field: {"type": "text"},
doc_id_field: {"type": "text"},
}
}
}
# create an index if not exists
self.client.indices.create(index=index, ignore=400, body=custom_mapping)
self.index = index
# configure mappings to ES fields that will be used for querying / displaying results
if type(search_fields) == str:
search_fields = [search_fields]
self.search_fields = search_fields
self.text_field = text_field
self.name_field = name_field
self.tag_fields = tag_fields
self.doc_id_field = doc_id_field
def get_document_by_id(self, id):
query = {"filter": {"term": {"_id": id}}}
result = self.client.search(index=self.index, body=query)["hits"]["hits"]
if result:
document = {
"id": result[self.doc_id_field],
"name": result[self.name_field],
"text": result[self.text_field],
}
else:
document = None
return document
def get_document_by_name(self, name):
query = {"filter": {"term": {self.name_field: name}}}
result = self.client.search(index=self.index, body=query)["hits"]["hits"]
if result:
document = {
"id": result[self.doc_id_field],
"name": result[self.name_field],
"text": result[self.text_field],
}
else:
document = None
return document
def get_document_ids_by_tags(self, tags):
term_queries = [{"terms": {key: value}} for key, value in tags.items()]
query = {"query": {"bool": {"must": term_queries}}}
result = self.client.search(index=self.index, body=query, size=10000)["hits"]["hits"]
doc_ids = []
for hit in result:
doc_ids.append(hit["_id"])
return doc_ids
def write_documents(self, documents):
for d in documents:
self.client.index(index=self.index, body=d)
def get_document_count(self):
result = self.client.count()
count = result["count"]
return count
def get_all_documents(self):
result = scan(self.client, query={"query": {"match_all": {}}}, index=self.index)
documents = []
for hit in result:
documents.append(
{
"id": hit["_source"][self.doc_id_field],
"name": hit["_source"][self.name_field],
"text": hit["_source"][self.text_field],
}
)
return documents
def query(self, query, top_k=10, candidate_doc_ids=None):
# TODO:
# for now: we keep the current structure of candidate_doc_ids for compatibility with SQL documentstores
# midterm: get rid of it and do filtering with tags directly in this query
body = {
"size": top_k,
"query": {
"bool": {
"must": [{"multi_match": {"query": query, "type": "most_fields", "fields": self.search_fields}}]
}
},
}
if candidate_doc_ids:
body["query"]["bool"]["filter"] = [{"terms": {"_id": candidate_doc_ids}}]
result = self.client.search(index=self.index, body=body)["hits"]["hits"]
paragraphs = []
meta_data = []
for hit in result:
paragraphs.append(hit["_source"][self.text_field])
meta_data.append(
{
"paragraph_id": hit["_id"],
"document_id": hit["_source"][self.doc_id_field],
"document_name": hit["_source"][self.name_field],
}
)
return paragraphs, meta_data
``` |
{
"source": "AadilVarsh/pyflames",
"score": 3
} |
#### File: pyflames/examples/test3.py
```python
try:
import os
import sys
currentdir = os.path.dirname(os.path.realpath(__file__))
parentdir = os.path.dirname(currentdir)
sys.path.append(parentdir)
except:
pass
from flask import Flask, render_template, request
from flames import Flames
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
return render_template("index.html")
@app.route('/getflame', methods=['GET', 'POST'])
def getflame():
if request.method == 'POST':
nameA = request.form['nameA']
nameB = request.form['nameB']
if nameA.strip() != nameB.strip():
f = Flames(nameA, nameB)
r = f.find_it()
return render_template('flame.html', status=r.title())
else:
return render_template('error.html', error="You can't FUCK yourself")
if __name__ == '__main__':
app.run(debug=True, port=9090)
``` |
{
"source": "AadilVarsh/tyrant",
"score": 3
} |
#### File: tyrant/cogs/fruit_vs_vegetables.py
```python
import asyncio
from disnake.ext.commands import Bot, Cog
from tyrant import constants
class FruitVsVegetables(Cog):
"""Assign fruit and vegetable roles."""
def __init__(self, bot: Bot):
"""Initialize this cog with the Bot instance."""
self.bot = bot
self.locks = {}
@Cog.listener()
async def on_raw_reaction_add(self, payload):
"""Distribute fruit or vegetable role, when appropriate."""
if payload.channel_id == constants.Channels.fruit_vs_vegetables:
# Acquire a lock for this user
if payload.user_id not in self.locks:
self.locks[payload.user_id] = asyncio.Lock()
lock = self.locks[payload.user_id]
# If it's already locked, just do nothing. The code
# below will clean up and exit with a clean state.
if lock.locked():
return
async with lock:
# Get the other info we need
channel = await self.bot.fetch_channel(payload.channel_id)
guild = self.bot.get_guild(payload.guild_id)
member = await guild.fetch_member(payload.user_id)
emoji = payload.emoji
# Get the role ID from the emoji
fruit_role_id = constants.EMOJI_TO_ROLE[emoji.name]
team_id = constants.EMOJI_TO_TEAM[emoji.name]
fruit_role = guild.get_role(fruit_role_id)
team_role = guild.get_role(team_id)
# Get rid of old roles, assign the new ones
await member.remove_roles(*[role for role in member.roles if role.id in constants.ALL_FRUIT_AND_VEG_ROLES])
await member.add_roles(fruit_role, team_role)
# Finally, remove all other reactions than this one
fruit_message = await channel.fetch_message(constants.Messages.fruit_role_assignment)
veg_message = await channel.fetch_message(constants.Messages.veg_role_assignment)
reactions = fruit_message.reactions + veg_message.reactions
for reaction in reactions:
# Do not remove the reaction we're currently adding
if reaction.custom_emoji:
if reaction.emoji.name == emoji.name:
continue
else:
if str(emoji) == str(reaction.emoji):
continue
# Otherwise, remove the emoji.
users = await reaction.users().flatten()
if member in users:
await reaction.remove(member)
@Cog.listener()
async def on_raw_reaction_remove(self, payload):
"""Remove fruit and veg roles, when appropriate."""
if payload.channel_id == constants.Channels.fruit_vs_vegetables:
# Acquire a lock for this user
if payload.user_id not in self.locks:
self.locks[payload.user_id] = asyncio.Lock()
lock = self.locks[payload.user_id]
async with lock:
guild = self.bot.get_guild(payload.guild_id)
member = await guild.fetch_member(payload.user_id)
emoji = payload.emoji
# Get the role ID from the emoji
fruit_role_id = constants.EMOJI_TO_ROLE[emoji.name]
team_id = constants.EMOJI_TO_TEAM[emoji.name]
team_role = guild.get_role(team_id)
# Remove all fruit and veg roles from the member
for role in member.roles:
if role.id == fruit_role_id and role.id in constants.ALL_FRUIT_AND_VEG_ROLES:
await member.remove_roles(role, team_role)
def setup(bot: Bot) -> None:
"""
This function is called automatically when this cog is loaded by the bot.
It's only purpose is to load the cog above, and to pass the Bot instance into it.
"""
bot.add_cog(FruitVsVegetables(bot))
``` |
{
"source": "aadimangla/StumbleUpon-Evergreen-Classification-Challenge",
"score": 3
} |
#### File: StumbleUpon-Evergreen-Classification-Challenge/Model 1/model1.py
```python
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
from sklearn.model_selection import train_test_split
#Pre-processing Libraries and downloads
import nltk
import re
nltk.download('words')
nltk.download('stopwords')
nltk.download('punkt')
from nltk.corpus import stopwords
stop_words = set(stopwords.words('english'))
stop_words = list(stop_words)
from nltk.stem.porter import PorterStemmer
porter = PorterStemmer()
from nltk import word_tokenize
import string
print(string.punctuation)
words = set(nltk.corpus.words.words())
words = list(words)
def remove(list):
pattern = '[0-9]'
list = [re.sub(pattern, '', i) for i in list]
pattern1 = '[a-z]'
return list
#def boilerplate_model(ds='train1.csv'):
#Importing Training and Validation Dataset
dataset = pd.read_csv('train1.csv')
# Boilerplate Data
boilerplate_data = dataset.iloc[:,[2,-1]].values
text_list = list(boilerplate_data[:,0])
#Importing Test Dataset
test_ds = pd.read_csv('test.csv')
boilerplate_test_data = test_ds.iloc[:,2].values
test_text_list = list(boilerplate_test_data)
# Train Validation Split
train, val = train_test_split(boilerplate_data, test_size=0.2)
# Pre-Processing training text
def cleaning_text(text):
cleaned_text = []
len_data = len(text)
for i in range(0,len_data):
sentence = text[i]
sentence = sentence.lower()
sentence_p = "".join([char for char in sentence if char not in string.punctuation])
sentence_words = word_tokenize(sentence_p)
sentence_filtered = [word for word in sentence_words if word not in stop_words]
sentence_stemmed = [porter.stem(word) for word in sentence_filtered]
#sentence_processed = list(w for w in sentence_stemmed if w in words)
#print(sentence_stemmed)
sentence_stemmed = remove(sentence_stemmed)
listToStr = ' '.join([str(elem) for elem in sentence_stemmed])
#print(listToStr)
cleaned_text.append(listToStr)
return cleaned_text
train_text= list(train[:,0])
train_cleaned_text = cleaning_text(train_text)
# Pre-Processing validation text
val_text= list(val[:,0])
val_cleaned_text = cleaning_text(val_text)
# Pre-Processing test data
test_cleaned_text = cleaning_text(test_text_list)
# Tokenization of textual data
train_targets = list(train[:,1])
train_targets = np.array(train_targets)
val_targets = list(val[:,1])
val_targets = np.array(val_targets)
tokenizer = Tokenizer(num_words=20000,oov_token='<OOV>',split=' ')
tokenizer.fit_on_texts(train_cleaned_text)
word_index = tokenizer.word_index
train_text_list = []
train_text_list = tokenizer.texts_to_sequences(train_cleaned_text)
train_text_list = pad_sequences(train_text_list,padding='pre',truncating='post',maxlen=20)
# Tokenization of Validation data
val_text_list = tokenizer.texts_to_sequences(val_cleaned_text)
val_text_list = pad_sequences(val_text_list,padding='pre',truncating='post',maxlen=20)
# Tokenization of Test Data
test_text_list = tokenizer.texts_to_sequences(test_cleaned_text)
test_text_list = pad_sequences(test_text_list,padding='pre',truncating='post',maxlen=20)
#Model
model = tf.keras.Sequential([
tf.keras.layers.Embedding(20000, 64,input_length=20),
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64, return_sequences=True)),
tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(32)),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(loss='mae', optimizer=tf.keras.optimizers.RMSprop(
learning_rate=0.003, rho=0.9, momentum=0.2, epsilon=1e-07, centered=False,
name='RMSprop'
), metrics=['accuracy','AUC',tf.keras.metrics.Precision(),tf.keras.metrics.Recall()])
NUM_EPOCHS = 25
history = model.fit(train_text_list,train_targets, epochs=NUM_EPOCHS, validation_data=(val_text_list,val_targets),use_multiprocessing=True)
#Result
results = model.evaluate(val_text_list, val_targets)
# Prediction on Test Data
results = model.predict(test_text_list)
results = np.array(results)
results = np.round(results)
prediction = pd.DataFrame(results, columns=['predictions']).to_csv('prediction.csv')
#model.save('model1.h5')
```
#### File: StumbleUpon-Evergreen-Classification-Challenge/Model 2/model2.py
```python
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import keras
from sklearn.model_selection import train_test_split
from utils import boilerplate_model
#for creating feature column
from tensorflow.keras import layers
from tensorflow import feature_column
from os import getcwd
#Importing Training and Validation Dataset
ds = pd.read_csv('train1.csv')
#Importing Test Data
ds_test = pd.read_csv('test.csv')
# Pre processing test data
X_test = ds_test.iloc[:,3].values
X_test.reshape(1,-1)
from sklearn.preprocessing import LabelEncoder,OneHotEncoder
labelencoder_X=LabelEncoder()
X_test=labelencoder_X.fit_transform(X_test)
ds_test['alchemy_category'] = X_test
ds_test['alchemy_category_score'] = np.array(ds_test['alchemy_category_score'])
test_results = pd.read_csv('prediction.csv')
test_text_results = test_results.iloc[:,2].values
ds_test.pop('boilerplate')
ds_test.pop('url')
ds_test.pop('urlid')
ds_test.pop('news_front_page')
ds_test['boilerplate'] = np.array(test_text_results,dtype=float)
ds_test.info()
# Encoding categorical Variable
X = ds.iloc[:,3].values
X.reshape(1,-1)
from sklearn.preprocessing import LabelEncoder,OneHotEncoder
labelencoder_X=LabelEncoder()
X=labelencoder_X.fit_transform(X)
ds['alchemy_category'] = X
#Getting Boilerplate results using boilerplate Model
text_results = boilerplate_model()
text_results = np.array(text_results)
ds.pop('boilerplate')
ds.pop('url')
ds.pop('urlid')
ds.pop('news_front_page')
ds['boilerplate'] = text_results
ds.info()
train, val = train_test_split(ds, test_size=0.2)
print(len(train), 'train examples')
print(len(val), 'validation examples')
#def df_to_dataset(dataframe, shuffle=True, batch_size=32):
# dataframe = dataframe.copy()
# labels = dataframe.pop('label')
# ds = tf.data.Dataset.from_tensor_slices((dict(dataframe), labels))
# if shuffle:
# ds = ds.shuffle(buffer_size=len(dataframe))
# ds = ds.batch(batch_size)
# return ds
train.info()
train_X = train
train_Y = np.array(train.pop('label'))
val_X= val
val_Y = np.array(val.pop('label'))
#batch_size = 64 # A small batch sized is used for demonstration purposes
#train_ds = df_to_dataset(train, batch_size=batch_size)
#val_ds = df_to_dataset(val, shuffle=False,batch_size=batch_size)
## Creating Feature Layer
#feature_columns = []
#
## Numeric Cols.
## Create a list of numeric columns. Use the following list of columns
## that have a numeric datatype:
#numeric_columns = ['alchemy_category','alchemy_category_score','avglinksize', 'commonlinkratio_1','commonlinkratio_2','commonlinkratio_3','commonlinkratio_4', 'compression_ratio','embed_ratio', 'framebased','frameTagRatio', 'hasDomainLink','html_ratio','image_ratio','is_news','lengthyLinkDomain', 'linkwordscore','non_markup_alphanum_characters','numberOfLinks','numwords_in_url','parametrizedLinkRatio','spelling_errors_ratio','boilerplate']
#
#for header in numeric_columns:
# # Create a numeric feature column out of the header.
# numeric_feature_column = tf.feature_column.numeric_column(header)
#
# feature_columns.append(numeric_feature_column)
#
##feature_layer = tf.keras.layers.DenseFeatures(feature_columns)
# MODEL
model = tf.keras.Sequential([
tf.keras.layers.Dense(64, input_dim=23, kernel_initializer='he_uniform', activation='tanh'),
tf.keras.layers.Dense(128, activation='selu'),
tf.keras.layers.Dense(256, activation='tanh'),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.Dense(128, activation='selu'),
tf.keras.layers.Dense(64, activation='tanh'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
model.compile(loss='mae', optimizer=tf.keras.optimizers.RMSprop(
learning_rate=0.05, rho=0.9, momentum=0.2, epsilon=1e-07, centered=False,
name='RMSprop'
), metrics=['accuracy','AUC',tf.keras.metrics.Precision(),tf.keras.metrics.Recall()])
NUM_EPOCHS = 50
history = model.fit(x=train_X,y=train_Y, epochs=NUM_EPOCHS,validation_data=(val_X,val_Y))
results = model.predict(ds_test)
results = np.array(results)
results = np.round(results)
prediction = pd.DataFrame(results, columns=['predictions']).to_csv('prediction1.csv')
``` |
{
"source": "aadimator/exercism",
"score": 3
} |
#### File: python/leap/leap.py
```python
def is_leap_year(year):
return year%4 == 0 and (year%100 != 0 or (year%100 == 0 and year%400 == 0))
``` |
{
"source": "aadimator/leetcode",
"score": 4
} |
#### File: aadimator/leetcode/3.Longest Substring Without Repeating Characters.py
```python
class Solution:
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
longest = ""
curr = ""
for c in s:
if (c in curr):
pos = curr.find(c) + 1
curr = curr[pos:]
curr += c
if (len(curr) >= len(longest)):
longest = curr
return len(longest)
``` |
{
"source": "aadi-mishra/robotino",
"score": 2
} |
#### File: libraries/Motor/move_straight.py
```python
import rospy
from geometry_msgs.msg import Twist
from geometry_msgs.msg import Pose
import math
def move():
pub_vel = rospy.Publisher('ref_vel', Twist, queue_size=10)
``` |
{
"source": "aadiraju/animetrics",
"score": 3
} |
#### File: analysis/scripts/project_functions.py
```python
import pandas as pd
import numpy as np
import html
import warnings
import random
warnings.filterwarnings("ignore")
#Process the anime.csv file
def load_and_process_anime(path):
#Deal with unnecessary data and missing values
df1 = pd.read_csv(path)
df1 = df1.drop(columns='members').loc[~((df1['episodes'] == 'Unknown') | (df1['episodes'] == 'NaN') | (df1['rating'].isna()))].dropna(axis=0)
#Process some data, and remove any rows with the 'Hentai' to keep things family-friendly
df1 = df1.assign(genre = lambda x: x.genre.str.split(",")).sort_values("anime_id").loc[~(df1['genre'].isin(['Hentai']))].reset_index(drop=True)
df1['genre'] = df1['genre'].map(lambda x: random.sample(x,len(x)))
df1['episodes'] = pd.to_numeric(df1['episodes'])
df1['name'] = df1['name'].map(html.unescape) #using a deprecated method, but it converts the names that are encoded with HTML entities into unicode
return df1
#process the rating.csv file
def load_and_process_ratings(path,whitelist):
#Deal with unnecessary data and missing values
df1 = pd.read_csv(path)
df1 = df1.loc[~((df1['rating'] == -1 ) | (df1['rating'].isna()))].dropna(axis=0) #Ditch the rows with a rating of -1 which means the user watched it but didn't rate it
#Process some data, and remove any rows with the 'Hentai' to keep things family-friendly
df1 = df1.loc[(df1['anime_id'].isin(whitelist))].reset_index(drop=True)
return df1
``` |
{
"source": "aadishgoel2013/Algos-with-Python",
"score": 4
} |
#### File: aadishgoel2013/Algos-with-Python/FibAndFact.py
```python
def fact_iterative(n):
ans=1
for i in range(1,n):
ans*=i
return ans
def fact(n):
if n==1:
return 1
else:
return n*fact(n-1)
def fib(n):
if n<2:
return n
else:
return fib(n-1)+fib(n-2)
```
#### File: aadishgoel2013/Algos-with-Python/mergeSort.py
```python
def merge(a,b):
data=[]
x,y=len(a),len(b)
i=j=0
while i<x and j<y:
if a[i]<b[j]:
data.append(a[i])
i+=1
else:
data.append(b[j])
j+=1
while i<x:
data.append(a[i])
i+=1
while j<y:
data.append(b[j])
j+=1
return data
def sort(a):
n=len(a)
if n==1:
return a
else:
return merge(sort(a[:n//2]),sort(a[n//2:]))
a = list(map(int,input().split()))
data = sort(a)
print(data)
``` |
{
"source": "aadishgoel2013/CrackingTheCodingInterview",
"score": 4
} |
#### File: CrackingTheCodingInterview/Chapter1/InplaceRotateMatrixBy90Degree.py
```python
def TransposeMatrix(size, data):
''' To Inplace Transpose Matrix '''
for i in range(size):
for j in range(i+1,size):
data[i][j],data[j][i] = data[j][i],data[i][j]
return data
def RotateMatrixBy90Degree(size, data):
'''To Rotate Matrix By 90 Degree'''
for i in range(size//2):
data[i],data[size-i-1]=data[size-i-1],data[i]
data = TransposeMatrix(size, data)
return data
if __name__=='__main__':
size = int(input("Enter Size: "))
data = [ list(map(int, input().split())) for i in range(size) ]
ans = RotateMatrixBy90Degree(size, data)
print('-----')
for line in ans:
print(*line)
''' Output
Enter Size: 3
1 2 3
4 5 6
7 8 9
-----
7 4 1
8 5 2
9 6 3
'''
``` |
{
"source": "aadishgoel2013/Cryptography-Codes",
"score": 4
} |
#### File: aadishgoel2013/Cryptography-Codes/galoisField.py
```python
from functools import reduce
ir = '1011' #x3+x+1 is an irreducible polynomial
n=3 #GF(p^n):- GF(2^3) n is 3
def mul(a,b,ir):
x,y = bin(a)[2:],bin(b)[2:]
mid = [int(y+'0'*index,2) for index,item in enumerate(x[::-1]) if item!='0']
if not len(mid): mid=[0,0]
ans= m = bin(reduce(lambda x,y:x^y,mid))[2:]
if len(m)>len(ir):
ans = int(ir+'0',2)^int(m,2) #Xoring With Shifted IR
ans = bin(int(ir,2)^int(bin(ans)[2:],2))[2:] #Reducing One Degree more
elif len(m)==len(ir):
ans = bin(int(ir,2)^int(m,2))[2:] #Reducing One Degree
return(ans)
add=[[int(bin(i)[2:])^int(bin(j)[2:]) for j in range(2**n)] for i in range(2**n)]
mult=[[mul(i,j,ir) for j in range(2**n)] for i in range(2**n)]
print('Addition Table')
for line in add:
print(*line)
print('Multiplication Table')
for line in mult:
print(*line)
``` |
{
"source": "aadishgoel2013/OperatingSystemAlgos",
"score": 3
} |
#### File: OperatingSystemAlgos/Monitors/monitorsDinningPhiloshper.py
```python
import random
class abc:
def __init__(s):
states = {0:'Thinking', 1:'Hungry', 2:'Eating'}
s.self = [0]*5
s.state = [0]*5
def pickup(s,i):
s.state[i]=1 #Hungry
s.test(i)
if s.state[i] != 2:
s.wait(s.self,i)
def putdown(s,i):
s.state[i]=0 #Thinking
s.test((i+4)%5)
s.test((i+1)%5)
def test(s,i):
if(s.state[((i+4)%5)]!=2 and s.state[((i+1)%5)]!=2 and s.state[i]==1 ):
s.state[i]=2
s.signal(s.self,i)
def wait(s,self,i):
if not s.self[i]: s.self[i]=1
def signal(s,self,i):
if s.self[i]:
s.self[i]=0
s.pickup(i)
##x = abc()
##print(x.self)
##print(x.state)
##print('**********************************************************************')
##x.pickup(0)
##x.pickup(2)
##x.pickup(1)
##print(x.self)
##print(x.state)
##print('**********************************************************************')
##x.putdown(0)
##x.putdown(2)
##print(x.self)
##print(x.state)
##print('**********************************************************************')
a = abc()
for i in range(100):
v = random.randint(0,1)
r = random.randint(0,4)
if not v: a.pickup(r)
else: a.putdown(r)
print('**********************************************************************')
print(a.self)
print(a.state)
``` |
{
"source": "aadishJ01/La-Robo-Liga-Workshops",
"score": 4
} |
#### File: Workshop 1 - PyBullet and Control Algorithms/PID Controller/drone_pid.py
```python
import matplotlib
import numpy as np
import pybullet as p
import time
import math
import cv2
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib import style
import pandas as pd
import csv
from threading import Thread
## Function to create a live plot of drone's height
def plot():
style.use('fivethirtyeight')
fig = plt.figure()
ax1 = fig.add_subplot(1,1,1)
def animate(i):
data = pd.read_csv('src/data.csv')
x =data['time']
y = data['height']
r = data['required_height']
ax1.clear()
ax1.plot(x , r , label= 'Target Height')
ax1.plot(x , y , label= 'Current Height')
ax1.legend(loc = 'upper left')
ani = animation.FuncAnimation(fig, animate, interval=2)
plt.show()
## Pybullet Simulation
def runSimulation():
fieldnames = ["time" , "height", "required_height"]
with open('src/data.csv','w') as csv_file:
csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
csv_writer.writeheader()
p_id = p.connect(p.GUI) #Loading the simulation
p.setGravity(0, 0, -10) #Setting the gravity
plane = p.loadURDF("src/plane.urdf") #Loading the plane
dronePos = [0,0,0.2] #Initial Position of the drone
drone = p.loadURDF("src/quadrotor.urdf", dronePos) #Loading the drone
def callback(): #Dummy function for the track-bars
pass
#P-D gains to be adjusted
cv2.namedWindow('controls') #Creating Track-Bars that can be used to adjust the PID values in real time.
cv2.createTrackbar('P', 'controls', 0, 500, callback) #Setting the lower and upper limits on track-bars
cv2.createTrackbar('I', 'controls', 0, 500, callback) #Creating three different track-bars for each P-I-D
cv2.createTrackbar('D', 'controls', 0, 500, callback)
P=cv2.getTrackbarPos('P', 'controls')/10 #Loading the PID constants from the trackbars
I=cv2.getTrackbarPos('I', 'controls')/1000
D=5*cv2.getTrackbarPos('D', 'controls')
#press escape key to execute
k=cv2.waitKey(1) & 0xFF #This is needed to keep the track-bars active in real time
#P, D = 0.1, 0.5
desired_state = 3 #This is the desired state that we want the drone to reach. That is a height of 3 meters
#Select the simulation window and Press ENTER to execute
t=0
while(True):
if t == 0:
p.configureDebugVisualizer(p.COV_ENABLE_GUI,0)
p.resetDebugVisualizerCamera(cameraDistance=3.5,
cameraYaw= 0,
cameraPitch= 0,
cameraTargetPosition = [0.0,0.0,desired_state] )
k=cv2.waitKey(1) #This is written to make the taskbars operate in real time
keycode = p.getKeyboardEvents() #Getting the keyboard events through PyBullet
if keycode.get(p.B3G_RETURN) == 1: #If ENTER key is pressed then the simulation executes
integral = 0 #Reseting all the gains to 0 at the start of the simulation
derivative = 0
prev_error = 0
t = 0 #Also setting the time to 0
p.resetSimulation() #Reseting the simulation
p.setGravity(0, 0, -10)
plane = p.loadURDF("src/plane.urdf") #Loading the plane and drone again
dronePos = [0,0,0.1]
drone = p.loadURDF("src/quadrotor.urdf", dronePos)
state = p.getBasePositionAndOrientation(drone)[0][2] #Getting the state to calculate error. In this case, it is the height of the drone
p.createConstraint(drone, -1, -1, -1, p.JOINT_PRISMATIC, [0,0,1], [0,0,0], [0,0,0]) #Contraining the drone to move along Z-axis only
p.stepSimulation() #Stepping the simulation by a step
## Saving the height of the drone and time to csv file to make a live plot
while(True):
with open('src/data.csv' , 'a') as csv_file:
csv_writer = csv.DictWriter(csv_file, fieldnames = fieldnames)
info = {
"time" : t,
"height" : state,
"required_height" : desired_state
}
csv_writer.writerow(info)
P=cv2.getTrackbarPos('P', 'controls')/10 #Get P from trackbar, dividing P by 10 to get it into range of 0-50 from 0-500 as desired value is in range of 0-50 and track-bar return values between 0-500
I=cv2.getTrackbarPos('I', 'controls')/1000 #Get I from trackbar, dividing I by 10000 to get it into range of 0-0.05 from 0-500 as desired value is in range of 0-0.05 and track-bar return values between 0-500
D=5*cv2.getTrackbarPos('D', 'controls') #Get D from trackbar, desired value is in range of 0-500 only
'''
Divinding factors are determined experimentally, we let track-bars have values from 0-500
and divide the value we get from them to get adjust them to the required range
For example, if track-bar is at 100, but I should be around 0.01, so we divide by 10000 to get the final value in desired range.
This is done as track-bars only support integer values
'''
k=cv2.waitKey(1) #This is necessary to keep the track-bars active
t+=0.01 #Keeping track of time into the simulation
state = p.getBasePositionAndOrientation(drone)[0][2] #Getting the state, i.e. the current altitude of the drone
error = state - desired_state #The error is the difference between current state and desired state
derivative = error - prev_error #The D term is the difference in current error and prev error, As the simulation is called at regular intervals, we don't divide by time. It gives us the rate at which the error is changing.
prev_error = error #Updating the prev error for using in next loop
if(p.getBaseVelocity(drone)[0][2]<0.01): #Integrating/Summing the error for I gain only when drone is almost stationary, as we only want the steady state error for integration term.
integral += error #Summing up the error
pid = P * error + D * derivative + I * integral #Calculating the upthrust to be given to the drone by multiplying error with different gains and adding
action = -pid #Action is the negative of our gain , This is experimental
print("The height is {}".format(state))
p.applyExternalForce(drone, -1, [0,0,action], [0,0,0], p.WORLD_FRAME) #Applying the resultant force as an upthrust on the drone.
p.stepSimulation() #Stepping the simulation
time.sleep(1./240.)
keycode = p.getKeyboardEvents() #Getting the keyboard events through PyBullet
if keycode.get(ord('r')) == 1: #Reseting the simulation when 'r' is pressed
print("Episode finished after {} timesteps".format(t+1))
p.resetSimulation()
p.setGravity(0, 0, -10)
t = 0
state = 0
with open('src/data.csv','w') as csv_file:
csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
csv_writer.writeheader()
plane = p.loadURDF("src/plane.urdf")
dronePos = [0,0,0.2]
drone = p.loadURDF("src/quadrotor.urdf", dronePos)
p.stepSimulation()
break
Thread(target=runSimulation).start()
plot()
``` |
{
"source": "aadishjain2911/CHASS",
"score": 3
} |
#### File: CHASS/chass/forloop.py
```python
import subprocess
from chass.locate_ifs import locate_ifs
from chass.locate_loops import locate_loops
def forloop (thepassedfile, start_index, end_index ,index,variable_info,params):
f = open(thepassedfile,"r+")
g = open("forloop" + str(index) + ".sh", "w+")
lines = f.readlines()
mylines = ""
i=0
before_end=end_index-1
after_start = start_index+1
for j in lines:
# if(i==after_start):
# mylines = j+"\necho 'iteration' >> forloop"+str(index)+".txt"
if(i==before_end):
mylines =j
for(a,b,c) in variable_info:
mylines=mylines+"\necho ${"+a+"[*]}>>"+"forloop"+str(index)+".txt\n"
else:
mylines=j
g.write(mylines)
i=i+1
f.close()
g.close()
temporary = open("garbage_file.txt","a")
temporary.flush()
subprocess.Popen(["bash","forloop"+str(index)+".sh"]+params,stdout=temporary,stderr=subprocess.STDOUT)
```
#### File: CHASS/chass/if_else.py
```python
import subprocess
#func_find_var finds the variables present in a conditional statement
#func_find_var finds takes the_str as parameter which is just the string in which we have to find the variables
def func_find_var(the_str):
l=[]
s=""
i=0
while i<len(the_str):
if(the_str[i]=='$'):
j=i+1
s=""
while j<len(the_str):
if(j==len(the_str)-1 or the_str[j]==' '):
l.append(s)
break
s=s+the_str[j]
j+=1
i=j+1
continue
else:
i+=1
return l
#include imports for fors,whiles,dountils
#include imports for psoition of functions
#posns is positions of if_else
#params is the parameter list for the script
#thefile is a copy of the script of the user
#the function if_else creates text files corresponding to the if/elif statements having information on the variables used in the if/elif condition.
def if_else(thefile,position_of_fors,position_of_funct,whiles,dountils,posns,params):
tot_list=[]
f=open(thefile,"r+")
e=[]
lines = f.readlines()
for i in lines:
tot_list.append(0)
rev_=[]
for i in lines:
rev_.append(0)
for i in position_of_fors:
rev_[i[0]]+=1
rev_[i[1]]-=1
for i in position_of_funct:
rev_[i[1]]+=1
rev_[i[2]]-=1
for i in whiles:
rev_[i[0]]+=1
rev_[i[1]]-=1
for i in dountils:
rev_[i[0]]+=1
rev_[i[1]]-=1
for i in range(1,len(lines)):
tot_list[i]=tot_list[i-1]+rev_[i]
it=0
w1=0
w2=0
b=[]
for i in lines:
b.append(0)
i=0
while i<len(posns):
#print("uipty")
if(tot_list[posns[i][0]]>0):
i+=1
continue
g=open(str(posns[i][0])+"rand.sh","w+")
e.append(str(posns[i][0])+"rand.sh")
it=0
while it<posns[i][0]:
g.write(lines[it])
it+=1
for t in range(0,len(posns[i])-1):
l=func_find_var(lines[posns[i][t]])
b[posns[i][t]]=1
for k in l:
g.write("\necho "+str(k)+" $"+str(k)+">>ifrand"+str(posns[i][t])+".txt\n")
while it<len(lines):
if(b[it]==0):
g.write(lines[it])
else:
l=func_find_var(lines[it])
g.write("\necho >"+"ifrand"+str(it)+".txt\n")
for k in l:
g.write("\necho "+str(k)+" $"+str(k)+" >>ifrand"+str(it)+".txt\n")
#print(type(lines))
g.write(lines[it])
it+=1
i+=1
g.close()
for i in range(0,len(e)):
temporary = open("garbage_file.txt","a")
temporary.flush()
subprocess.Popen(["bash",e[i]]+params,stdout=temporary,stderr=subprocess.STDOUT)
temporary.close()
```
#### File: CHASS/chass/locate_commands.py
```python
from chass.get_variable import get_variable
# This function takes the preprocessed file as input and returns a list of tuples
# Each tuple's first element gives the line number and the second element gives the name of the command
# Only classifies a limited number of commands namely echo, cd, mkdir, pwd, sed, awk, touch, mv, rm, cp, ln
def locate_commands(file) :
myfile = open(file,"r")
line_number = 0
commands = []
string = ""
for line in myfile.readlines() :
new_line = line
command = get_variable(line,0,"forward")
if command :
if command == "sed" :
commands.append((line_number,"sed"))
if "-n " in line :
new_line = line.replace("-n "," ")
if "--quiet" in line :
new_line = line.replace("--quiet","")
if "--silent" in line :
new_line = line.replace("--silent","")
if " >> " in line:
new_line = new_line.replace(line[line.index(" >> "): ]," \n")
if ">>" in line:
new_line = new_line.replace(line[line.index(">>"): ]," \n")
flag1 = True
flag2 = True
for i in range(len(line)) :
character = line[i]
if character == "'" : flag1 = not flag1
elif character == '"' : flag2 = not flag2
elif character == '>' and flag1 and flag2 :
new_line = new_line.replace(line[i:]," \n")
break
elif command == "awk" :
commands.append((line_number,"awk"))
elif command == "cd" :
new_line = new_line.replace(new_line,"\n")
commands.append((line_number,"cd"))
elif command == "mkdir" :
commands.append((line_number,"mkdir"))
elif command == "pwd" :
commands.append((line_number,"pwd"))
elif command == "echo" :
commands.append((line_number,"echo"))
elif command == "touch" :
commands.append((line_number,"touch"))
elif command == "mv" :
commands.append((line_number,"mv"))
elif command == "rm" :
commands.append((line_number,"rm"))
elif command == "cp" :
new_line = new_line.replace(new_line,"\n")
commands.append((line_number,"cp"))
if " sed " in line :
commands.append((line_number,"sed"))
if "-n " in line :
new_line = line.replace("-n "," ")
if "--quiet" in line :
new_line = line.replace("--quiet","")
if "--silent" in line :
new_line = line.replace("--silent","")
if " >> " in line:
new_line = new_line.replace(line[line.index(" >> "): ]," \n")
if ">>" in line:
new_line = new_line.replace(line[line.index(">>"): ]," \n")
flag1 = True
flag2 = True
for i in range(len(line)) :
character = line[i]
if character == "'" : flag1 = not flag1
elif character == '"' : flag2 = not flag2
elif character == '>' and flag1 and flag2 :
new_line = new_line.replace(line[i:]," \n")
break
if " awk " in line :
commands.append((line_number,"awk"))
if " cd " in line :
new_line = new_line.replace(new_line,"\n")
commands.append((line_number,"cd"))
if " mkdir " in line :
commands.append((line_number,"mkdir"))
if " pwd " in line :
commands.append((line_number,"pwd"))
if " echo " in line :
commands.append((line_number,"echo"))
if " touch " in line :
commands.append((line_number,"touch"))
if " mv " in line :
commands.append((line_number,"mv"))
if " rm " in line :
commands.append((line_number,"rm"))
if " cp " in line :
new_line = new_line.replace(new_line,"\n")
commands.append((line_number,"cp"))
if ">>" in line:
new_line = new_line.replace(line[line.index(">>"): ]," \n")
string+=new_line
line_number+=1
new_file = open(file,"w")
new_file.write(string)
return commands
``` |
{
"source": "Aadit-Ambadkar/pyribs",
"score": 3
} |
#### File: ribs/emitters/_iso_line_emitter.py
```python
import numpy as np
from numba import jit
from ribs.emitters._emitter_base import EmitterBase
class IsoLineEmitter(EmitterBase):
"""Emits solutions that are nudged towards other archive solutions.
If the archive is empty, calls to :meth:`ask` will generate solutions from
an isotropic Gaussian distribution with mean ``x0`` and standard deviation
``iso_sigma``. Otherwise, to generate each new solution, the emitter selects
a pair of elites :math:`x_i` and :math:`x_j` and samples from
.. math::
x_i + \\sigma_{iso} \\mathcal{N}(0,\\mathcal{I}) +
\\sigma_{line}(x_j - x_i)\\mathcal{N}(0,1)
This emitter is based on the Iso+LineDD operator presented in `Vassiliades
2018 <https://arxiv.org/abs/1804.03906>`_.
Args:
archive (ribs.archives.ArchiveBase): An archive to use when creating and
inserting solutions. For instance, this can be
:class:`ribs.archives.GridArchive`.
x0 (array-like): Center of the Gaussian distribution from which to
sample solutions when the archive is empty.
iso_sigma (float): Scale factor for the isotropic distribution used when
generating solutions.
line_sigma (float): Scale factor for the line distribution used when
generating solutions.
bounds (None or array-like): Bounds of the solution space. Solutions are
clipped to these bounds. Pass None to indicate there are no bounds.
Alternatively, pass an array-like to specify the bounds for each
dim. Each element in this array-like can be None to indicate no
bound, or a tuple of ``(lower_bound, upper_bound)``, where
``lower_bound`` or ``upper_bound`` may be None to indicate no bound.
batch_size (int): Number of solutions to return in :meth:`ask`.
seed (int): Value to seed the random number generator. Set to None to
avoid a fixed seed.
"""
def __init__(self,
archive,
x0,
iso_sigma=0.01,
line_sigma=0.2,
bounds=None,
batch_size=64,
seed=None):
self._rng = np.random.default_rng(seed)
self._batch_size = batch_size
self._x0 = np.array(x0, dtype=archive.dtype)
self._iso_sigma = archive.dtype(iso_sigma)
self._line_sigma = archive.dtype(line_sigma)
EmitterBase.__init__(
self,
archive,
len(self._x0),
bounds,
)
@property
def x0(self):
"""numpy.ndarray: Center of the Gaussian distribution from which to
sample solutions when the archive is empty."""
return self._x0
@property
def iso_sigma(self):
"""float: Scale factor for the isotropic distribution used when
generating solutions."""
return self._iso_sigma
@property
def line_sigma(self):
"""float: Scale factor for the line distribution used when generating
solutions."""
return self._line_sigma
@property
def batch_size(self):
"""int: Number of solutions to return in :meth:`ask`."""
return self._batch_size
@staticmethod
@jit(nopython=True)
def _ask_solutions_numba(parents, iso_gaussian, line_gaussian, directions):
"""Numba helper for calculating solutions."""
return parents + iso_gaussian + line_gaussian * directions
@staticmethod
@jit(nopython=True)
def _ask_clip_helper(solutions, lower_bounds, upper_bounds):
"""Numba version of clip."""
return np.minimum(np.maximum(solutions, lower_bounds), upper_bounds)
def ask(self):
"""Generates ``batch_size`` solutions.
If the archive is empty, solutions are drawn from an isotropic Gaussian
distribution centered at ``self.x0`` with standard deviation
``self.iso_sigma``. Otherwise, each solution is drawn as described in
this class's docstring.
Returns:
``(batch_size, solution_dim)`` array -- contains ``batch_size`` new
solutions to evaluate.
"""
iso_gaussian = self._rng.normal(
scale=self._iso_sigma,
size=(self._batch_size, self.solution_dim),
).astype(self.archive.dtype)
if self.archive.empty:
solutions = np.expand_dims(self._x0, axis=0) + iso_gaussian
else:
parents = [
self.archive.get_random_elite()[0]
for _ in range(self._batch_size)
]
directions = [(self.archive.get_random_elite()[0] - parents[i])
for i in range(self._batch_size)]
line_gaussian = self._rng.normal(
scale=self._line_sigma,
size=(self._batch_size, 1),
).astype(self.archive.dtype)
solutions = self._ask_solutions_numba(np.asarray(parents),
iso_gaussian, line_gaussian,
np.asarray(directions))
return self._ask_clip_helper(solutions, self.lower_bounds,
self.upper_bounds)
``` |
{
"source": "Aadit-Bhojgi/Automated-Attendance-System",
"score": 3
} |
#### File: Aadit-Bhojgi/Automated-Attendance-System/DigitRecognizer.py
```python
import math
import cv2
import numpy as np
import tensorflow as tf
from scipy import ndimage
import os
from MNIST_data import input_data
class Recognizer:
def __init__(self):
pass
@staticmethod
def shift(img, sx, sy):
rows, cols = img.shape
M = np.float32([[1, 0, sx], [0, 1, sy]])
shifted = cv2.warpAffine(img, M, (cols, rows))
return shifted
@staticmethod
def getBestShift(img):
cy, cx = ndimage.measurements.center_of_mass(img)
rows, cols = img.shape
shiftx = np.round(cols / 2.0 - cx).astype(int)
shifty = np.round(rows / 2.0 - cy).astype(int)
return shiftx, shifty
def TrainRecognizer(self):
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
self.x = tf.placeholder("float", [None, 784])
# we need our weights for our neural net
W = tf.Variable(tf.zeros([784, 10]))
# and the biases
b = tf.Variable(tf.zeros([10]))
self.y = tf.nn.softmax(tf.matmul(self.x, W) + b)
self.y_ = tf.placeholder("float", [None, 10])
cross_entropy = -tf.reduce_sum(self.y_ * tf.log(self.y))
train_step = tf.train.GradientDescentOptimizer(0.01).minimize(cross_entropy)
init = tf.initialize_all_variables()
self.sess = tf.Session()
self.sess.run(init)
# use 1000 batches with a size of 100 each to train our network
for i in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
# run the train_step function with the given image values (x) and the real output (y_)
self.sess.run(train_step, feed_dict={self.x: batch_xs, self.y_: batch_ys})
correct_prediction = tf.equal(tf.argmax(self.y, 1), tf.argmax(self.y_, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
# print('Probability: ' + str(self.sess.run(self.accuracy,
# feed_dict={self.x: mnist.test.images, self.y_: mnist.test.labels})))
def TestRecognizer(self, directory, images_list):
# create an array where we can store our 4 pictures
images = np.zeros((len(images_list), 784))
# and the correct values
correct_vals = np.zeros((len(images_list), 10))
index = 0
# we want to test our images which you saw at the top of this page
for no in images_list:
# read the image
gray = cv2.imread(directory + '\\' + no, cv2.IMREAD_GRAYSCALE)
os.remove(directory + '\\' + no)
# resize the images and invert it (black background)
gray = cv2.resize(255 - gray, (28, 28))
(thresh, gray) = cv2.threshold(gray, 128, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)
while np.sum(gray[0]) == 0:
gray = gray[1:]
while np.sum(gray[:, 0]) == 0:
gray = np.delete(gray, 0, 1)
while np.sum(gray[-1]) == 0:
gray = gray[:-1]
while np.sum(gray[:, -1]) == 0:
gray = np.delete(gray, -1, 1)
rows, cols = gray.shape
if rows > cols:
factor = 20.0 / rows
rows = 20
cols = int(round(cols * factor))
gray = cv2.resize(gray, (cols, rows))
else:
factor = 20.0 / cols
cols = 20
rows = int(round(rows * factor))
gray = cv2.resize(gray, (cols, rows))
colsPadding = (int(math.ceil((28 - cols) / 2.0)), int(math.floor((28 - cols) / 2.0)))
rowsPadding = (int(math.ceil((28 - rows) / 2.0)), int(math.floor((28 - rows) / 2.0)))
gray = np.lib.pad(gray, (rowsPadding, colsPadding), 'constant')
shiftx, shifty = self.getBestShift(gray)
shifted = self.shift(gray, shiftx, shifty)
gray = shifted
# save the processed images
# cv2.imwrite(directory + '\\' + 'changed'+no, gray)
"""
all images in the training set have an range from 0-1
and not from 0-255 so we divide our flatten images
(a one dimensional vector with our 784 pixels)
to use the same 0-1 based range
"""
flatten = gray.flatten() / 255.0
"""
we need to store the flatten image and generate
the correct_vals array
correct_val for the first digit (9) would be
[0,0,0,0,0,0,0,0,0,1]
"""
images[index] = flatten
# correct_val = np.zeros((10))
# correct_val[no] = 1
# correct_vals[index] = correct_val
index += 1
"""
the prediction will be an array with four values,
which show the predicted number
"""
prediction = tf.argmax(self.y, 1)
"""
we want to run the prediction and the accuracy function
using our generated arrays (images and correct_vals)
"""
return self.sess.run(prediction, feed_dict={self.x: images})
# print(self.sess.run(self.accuracy, feed_dict={self.x: images, self.y_: correct_vals}))
if __name__ == '__main__':
recognizer = Recognizer()
recognizer.TrainRecognizer()
print(recognizer.TestRecognizer('Images', ['5.png']))
``` |
{
"source": "Aadit-Bhojgi/PyCloud",
"score": 3
} |
#### File: Aadit-Bhojgi/PyCloud/AutomatedDownloader.py
```python
import calendar
import datetime
import inspect
import os
import pickle
import win32api
from pyicloud import *
from pyicloud.exceptions import PyiCloudAPIResponseError, PyiCloudFailedLoginException,\
PyiCloudServiceNotActivatedErrror
from requests.exceptions import ConnectionError
import sanityCheck
class Cmd:
def __init__(self):
self.username = self.password = ''
self.list_year, self.list_month, self.list_images, self.count = [], [], [], 0
# To check the Path of the script
self.path = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) # script directory
# Variable Initialization for log file
self.message = self.time = self.today = self.phone = ''
self.dev = 0
def get_password_username(self):
# Deciphering your password
a = open(self.path + '/Automation/Credentials/Credentials.txt', 'r+')
raw = a.read().split('*/*/')
count = 1
s1 = ''
l1 = []
for i in raw[0]:
if i == '%':
l1.append(int(s1) - count)
count += 1
s1 = ''
continue
s1 += i
for i in l1:
self.password += chr(i)
# Deciphering your username
l2 = []
raw1 = raw[1].split('@*@')
username = str(raw1[0])
size = int(raw1[1].split('ved')[0])
s = ''
for i in username:
if i == '@':
l2.append(int(s) - size)
size -= 1
s = ''
continue
s += i
for i in l2:
self.username += chr(i)
self.dev = int(raw1[1].split('ved')[1])
def download_photos(self):
try:
# Deciphering Username and password
self.get_password_username()
# Authentication
self.api = PyiCloudService(self.username, self.password)
self.user_name = str(self.api.devices[self.dev]).split(":")[1].strip(' ')
# Sanity Check
self.sanity = sanityCheck.Sanity(self.user_name)
self.sanity.check()
except PyiCloudAPIResponseError:
raise PyiCloudFailedLoginException
except PyiCloudFailedLoginException:
error = 'Invalid email/password combination.\nPlease try again.'
win32api.MessageBox(0, error, 'PyCloud - Message', 0x00000000L + 0x00000010L + 0x00020000L)
except ConnectionError:
error = 'Internet is not working.\nPlease try again.'
win32api.MessageBox(0, error, 'PyCloud - Message', 0x00000000L + 0x00000010L + 0x00020000L)
except:
win32api.MessageBox(0, "Some problem occurred.\nPlease try again.\nTry deleting the folder(with your "
"device's name)", 'PyCloud - Message', 0x00000000L + 0x00000010L + 0x00020000L)
# Reading input data
try:
with open(self.path + '/' + self.user_name + '/AppData/AppData.dat', "rb") as f:
self.list_year = pickle.load(f)
self.list_month = pickle.load(f)
self.list_images = pickle.load(f)
self.count = pickle.load(f)
f.close()
except IOError:
if not os.path.exists(self.user_name + '/AppData'):
folder = self.user_name + '/AppData'
os.makedirs(os.path.join(self.path, folder))
return
try:
# Downloading All Photos from user's iCloud account.
for photo in self.api.photos.all:
if photo.filename not in self.list_images:
download = photo.download()
info = str(photo.created).split()[0].split('-')
year = str(info[0])
month = calendar.month_name[int(info[1])]
if year not in self.list_year:
os.makedirs(os.path.join(self.path + '/' + self.user_name + '/Photos', year))
self.list_year.append(year)
if (month + '_' + year) not in self.list_month:
os.makedirs(os.path.join(self.path + '/' + self.user_name + '/Photos', year, month))
self.list_month.append((month + '_' + year))
self.list_images.append(photo.filename)
with open(self.path + '/' + self.user_name + '/Photos/{}/{}/{}'.format(year, month, photo.filename),
'wb+') as opened_file:
opened_file.write(download.raw.read())
opened_file.close()
self.message += '{} downloaded, Date of creation: {} {}, {}\n'.format(photo.filename, info[2],
calendar.month_abbr[int(info[1])],
info[0])
print '{} downloaded, Date of creation: {} {}, {}'.format(photo.filename, info[2],
calendar.month_abbr[int(info[1])],
info[0])
self.count += 1
with open(self.path + '/' + self.user_name + '/AppData/AppData.dat', "wb") as f:
pickle.dump(self.list_year, f)
pickle.dump(self.list_month, f)
pickle.dump(self.list_images, f)
pickle.dump(self.count, f)
f.close()
except PyiCloudServiceNotActivatedErrror:
error = 'Please log into https://icloud.com/ to\nmanually finish setting up your iCloud service' \
'\n(AUTHENTICATION_FAILED)'
win32api.MessageBox(0, error, 'PyCloud - Message', 0x00000000L + 0x00000010L + 0x00020000L)
self.phone = 'Device: ' + str(self.api.devices[self.dev]).split(':')[0] + '\n' + \
'User:' + str(self.api.devices[self.dev]).split(':')[1]
self.today = list(str(datetime.date.today()).split('-')) # gets today's date
self.time = list(str(datetime.datetime.now().time()).split(':')) # gets current time
self.time = self.time[0] + ':' + self.time[1]
self.message += 'Total Images: {}, Date of Download: {} {}, {} at {}\n'.format(self.count,
self.today[2],
calendar.month_abbr[
int(self.today[1])
],
self.today[0],
self.time)
print 'Total Images: {}, Date of Download: {} {}, {} at {}'.format(self.count,
self.today[2],
calendar.month_abbr[
int(self.today[1])
],
self.today[0],
self.time)
# self.send_mail(self.username, str(self.today[2]), calendar.month_abbr[int(self.today[1])], str(self.today[0]))
# Saving application Log to a text file
log_file = open(self.path + '/' + self.user_name + '/AppData/AppLog.txt', 'a+')
log_file.writelines(self.message + '--------------------------------------------------------------\n')
log_file.close()
win32api.MessageBox(0, self.phone + '\n' + self.message, 'PyCloud - Message',
0x00000001L + 0x00000040L + 0x00020000L)
if __name__ == '__main__':
Cmd().download_photos()
``` |
{
"source": "Aadit-Bhojgi/PyPher",
"score": 3
} |
#### File: Aadit-Bhojgi/PyPher/Decipher.py
```python
class Decipher:
def __init__(self, data='', password=''):
self.data = data
self.password = password
self.result = False
def decipher_password(self):
# For authentication(Deciphering your password)..
helper = self.data.split('*/*/')
count, separator, compare, helper_list = 1, '', '', []
for i in helper[0]:
if i == '%':
helper_list.append(int(separator) - count)
count += 1
separator = ''
continue
separator += i
for i in helper_list:
compare += chr(i)
if compare == self.password:
self.result = True
return self.result, helper
def decipher_message(self, helper):
# Deciphering your message
helper_list = []
helper_msg = helper[1].split('@*@')
message = str(helper_msg[0])
size = int(helper_msg[1])
separator, deciphered = '', ''
for i in message:
if i == '@':
helper_list.append(int(separator) - size)
size -= 1
separator = ''
continue
separator += i
for i in helper_list:
deciphered += chr(i)
return deciphered
if __name__ == '__main__':
pass
``` |
{
"source": "aaditep/aadioptimize",
"score": 4
} |
#### File: aadioptimize/aadioptimize/optimize.py
```python
import numpy.matlib as mat
import numpy as np
N =1
def initDE(N_p,lb,ub,prob):
"""
Initializes paramaters for differential evolution
Paramaters
----------
N_p : int
Number of population
lb : int
lower bound of searchspace
ub : int
upper bound of searchspace
prob : function
The objective function
Returns
-------
lb : numpy.ndarray
Returns the lower bound as a numpy array
ub : numpy.ndarray
Returns the upper bound as a numpy array
f : numpy.ndarray
Returns vector for fitness function
fu : numpy ndarray
Retruns empty vector for fitness function
D : int
Returns the amount of decision variables for crossover process
U : numpy.ndarray
Returns matrix for trial solution
P : numpy.ndarray
Returns randomly generated matrix of target vectors
"""
lb = np.full(N_p,lb)
ub = np.full(N_p,ub)
f = np.zeros((N_p,1)) #empty vector for fitness function
fu = np.zeros((N_p,1))#newly created trial vector
D = len(lb) # Determining amount of decision variables
U = np.zeros((N_p,D)) #Matrix for storing trial solutions
#Initial random population
P = mat.repmat(lb,N_p,1)+mat.repmat((ub-lb),N_p,1)*np.random.rand(len(ub-lb),N_p)
for p in np.arange(N_p):
f[p]=prob(P[p,])
return lb,ub,f,fu,D,U,P
#This function starts the mutation process and generates a donorvector
def mutation(i,N_p,t,T,P,N_vars,F_min,F_const):
"""
Function that generates a donorvector. If there is >=3 searchvariables then the
adaptive scaling factor is implimented. Otherwise just the constant. It gnerates
candidates for the donorvector by randomly choosing rows from the initial matrix,
but not the i-th element.
Paramaters
----------
i : int
Number of the row in matrix
N_p : int
Number of population
t : int
Iteration index
T : int
Total number of iterations
N_vars : int
Number of search variables
F_min : optional (float,int)
The minimum value of scaling factor. Used when N_vars >= 3
F_const : optional (float,int)
The constant value of scaling factor
Returns
-------
V : numpy.ndarray
The donor vector
"""
#Adaptive scaling factor
if N_vars >= 3:
F=F_min*2**np.exp(1-(T/(T+1-t)))
else:
F = F_const
#candidates are assigned without the i-th element
candidates= np.delete(np.arange(N_p), np.where(np.arange(N_p)==i))
#3 target vectors are picked out randomly for the donorvector generator
cand_rand=np.random.choice(candidates,3,replace= False)
X1=P[cand_rand[0],]
X2=P[cand_rand[1],]
X3=P[cand_rand[2],]
#Donorvctor generator
V= X1 + F*(X2-X3)
return V
#this function evaluates donor vector and uses parts of it which fit better
def crossover(f,P_c_min,P_c_max,i,D,V,P,U):
"""
Crossover function for differential evolution. This function uses adaptive crossover rate.
The minimum and the maximum range is set by user. It decides whether or not to use donorvector's
j-th elements in the U matrix.
Paramaters
---------
f : numpy.ndarray
The fitness function array
P_c_min : optional(float/integer)
Minimum crossover rate value for adaptive crossover rate
P_c_max : optional(float/integer)
Maximum crossover rate value for adaptive crossover rate
i : int
Row number
D : int
The amount of decision variables for crossover process
V : numpy.ndarray
The donor vector
P : numpy.ndarray
Matrix of initial target vectors
U : numpy.ndarrat
Matrix of trial solutions
Returns
-------
U : numpy.ndarray
Retruns the U matrix with new trial solutions.
"""
#ADAPTIVE Crossover
if f[i] < np.mean(f):
P_c = P_c_min + (P_c_max-P_c_min)*((f[i]-np.mean(f))/(np.max(f)-np.mean(f)))
else:
P_c = P_c_min
delta = np.random.randint(0,D-1)
for j in np.arange(D):
if np.random.uniform(0,1) <= P_c or delta == j:
U[i,j] = V[j]
else:
U[i,j]=P[i,j]
return U
#this function bounds the vector and replaces the old target vector with new if better
def boundgreed(N,j,U,P,f,fu,ub,lb,prob):
"""
This function bound the vector elements according to the bound set by the usere. If bounds
violated, it is replaced by either the lower- or upperbound. Then the Greedy selection is performed.
Firstly objective function is valued by the new vector. Then it is compared to the initial or the last
objective function value. If the new value is samller. Then the Initial or last target vector matrix's rows
are replaced by new vector.
Parameters
----------
j : int
U : numpy.ndarray
Matrix of trial vectors
P : numpy.ndarray
Matrix of target vectors
f : numpy.ndarray
Target vectors' Fitness function array.
fu : numpy.ndarray
Trial vectors' Fitness function array.
ub : numpy.ndarray
Upperbound
lb : numpy.ndarray
Lowerbound
prob : function
The objective function
Returns
-------
f : numpy.ndarray
New trial vectors' fitness function value that will be used in next iteration
P : numpy.ndarray
New trial vector matrix that will be used in next iteration
"""
U[j]=np.minimum(U[j], ub)
U[j]=np.maximum(U[j], lb)
##
fu[j]=prob(U[j])
N = N+1
if fu[j] < f[j]:
P[j]= U[j]
f[j]=fu[j]
return N,f,P
#distance from known location
def distance(known_loc,found_loc,N_vars,):
"""
Function that uses pythagorean theorem to calculate distance between the found point
and known location. NB!!! This function is not used in the main prgram so thist must
be called itself.
Parameters
----------
known_loc : numpy.ndarray
Known location that is given by the user
found_loc : numpy.ndarray
Found location with the program
N_vars : int
Number of search variables
Returns
-------
dist : float
Returns the distance between the points.
"""
undersqrt=np.zeros(N_vars)
for i in (np.arange(N_vars)):
undersqrt[i] =(known_loc[i]-found_loc[i])**2
dist = np.sqrt(sum(undersqrt))
return dist
def main(N,N_p,T,lb,ub,prob,N_vars,F_min,F_const,P_c_min,P_c_max):
"""
Differential evolution optimizer. It takes all the parmaters and uses them to find the
global optimum of the objctive function. At least tries. Number of evaluation of the
fitness function is 1+(N_p*T).
Parameters
----------
N : int
Number of evaluations counter
N_p : int
Number of population
T : int
Number of iterations
lb : int
Lower bound of search space
ub : TYPE
Upper bound of search space
prob : function
Function for objective function
N_vars : int
Number of search variables
F_min : optional (int/float)
Minimum value for the scaling factor
F_const : optional (int/float)
Constant value for the scaling factor
P_c_min : optional (int/float)
Minimum value of Crossover rate
P_c_max : optional (int/float)
Maximum value of crossover rate
Raises
------
Exception
Raises error when there is less than 4 of the population(N_p)
Returns
-------
best_of_f : numpy.ndarray
Returns the best value of the objective function
globopt : numpy.ndarray
Returns global optimum location
"""
lb,ub,f,fu,D,U,P = initDE(N_p,lb,ub,prob)
if N_p < 4:
raise Exception("Sorry, there must be atleast a population of 4. Reccomended 20")
for t in np.arange(T):
for i in np.arange(N_p):
V = mutation(i,N_p,t,T,P,N_vars,F_min,F_const)
U=crossover(f,P_c_min,P_c_max,i,D,V,P,U)
for j in np.arange(N_p):
N,f,P = boundgreed(N,j,U,P,f,fu,ub,lb,prob)
#if N == 500:
#break
best_of_f= min(f)
globopt = P[f.argmin()]
return N,best_of_f, globopt[:N_vars]
```
#### File: aadioptimize/tests/test_optimize.py
```python
import unittest
from aadioptimize import optimize
from funcs import funcs
import numpy as np
class TestOptimize(unittest.TestCase):
def test_distance(self):
# if the distance calculator works at varieties of dimensions
dist = optimize.distance([1,1],[1,1],2)
self.assertEqual(dist, 0)
dist = optimize.distance([1,1,0],[1,1,1],3)
self.assertEqual(dist,1)
dist = optimize.distance([-1,-1,-1],[0,-1,-1],3)
self.assertEqual(dist,1)
dist = optimize.distance([-1,-1,-1,-1,-1],[8,-1,-1,-1,-1],5)
self.assertEqual(dist,9)
def test_funcs(self):
#If the functions itself are correct
self.assertEqual(funcs.rosenbrock([1,1]),0)
self.assertEqual(round(funcs.kenny([-0.54719,-1.54719]),3),-1.913)
def test_initDE(self):
#if the function retruns values with correct shapes and values
np.random.seed(10)
N_p = 5
lb = -5
ub= 5
prob = funcs.rosenbrock
lb,ub,f,fu,D,U,P = optimize.initDE(N_p,lb,ub,prob)
#lowerbound and upperbound
self.assertEqual(len(lb),5)
self.assertEqual(len(ub),5)
#fitness function
self.assertIsNot(f,np.zeros)
self.assertEqual(len(f),5)
#trial fitness function
self.assertEqual(len(fu),5)
#Decision variables
self.assertEqual(D,5)
#U must be empty
self.assertFalse(U.all())
#P must be all True
self.assertTrue(P.all())
#Testing with random seed if the arrays are correct
arr1 = P
arr2 = np.array([[ 2.71320643, -4.79248051, 1.33648235, 2.48803883, -0.01492988],
[-2.75203354, -3.01937135, 2.60530712, -3.30889163, -4.11660186],
[ 1.85359818, 4.53393346, -4.96051734, 0.12192263, 3.12620962],
[ 1.12526067, 2.21755317, -2.08123932, 4.17774123, 2.14575783],
[ 0.42544368, -3.57829952, -1.2665924 , 1.74133615, -0.58166826]])
self.assertIsNone(np.testing.assert_almost_equal(arr1, arr2))
#Testing fitness function values
arr3 = f
arr4 = np.array(
[[14774.83290809],
[11235.36973715],
[ 121.31257991],
[ 90.52077474],
[ 1413.5651541 ]])
self.assertIsNone(np.testing.assert_almost_equal(arr3, arr4))
def test_mutation(self):
np.random.seed(10)
i = 5
N_p =5
t =5
T=50
N_vars = 4
P =np.array([[ 2.71320643, -4.79248051, 1.33648235, 2.48803883, -0.01492988],
[-2.75203354, -3.01937135, 2.60530712, -3.30889163, -4.11660186],
[ 1.85359818, 4.53393346, -4.96051734, 0.12192263, 3.12620962],
[ 1.12526067, 2.21755317, -2.08123932, 4.17774123, 2.14575783],
[ 0.42544368, -3.57829952, -1.2665924 , 1.74133615, -0.58166826]])
F_min =0.5
F_const = 0.7
V= optimize.mutation(i,N_p,t,T,P,N_vars,F_min,F_const)
self.assertEqual(len(V),5)
def test_crossover(self):
np.random.seed(10)
f =np.array(
[[14774.83290809],
[11235.36973715],
[ 121.31257991],
[ 90.52077474],
[ 1413.5651541 ]])
P_c_min=0.5
P_c_max=0.9
i=4
D=5
V=[2,4,3,1.5,2.5]
P =np.array([[ 2.71320643, -4.79248051, 1.33648235, 2.48803883, -0.01492988],
[-2.75203354, -3.01937135, 2.60530712, -3.30889163, -4.11660186],
[ 1.85359818, 4.53393346, -4.96051734, 0.12192263, 3.12620962],
[ 1.12526067, 2.21755317, -2.08123932, 4.17774123, 2.14575783],
[ 0.42544368, -3.57829952, -1.2665924 , 1.74133615, -0.58166826]])
U = np.zeros((5,5))
U = optimize.crossover(f,P_c_min,P_c_max,i,D,V,P,U)
self.assertFalse(U.all())
def test_boundgreed(self):
N_p =5
N=5
j=4
U =np.array([[ 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. ],
[ 2. , 4. , -1.2665924 , 1.74133615, -0.58166826]])
P =np.array([[ 2.71320643, -4.79248051, 1.33648235, 2.48803883, -0.01492988],
[-2.75203354, -3.01937135, 2.60530712, -3.30889163, -4.11660186],
[ 1.85359818, 4.53393346, -4.96051734, 0.12192263, 3.12620962],
[ 1.12526067, 2.21755317, -2.08123932, 4.17774123, 2.14575783],
[ 0.42544368, -3.57829952, -1.2665924 , 1.74133615, -0.58166826]])
f =np.array(
[[14774.83290809],
[11235.36973715],
[ 121.31257991],
[ 90.52077474],
[ 1413.5651541 ]])
fu =np.zeros((N_p,1))
prob = funcs.rosenbrock
lb = np.full(N_p,5)
ub = np.full(N_p,5)
N,f,P = optimize.boundgreed(N,j,U,P,f,fu,ub,lb,prob)
self.assertEqual(len(f),5)
self.assertEqual(len(P),5)
self.assertTrue(P.any())
def test_main(self):
N=4
N_p= 5
T =50
lb =5
ub=5
prob = funcs.rosenbrock
N_vars =2
F_min = 0.5
F_const = 0.6
P_c_min = 0.5
P_c_max = 0.8
N,best_f,globopt= optimize.main(N,N_p,T,lb,ub,prob,N_vars,F_min,F_const,P_c_min,P_c_max)
self.assertEqual(len(best_f),1)
self.assertEqual(len(globopt),2)
if __name__ == '__main__':
unittest.main()
```
#### File: aadioptimize/funcs/funcs.py
```python
import numpy as np
def rosenbrock(X):
x = X[0]
y = X[1]
f = ((1-x)**2 + 100*(y-x**2)**2)
return f
def kenny(X):
x=X[0]
y=X[1]
F = np.sin(x+y) +(x-y)**2-(1.5*x)+(2.5*y)+1
return F
``` |
{
"source": "aaditgupta21/aaditgupta-tri3",
"score": 4
} |
#### File: code/week2/factorial.py
```python
class Factorial:
def __call__(self, number):
if number ==0:
return 1
elif number == 1:
return 1
else:
return number * self(number-1)
def tester():
num = int(input("Enter a number for factorial: "))
try:
fac = Factorial()
print("The factorial is", fac(num))
except:
print("Sorry, something went wrong.")
if __name__ == "__main__":
tester()
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.