metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "7Bpencil/mygit",
"score": 3
} |
#### File: mygit/commands/branch.py
```python
import argparse
import logging
from colorama import Fore
from textwrap import dedent
from mygit.state import State
from mygit.constants import Constants
from mygit.command import Command
from mygit.backend import remove_branch, create_new_branch_from_current, create_new_branch_from_commit, show_branches
class Branch(Command):
def __init__(self, subparsers: argparse._SubParsersAction, commands_dict: dict):
command_description = dedent(
'''
Bunch of tools for branching
Usage examples:
mygit branch -r dev remove branch dev
Note: you can't remove head/nonexistent branch
mygit branch -l show all branches
mygit branch -a expl y76ec54... create new branch with name expl,
that will point to commit y76ec54...
Note: you can't create branch from nonexistent commit
you can't create branch with already existent name
mygit branch -a expl HEAD create new branch with name expl,
that will point to head commit
''')
super().__init__("branch", command_description, subparsers, commands_dict)
def _add_arguments(self, command_parser: argparse.ArgumentParser):
branch_group = command_parser.add_mutually_exclusive_group()
branch_group.add_argument("-r", "--remove", nargs=1,
metavar="branch",
help="removes specified branch")
branch_group.add_argument("-l", "--list", action='store_true',
default=False,
help="shows all branches")
branch_group.add_argument("-a", "--add_from_commit", nargs=2,
metavar="checksum",
help="creates new branch from commit")
def work(self, namespace: argparse.Namespace, constants: Constants, state: State):
if namespace.remove is not None:
remove_branch(namespace.remove[0], constants)
elif namespace.add_from_commit is not None:
if namespace.add_from_commit[1] == "HEAD":
create_new_branch_from_current(namespace.add_from_commit[0], constants)
else:
create_new_branch_from_commit(namespace.add_from_commit[0], namespace.add_from_commit[1], constants)
elif namespace.list:
show_branches(constants)
else:
logging.warning(Fore.YELLOW + "write arguments")
```
#### File: mygit/commands/checkout.py
```python
import argparse
from textwrap import dedent
from mygit.state import State
from mygit.constants import Constants
from mygit.command import Command
from mygit.backend import create_new_branch_from_current_and_checkout, checkout_to_branch
class Checkout(Command):
def __init__(self, subparsers: argparse._SubParsersAction, commands_dict: dict):
command_description = dedent(
'''
Restore workspace state so it becomes identical to another branch's recorded state
Usage examples:
mygit checkout expl restore expl branch workspace
Note: you can't checkout with indexed but uncommitted changes
Note: you can't checkout to current/nonexistent branch
mygit checkout -n expl creates new branch expl from HEAD and checkouts to it.
Note: it will not change your workspace or index
''')
super().__init__("checkout", command_description, subparsers, commands_dict)
def _add_arguments(self, command_parser: argparse.ArgumentParser):
command_parser.add_argument("branch", nargs=1)
command_parser.add_argument('-n', '--new_branch', action='store_true',
default=False)
def work(self, namespace: argparse.Namespace, constants: Constants, state: State):
if namespace.new_branch:
create_new_branch_from_current_and_checkout(namespace.branch[0], constants)
else:
checkout_to_branch(namespace.branch[0], constants, state)
```
#### File: mygit/commands/merge.py
```python
import argparse
from textwrap import dedent
from mygit.state import State
from mygit.constants import Constants
from mygit.command import Command
from mygit.backend import merge
class Merge(Command):
def __init__(self, subparsers: argparse._SubParsersAction, commands_dict: dict):
command_description = dedent(
'''
Fast-forward HEAD to another branch state (if it's possible)
Usage examples:
mygit merge dev merge commits from dev into HEAD
Note: fast-forward is possible only if HEAD commit's line
is subset of branch commit's line
''')
super().__init__("merge", command_description, subparsers, commands_dict)
def _add_arguments(self, command_parser: argparse.ArgumentParser):
command_parser.add_argument("merge_branch", nargs=1)
def work(self, namespace: argparse.Namespace, constants: Constants, state: State):
merge(namespace.merge_branch[0], constants, state)
```
#### File: mygit/tests/test_vcs.py
```python
import mygit.backend as backend
import mygit.main as mygit
import pytest
import tempfile
from test_utils import *
from hashlib import sha1
from mygit.constants import Constants
from mygit.state import State
from pathlib import Path
from shlex import split as shlex_split
from zlib import decompress, compress
class TestVCS:
def setup_class(self):
self.cwd = tempfile.TemporaryDirectory()
self.cwd_path = Path(self.cwd.name)
self.constants = Constants(self.cwd_path)
def teardown_class(self):
self.cwd.cleanup()
pass
def teardown_method(self, method):
clean_directory(self.cwd_path)
pass
def test_functional(self):
"""Init new repository, create test file and save it in vcs"""
mygit.main(self.cwd_path, shlex_split("init")) # init new repository
test_file_name = "readme.md"
test_file_path = self.cwd_path / test_file_name
with Path.open(test_file_path, "w") as test_file: # create test file
test_file.write("hello world")
with Path.open(test_file_path, "rb") as test_file:
content = compress(test_file.read(), -1)
test_file_checksum = sha1(content).hexdigest()
state = get_current_state(self.constants)
assert state.status_not_indexed_paths == ['modified: readme.md']
mygit.main(self.cwd_path, shlex_split("index readme.md")) # index test file
state = get_current_state(self.constants)
assert state.status_not_indexed_paths == []
assert test_file_path in state.current_indexed_paths
assert state.current_indexed_paths.get(test_file_path) == test_file_checksum
mygit.main(self.cwd_path, shlex_split("commit created_readme")) # save test file in vcs
state = get_current_state(self.constants)
assert state.current_indexed_paths == {}
assert (self.constants.mygit_objects_path / test_file_checksum).exists() # was file really compressed & saved?
``` |
{
"source": "7bruno/getnet-py",
"score": 2
} |
#### File: getnet/errors/business_error.py
```python
from getnet.errors.request_error import RequestError
class BusinessError(RequestError):
@property
def details(self):
return self.response.json().get("details")[0]
@property
def payment_id(self):
return self.details.get("payment_id")
@property
def authorization_code(self):
return self.details.get("authorization_code")
@property
def terminal_nsu(self):
return self.details.get("terminal_nsu")
@property
def acquirer_transaction_id(self):
return self.details.get("acquirer_transaction_id")
@property
def status(self):
return self.details.get("status")
```
#### File: payments/credit/delayed.py
```python
class CreditConfirm:
def __init__(self, confirm_date, message):
self.confirm_date = confirm_date
self.message = message
class DelayedResponse:
def __init__(self, payment_id: str, seller_id: str, amount: int, currency: str, order_id: str, status: str, credit_confirm: dict):
self.payment_id = payment_id
self.seller_id = seller_id
self.amount = amount
self.currency = currency
self.order_id = order_id
self.status = status
self.credit_confirm = CreditConfirm(**credit_confirm)
class DelayedAdjustResponse:
def __init__(
self,
payment_id: str,
seller_id: str,
amount: int,
currency: str,
order_id: str,
status: str,
authorization_code: str,
authorized_at: str,
reason_code: str,
reason_message: str,
acquirer: str,
soft_descriptor: str,
terminal_nsu: str,
acquirer_transaction_id: str,
adjustment_acquirer_transactiion_id: str
):
self.payment_id = payment_id
self.seller_id = seller_id
self.amount = amount
self.currency = currency
self.order_id = order_id
self.status = status
self.authorization_code = authorization_code
self.authorized_at = authorized_at
self.reason_code = reason_code
self.reason_message = reason_message
self.acquirer = acquirer
self.soft_descriptor = soft_descriptor
self.terminal_nsu = terminal_nsu
self.acquirer_transaction_id = acquirer_transaction_id
self.adjustment_acquirer_transactiion_id = adjustment_acquirer_transactiion_id
```
#### File: payments/credit/service.py
```python
from typing import Union
from uuid import UUID
from getnet.services.payments import Customer
from getnet.services.payments import OrderItem
from getnet.services.payments.marketplace_subseller import MarketplaceSubseller
from getnet.services.payments.credit.delayed import DelayedResponse
from getnet.services.payments.credit.credit import Credit
from getnet.services.payments.credit.credit_cancel import CreditCancelPaymentResponse
from getnet.services.payments.credit.credit_response import CreditPaymentResponse
from getnet.services.payments.order import Order
from getnet.services.service import Service
from getnet.services.utils import Device
class CreditPaymentService(Service):
path = "/v1/payments/credit"
def create(
self,
amount: int,
currency: str,
order: Order,
credit: Credit,
customer: Customer,
device: Device = None,
) -> CreditPaymentResponse:
data = {
"seller_id": self._client.seller_id,
"amount": amount,
"currency": currency,
"order": order.as_dict(),
"customer": customer.as_dict(),
"credit": credit.as_dict(),
}
if device is not None:
data["device"] = device.as_dict()
response = self._post(self._format_url(), json=data)
return CreditPaymentResponse(**response)
def cancel(self, payment_id: Union[UUID, str]) -> CreditCancelPaymentResponse:
response = self._post(
self._format_url(path="/{payment_id}/cancel",
payment_id=str(payment_id))
)
return CreditCancelPaymentResponse(**response)
def marketplaceCreditCreate(
self,
marketplace_subseller_payments: list,
amount: int,
order: Order,
customer: Customer,
credit: Credit,
currency: str = "BRL",
device: Device = None,
**kwargs
):
data = {
"seller_id": self._client.seller_id,
"amount": amount,
"currency": currency,
"order": order.as_dict(),
"customer": customer.as_dict(),
"credit": credit.as_dict()
}
if marketplace_subseller_payments is not None:
data["marketplace_subseller_payments"] = marketplace_subseller_payments
if device is not None:
data["device"] = device.as_dict()
data["credit"].pop("authenticated")
data["customer"]["billing_address"] = data["customer"].pop("address")
data["customer"].pop("observation")
phone = data["customer"].pop("phone_number")
cellphone = data["customer"].pop("celphone_number")
data["customer"].pop("seller_id")
data["customer"].pop("birth_date")
data["customer"].setdefault(
"name", f'{data["customer"]["first_name"]} {data["customer"]["last_name"]}')
data["shippings"] = [{
"first_name": data["customer"]["first_name"],
"name": f'{data["customer"]["first_name"]} {data["customer"]["last_name"]}',
"email":data["customer"]["email"],
"phone_number":cellphone,
"shipping_amount": 0,
"address":data["customer"]["billing_address"]
}]
response = self._post(self._format_url(), json=data)
if "callback" in kwargs.keys():
kwargs["callback"](data, response, self._format_url())
return CreditPaymentResponse(**response)
def delayed(self, payment_id: Union[UUID, str], amount: int, marketplace_subseller_payments: list = None, **kwargs):
data = {
"amount": amount,
}
if marketplace_subseller_payments:
data["marketplace_subseller_payments"] = marketplace_subseller_payments
response = self._post(self._format_url() +
f'/{payment_id}/confirm', json=data)
if "callback" in kwargs.keys():
kwargs["callback"](data, response, self._format_url() +
f'/{payment_id}/confirm')
return DelayedResponse(**response)
def delayedAdjust(self, payment_id: Union[UUID, str], amount: int, currency: str = "BRL", marketplace_subseller_payments: list = None, **kwargs):
data = {
"amount": amount,
"currency": currency
}
if marketplace_subseller_payments:
data["marketplace_subseller_payments"] = marketplace_subseller_payments
response = self._post(self._format_url() +
f'/{payment_id}/adjustment', json=data)
if "callback" in kwargs.keys():
kwargs["callback"](data, response, self._format_url() +
f'/{payment_id}/adjustment')
return DelayedAdjustResponse(**response)
```
#### File: services/payments/marketplace_subseller.py
```python
from .order_items import OrderItem
class MarketplaceSubseller:
def __init__(self, subseller_sales_amount: int, subseller_id: int):
self.subseller_sales_amount = subseller_sales_amount
self.subseller_id = subseller_id
self.order_items = []
def add_order_item(self, order_item: OrderItem):
self.order_items.append(order_item.as_dict())
def as_dict(self):
return self.__dict__.copy()
```
#### File: services/plans/period.py
```python
from enum import Enum, unique
from typing import Union
@unique
class PeriodType(Enum):
"""PeriodType is the enum with Plan Period options"""
YEARLY = "yearly"
MONTHLY = "monthly"
BIMONTHLY = "bimonthly"
QUARTERLY = "quarterly"
SEMESTERLY = "semesterly"
SPECIFIC = "specific"
class Period:
"""Period represents the Plan Period entity"""
type: PeriodType
billing_cycle: int
specific_cycle_in_days: int
def __init__(
self,
type: Union[PeriodType, str],
billing_cycle: int,
specific_cycle_in_days: int = None,
):
"""
Args:
type: (PeriodType)
billing_cycle (int):
specific_cycle_in_days (int):
"""
if isinstance(type, str):
try:
type = PeriodType[type.upper()]
except Exception:
raise AttributeError("Invalid Type")
if type == PeriodType.SPECIFIC and specific_cycle_in_days is None:
raise AttributeError(
"'specific_cycle_in_days' required if type is specific"
)
self.type = type
self.billing_cycle = billing_cycle
self.specific_cycle_in_days = specific_cycle_in_days
def as_dict(self):
"""Format the data as dict to be sent to Getnet"""
data = self.__dict__.copy()
data["type"] = self.type.value
data.pop("specific_cycle_in_days")
if self.type == PeriodType.SPECIFIC:
data["specific_cycle_in_days"] = self.specific_cycle_in_days
return data
```
#### File: services/plans/plan.py
```python
from enum import Enum, unique
from typing import Union, List, Optional
from getnet.services.plans.period import Period
@unique
class ProductType(Enum):
CASH_CARRY = "cash_carry"
DIGITAL_CONTENT = "digital_content"
DIGITAL_GOODS = "digital_goods"
GIFT_CARD = "gift_card"
PHYSICAL_GOODS = "physical_goods"
RENEW_SUBS = "renew_subs"
SHAREWARE = "shareware"
SERVICE = "service"
class Plan:
seller_id: str
name: str
description: str
amount: int
currency: str
payment_types: List[str] = ("credit_card",)
sales_tax: int = 0
product_type: Optional[Union[ProductType, str]]
period: Period
def __init__(
self,
name: str,
amount: int,
currency: str,
payment_types: List[str] = ("credit_card",),
sales_tax: int = 0,
description: Optional[str] = None,
product_type: Optional[ProductType] = None,
seller_id: Optional[str] = None,
period: Optional[Union[Period, dict]] = None,
):
if isinstance(product_type, str):
try:
product_type = ProductType[product_type.upper()]
except Exception:
raise AttributeError("Invalid Product Type")
self.product_type = product_type
self.name = name
self.description = description
self.amount = amount
self.currency = currency
self.payment_types = payment_types
self.sales_tax = sales_tax
self.seller_id = seller_id
self.period = period if isinstance(period, Period) else Period(**period)
def __str__(self):
return self.name
def __eq__(self, other):
return (self.seller_id, self.name, self.product_type) == (
other.seller_id,
other.name,
other.product_type,
)
def as_dict(self):
data = self.__dict__.copy()
data["product_type"] = self.product_type.value
period = data.pop("period")
data["period"] = period.as_dict()
return data
```
#### File: services/subscriptions/credit.py
```python
from typing import Union
from getnet.services.customers import Address
from getnet.services.payments.credit import Credit as BaseCredit
class Credit(BaseCredit):
billing_address: Address
def __init__(self, billing_address: Union[Address, dict] = None, **kwargs):
self.billing_address = (
billing_address
if isinstance(billing_address, Address) or billing_address is None
else Address(**billing_address)
)
super(Credit, self).__init__(**kwargs)
def as_dict(self):
data = {
"transaction_type": self.transaction_type,
"number_installments": self.number_installments,
"card": self.card._as_dict(),
}
if self.billing_address is not None:
data["billing_address"] = self.billing_address.as_dict()
if self.soft_descriptor is not None:
data["soft_descriptor"] = self.soft_descriptor
return data
```
#### File: getnet-py/getnet/utils.py
```python
from requests.models import Response
from getnet import errors
class handler_request:
def __init__(self, client, logger):
"""
Args:
client:
"""
self.client = client
self.logger = logger
def __enter__(self):
if self.client.access_token_expired():
self.client.auth()
return self.logger
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Args:
exc_type:
exc_val:
exc_tb:
"""
pass
def handler_request_exception(response: Response):
"""
Args:
response (Response):
"""
status_code = response.status_code
data = response.json()
if "details" in data and len(data.get("details")) > 0:
data = data.get("details")[0]
kwargs = {
"error_code": data.get("error_code")
or data.get("error")
or str(data.get("status_code")),
"description": data.get("description_detail")
or data.get("description")
or data.get("error_description")
or data.get("message"),
"response": response,
}
message = "{} {} ({})".format(
kwargs.get("error_code"),
kwargs.get("description"),
response.url,
)
if status_code == 400:
return errors.BadRequest(message, **kwargs)
elif status_code == 402:
return errors.BusinessError(message, **kwargs)
elif status_code == 404:
return errors.NotFound(message, **kwargs)
elif status_code == 500:
return errors.ServerError(message, **kwargs)
elif status_code == 503:
return errors.ServiceUnavailable(message, **kwargs)
elif status_code == 504:
return errors.GatewayTimeout(message, **kwargs)
else:
return errors.RequestError(message, **kwargs)
```
#### File: services/cards/test_card.py
```python
import pytest
from getnet.services.cards.card import Card
from getnet.services.token.card_token import CardToken
def test_invalid_expiration_month(card_sample: dict):
with pytest.raises(TypeError):
card_sample["expiration_month"] = 13
Card(**card_sample)
def test_invalid_expiration_year(card_sample: dict):
with pytest.raises(TypeError):
card_sample["expiration_year"] = 100
Card(**card_sample)
def test_invalid_customer_id(card_sample: dict):
with pytest.raises(TypeError):
card_sample["customer_id"] = "1" * 101
Card(**card_sample)
def test_invalid_security_code2(card_sample: dict):
with pytest.raises(TypeError):
card_sample["security_code"] = "12"
Card(**card_sample)
def test_invalid_security_code5(card_sample: dict):
with pytest.raises(TypeError):
card_sample["security_code"] = "12345"
Card(**card_sample)
def test_number_token_as_str(card_sample: dict):
card_sample["number_token"] = "12345"
card = Card(**card_sample)
assert isinstance(card.number_token, CardToken)
assert "12345" == card.number_token.number_token
def test_invalid_brand(card_sample: dict):
with pytest.raises(TypeError):
card_sample["brand"] = "12345"
Card(**card_sample)
def test_as_dict(card_sample: dict):
card = Card(**card_sample)
assert card_sample == card._as_dict()
```
#### File: services/cards/test_card_response.py
```python
import pytest
from getnet.services.cards.card_response import CardResponse
def test_invalid_without_card_id(card_response_sample: dict):
with pytest.raises(TypeError):
card_response_sample.pop("card_id")
CardResponse(**card_response_sample)
def testAsDict(card_response_sample: dict):
card = CardResponse(**card_response_sample)
value = card._as_dict()
assert "used_at" not in value
assert "created_at" not in value
assert "updated_at" not in value
```
#### File: getnet/services/conftest.py
```python
import os
import pytest
from pytest_mock import MockFixture
from getnet import Client, Environment
@pytest.fixture
def client_mock(mocker: MockFixture):
return mocker.patch("getnet.Client")
@pytest.fixture(scope="module")
def client():
return Client(
os.environ.get("GETNET_SELLER_ID"),
os.environ.get("GETNET_CLIENT_ID"),
os.environ.get("GETNET_CLIENT_SECRET"),
Environment.HOMOLOG,
)
```
#### File: services/customers/test_service.py
```python
from getnet import Client
from getnet.services.customers import Service, Customer
from getnet.services.service import ResponseList
def test_create(
client_mock: Client, customer_sample: dict, customer_response_sample: dict
):
client_mock.post.return_value = customer_response_sample
service = Service(client_mock)
customer = service.create(Customer(**customer_sample))
assert isinstance(customer, Customer)
assert customer_sample.get("customer_id") == customer.customer_id
def test_all(client_mock: Client, customer_response_sample: dict):
client_mock.get.return_value = {
"customers": [
customer_response_sample,
customer_response_sample,
customer_response_sample,
],
"page": 1,
"limit": 100,
"total": 3,
}
service = Service(client_mock)
customers = service.all()
assert isinstance(customers, ResponseList)
assert 1 == customers.page
assert 3 == customers.total
assert customer_response_sample.get("customer_id") == customers[0].customer_id
def test_get(client_mock: Client, customer_response_sample: dict):
client_mock.get.return_value = customer_response_sample
service = Service(client_mock)
customer = service.get(customer_response_sample.get("customer_id"))
assert isinstance(customer, Customer)
assert customer_response_sample.get("customer_id") == customer.customer_id
client_mock.get.assert_called_once_with(
"/v1/customers/{}".format(customer_response_sample.get("customer_id"))
)
```
#### File: services/plans/test_integration.py
```python
import pytest
from getnet.errors import NotFound
from getnet.services.plans import Service, Plan
from getnet.services.service import ResponseList
@pytest.mark.vcr
def test_create(client, plan_sample):
plan = Service(client).create(Plan(**plan_sample))
assert isinstance(plan, Plan)
assert plan.plan_id is not None
@pytest.mark.vcr
def test_get(client, plan_sample):
created_plan = Service(client).create(Plan(**plan_sample))
plan = Service(client).get(created_plan.plan_id)
assert isinstance(plan, Plan)
assert created_plan == plan
assert created_plan.plan_id == plan.plan_id
@pytest.mark.vcr
def test_invalid_get(client):
with pytest.raises(NotFound) as excinfo:
Service(client).get("14a2ce5d-ebc3-49dc-a516-cb5239b02285")
assert excinfo.value.error_code == "404"
@pytest.mark.vcr
def test_all(client):
plans = Service(client).all()
assert isinstance(plans, ResponseList)
assert plans.page == 1
assert plans.limit == 100
assert plans.total is not None
@pytest.mark.vcr
def test_all_not_found(client):
plans = Service(client).all(name="foobarTest123")
assert plans.total == 0
@pytest.mark.vcr
def test_update(client, plan_sample):
created_plan = Service(client).create(Plan(**plan_sample))
plan1 = Service(client).update(
created_plan.plan_id, "FooBar #1", created_plan.description
)
assert plan1.name == "FooBar #1"
plan2 = Service(client).update(created_plan, "FooBar #2")
assert plan2.name == "FooBar #2"
created_plan.name = "FooBar #3"
plan3 = Service(client).update(created_plan)
assert plan3.name == "FooBar #3"
@pytest.mark.vcr
def test_update_status(client, plan_sample):
created_plan = Service(client).create(Plan(**plan_sample))
assert created_plan.is_active is True
plan = Service(client).update_status(created_plan.plan_id, False)
assert plan.is_active is False
```
#### File: services/token/test_card_number.py
```python
import unittest
import pytest
from getnet.services.token import CardNumber
def test_invalid_card_number():
with pytest.raises(AttributeError):
CardNumber("123", "123")
def test_invalid_customer_id():
with pytest.raises(AttributeError):
CardNumber("5155901222280001", "a" * 101)
def test_get_as_dict():
object = CardNumber("5155901222280001", "customer_21081826")
assert {
"card_number": "5155901222280001",
"customer_id": "customer_21081826",
} == object.as_dict()
```
#### File: services/token/test_service.py
```python
import unittest
from unittest.mock import patch
import pytest
from pytest_mock import MockFixture
from getnet.services.token import Service, CardNumber
from getnet.services.token.card_token import CardToken
@pytest.fixture
def card_number():
return CardNumber("5155901222280001", "customer_21081826")
def test_token_generate(client_mock, card_number):
client_mock.post.return_value = {"number_token": "<PASSWORD>"}
service = Service(client_mock)
token = service.generate(card_number)
assert isinstance(token, CardToken)
assert "<PASSWORD>" == token.number_token
```
#### File: services/utils/test_device.py
```python
import ipaddress
import pytest
from getnet.services.utils import Device
class TestDevice:
def test_invalid_device_id(self):
with pytest.raises(TypeError):
Device("127.0.0.1", "A" * 81)
def test_invalid_ipaddress(self):
with pytest.raises(ipaddress.AddressValueError):
Device("127.0.0.300", "ABC")
def test_get_as_dict(self):
object = Device("127.0.0.3", "ABC")
assert {"ip_address": "127.0.0.3", "device_id": "ABC"} == object.as_dict()
``` |
{
"source": "7B-Things/braille-signage-generator",
"score": 3
} |
#### File: 7B-Things/braille-signage-generator/braille_signage_generator.py
```python
import cadquery as cq
# Change the text inside the quotes and a plate
# with the appropriate dots will be generated.
text = "cadquery"
# Braille dot geometry (do not change unless you understand the consequences)
horizontal_interdot = 2.5
vertical_interdot = 2.5
horizontal_intercell = 7.6
vertical_interline = 10.2
dot_height = 0.9
dot_diameter = 1.6
# The base plate thickness
base_thickness = 1.5
# Make sure that only lowercase letters are processed
text = text.lower()
# Map from character to positions.
# Based on https://en.wikipedia.org/wiki/Braille_Patterns
# The positions are
# as follows:
# 1 4
# 2 5
# 3 6
# 7 8
char_point_map = {
" ": "",
"a": "1",
"b": "1,2",
"c": "1,4",
"d": "1,4,5",
"e": "1,5",
"f": "1,2,4",
"g": "1,2,4,5",
"h": "1,2,5",
"i": "2,4",
"j": "2,4,5",
"k": "1,3",
"l": "1,2,3",
"m": "1,3,4",
"n": "1,3,4,5",
"o": "1,3,5",
"p": "1,2,3,4",
"q": "1,2,3,4,5",
"r": "1,2,3,5",
"s": "2,3,4",
"t": "2,3,4,5",
"u": "1,3,6",
"v": "1,2,3,6",
"w": "2,4,5,6",
"x": "1,4,3,6",
"y": "1,3,4,5,6",
"z": "1,3,5,6"
}
def get_plate_width(text):
"""
Determines the height of the plate based on the number of characters.
"""
return (len(text) * (horizontal_interdot + horizontal_intercell))
def get_plate_height(text):
"""
Determines the width of the plate based on the number of characters.
"""
max_len = max([len(t) for t in text])
return (2 * horizontal_interdot +
horizontal_interdot +
(max_len - 1) * horizontal_intercell)
# Generate the braille plate
plate = (cq.Workplane().rect(get_plate_width(text), get_plate_height(text) + vertical_interdot * 2.0, centered=False)
.extrude(base_thickness))
# Add the dot points to the plate
pnts = []
i = 1
offset = 0
for char in text:
locs = char_point_map[char]
loc_list = locs.split(",")
for loc in loc_list:
if loc == "1":
pnts.append((horizontal_interdot * i + offset, vertical_interdot * 4))
elif loc == "2":
pnts.append((horizontal_interdot * i + offset, vertical_interdot * 3))
elif loc == "3":
pnts.append((horizontal_interdot * i + offset, vertical_interdot * 2))
elif loc == "7":
pnts.append((horizontal_interdot * i + offset, vertical_interdot * 1))
elif loc == "4":
pnts.append((horizontal_interdot * i + horizontal_interdot + offset, vertical_interdot * 4))
elif loc == "5":
pnts.append((horizontal_interdot * i + horizontal_interdot + offset, vertical_interdot * 3))
elif loc == "6":
pnts.append((horizontal_interdot * i + horizontal_interdot + offset, vertical_interdot * 2))
elif loc == "8":
pnts.append((horizontal_interdot * i + horizontal_interdot + offset, vertical_interdot * 1))
offset += horizontal_intercell
i += 1
plate = plate.faces(">Z").workplane().pushPoints(pnts).circle(dot_diameter / 2.0).extrude(dot_height).faces(">Z").fillet(dot_diameter / 2.0 - 0.151)
show_object(plate)
``` |
{
"source": "7B-Things/push-button-switch",
"score": 2
} |
#### File: 7B-Things/push-button-switch/switch.py
```python
import cadquery as cq
#import component.leaf_spring
def make_leaf_spring():
result=cq
result=result.Workplane("XY").workplane(invert=True,centerOption="CenterOfBoundBox").tag("Change")
result=result.moveTo(-37.5,0).hLine(75,forConstruction=False).vLine(10,forConstruction=False).threePointArc(point1=(10,20),point2=(5,20),forConstruction=False).hLine(-10,forConstruction=False).threePointArc(point1=(-32.5,13),point2=(-37.5,10),forConstruction=False).close()
result=result.extrude(10,combine=True,clean=True,both=False,taper=0.0)
result=result.faces("<Y").workplane(invert=False,centerOption="CenterOfBoundBox")
result=result.circle(1.0,forConstruction=False)
result=result.cutThruAll(clean=True,taper=0.0)
return result
switch = cq.Assembly()
switch.add(make_leaf_spring(), name="leaf spring")
switch = switch.toCompound()
show_object(switch)
``` |
{
"source": "7-carmen/CineLaravel",
"score": 3
} |
#### File: CineLaravel/scraping/filmAffinity.py
```python
import sys
import time
import os
import urllib.request, re
from bs4 import BeautifulSoup
# Método que pinta una barra de progreso
def printProgress (iteration, total, prefix = '', suffix = '', decimals = 1, barLength = 100):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
barLength - Optional : character length of bar (Int)
"""
formatStr = "{0:." + str(decimals) + "f}"
percent = formatStr.format(100 * (iteration / float(total)))
filledLength = int(round(barLength * iteration / float(total)))
bar = '█' * filledLength + '-' * (barLength - filledLength)
sys.stdout.write('\r%s |%s| %s%s %s' % (prefix, bar, percent, '%', suffix)),
if iteration == total:
sys.stdout.write('\n')
sys.stdout.flush()
# Método que extrae el contenido de un fichero de lista de películas
def extraer_lista(file):
f = open(file, "r", encoding="utf8")
l = f.read()
f.close()
return l
# Método que extrae las películas de la página de búsquedas de FilmAffinity
def extraer_peliculas(titulo, director, anyo):
def abrir_url(url, file):
try:
f = urllib.request.urlretrieve(url, filename=file)
return file
except Exception:
print("Error al conectarse a la página de búsqueda de películas")
traceback.print_exc()
return None
fichero = "scraping/pelicula"
link = "http://www.filmaffinity.com/es/advsearch.php?stext="+titulo+"&stype[]=title&fromyear="+anyo+"&toyear="+anyo
if abrir_url(link, fichero):
l = extraer_lista(fichero)
if l:
soup = BeautifulSoup(l, 'html.parser')
peliculas = soup.findAll("div", "movie-card movie-card-1")
directorPelicula = ""
for p in peliculas:
for g in p.find("div", "mc-director").div.findAll("span","nb"):
if director!="":
directorPelicula = directorPelicula+", "+g.a.get('title')
else:
directorPelicula = g.a.get('title')
if director in directorPelicula:
titulo = p.find("div", "mc-title").a.string
link = p.find("div", "mc-title").a.get('href')
poster = p.find("div","mc-poster").img.get('src')
rating = p.find("div", "mr-rating").find("div","avgrat-box").string
info_peliculas = extraer_info_peliculas(titulo, link, poster, rating)
break
return info_peliculas
# Método que extrae los datos de la película de la página de FilmAffinity
def extraer_info_peliculas(titulo, link, poster, rating):
def abrir_url(url, file):
try:
f = urllib.request.urlretrieve(url, filename=file)
return file
except Exception:
print("Error al conectarse a la página de información de la película")
traceback.print_exc()
return None
fichero = "scraping/info_pelicula"
link = link
if abrir_url(link, fichero):
l = extraer_lista(fichero)
if l:
soup = BeautifulSoup(l, 'html.parser')
info_pelicula = soup.find("div", "z-movie").find("div", {"id": "left-column"}).find("dl", "movie-info")
actores = ""
director = ""
fecha = info_pelicula.find("dd", {"itemprop": "datePublished"}).string
try:
duracion = info_pelicula.find("dd", {"itemprop": "duration"}).string
except:
duracion = ""
for d in info_pelicula.find("dd", "directors").findAll("a"):
director = director + d.get('title') + ", "
director = director[0:len(director) - 2]
for a in info_pelicula.findAll("span", {"itemprop": "name"}):
actores = actores+a.string+", "
actores = actores[len(director)+2:len(actores)-2]
genero = info_pelicula.find("span", {"itemprop": "genre"}).a.string
try:
sipnosis = info_pelicula.find("dd", {"itemprop": "description"}).string
except:
sipnosis = ""
return [titulo, poster, rating, fecha, duracion, director, actores, genero, sipnosis]
# Generador Seeder de Movies
def moviesSeeder(peliculasArray):
moviesSeederFile = open("database/seeds/MoviesTableSeeder.php", "w", encoding="utf8")
lineasFichero = "<?php\n\n"\
"use App\movie;\n"\
"use Illuminate\Database\Seeder;\n\n"\
"class MoviesTableSeeder extends Seeder\n"\
"{\n"\
" /**\n"\
" * Run the database seeds.\n"\
" *\n"\
" * @return void\n"\
" */\n"\
" public function run()\n"\
" {\n"\
" movie::truncate();\n"
moviesSeederFile.write(lineasFichero)
for pelicula in peliculasArray:
lineasFichero = " DB::table('movies')->insert([\n"\
" 'cartel'=>'"+pelicula[1]+"',\n"\
" 'nombre'=>'"+pelicula[0]+"',\n"\
" 'duracion'=>'"+pelicula[4]+"',\n"\
" 'anyo'=>'"+pelicula[3]+"',\n"\
" 'sinopsis'=>'"+pelicula[8].replace("'","\\'")+"',\n"\
" 'rating'=>'"+pelicula[2]+"'\n"\
" ]);\n"
moviesSeederFile.write(lineasFichero)
lineasFichero = " }\n}"
moviesSeederFile.write(lineasFichero)
moviesSeederFile.close()
# Generador Seeder de Generos
def generosSeeder(peliculasArray):
generosSeederFile = open("database/seeds/GenerosTableSeeder.php", "w", encoding="utf8")
lineasFichero = "<?php\n\n"\
"use App\genero;\n"\
"use Illuminate\Database\Seeder;\n\n"\
"class GenerosTableSeeder extends Seeder\n"\
"{\n"\
" /**\n"\
" * Run the database seeds.\n"\
" *\n"\
" * @return void\n"\
" */\n"\
" public function run()\n"\
" {\n"\
" genero::truncate();\n"
generosSeederFile.write(lineasFichero)
generos_ya_incluidos = []
for pelicula in peliculasArray:
#time.sleep(5)
if pelicula[7] not in generos_ya_incluidos:
lineasFichero = " DB::table('generos')->insert([\n"\
" 'nombre'=>'"+pelicula[7]+"'\n"\
" ]);\n"
generos_ya_incluidos.append(pelicula[7])
generosSeederFile.write(lineasFichero)
lineasFichero = " }\n}"
generosSeederFile.write(lineasFichero)
generosSeederFile.close()
# Generador Seeder de People
def peopleSeeder(peliculasArray):
peopleSeederFile = open("database/seeds/PeopleTableSeeder.php", "w", encoding="utf8")
lineasFichero = "<?php\n\n"\
"use App\people;\n"\
"use Illuminate\Database\Seeder;\n\n"\
"class PeopleTableSeeder extends Seeder\n"\
"{\n"\
" /**\n"\
" * Run the database seeds.\n"\
" *\n"\
" * @return void\n"\
" */\n"\
" public function run()\n"\
" {\n"\
" people::truncate();\n"
peopleSeederFile.write(lineasFichero)
personas_ya_incluidas = []
for pelicula in peliculasArray:
directores = pelicula[5].split(", ")
actores = pelicula[6].split(", ")
# Incluimos los directores de cada pelicula
for director in directores:
if director not in personas_ya_incluidas:
lineasFichero = " DB::table('people')->insert([\n"\
" 'name'=>'"+director.replace("'","\\'")+"',\n"\
" 'imagen'=>''\n"\
" ]);\n"
personas_ya_incluidas.append(director)
peopleSeederFile.write(lineasFichero)
# Incluimos los actores de cada pelicula
for actor in actores:
if actor not in personas_ya_incluidas:
lineasFichero = " DB::table('people')->insert([\n"\
" 'name'=>'"+actor.replace("'","\\'")+"',\n"\
" 'imagen'=>''\n"\
" ]);\n"
personas_ya_incluidas.append(actor)
peopleSeederFile.write(lineasFichero)
lineasFichero = " }\n}"
peopleSeederFile.write(lineasFichero)
peopleSeederFile.close()
# Código a ejecutar para realizar el scraping
peliculas = extraer_lista("scraping/peliculas.txt")
i = 0
printProgress(i, 19, prefix='Progress:', suffix='Complete', barLength=50)
num_errores = 0
f = open("scraping/peliculas_extraidas.txt", "w", encoding="utf8")
peliculasArray = peliculas.splitlines()
resultado_extraccion = []
for pelicula in peliculasArray:
#time.sleep(5)
titulo = eval(pelicula)[0].replace(" ","%20")
anyo = eval(pelicula)[1]
director = eval(pelicula)[2]
try:
extraccion = extraer_peliculas(titulo, director, anyo)
resultado_extraccion.append(extraccion)
f.write(str(extraccion))
f.write("\n")
except UnboundLocalError:
num_errores = num_errores + 1
printProgress(i, 19, prefix='Progress:', suffix='Complete', barLength=50)
i = i + 1
f.close()
print("\nNúmero de errores: "+str(num_errores))
# Escribimos los ficheros Seeder
moviesSeeder(resultado_extraccion)
generosSeeder(resultado_extraccion)
peopleSeeder(resultado_extraccion)
# Eliminamos los ficheros auxiliares
os.remove("scraping/info_pelicula")
os.remove("scraping/pelicula")
``` |
{
"source": "7CD/LambdaMART",
"score": 2
} |
#### File: src/data/dataset.py
```python
import os
import tarfile
import gdown
import numpy as np
import pandas as pd
from scipy.sparse import csr_matrix, save_npz, load_npz
from sklearn.datasets import load_svmlight_file
def download_dataset(url, path):
if not os.path.exists(path):
gdown.download(url, path, quiet=False)
def process_dataset(raw_dataset_path, processed_dataset_dir):
train_csr_path = os.path.join(processed_dataset_dir, 'train_csr.npz')
train_y_path = os.path.join(processed_dataset_dir, 'train_y.npy')
train_qid_path = os.path.join(processed_dataset_dir, 'train_qid.npy')
test_csr_path = os.path.join(processed_dataset_dir, 'test_csr.npz')
test_y_path = os.path.join(processed_dataset_dir, 'test_y.npy')
test_qid_path = os.path.join(processed_dataset_dir, 'test_qid.npy')
if all(map(os.path.exists, [train_csr_path, train_y_path, train_qid_path, \
test_csr_path, test_y_path, test_qid_path])):
return
tar = tarfile.open(raw_dataset_path, "r:gz")
raw_dataset_dir_path = os.path.dirname(raw_dataset_path)
tar.extractall(raw_dataset_dir_path)
tar.close()
extracted_dir_path = os.path.join(raw_dataset_dir_path, 'l2r')
train_data_path = os.path.join(extracted_dir_path, 'train.txt.gz')
train_csr, train_y, train_qid = load_svmlight_file(train_data_path, query_id=True)
# There are some invalid samples in training data
docs_by_query = pd.DataFrame({'doc_index' : np.arange(len(train_y)),
'labels' : train_y,
'query' : train_qid},
index=train_qid)
good_indexes = []
for query in set(train_qid):
try:
if len(set(docs_by_query.loc[query].values[:, 1])) > 1:
good_indexes.extend(docs_by_query.loc[query, 'doc_index'].values)
except:
continue
train_csr = train_csr[good_indexes]
train_qid = train_qid[good_indexes]
train_y = train_y[good_indexes]
test_data_path = os.path.join(extracted_dir_path, 'test.txt.gz')
test_csr, test_y, test_qid = load_svmlight_file(test_data_path, query_id=True)
save_npz(train_csr_path, train_csr)
np.save(train_y_path, train_y)
np.save(train_qid_path, train_qid)
save_npz(test_csr_path, test_csr)
np.save(test_y_path, test_y)
np.save(test_qid_path, test_qid)
def get_dataset(url, raw_dataset_path, processed_dataset_dir):
download_dataset(url, raw_dataset_path)
process_dataset(raw_dataset_path, processed_dataset_dir)
train_csr_path = os.path.join(processed_dataset_dir, 'train_csr.npz')
train_y_path = os.path.join(processed_dataset_dir, 'train_y.npy')
train_qid_path = os.path.join(processed_dataset_dir, 'train_qid.npy')
test_csr_path = os.path.join(processed_dataset_dir, 'test_csr.npz')
test_y_path = os.path.join(processed_dataset_dir, 'test_y.npy')
test_qid_path = os.path.join(processed_dataset_dir, 'test_qid.npy')
train_csr = load_npz(train_csr_path)
train_y = np.load(train_y_path)
train_qid = np.load(train_qid_path)
test_csr = load_npz(test_csr_path)
test_y = np.load(test_y_path)
test_qid = np.load(test_qid_path)
return (train_csr, train_y, train_qid), (test_csr, test_y, test_qid)
```
#### File: src/kaggle/utils.py
```python
import kaggle
import numpy as np
import pandas as pd
def order_docs(doc_scores, qids):
assert len(doc_scores) == len(qids)
doc_ids = np.arange(1, len(doc_scores) + 1)
ordered_docs = np.zeros(len(doc_scores), dtype=np.int32)
qid_prev = qids[0]
i_prev = 0
for i, qid_i in enumerate(qids):
if qid_i != qid_prev:
ordered_docs[i_prev:i] = doc_ids[np.argsort(-doc_scores[i_prev:i]) + i_prev]
i_prev = i
qid_prev = qid_i
ordered_docs[i_prev:] = doc_ids[np.argsort(-doc_scores[i_prev:]) + i_prev]
return ordered_docs
def submit(prediction, submission_path):
sample = pd.read_csv('data/raw/l2r/sample.made.fall.2019')
docs = order_docs(prediction, sample['QueryId'].values)
sample['DocumentId'] = docs
sample.to_csv(submission_path, index=False)
kaggle.api.competition_submit(submission_path, '', 'learning-to-rank-made-fall-2019')
```
#### File: src/pipelines/evaluate.py
```python
import argparse
import json
import os
import joblib
import numpy as np
from scipy.sparse import load_npz
import yaml
from src.evaluate.evaluate import evaluate
def evaluate_model(config_path):
config = yaml.safe_load(open(config_path))
model = joblib.load(config['evaluate']['model_path'])
X = load_npz(config['evaluate']['X_path'])
y = np.load(config['evaluate']['y_path'])
qid = np.load(config['evaluate']['qid_path'])
ndcg = evaluate(model, X, y, qid)
metrics_path = config['evaluate']['metrics_path']
json.dump(
obj={'ndcg': ndcg},
fp=open(metrics_path, 'w')
)
print(f'NDCG score file saved to : {metrics_path}')
if __name__ == '__main__':
args_parser = argparse.ArgumentParser()
args_parser.add_argument('--config', dest='config', required=True)
args = args_parser.parse_args()
evaluate_model(config_path=args.config)
```
#### File: src/ranking/utils.py
```python
import numpy as np
def group_by_ids(a, group_ids):
a, ids = np.array(a), np.array(group_ids)
id_bounds_idxs = [0]
id_bounds_idxs.extend((ids[1:] != ids[:-1]).nonzero()[0] + 1)
id_bounds_idxs.append(len(ids))
a_layed_out = []
for i in range(len(id_bounds_idxs) - 1):
a_layed_out.append(a[id_bounds_idxs[i] : id_bounds_idxs[i + 1]])
return a_layed_out
``` |
{
"source": "7clem/discordBots",
"score": 3
} |
#### File: 7clem/discordBots/devine.py
```python
import discord
import random
import sqlite3
import sys
import math
#from botStateMachine import BotStateMachine
def listeNombres(s):
mots = s.split()
print(mots)
nombres = []
for m in mots:
try:
n = int(m.strip('.,!?'))
print("trouvé", n)
nombres.append(n)
except:
pass
return nombres
def contient(chaine, sub):
if type(sub) == str:
return chaine.find(sub) != -1
else:
for s in sub:
if chaine.find(s) != -1:
return True
return False
class DevineJoueur(discord.Client):
def __init__(self):
super().__init__()
self.intervale = []
self.essai = 0
self.joue = False
async def on_ready(self):
print("Le bot discord est actif.")
def divise(self):
self.essai = math.floor(sum(self.intervale) / 2)
def tropHaut(self):
self.intervale = [self.intervale[0], self.essai]
def tropBas(self):
self.intervale = [self.essai, self.intervale[1]]
def lireIntervale(self, s):
self.joue = True
nbs = listeNombres(s)
if len(nbs) == 2:
self.intervale = nbs
self.joue = True
self.divise()
async def on_message(self, msg):
if (msg.author == self.user):
return
cont = msg.content.lower()
print(cont)
prefix = "Ok, j'ai choisi un nombre au hazard entre ".lower()
if cont.startswith(prefix):
self.lireIntervale(cont)
elif contient(cont,"gagné") or contient(cont, "trouvé"):
self.joue = False
return
else:
if cont.isnumeric():
self.essai = int(cont)
if self.essai > self.intervale[0]:
self.intervale[0] = essai
if self.essai > intervale[1]:
self.intervale[1] = self.essai
self.divise()
elif contient(cont, ["haut", "élévé"]) or cont == 'h':
self.tropHaut()
elif contient(cont, ["bas", "petit"]) or cont == 'b':
self.tropBas()
self.divise()
await msg.channel.send(str(self.essai))
def __str__(self):
i = str(self.intervale)
e = str(self.essai)
j = str(self.joue)
return f"DevineJoueur intervale={i}, essai={e}, joue={j}"
if __name__ == "__main__":
bot = DevineJoueur()
#etat = BotStateMachine(bot)
discordBotToken = ""
with open(sys.argv[1]) as dbtFile:
discordBotToken = dbtFile.readlines()[0]
bot.run(discordBotToken)
``` |
{
"source": "7CLewis/PiRate",
"score": 4
} |
#### File: PiRate/backend/splitTests.py
```python
class dictRules:
"""
A special class for getting and setting multiple dictionaries
simultaneously. This class is not meant to be instantiated
on its own, but rather in response to a slice operation on UniformDictList.
"""
def __init__(parent, slice):
self.parent = parent
self.range = range(*slice.indices(len(parent)))
def keys(self):
keySet = set()
for i in self.range:
keySet &= self.parent.keys()
return keySet
def getItem(self, key):
return [self.parent[k][key] for k in self.range]
def setItem(self, key, val):
for k in self.range:
self.parent[k][key] = val
def update(self, *args, **kargs):
for k in self.range:
self.parent[k].update(*args, **kargs)
class UniformDictList(list):
def getItem(self, key):
if isinstance(key, slice):
return dictRules(self, key)
return super().getItem(key)
```
#### File: PiRate/controller/main.py
```python
from datetime import datetime
import os
import sys
import json
# import every '.py' in the 'scans' directory and record its name in 'tests' as a string
dir = os.path.dirname(__file__) # Get the current directory of this file
tests = [x[:-3] for x in os.listdir(os.path.join(dir,"tests")) if x.endswith(".py")]
tests.remove("__init__") # __init__ is not a scan!
for test in tests: exec("from tests." + test + " import " + test)
# these globals are needed for the test_list function because of how exec() works
name = ""
desc = ""
oses = ""
test_val = ""
test_desc = ""
ip = ""
def main():
"""
Manually run tests and view the results.
Run this file with the '--help' flag to see the usage guide.
"""
args = sys.argv[1:] # ignore the filename arg
if not args or args[0] == "--help":
print("Manual scanning:")
print(" --list: Print out a list of runnable tests")
print(" --ip ip_addr test_id0 test_id1 ...: Scan the host at ip_addr with tests id0, id1, ... and print the results")
elif args[0] == "--list":
list = '[' # Brackets required for proper JSON object formatting
for test in test_list():
list += json.dumps(test) + ','
list = list[:-1] # Remove the last comma
list += ']'
print(list)
elif args[0] == "--ip":
ip = args[1]
test_ids = args[2].split(',')
results = execute_tests(ip, test_ids) # Execute the tests and store the results
print(json.dumps(results))
def test_list():
"""
Retrieve a list of tests as json objects.
See the README for details on the formatting.
"""
test_json_objs = []
for test_id, test_name in enumerate(tests):
exec("name = "+test_name+".name", globals())
exec("desc = "+test_name+".desc", globals())
exec("oses = "+test_name+".oses", globals())
test = {
"test_id": str(test_id),
"test_name": name,
"test_description": desc,
"operating_systems": oses,
}
test_json_objs.append(test)
return test_json_objs
def execute_tests(ip, test_ids):
"""
Execute a list of tests and return a list of json results.
See the README for details on the format of the json results.
@param ip The ip address of the host to scan
@param test_ids A list of ids of tests to run, must correspond to ids returned by test_list()
"""
results = []
for result_id, test_id in enumerate(test_ids):
test_name = tests[int(test_id)]
exec("test_val, test_desc = " + test_name + ".scan('" + ip + "')", globals())
test_result = {
"id": str(result_id+1),
"test_id": str(test_id),
"value": test_val,
"description": test_desc,
"date": datetime.now().strftime("%b %d %Y %H:%M:%S"),
}
results.append(test_result)
return results
if __name__ == "__main__":
main()
```
#### File: controller/tests/bluekeep.py
```python
import subprocess
import os
class bluekeep:
"""
Scan object for the bluekeep vulnerability (CVE 2019-0708).
"""
name = "Bluekeep"
desc = "Scans for the bluekeep vulnerability"
oses = "Windows"
def scan (host_ip):
"""
Scan for the Bluekeep vulnerability according to the Pirate Scan API.
@param host_ip The ip address of the host to scan for the vulnerability
"""
scriptpath = os.path.dirname(os.path.realpath(__file__))
rdpscan_cmd = scriptpath + "/rdpscan"
output = subprocess.run([rdpscan_cmd, "-v", host_ip], stdout=subprocess.PIPE)
output = output.stdout.decode("UTF-8")
if "VULNERABLE" in output:
return ("Failure.", "Host is vulnerable, rdpscan output: " + output)
else:
return ("Success.", "Host is not vulnerable, rdpscan output: " + output)
```
#### File: controller/tests/eternal_blue.py
```python
import nmap
class eternal_blue:
"""
Scan object for the eternal blue vulnerability (CVE MS17-010).
"""
name = "Eternal Blue"
desc = "Scans for the eternal blue vulnerability"
oses = "Windows"
def scan (host_ip):
"""
Scan for the Eternal Blue vulnerability according to the Pirate Scan API.
@param host_ip The ip address of the host to scan for the vulnerability
"""
scanner = nmap.PortScanner()
scanner.scan(hosts=host_ip, ports="445", arguments="--script smb-vuln-ms17-010")
if not scanner.all_hosts():
return ("Failure", "No hosts detected.")
host = scanner.all_hosts()[0]
if "hostscript" not in scanner[host].keys():
return ("Success", "Host is not vulnerable to Eternal Blue (CVE MS17-010).")
else:
nmap_output = scanner[host]["hostscript"][0]["output"]
return ("Failure", "Host is vulnerable to Eternal Blue (CVE MS17-010).\n" + nmap_output)
```
#### File: controller/tests/full_port_scan.py
```python
import nmap
class full_port_scan:
"""
Pirate scan object for all TCP ports (0-65535).
"""
name = "Full Port Scan"
desc = "Scan TCP ports 0-65535 and interrogate those open ports for the services running on them."
oses = "Any"
def scan(host_ip):
"""
Scan all TCP ports (0-65535) and determine if they are open.
If they are open, they are interrogated for the services running on those ports.
@param host_ip The ip address of the host to scan
"""
scanner = nmap.PortScanner()
scanner.scan(host_ip, "0-65535")
if not scanner.all_hosts():
return ("Failure", "Host cannot be found.")
host = scanner.all_hosts()[0]
if "tcp" not in scanner[host].all_protocols():
return ("Success", "No open TCP ports found on host.")
open_ports = {}
scanned_ports = scanner[host]["tcp"].keys()
for port in scanned_ports:
port_name = scanner[host]["tcp"][port]["name"]
port_prod = scanner[host]["tcp"][port]["product"]
open_ports[port] = port_name + ", " + port_prod
return ("Success", "Open TCP ports: " + str(open_ports))
``` |
{
"source": "7cm-diameter/bnet",
"score": 3
} |
#### File: bnet/bnet/actor.py
```python
import numpy as np
from nptyping import NDArray
import bnet.utype as ut
class UniformActor(ut.Actor):
def __init__(self, response_times: ut.ResponseTimes):
self.__rt = response_times
def choose_action(self, weights: NDArray[1, float], *args,
**kwargs) -> ut.Node:
probs = ut.ChoiceMethod.Uniform(weights)
n = len(weights)
return np.random.choice(n, p=probs)
def take_time(self, i: ut.Node) -> ut.ResponseTime:
return self.__rt[i]
class SoftmaxActor(ut.Actor):
def __init__(self, response_times: ut.ResponseTimes):
self.__rt = response_times
def choose_action(self, weights: NDArray[1, float], *args,
**kwargs) -> ut.Node:
beta = kwargs.get("beta", 1.)
probs = ut.ChoiceMethod.Softmax(weights, beta)
n = len(weights)
return np.random.choice(n, p=probs)
def take_time(self, i: ut.Node) -> ut.ResponseTime:
return self.__rt[i]
class PropotionalActor(ut.Actor):
def __init__(self, response_times: ut.ResponseTimes):
self.__rt = response_times
def choose_action(self, weights: NDArray[1, float], *args,
**kwargs) -> ut.Node:
probs = ut.ChoiceMethod.Propotional(weights)
n = len(weights)
return np.random.choice(n, p=probs)
def take_time(self, i: ut.Node) -> ut.ResponseTime:
return self.__rt[i]
```
#### File: bnet/bnet/utype.py
```python
from abc import ABCMeta, abstractmethod
from enum import Enum
from typing import Any, Dict, List, Tuple
from nptyping import NDArray
from bnet.util import propotional_allocation, softmax, uniform
Node = int
Edge = Tuple[Node, Node]
Path = List[Node]
Weight = float
WeightVector = NDArray[1, Weight]
WeightMatrix = NDArray[2, Weight]
RewardValue = float
RewardValues = NDArray[1, RewardValue]
ResponseTime = float
ResponseTimes = NDArray[1, ResponseTime]
Action = int
Parameters = Dict[str, Any]
class ChoiceMethod(Enum):
Uniform = uniform
Propotional = propotional_allocation
Softmax = softmax
class Network(metaclass=ABCMeta):
@abstractmethod
def construct_network(self, weights: WeightMatrix, method: ChoiceMethod,
*args, **kwargs):
pass
@abstractmethod
def find_path(self, start: Node, goal: Node) -> List[Path]:
pass
@property
@abstractmethod
def network(self) -> "Network":
pass
class Learner(metaclass=ABCMeta):
@abstractmethod
def update(self, i: Node, j: Node, reward: RewardValue, *args, **kwargs):
pass
@property
@abstractmethod
def weights(self) -> WeightMatrix:
pass
class Actor(metaclass=ABCMeta):
@abstractmethod
def choose_action(self, weights: NDArray[1, float], *args,
**kwargs) -> Node:
pass
@abstractmethod
def take_time(self, i: Node) -> ResponseTime:
pass
class Agent(Network, Learner, Actor):
pass
```
#### File: bnet/hbtfmt/model.py
```python
from typing import List
import bnet.utype as ut
import numpy as np
from bnet.actor import PropotionalActor
from bnet.learner import SarsaLearner, TDLearner
from bnet.network import WeightAgnosticNetwork
from bnet.util import exp_rng
from nptyping import NDArray
class HQAgent(ut.Agent):
def __init__(self, q_opeant: float, q_other: float, n: int):
q_ = np.full(n, q_other)
q_[0] = q_opeant
self.__weights: NDArray[2, float] = np.outer(q_, q_)
self.__actor = PropotionalActor(np.zeros(n))
self.__net = WeightAgnosticNetwork()
def update(self):
pass
@property
def weights(self) -> ut.WeightMatrix:
return self.__weights
def construct_network(self, mindeg: int):
self.__net.construct_network(mindeg, self.weights,
ut.ChoiceMethod.Propotional)
def find_path(self, start: ut.Node, goal: ut.Node) -> List[ut.Path]:
return self.__net.find_path(start, goal)
@property
def network(self) -> ut.Network:
return self.__net
def choose_action(self, weights: ut.RewardValues) -> ut.Node:
return self.__actor.choose_action(weights)
def take_time(self, i: ut.Node) -> ut.ResponseTime:
return self.__actor.take_time(i)
class TDAgent(ut.Agent):
def __init__(self, n: int, alpha: float, rt: float):
self.__learner = TDLearner(n, alpha)
rts = exp_rng(rt, n, 0.1)
self.__actor = PropotionalActor(rts)
self.__net = WeightAgnosticNetwork()
def update(self, i: ut.Node, j: ut.Node, reward: ut.RewardValue):
self.__learner.update(i, j, reward)
@property
def weights(self) -> ut.WeightMatrix:
return self.__learner.weights
def construct_network(self, mindeg: int):
self.__net.construct_network(mindeg, self.weights,
ut.ChoiceMethod.Propotional)
def find_path(self, start: ut.Node, goal: ut.Node) -> List[ut.Path]:
return self.__net.find_path(start, goal)
@property
def network(self) -> ut.Network:
return self.__net
def choose_action(self, weights: ut.RewardValues) -> ut.Node:
return self.__actor.choose_action(weights)
def take_time(self, i: ut.Node) -> ut.ResponseTime:
return self.__actor.take_time(i)
class SarsaAgent(ut.Agent):
def __init__(self, n: int, alpha: float, rt: float):
self.__learner = SarsaLearner(n, alpha, 0.9)
rts = exp_rng(rt, n, 0.1)
self.__actor = PropotionalActor(rts)
self.__net = WeightAgnosticNetwork()
def update(self, i: ut.Node, j: ut.Node, reward: ut.RewardValue,
k: ut.Node):
rt = self.take_time(k)
self.__learner.update(i, j, reward, rt, k)
@property
def weights(self) -> ut.WeightMatrix:
return self.__learner.weights
def construct_network(self, mindeg: int):
self.__net.construct_network(mindeg, self.weights,
ut.ChoiceMethod.Propotional)
def find_path(self, start: ut.Node, goal: ut.Node) -> List[ut.Path]:
return self.__net.find_path(start, goal)
@property
def network(self) -> ut.Network:
return self.__net
def choose_action(self, weights: ut.RewardValues) -> ut.Node:
return self.__actor.choose_action(weights)
def take_time(self, i: ut.Node) -> ut.ResponseTime:
return self.__actor.take_time(i)
```
#### File: hbtfmt/sims/sarsa_variable_interval.py
```python
import numpy as np
from bnet.env import ConcurrentSchedule
from hbtfmt.model import SarsaAgent
from hbtfmt.util import to_onehot
def train_under_variable_interval(agent: SarsaAgent, env: ConcurrentSchedule):
n, _ = agent.weights.shape
# make the value of rewards uniform so that the agent explores randomly
pseudo_rewards = np.ones(n)
prev_response = agent.choose_action(pseudo_rewards)
curr_response = agent.choose_action(pseudo_rewards)
while not env.finished():
onehot_action = to_onehot(curr_response, n)
stepsize = onehot_action.copy()
stepsize[0] = agent.take_time(curr_response)
rewards = env.step(stepsize, onehot_action)
reward = rewards[curr_response]
next_action = agent.choose_action(pseudo_rewards)
agent.update(prev_response, curr_response, reward, next_action)
prev_response = curr_response
curr_response = next_action
return None
if __name__ == '__main__':
from typing import List, Tuple
from bnet.env import FixedRatio, VariableInterval
from bnet.util import create_data_dir
from hbtfmt.util import (ConfigClap, format_data2, free_access,
generate_rewards, parameter_product2)
clap = ConfigClap()
config = clap.config()
params = parameter_product2(config)
data_dir = create_data_dir(__file__, "bnet")
filepath = data_dir.joinpath(config.filename)
results: List[Tuple[float, int, float, int, int, float, float]] = []
for param in params:
interval, n, ro, training = param
rewards_baseline = generate_rewards(1., ro, n)
rewards_test = generate_rewards(0., ro, n)
print(
f"interval = {interval}, n = {n}, ro = {ro}, training = {training}"
)
for i in range(config.loop_per_condition):
agent = SarsaAgent(n, 0.01, 2.5)
variable_interval = VariableInterval(interval, training, .1, 1.)
fixed_ratio = [FixedRatio(1, 10000, ro) for _ in range(n - 1)]
concurrent_schedule = ConcurrentSchedule([variable_interval] +
fixed_ratio)
train_under_variable_interval(agent, concurrent_schedule)
agent.construct_network(2)
operant_baseline, total_baseline = \
free_access(agent, rewards_baseline,
n, config.loop_per_simulation)
operant_test, total_test = \
free_access(agent, rewards_test,
n, config.loop_per_simulation)
propotion_baseline = operant_baseline / total_baseline
propotion_test = operant_test / total_test
operant_degree = int(agent.network.degree[0])
median_degree = int(np.median(agent.network.degree))
result = param + (operant_degree, median_degree,
propotion_baseline, propotion_test)
results.append(result)
output_data = format_data2(results)
output_data.to_csv(filepath, index=False)
``` |
{
"source": "7cm-diameter/CDRL",
"score": 2
} |
#### File: CDRL/cdrl/run_qlearning.py
```python
if __name__ == '__main__':
from argparse import ArgumentParser
from pathlib import Path
import numpy as np
from numpy.typing import NDArray
from pandas import DataFrame
from cdrl.env import NArmBandit
from cdrl.model import QLearner
from cdrl.simulation import Result, run_block
parser = ArgumentParser()
parser.add_argument("--file", "-f")
parser.add_argument("--alpha", "-a", type=float, default=0.1)
parser.add_argument("--beta", "-b", type=float, default=1.)
parser.add_argument("--weight", "-w", type=float, default=2.)
args = parser.parse_args()
NUMBER_OF_ARMS = 2
BLOCKSIZE = 150
probs = np.array([[1., 1.], [1., 0.], [0., 0.], [0.5, 0.5]])
agent = QLearner(args.alpha, args.beta, NUMBER_OF_ARMS)
env = NArmBandit(NUMBER_OF_ARMS)
def run(prob: NDArray[np.float_]) -> Result:
env.set_probs(prob)
return run_block(agent, env, BLOCKSIZE)
result = np.vstack(list(map(run, probs)))
ret = DataFrame(result,
columns=[
"lprob", "rprob", "reward", "action", "lq", "rq", "lc",
"rc", "lp", "rp"
])
datadir = Path(__file__).absolute().parent.parent.joinpath("data")
if not datadir.exists():
datadir.mkdir()
datapath = datadir.joinpath(args.file)
ret.to_csv(datapath, index=False)
``` |
{
"source": "7cm-diameter/dlsys",
"score": 3
} |
#### File: dlsys/dlsys/model.py
```python
from math import isclose
from typing import Any, List, Optional, Tuple
import numpy as np
from nptyping import NDArray
Buffer = NDArray[1, Any]
class FIFOMemory(object):
def __init__(self, memsize: int):
self.__memsize = memsize
self.__usage = 0
self.__buffer: Buffer = np.array([])
@property
def memsize(self) -> int:
return self.__memsize
@property
def contents(self) -> Buffer:
return self.__buffer
def add(self, new_val: Any):
if self.__usage >= self.__memsize:
self.__buffer = np.delete(self.__buffer, 0)
self.__usage -= 1
self.__buffer = np.append(self.__buffer, new_val)
self.__usage += 1
class DualSysyem(object):
def __init__(self, theta: float, alpha_p: float, alpha_n: float,
tau: float, I: float, C: float, memsize: int,
cycle_length: int):
# model parameters
self.theta = theta
self.alpha_p = alpha_p
self.alpha_n = alpha_n
self.tau = tau
self.I = I
self.C = C
self.__memisize = memsize
self.__cycle_lenght = cycle_length
# hidden variables
self.__rk = 0.
self.__rk_bar = 0.
self.__gk = 0.
self.__hkt = 0.
self.__hk = 0.
# memories
self.__behavior_memory = FIFOMemory(memsize)
self.__reward_memory = FIFOMemory(memsize)
# Used in the second term of the right-hand side of Eq. 6
self.__hkt_memory = FIFOMemory(cycle_length)
self.__k = -memsize + 1
self.__t = 0.
self.__response_count = 0
self.__reward_count = 0
@property
def k(self) -> int:
if self.__k < 0:
return 0
return self.__k
@property
def rk(self) -> float:
return self.__rk
@property
def rk_bar(self) -> float:
return self.__rk_bar
@property
def gk(self) -> float:
return self.__gk
@property
def hkt(self) -> float:
return self.__hkt
@property
def hk(self) -> float:
return self.__hk
@property
def t(self) -> float:
return self.__t
def update_memory(self, b: int, r: int):
self.__behavior_memory.add(b)
self.__reward_memory.add(r)
self.__k += 1
def update_rk(self):
# eq. 1
b = self.__behavior_memory.contents
r = self.__reward_memory.contents
b_bar = np.mean(b)
r_bar = np.mean(r)
b_sd = np.std(b)
r_sd = np.std(r)
n = self.__memisize
rk = np.sum((b - b_bar) * (r - r_bar)) / (n * b_sd * r_sd)
if np.isnan(rk):
rk = 0.
self.__rk = rk
return rk
def update_rk_bar(self):
# eq. 2
if self.__k > 0:
beta = 1 / self.__k
self.__rk_bar += beta * (self.__rk - self.__rk_bar)
def update_gk(self):
# eq. 3
rk = self.__rk
rk_bar = self.__rk_bar
self.__gk = self.theta * rk + (1 - self.theta) * rk_bar
def update_hkt(self, PE: float):
# eq. 4
alpha = self.alpha_n
if PE > 0:
alpha = self.alpha_p
self.__hkt += alpha * PE
self.__hkt_memory.add(self.__hkt)
def compute_prediction_error(self, reward: float) -> float:
# eq. 5
if reward > 0:
self.__reward_count += 1
return reward - (self.__hkt + self.__gk)
def update_hk(self):
# eq. 6
# the implementation was made to fit the expected model behavior
# since `h_k` diverges when implemented as described in the paper.
# following two lines are the implementation as described in the paper.
# sum_hkt_in_cycle = np.sum(self.__hkt_memory.contents)
# self.__hk += sum_hkt_in_cycle
# this implementation shows the expected behavior
self.__hk = self.__hkt
def compute_response_probability(self) -> float:
p = 1 / (1 + np.exp(-self.tau *
(self.I * self.__gk + self.__hk - self.C)))
return p
def emit_response(self, p: float) -> bool:
response = np.random.uniform() <= p
self.__response_count += response
return response
def step(self, step_size: float) -> Optional[Tuple[float, float]]:
self.__t += step_size
if self.__t >= self.__cycle_lenght:
b, r = self.__response_count, self.__reward_count
self.update_memory(b, r)
self.update_rk()
self.update_rk_bar()
self.update_gk()
self.update_hk()
self.__response_count = 0
self.__reward_count = 0
self.__t = 0.
return self.__gk, self.__hk
return None
def learn_from_dataset(self, dataset: List[Tuple[float, float]],
stepsize: float):
row_of_result: List[Tuple[float, float]] = []
for irt, reward in dataset:
while not isclose(irt, 0., abs_tol=1e-3) and irt > 0.:
irt -= stepsize
if isclose(irt, 0, abs_tol=1e-3) or irt <= 0.:
_ = self.emit_response(1.)
rpe = self.compute_prediction_error(reward)
else:
rpe = 0.
self.update_hkt(rpe)
gk_hk = self.step(stepsize)
if gk_hk is not None:
row_of_result.append(gk_hk)
return row_of_result
``` |
{
"source": "7cm-diameter/grgr",
"score": 2
} |
#### File: grgr/dev/__init__.py
```python
from typing import Any, Dict, Iterable, List, Optional
import grgr.dev.typing as tp
from grgr import _R
from numpy import ndarray
from pandas import DataFrame
from rpy2.robjects import conversion, default_converter, numpy2ri, pandas2ri
def _is_ndarray(x: Any) -> bool:
return isinstance(x, ndarray)
def _is_dataframe(x: Any) -> bool:
return isinstance(x, DataFrame)
def _is_ggplot(x: Any) -> bool:
return isinstance(x, tp.Show)
def assign_in_R(varname: str, v: Any):
if _is_ggplot(v):
_R(f"{varname} <- {v.tor()}")
return None
if _is_ndarray(v):
with conversion.localconverter(default_converter + numpy2ri.converter):
v = conversion.py2rpy(v)
if _is_dataframe(v):
with conversion.localconverter(default_converter +
pandas2ri.converter):
v = conversion.py2rpy(v)
_R.assign(varname, v)
def _id_to_alphabet(x: Any) -> str:
id_ = str(id(x))
return "".join((map(lambda i: chr(ord("@") + int(i) + 1), str(id_))))
def _format_as_kwarg(k: str, v: Any, ignored: List[str]) -> Optional[str]:
if v is None or k in ignored:
return None
if _is_ndarray(v) or _is_dataframe(v) or _is_ggplot(v):
varname = _id_to_alphabet(v)
assign_in_R(varname, v)
return f"{k}={varname}"
return f"{k}={v}"
def _format_as_posarg(v: Any) -> str:
if _is_ndarray(v) or _is_dataframe(v) or _is_ggplot(v):
varname = _id_to_alphabet(v)
assign_in_R(varname, v)
return (varname)
return str(v)
def _filter_none(x: Iterable[Optional[Any]]) -> filter:
return filter(lambda x_: x_ is not None, x)
def dict_to_rargs(d: Dict, ignored: List[str] = []) -> Optional[tp.RCode]:
# following three variables are ignored by defalut
ignored.extend(["ignores", "self", "args", "kwargs"])
_fargs: map[Optional[str]] = map(
lambda kv: _format_as_kwarg(kv[0], kv[1], ignored), d.items())
fargs = list(_filter_none(_fargs))
s = ",".join(fargs)
if len(s) == 0:
return None
return s
def iter_to_rargs(x: Iterable) -> Optional[tp.RCode]:
fargs: map[str] = map(_format_as_posarg, x)
s = ",".join(fargs)
if len(s) == 0:
return None
return s
```
#### File: grgr/ggplot2/__init__.py
```python
from typing import Optional, Tuple, Union
from grgr import _R
from grgr.dev import dict_to_rargs
from grgr.dev.typing import T, U
from grgr.ggplot2.basic import Aesthetic, GGPlot
from grgr.ggplot2.facet import Facet
from grgr.ggplot2.layer import Layer
from grgr.ggplot2.scale import Appearance
from grgr.ggplot2.theme import Theme, ThemeElement
from numpy import array, ndarray, str_
from numpy.typing import NDArray
from pandas import DataFrame
# Basics
def ggplot(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return GGPlot(data, mapping, **kwargs)
def aes(x: Optional[Union[str, ndarray]] = None,
y: Optional[Union[str, ndarray]] = None,
**kwargs) -> Aesthetic:
return Aesthetic(x, y, **kwargs)
def ggsave(filename: str,
plot: Optional[GGPlot] = None,
width: Optional[int] = None,
height: Optional[int] = None,
dpi: Optional[int] = None,
**kwargs):
s = str()
pyargs = locals()
pyargs.update(**kwargs)
rargs = dict_to_rargs(pyargs, ["s"])
rcode = f"ggsave({rargs})"
_R(rcode)
# Layer
def geom_abline(slope: float = 1., intercept: float = 0.) -> Layer:
return Layer("geom_abline", slope=slope, intercept=intercept)
def geom_hline(yintercept: float) -> Layer:
return Layer("geom_hline", yintercept=yintercept)
def geom_vline(xintercept: float) -> Layer:
return Layer("geom_hline", xintercept=xintercept)
def geom_bar(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_bar", data, mapping, **kwargs)
def geom_boxplot(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_boxplot", data, mapping, **kwargs)
def geom_density(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_density", data, mapping, **kwargs)
def geom_density_2d(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_density_2d", data, mapping, **kwargs)
def geom_histogram(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_histogram", data, mapping, **kwargs)
def geom_errorbar(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_errorbar", data, mapping, **kwargs)
def geom_line(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_line", data, mapping, **kwargs)
def geom_point(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_point", data, mapping, **kwargs)
def geom_ribbon(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_ribbon", data, mapping, **kwargs)
def geom_area(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_area", data, mapping, **kwargs)
def geom_violin(data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
return Layer("geom_violin", data, mapping, **kwargs)
# Scales
def labs(title: Optional[str] = None,
subtitle: Optional[str] = None,
caption: Optional[str] = None,
tag: Optional[str] = None,
alt: Optional[str] = None,
alt_insight: Optional[str] = None,
**kwargs) -> Appearance:
return Appearance("labs",
title=title,
subtitle=subtitle,
caption=caption,
tag=tag,
alt=alt,
alt_insight=alt_insight,
**kwargs)
def xlab(label: str) -> Appearance:
return Appearance("xlab", label=label)
def ylab(label: str) -> Appearance:
return Appearance("xlab", label=label)
def ggtitle(label, subtitle: Optional[str] = None) -> Appearance:
return Appearance("ggtitle", label=label, subtitle=subtitle)
def lims(x: Optional[Tuple[T, T]], y: Optional[Tuple[U, U]]) -> Appearance:
return Appearance("lims", x=array(x), y=array(y))
def xlim(x: Tuple[T, T]) -> Appearance:
return Appearance("xlim", array(x))
def ylim(y: Tuple[T, T]) -> Appearance:
return Appearance("ylim", array(y))
def scale_color_continuous(colorscale: str = '"gradient"') -> Appearance:
return Appearance("scale_color_continuous", type=colorscale)
def scale_fill_continuous(colorscale: str = '"gradient"') -> Appearance:
return Appearance("scale_fill_continuous", type=colorscale)
def scale_color_discrete(colorscale: str = '"gradient"') -> Appearance:
return Appearance("scale_color_discrete", type=colorscale)
def scale_fill_discrete(colorscale: str = '"gradient"') -> Appearance:
return Appearance("scale_fill_discrete", type=colorscale)
def scale_color_gradient(low: str, high: str, **kwargs) -> Appearance:
return Appearance("scale_color_gradient", low=low, high=high, **kwargs)
def scale_fill_gradient(low: str, high: str, **kwargs) -> Appearance:
return Appearance("scale_fill_gradient", low=low, high=high, **kwargs)
def scale_color_gradient2(low: str, mid: str, high: str,
**kwargs) -> Appearance:
return Appearance("scale_color_gradient2",
low=low,
mid=mid,
high=high,
**kwargs)
def scale_fill_gradient2(low: str, mid: str, high: str,
**kwargs) -> Appearance:
return Appearance("scale_fill_gradient2",
low=low,
mid=mid,
high=high,
**kwargs)
def scale_color_gradientn(colors: NDArray[str_], **kwargs) -> Appearance:
return Appearance("scale_color_gradientn", colors=colors, **kwargs)
def scale_fill_gradientn(colors: NDArray[str_], **kwargs) -> Appearance:
return Appearance("scale_fill_gradientn", colors=colors, **kwargs)
# Facets
def facet_grid(*args, **kwargs) -> Facet:
return Facet("facet_grid", *args, **kwargs)
def facet_wrap(*args, **kwargs) -> Facet:
return Facet("facet_wrap", *args, **kwargs)
# Themes
def theme(**kwargs):
return Theme("theme", **kwargs)
def theme_bw(**kwargs):
return Theme("theme_bw", **kwargs)
def theme_classic(**kwargs):
return Theme("theme_classic", **kwargs)
def margin(top: float = 0.,
right: float = 0.,
bottom: float = 0.,
left: float = 0.,
unit: str = "pt") -> ThemeElement:
return ThemeElement("margin", t=top, r=right, b=bottom, l=left, unit=unit)
def element_blank():
return ThemeElement("element_blank")
def element_rect(fill: Optional[str] = None,
color: Optional[str] = None,
size: Optional[float] = None,
linetype: Optional[str] = None,
**kwargs):
return ThemeElement("element_rect",
fill=fill,
color=color,
size=size,
linetype=linetype,
**kwargs)
def element_line(color: Optional[str] = None,
size: Optional[float] = None,
linetype: Optional[str] = None,
**kwargs):
return ThemeElement("element_line",
color=color,
size=size,
linetype=linetype,
**kwargs)
def element_text(family: Optional[str] = None,
color: Optional[str] = None,
size: Optional[float] = None,
angle: Optional[float] = None,
**kwargs):
return ThemeElement("element_text",
family=family,
color=color,
size=size,
angle=angle,
**kwargs)
```
#### File: grgr/ggplot2/layer.py
```python
from copy import copy
from typing import Optional
import grgr.dev.typing as tp
from grgr.dev import dict_to_rargs
from grgr.ggplot2.basic import Aesthetic
from pandas import DataFrame
class Layer(tp.Join):
def __init__(self,
name: str,
data: Optional[DataFrame] = None,
mapping: Optional[Aesthetic] = None,
**kwargs):
self._s = str()
pyargs = locals()
pyargs.update(kwargs)
rargs = dict_to_rargs(pyargs, ["name"])
if rargs is not None:
self._s = f"{name}({rargs})"
else:
self._s = f"{name}()"
def __repr__(self) -> str:
return self._s
def __add__(self, other: tp.Join) -> "Layer":
clone = copy(self)
clone._s += f" + \n {other.tor()}"
return clone
def tor(self) -> tp.RCode:
return self._s
```
#### File: grgr/tests/test_theme.py
```python
from typing import Any, Dict
import pytest
from grgr.ggplot2.theme import Theme, ThemeElement
@pytest.mark.parametrize("kwargs, answer", [
({
"foo": '"bar"'
}, 'test(foo="bar")'),
({
"foo_bar": '"bar"'
}, 'test(foo.bar="bar")'),
({
"foo": '"bar"',
"foo_bar": '"bar"'
}, 'test(foo="bar",foo.bar="bar")'),
({}, "test()"),
])
def test_theme(kwargs: Dict[str, Any], answer: str):
assert Theme("test", **kwargs).tor() == answer
@pytest.mark.parametrize("kwargs, answer", [
({
"foo": '"bar"'
}, 'test(foo="bar")'),
({
"foo_bar": '"bar"'
}, 'test(foo_bar="bar")'),
({
"foo": '"bar"',
"foo_bar": '"bar"'
}, 'test(foo="bar",foo_bar="bar")'),
({}, "test()"),
])
def test_theme_element(kwargs: Dict[str, Any], answer: str):
assert ThemeElement("test", **kwargs).tor() == answer
``` |
{
"source": "7cm-diameter/playground",
"score": 3
} |
#### File: playground/agent/flush_message.py
```python
from amas.agent import Agent
from comprex.util import timestamp
RECV = "RECV"
SEND = "SEND"
async def flush_message_for(agent: Agent, duration: float):
print(timestamp(f"Ignore messages for {duration} sec"))
while duration >= 0.:
s, _ = timestamp(None)
await agent.recv(duration)
e, _ = timestamp(None)
duration -= e - s
print(timestamp("Now, starting to receive messages"))
async def recv_message(agent: Agent):
await flush_message_for(agent, 5)
while True:
addr, mess = await agent.recv()
if mess == 0:
break
print(f"{mess} from {addr}")
async def send_mess(agent: Agent, t: float, n: int):
for _ in range(n):
await agent.sleep(t)
agent.send_to(RECV, 1)
agent.send_to(RECV, 0)
if __name__ == '__main__':
from amas.connection import Register
from amas.env import Environment
sender = Agent(SEND).assign_task(send_mess, t=1., n=10)
receiver = Agent(RECV).assign_task(recv_message)
rgst = Register([sender, receiver])
env = Environment([sender, receiver])
env.run()
```
#### File: playground/disagree/sim.py
```python
import argparse as ap
from itertools import chain
from pathlib import Path
from typing import List, Tuple
import numpy as np
import yaml
from nptyping import NDArray
class Config(dict):
def __init__(self, path: str):
f = open(path, "r")
self.__path = path
d: dict = yaml.safe_load(f)
[self.__setitem__(item[0], item[1]) for item in d.items()]
f.close()
@property
def narms(self) -> int:
return self["number-of-arms"]
@property
def nlearner(self) -> int:
return self["number-of-learners"]
@property
def weight(self) -> float:
return self["weight"]
@property
def initpreds(self) -> Tuple[float, float]:
return tuple(self["range-initial-prediction"])
@property
def rangelr(self) -> Tuple[float, float]:
return tuple(self["range-learning-rate"])
@property
def reward_probs(self) -> List[List[float]]:
return self["reward-probabilities"]
@property
def trial(self) -> List[int]:
return self["number-of-trial"]
@property
def filename(self) -> str:
return self["filename"]
class ConfigClap(object):
def __init__(self):
self._parser = ap.ArgumentParser()
self._parser.add_argument("--yaml",
"-y",
help="path to configuration file (`yaml`)",
type=str)
self._args = self._parser.parse_args()
def config(self) -> Config:
yml = self._args.yaml
return Config(yml)
def softmax(x: NDArray[1, float]) -> NDArray[1, float]:
x_ = np.exp(x)
return x_ / np.sum(x_)
class DisAgreeAgent(object):
def __init__(self, narms: int, num_lerner: int, initpreds: Tuple[float,
float],
rangelr: Tuple[float, float], weight: float):
self._k = narms
self._n = num_lerner
self._w = weight
self._preds = np.random.uniform(*initpreds, (narms, num_lerner))
self._lrs = np.random.uniform(*rangelr, (narms, num_lerner))
def update(self, rewards: NDArray[1, float], action: NDArray[1, int]):
tds = rewards.reshape(self._k, -1) - self._preds
self._preds += self._lrs * tds * action.reshape(self._k, -1)
def ensemble_preds(self) -> NDArray[1, float]:
return np.mean(self._preds, axis=1)
def disagreements(self) -> NDArray[1, float]:
return self._w * np.var(self._preds, axis=1)
def choose_action(self, preds: NDArray[1, float],
curiosities: NDArray[1, float]) -> NDArray[1, int]:
val = preds + curiosities
probs = softmax(val)
action = np.random.choice(self._k, p=probs)
return np.identity(self._k)[action]
class NArmBandit(object):
def __init__(self, probs: NDArray[1, float]):
self._probs = probs
def rewards(self):
return np.random.binomial(1, self._probs)
def to_drow(probs: List[float], preds: NDArray[1, float],
disagreements: NDArray[1, float], reward: List[float]) -> Tuple:
return tuple(chain(probs, preds, disagreements, reward))
def colnames(narms: int) -> List[str]:
probs = [f"prob_{i}" for i in range(narms)]
preds = [f"pred_{i}" for i in range(narms)]
disagreements = [f"disagreement_{i}" for i in range(narms)]
return list(chain(probs, preds, disagreements, ["reward"]))
def get_current_dir(relpath: str) -> Path:
return Path(relpath).absolute()
def create_data_dir(relpath: str, parent: str):
cur_dir = get_current_dir(relpath)
target_dir = cur_dir
while not target_dir.stem == parent:
target_dir = target_dir.parent
data_dir = target_dir.joinpath("data")
if not data_dir.exists():
data_dir.mkdir()
return data_dir
if __name__ == '__main__':
from pandas import DataFrame
config = ConfigClap().config()
# parameters for agent
rangelr = config.rangelr
initpreds = config.initpreds
weight = config.weight
# parameters for environment
narms = config.narms
numlearner = config.nlearner
trials = config.trial
probs = config.reward_probs
agent = DisAgreeAgent(narms, numlearner, initpreds, rangelr, weight)
results: List[Tuple] = []
for trial, prob in zip(trials, probs):
print(f"reward probabilities: {prob}")
env = NArmBandit(np.array(prob))
for t in range(1, trial + 1):
preds = agent.ensemble_preds()
disagreements = agent.disagreements()
action = agent.choose_action(preds, disagreements)
rewards = env.rewards()
agent.update(rewards, action)
if t % 10 == 0:
print(f"{t} - preds: {preds} - curiosity: {disagreements}")
reward = np.max(action * rewards)
results.append(to_drow(prob, preds, disagreements, [reward]))
output_data = DataFrame(results, columns=colnames(narms))
data_dir = create_data_dir(__file__, "disagree")
filepath = data_dir.joinpath(config.filename)
output_data.to_csv(filepath, index=False)
``` |
{
"source": "7coil/simpleguitk",
"score": 3
} |
#### File: simpleguitk/simpleplot/plot.py
```python
from __future__ import division
try:
import matplotlib.pyplot
except ImportError:
print('matplotlib must be installed in order to use SimplePlot!')
raise
def plot_lines(framename, width, height, xlabel, ylabel, datasets,
points=False, legends=[]):
matplotlib.pyplot.title(framename)
matplotlib.pyplot.xlabel(xlabel)
matplotlib.pyplot.ylabel(ylabel)
marker = None
if points:
marker = 'o'
for i in range(len(datasets)):
label = ''
if i < len(legends):
label = legends[i]
(xdata, ydata) = ([], [])
if type(datasets[i]) is dict:
for x, y in sorted(datasets[i].items()):
xdata.append(x)
ydata.append(y)
elif type(datasets[i]) is list:
for x, y in datasets[i]:
xdata.append(x)
ydata.append(y)
else:
raise TypeError('Type %s currently unsupported' %
type(datasets[i]))
matplotlib.pyplot.plot(xdata, ydata, label=label, marker=marker)
if len(legends):
matplotlib.pyplot.legend(loc='upper right')
matplotlib.pyplot.show()
``` |
{
"source": "7Cortez7/instagram-giveaway-bot",
"score": 3
} |
#### File: 7Cortez7/instagram-giveaway-bot/browser.py
```python
from selenium import webdriver
import time
import userdata as udata
import random
randomUsers = set()
class Browser:
def __init__(self, link):
self.link = link
self.browser = webdriver.Chrome()
Browser.Instagram(self)
Browser.Login(self)
Browser.goFollowers(self)
def Instagram(self):
self.browser.get(self.link)
time.sleep(2)
def goFollowers(self):
self.browser.find_element_by_xpath("//*[@id=\"react-root\"]/section/main/div/header/section/ul/li[2]/a").click()
time.sleep(5)
Browser.scrollDown(self)
followers = self.browser.find_elements_by_css_selector("._7UhW9.xLCgt.qyrsm.KV-D4.se6yk.T0kll")
for follower in followers:
randomUsers.add(follower.text)
print("Çekiliş başlıyor! {totaluser} kişi katılmaya hak kazandı.".format(totaluser = len(randomUsers)))
time.sleep(5)
randomUsersList = list(randomUsers)
print("Kazanan:", random.choice(randomUsersList))
time.sleep(5)
exit()
def scrollDown(self):
jsCode = """
page = document.querySelector(".isgrP");
page.scrollTo(0, page.scrollHeight);
var pageEnd = page.scrollHeight;
return pageEnd;
"""
pageEnd = self.browser.execute_script(jsCode)
while True:
end = pageEnd
time.sleep(1)
pageEnd = self.browser.execute_script(jsCode)
if end == pageEnd:
break
def Login(self):
username = self.browser.find_element_by_name("username")
password = self.browser.find_element_by_name("password")
loginBtn = self.browser.find_element_by_css_selector("#loginForm > div > div:nth-child(3) > button > div")
username.send_keys(udata.username)
password.send_keys(udata.password)
time.sleep(1)
loginBtn.click()
time.sleep(2)
self.browser.get(self.link + udata.username)
time.sleep(2)
``` |
{
"source": "7CStudio/phone_book_manager",
"score": 2
} |
#### File: phone_book_manager/phone_book_manager/base_views.py
```python
from flask import request, current_app
from flask.views import MethodView
import status
from .utils import get_token, HTTPException
from . import services
class AuthMixin:
def authenticate(self):
token = get_token()
self.user = services.get_user_by_access_token(
access_token=token)
if not self.user:
raise HTTPException(status=status.HTTP_401_UNAUTHORIZED)
def authenticate_app(self):
token = get_token()
if token != current_app.config['APP_TOKEN']:
raise HTTPException(status=status.HTTP_403_FORBIDDEN)
class BaseAPIView(MethodView, AuthMixin):
def dispatch_request(self, *args, **kwargs):
method = request.method
meth = getattr(self, method.lower(), None)
assert meth is not None, 'Unimplemented method %r' % method
resp = meth(*args, **kwargs)
return resp
```
#### File: 7CStudio/phone_book_manager/tasks.py
```python
import os
from invoke import task
@task
def clean(ctx):
ctx.run("rm -rf **/*.pyc")
@task
def test(ctx, pdb=False):
cmd = "py.test -v -s"
if pdb:
cmd += ' --pdb'
ctx.run(cmd, pty=True)
ctx.run("py.test --cov-report term-missing --cov=phone_book_manager test_api.py") # noqa
ctx.run('flake8 .')
@task
def create_schema(ctx):
from phone_book_manager import create_app, db
from app import get_default_settings
os.environ['SQLALCHEMY_ECHO'] = 'True'
create_app(get_default_settings())
db.create_all()
``` |
{
"source": "7da-dev/censo7da_serve",
"score": 2
} |
#### File: censo7da_serve/census/models.py
```python
from django.db import models
from uuid import uuid4
from django.utils.translation import ugettext_lazy as _
# Create your models here.
class Inst(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
year = models.CharField(_('year'), max_length=4, blank=True)
inst_id = models.CharField(_('inst id'), max_length=20, blank=True)
inst_e_id = models.CharField(_('inst e id'), max_length=20, null=True, blank=True)
inst_type = models.CharField(_('type'), max_length=20, blank=True)
inst_title = models.CharField(_('title'), max_length=20, blank=True)
inst_name_l = models.CharField(_('name l'), max_length=120, blank=True)
inst_name_s = models.CharField(_('name s'), max_length=20, blank=True)
is_active = models.BooleanField(_('active'), default=True)
fiscal_id = models.CharField(_('fiscal id'), max_length=20, blank=True)
inst_e2_id = models.CharField(_('inst e2 id'), max_length=20, null=True, blank=True)
birth_date = models.DateField(_('birth date'), null=True, blank=True)
photo = models.ImageField(
_('photo'), upload_to='inst', default='inst/default.png',
null=True, blank=True)
created = models.DateTimeField(_('created'), auto_now_add=True, blank=True)
modified = models.DateTimeField(_('modified'), auto_now=True, blank=True)
class Meta:
verbose_name = _('inst')
verbose_name_plural = _('insts')
unique_together = (
('year','fiscal_id', 'inst_name_s'),
)
def __str__(self):
return '%s %s' % (self.fiscal_id, self.inst_name_s, )
class Camp(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
year = models.CharField(_('year'), max_length=4, blank=True)
camp_id = models.CharField(_('camp id'), max_length=20, blank=True)
inst_id = models.CharField(_('inst id'), max_length=20, null=True, blank=True)
camp_type = models.CharField(_('type'), max_length=20, blank=True)
camp_title = models.CharField(_('title'), max_length=20, blank=True)
camp_name_l = models.CharField(_('name l'), max_length=120, blank=True)
camp_name_s = models.CharField(_('name s'), max_length=20, blank=True)
is_active = models.BooleanField(_('active'), default=True)
resp_id = models.CharField(_('resp id'), max_length=20, null=True, blank=True)
account = models.CharField(_('account'), max_length=20, null=True, blank=True)
birth_date = models.DateField(_('birth date'), null=True, blank=True)
photo = models.ImageField(
_('photo'), upload_to='camp', default='camp/default.png',
null=True, blank=True)
created = models.DateTimeField(_('created'), auto_now_add=True, blank=True)
modified = models.DateTimeField(_('modified'), auto_now=True, blank=True)
class Meta:
verbose_name = _('camp')
verbose_name_plural = _('camps')
unique_together = (
( 'year','camp_name_s'),
)
def __str__(self):
return '%s %s' % (self.camp_name_l, self.camp_name_s, )
class Unit(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
year = models.CharField(_('year'), max_length=4, blank=True)
unit_id = models.CharField(_('unit id'), max_length=20, blank=True)
camp_id = models.CharField(_('camp id'), max_length=20, null=True, blank=True)
unit_type = models.CharField(_('type'), max_length=20, blank=True)
unit_title = models.CharField(_('title'), max_length=20, blank=True)
unit_name_l = models.CharField(_('name l'), max_length=120, blank=True)
unit_name_s = models.CharField(_('name s'), max_length=20, blank=True)
is_active = models.BooleanField(_('active'), default=True)
resp_id = models.CharField(_('resp id'), max_length=20, null=True, blank=True)
account = models.CharField(_('account'), max_length=20, null=True, blank=True)
created = models.DateTimeField(_('created'), auto_now_add=True, blank=True)
modified = models.DateTimeField(_('modified'), auto_now=True, blank=True)
class Meta:
verbose_name = _('unit')
verbose_name_plural = _('units')
def __str__(self):
return '%s %s' % (self.unit_name_l, self.unit_name_s, )
```
#### File: censo7da_serve/users/models.py
```python
from uuid import uuid4
from django.db import models
from django.contrib.auth.models import AbstractUser
#from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
'''
class Profile(models.Model):
OTHER = 0
NID = 1
FOREING_CARD = 2
CERTIFICATE_BIRTH = 3
IDENTITY_TYPE_CHOICES = (
(NID, _('n.i.d.')),
(FOREING_CARD, _('foreign card')),
(CERTIFICATE_BIRTH, _('certificate birth')),
(OTHER, _('other')),
)
# id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
user = models.OneToOneField(
User, on_delete=models.CASCADE)
identity_type = models.PositiveSmallIntegerField(
choices=IDENTITY_TYPE_CHOICES, default=NID, null=True, blank=True)
identity_num = models.CharField(
_('identity num'), max_length=20, null=True, blank=True)
location = models.CharField(
_('location'), max_length=30, null=True, blank=True)
birth_date = models.DateField(_('birth date'), null=True, blank=True)
photo = models.ImageField(
_('photo'), upload_to='persons', default='persons/default.png',
blank=True)
detail = models.TextField(verbose_name=_('detail'), null=True, blank=True)
def __str__(self):
return self.user.username
class Meta:
verbose_name = _('profile')
verbose_name_plural = _('profiles')
@receiver(post_save, sender=User)
def create_or_update_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
instance.profile.save()
'''
class Person(models.Model):
OTHER = 0
NID = 1
FOREING_CARD = 2
CERTIFICATE_BIRTH = 3
IDENTITY_TYPE_CHOICES = (
(NID, _('n.i.d.')),
(FOREING_CARD, _('foreign card')),
(CERTIFICATE_BIRTH, _('certificate birth')),
(OTHER, _('other')),
)
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
identity_type = models.PositiveSmallIntegerField(
_('identity type'), choices=IDENTITY_TYPE_CHOICES, default=NID,
null=True, blank=True)
identity_num = models.CharField(
_('identity num'), max_length=20, null=True, blank=True)
first_name = models.CharField(_('first name'), max_length=30, blank=True)
last_name = models.CharField(_('last name'), max_length=150, blank=True)
email = models.EmailField(_('email address'), blank=True)
is_active = models.BooleanField(_('active'), default=True)
birth_date = models.DateField(_('birth date'), null=True, blank=True)
photo = models.ImageField(
_('photo'), upload_to='persons', default='persons/default.png',
blank=True)
created = models.DateTimeField(_('created'), auto_now_add=True, blank=True)
modified = models.DateTimeField(_('modified'), auto_now=True, blank=True)
class Meta:
verbose_name = _('person')
verbose_name_plural = _('persons')
unique_together = (
('first_name', 'last_name', 'identity_type', 'identity_num'),
('identity_type', 'identity_num'),
)
def __str__(self):
return '%s %s' % (self.first_name, self.last_name, )
class User(AbstractUser):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
person = models.OneToOneField(
'Person', on_delete=models.CASCADE, verbose_name=_('person'),
null=True, blank=True)
detail = models.TextField(_('detail'), blank=True)
last_did = models.CharField(
_('last did'), max_length=250, null=True, blank=True)
class Meta:
verbose_name = _('user')
verbose_name_plural = _('users')
def __str__(self):
return '%s' % (self.username)
'''
@receiver(post_save, sender=User)
def create_or_update_user_person(sender, instance, created, **kwargs):
if created:
Person.objects.create(user=instance)
instance.person.save()
'''
``` |
{
"source": "7dev7urandom/mcblend",
"score": 2
} |
#### File: mcblend/mcblend/panel.py
```python
from typing import List, Optional
from dataclasses import dataclass
from .custom_properties import EffectTypes
import bpy
from bpy.props import (
StringProperty, IntProperty, BoolProperty, FloatProperty,
FloatVectorProperty, CollectionProperty, EnumProperty, PointerProperty,
IntVectorProperty
)
from .operator_func.texture_generator import (
list_mask_types_as_blender_enum, UvMaskTypes,
list_mix_mask_modes_as_blender_enum)
# GUI
# UV-groups names list
class OBJECT_UL_NusiqMcblendUVGroupList(bpy.types.UIList):
'''GUI item used for drawing list of names of UV-groups.'''
def draw_item(
self, context, layout, data, item, icon, active_data,
active_propname):
'''
Drawing OBJECT_NusiqMcblendUvGroupProperties in a list.
:param context: the contexts of operator
:param layout: layout in which the object is drawn
:param data: the RNA object containing the collection
:param item: the item currently drawn in the collection
:param icon: not used - "the "computed" icon for the item" (?)
:param active_data: the RNA object containing the active property for the
collection.
:param active_propname: the name of the active property.
For more info see the UI Template called: "UI List Simple".
'''
# pylint: disable=arguments-differ, unused-argument
if self.layout_type in {'DEFAULT', 'COMPACT', 'CENTER'}:
# No rename functionality:
# layout.label(text=item.name, translate=False)
# With rename functionality:
layout.prop(item, "name", text="", emboss=False)
# UV-group panel
@dataclass
class _UIStackItem():
'''
Object used in OBJECT_PT_NusiqMcblendUVGroupPanel for saving the
information about nested UV-groups in stack data structure.
'''
ui: Optional[bpy.types.UILayout] # None if parent is collapsed
depth: int
class OBJECT_PT_NusiqMcblendUVGroupPanel(bpy.types.Panel):
'''Panel used for editing UV-groups.'''
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'scene'
bl_label = "Mcblend UV groups"
def draw_colors(self, mask, mask_index: int, col: bpy.types.UILayout):
'''Draws colors of UV-mask.'''
box = col.box()
row = box.row()
row.label(text='Colors')
op_props = row.operator(
"object.nusiq_mcblend_add_uv_mask_color", text="", icon='ADD')
op_props.mask_index = mask_index
colors_len = len(mask.colors)
for color_index, color in enumerate(mask.colors):
row = box.row()
row.prop(color, "color", text="")
up_down_row = row.row(align=True)
# Move down
if color_index - 1 >= 0:
op_props = up_down_row.operator(
"object.nusiq_mcblend_move_uv_mask_color", icon='TRIA_UP',
text='')
op_props.mask_index = mask_index
op_props.move_from = color_index
op_props.move_to = color_index - 1
# Move up
if color_index + 1 < colors_len:
op_props = up_down_row.operator(
"object.nusiq_mcblend_move_uv_mask_color", icon='TRIA_DOWN',
text='')
op_props.mask_index = mask_index
op_props.move_from = color_index
op_props.move_to = color_index + 1
# Delete button
op_props = row.operator(
"object.nusiq_mcblend_remove_uv_mask_color", icon='X', text='')
op_props.mask_index = mask_index
op_props.color_index = color_index
def draw_stripes(self, mask, mask_index: int, col: bpy.types.UILayout):
'''Draws stripes of UV-mask.'''
box = col.box()
row = box.row()
row.label(text='Stripes')
op_props = row.operator(
"object.nusiq_mcblend_add_uv_mask_stripe", text="", icon='ADD')
op_props.mask_index = mask_index
stripes_len = len(mask.stripes)
for stripe_index, stripe in enumerate(mask.stripes):
row = box.row()
if (
mask.relative_boundaries and
mask.mask_type != UvMaskTypes.GRADIENT_MASK.value):
# Gradient mask always uses absolute values
row.prop(stripe, "width_relative")
else:
row.prop(stripe, "width")
row.prop(stripe, "strength")
up_down_row = row.row(align=True)
# Move down
if stripe_index - 1 >= 0:
op_props = up_down_row.operator(
"object.nusiq_mcblend_move_uv_mask_stripe", icon='TRIA_UP',
text='')
op_props.mask_index = mask_index
op_props.move_from = stripe_index
op_props.move_to = stripe_index - 1
# Move up
if stripe_index + 1 < stripes_len:
op_props = up_down_row.operator(
"object.nusiq_mcblend_move_uv_mask_stripe", icon='TRIA_DOWN',
text='')
op_props.mask_index = mask_index
op_props.move_from = stripe_index
op_props.move_to = stripe_index + 1
# Delete button
op_props = row.operator(
"object.nusiq_mcblend_remove_uv_mask_stripe", icon='X',
text='')
op_props.mask_index = mask_index
op_props.stripe_index = stripe_index
def draw_mask_properties(
self, mask, index: int, col: bpy.types.UILayout, *,
colors=False, interpolate=False,
normalize=False, p1p2=False, stripes=False,
relative_boundaries=False, expotent=False, strength=False,
hard_edge=False, horizontal=False, seed=False,color=False,
children=False, mode=False):
'''Draws properties of UV-mask.'''
if colors:
self.draw_colors(mask, index, col) # colors
if interpolate:
col.prop(mask, "interpolate")
if normalize:
col.prop(mask, "normalize")
if p1p2:
row = col.row()
if mask.relative_boundaries:
row.prop(mask, "p1_relative")
row = col.row()
row.prop(mask, "p2_relative")
else:
row.prop(mask, "p1")
row = col.row()
row.prop(mask, "p2")
if relative_boundaries:
col.prop(mask, "relative_boundaries")
if stripes:
self.draw_stripes(mask, index, col) # stripes
if expotent:
col.prop(mask, "expotent")
if strength:
col.row().prop(mask, "strength")
if hard_edge:
col.prop(mask, "hard_edge")
if horizontal:
col.prop(mask, "horizontal")
if seed:
row = col.row()
row.prop(mask, "use_seed")
if mask.use_seed:
row.prop(mask, "seed")
if color:
col.prop(mask.color, "color")
if mode:
col.prop(mask, "mode")
if children:
col.prop(mask, "children")
def draw_mask(
self, mask, index: int, masks_len: int,
ui_stack: List[_UIStackItem]):
'''
Draws whole UV-mask gui with additional GUI items for navigation
between masks like buttons for moving and removing masks.
'''
col = None
# If parent is collapsed don't draw anything
if ui_stack[-1].ui is not None:
col = ui_stack[-1].ui
box = col.box()
# box.scale_x = True
col = box.column()
row = col.row()
if mask.ui_collapsed:
row.prop(
mask, "ui_collapsed", text="", icon='DISCLOSURE_TRI_RIGHT',
emboss=False)
else:
row.prop(
mask, "ui_collapsed", text="", icon='DISCLOSURE_TRI_DOWN',
emboss=False)
row.label(text=f'{mask.mask_type}')
up_down_row = row.row(align=True)
# Move down
if index - 1 >= 0:
op_props = up_down_row.operator(
"object.nusiq_mcblend_move_uv_mask", icon='TRIA_UP',
text='')
op_props.move_from = index
op_props.move_to = index - 1
# Move up
if index + 1 < masks_len:
op_props = up_down_row.operator(
"object.nusiq_mcblend_move_uv_mask", icon='TRIA_DOWN',
text='')
op_props.move_from = index
op_props.move_to = index + 1
# Hide button
if mask.ui_hidden:
row.prop(
mask, "ui_hidden", text="", icon='HIDE_ON',
emboss=False)
else:
row.prop(
mask, "ui_hidden", text="", icon='HIDE_OFF',
emboss=False)
# Delete button
op_props = row.operator(
"object.nusiq_mcblend_remove_uv_mask", icon='X', text='')
op_props.target = index
# Drawing the mask itself unless collapsed
if not mask.ui_collapsed:
if mask.mask_type == UvMaskTypes.COLOR_PALLETTE_MASK.value:
if len(ui_stack) > 1:
col.label(
text="This mask can't be put inside mix mask",
icon='ERROR')
else:
self.draw_mask_properties(
mask, index, col,
colors=True, interpolate=True, normalize=True)
if mask.mask_type == UvMaskTypes.GRADIENT_MASK.value:
self.draw_mask_properties(
mask, index, col,
p1p2=True, stripes=True, relative_boundaries=True,
expotent=True)
if mask.mask_type == UvMaskTypes.ELLIPSE_MASK.value:
self.draw_mask_properties(
mask, index, col,
p1p2=True, relative_boundaries=True, expotent=True,
strength=True, hard_edge=True)
if mask.mask_type == UvMaskTypes.RECTANGLE_MASK.value:
self.draw_mask_properties(
mask, index, col,
p1p2=True, relative_boundaries=True, expotent=True,
strength=True, hard_edge=True)
if mask.mask_type == UvMaskTypes.STRIPES_MASK.value:
self.draw_mask_properties(
mask, index, col,
stripes=True, relative_boundaries=True, horizontal=True)
if mask.mask_type == UvMaskTypes.RANDOM_MASK.value:
self.draw_mask_properties(
mask, index, col,
strength=True, expotent=True, seed=True)
if mask.mask_type == UvMaskTypes.COLOR_MASK.value:
self.draw_mask_properties(mask, index, col, color=True)
if mask.mask_type == UvMaskTypes.MIX_MASK.value:
self.draw_mask_properties(
mask, index, col,
children=True, strength=True, expotent=True,
mode=True)
if mask.mask_type == UvMaskTypes.MIX_MASK.value and col is not None:
# mask.children+1 because it counts itself as a member
if not mask.ui_collapsed:
ui_stack.append(_UIStackItem(
col.box(), mask.children+1))
else:
ui_stack.append(_UIStackItem(
None, mask.children+1))
def draw(self, context):
'''Draws whole UV-group panel.'''
col = self.layout.column(align=True)
# Add group
row = col.row()
row.operator(
"object.nusiq_mcblend_add_uv_group", text="New UV group",
icon='ADD'
)
row_import_export = col.row()
row_import_export.operator(
"object.nusiq_mcblend_import_uv_group_operator",
text="Import UV group", icon='IMPORT'
)
active_uv_group_id = bpy.context.scene.nusiq_mcblend_active_uv_group
uv_groups = bpy.context.scene.nusiq_mcblend_uv_groups
col.template_list(
listtype_name="OBJECT_UL_NusiqMcblendUVGroupList",
list_id="", dataptr=context.scene,
propname="nusiq_mcblend_uv_groups",
active_dataptr=context.scene,
active_propname="nusiq_mcblend_active_uv_group")
if active_uv_group_id < len(uv_groups):
active_uv_group = uv_groups[active_uv_group_id]
# Delete group
row.operator(
"object.nusiq_mcblend_remove_uv_group",
text="Delete this UV group", icon='X')
row_import_export.operator(
"object.nusiq_mcblend_export_uv_group_operator",
text="Export UV group", icon='EXPORT'
)
# Select side
row = col.row()
row.label(text='Side:')
row.prop(
context.scene, "nusiq_mcblend_active_uv_groups_side",
text="")
col.separator()
col.operator(
'object.nusiq_mcblend_copy_uv_group_side',
text='Copy current UV face', icon='DUPLICATE')
# Add mask
col.operator_menu_enum(
"object.nusiq_mcblend_add_uv_mask", "mask_type",
text="Add mask", icon="ADD")
# Draw selected side
sides = [
active_uv_group.side1, active_uv_group.side2,
active_uv_group.side3, active_uv_group.side4,
active_uv_group.side5, active_uv_group.side6
]
masks = sides[
int(context.scene.nusiq_mcblend_active_uv_groups_side)]
# Stack of UI items to draw in
ui_stack: List[_UIStackItem] = [
_UIStackItem(col, 0)]
for i, mask in enumerate(masks):
col.separator(factor=0.5)
self.draw_mask(mask, i, len(masks), ui_stack)
# Remove empty ui containers from top of ui_stack
while len(ui_stack) > 1: # Except the first one
ui_stack[-1].depth -= 1
if ui_stack[-1].depth <= 0:
ui_stack.pop()
else:
break
# Event group panel
class OBJECT_UL_NusiqMcblendEventsList(bpy.types.UIList):
'''GUI item used for drawing list of names of events.'''
def draw_item(
self, context, layout, data, item, icon, active_data,
active_propname):
'''
Drawing OBJECT_NusiqMcblendEventGroupProperties in a list.
:param context: the contexts of operator
:param layout: layout in which the object is drawn
:param data: the RNA object containing the collection
:param item: the item currently drawn in the collection
:param icon: not used - "the "computed" icon for the item" (?)
:param active_data: the RNA object containing the active property for the
collection.
:param active_propname: the name of the active property.
'''
# pylint: disable=arguments-differ, unused-argument
if self.layout_type in {'DEFAULT', 'COMPACT', 'CENTER'}:
# No rename functionality:
# layout.label(text=item.name, translate=False)
# With rename functionality:
layout.prop(item, "name", text="", emboss=False)
class OBJECT_PT_NusiqMcblendEventsPanel(bpy.types.Panel):
'''Panel used for editing events.'''
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'scene'
bl_label = "Mcblend events"
def draw_effect(self, effect, index: int, col: bpy.types.UILayout):
'''Draw single effect in the event'''
# If parent is collapsed don't draw anything
box = col.box()
col = box.column()
row = col.row()
row.label(text=f'{effect.effect_type}')
# Delete button
op_props = row.operator(
"object.nusiq_mcblend_remove_effect", icon='X', text='')
op_props.effect_index = index
if effect.effect_type == EffectTypes.PARTICLE_EFFECT.value:
col.prop(effect, "effect", text="Effect")
col.prop(effect, "locator", text="Locator")
col.prop(effect, "pre_effect_script", text="Pre effect script")
col.prop(effect, "bind_to_actor", text="Bind to actor")
elif effect.effect_type == EffectTypes.SOUND_EFFECT.value:
col.prop(effect, "effect", text="Effect")
def draw(self, context):
'''Draws whole event group panel.'''
col = self.layout.column(align=True)
row = col.row()
events = bpy.context.scene.nusiq_mcblend_events
active_event_id = bpy.context.scene.nusiq_mcblend_active_event
col.template_list(
listtype_name="OBJECT_UL_NusiqMcblendEventsList",
list_id="",
dataptr=bpy.context.scene, propname="nusiq_mcblend_events",
active_dataptr=bpy.context.scene,
active_propname="nusiq_mcblend_active_event")
row.operator(
"object.nusiq_mcblend_add_event", text="New event",
icon='ADD')
if 0 <= active_event_id < len(events):
row.operator(
"object.nusiq_mcblend_remove_event",
text="Delete this UV group", icon='X')
event = events[active_event_id]
effects = event.effects
col.operator_menu_enum(
"object.nusiq_mcblend_add_effect", "effect_type",
text="Add effect", icon="ADD")
if len(effects) > 0:
for i, effect in enumerate(effects):
col.separator(factor=0.5)
self.draw_effect(effect, i, col)
# Custom object properties panel
class OBJECT_PT_NusiqMcblendObjectPropertiesPanel(bpy.types.Panel):
'''Panel used for editing custom model object properties.'''
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = 'object'
bl_label = "Mcblend object properties"
@classmethod
def poll(cls, context):
if context.active_object:
return (
context.active_object.type == "MESH" or
context.active_object.type == "EMPTY")
return False
def draw(self, context):
col = self.layout.column(align=True)
if context.mode == "OBJECT" and context.object is not None:
object_properties = context.object.nusiq_mcblend_object_properties
col.prop(object_properties, "is_bone", text="Export as bone")
col.prop(object_properties, "mesh_type", text="")
if context.object.type == 'MESH':
col.prop(object_properties, "mirror", text="Mirror")
if object_properties.uv_group != '':
col.label(text=f'UV Group: {object_properties.uv_group}')
else:
col.label(text="This object doesn't have a UV group")
col.prop(object_properties, "inflate", text="Inflate")
# Model export panel
class OBJECT_PT_NusiqMcblendExportPanel(bpy.types.Panel):
'''
Panel used for launching the model export operator and changing its
settings.
'''
# pylint: disable=unused-argument
bl_label = "Export bedrock model"
bl_category = "Mcblend"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
col = self.layout.column(align=True)
# col.prop(context.scene.nusiq_mcblend, "path", text="")
col.prop(
context.scene.nusiq_mcblend, "model_name", text="Name"
)
col.prop(
context.scene.nusiq_mcblend, "visible_bounds_width",
text="Visible bounds width"
)
col.prop(
context.scene.nusiq_mcblend, "visible_bounds_height",
text="Visible bounds height"
)
col.prop(
context.scene.nusiq_mcblend, "visible_bounds_offset",
text="Visible bounds offset"
)
self.layout.row().operator(
"object.nusiq_mcblend_export_operator", text="Export model"
)
# Model import panel
class OBJECT_PT_NusiqMcblendImportPanel(bpy.types.Panel):
'''Panel used for launching the model import operator.'''
# pylint: disable=unused-argument
bl_label = "Import bedrock model"
bl_category = "Mcblend"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
self.layout.row().operator(
"object.nusiq_mcblend_import_operator", text="Import model"
)
# Animation export panel
class OBJECT_PT_NusiqMcblendExportAnimationPanel(bpy.types.Panel):
'''
Panel used launching the animation export operator and changing its
settings.
'''
# pylint: disable=unused-argument
bl_label = "Export bedrock animation"
bl_category = "Mcblend"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
col = self.layout.column(align=True)
row = col.row()
row.operator(
"object.nusiq_mcblend_add_animation", text="New animation"
)
active_anim_id = bpy.context.scene.nusiq_mcblend_active_animation
anims = bpy.context.scene.nusiq_mcblend_animations
if active_anim_id < len(anims):
row.operator(
"object.nusiq_mcblend_remove_animation",
text="Remove this animation"
)
col.operator_menu_enum(
"object.nusiq_mcblend_list_animations", "animations_enum",
text="Select animation"
)
active_anim = anims[active_anim_id]
col.prop(active_anim, "name", text="Name")
col.prop(active_anim, "skip_rest_poses", text="Skip rest poses")
col.prop(active_anim, "single_frame", text="Export as pose")
if active_anim.single_frame:
col.prop(bpy.context.scene, "frame_current", text="Frame")
else:
col.prop(active_anim, "loop", text="Loop")
col.prop(active_anim, "anim_time_update",
text="anim_time_update")
col.prop(bpy.context.scene, "frame_start", text="Frame start")
col.prop(bpy.context.scene, "frame_end", text="Frame end")
col.operator(
"object.nusiq_mcblend_export_animation_operator",
text="Export animation")
# UV-mapper panel
class OBJECT_PT_NusiqMcblendSetUvsPanel(bpy.types.Panel):
'''
Panel used for launching the UV-mapping operator and changing its settings.
'''
# pylint: disable=unused-argument
bl_label = "Set bedrock UVs"
bl_category = "Mcblend"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
col = self.layout.column(align=True)
col.prop(
context.scene.nusiq_mcblend, "texture_width", text="Texture width")
col.prop(
context.scene.nusiq_mcblend, "texture_height",
text="Texture height")
col.prop(
context.scene.nusiq_mcblend, "allow_expanding",
text="Allow texture expanding")
col.prop(
context.scene.nusiq_mcblend, "generate_texture",
text="Generate Texture")
if context.scene.nusiq_mcblend.generate_texture:
col.prop(
context.scene.nusiq_mcblend, "texture_template_resolution",
text="Template resolution")
self.layout.row().operator(
"object.nusiq_mcblend_map_uv_operator", text="Set minecraft UVs")
# "Other" operators panel
class OBJECT_PT_NusiqMcblendOperatorsPanel(bpy.types.Panel):
'''
Panel that gives the user access to various operators used by Mcblend.
'''
# pylint: disable=unused-argument
bl_label = "Operators"
bl_category = "Mcblend"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
col = self.layout.column()
col.operator(
"object.nusiq_mcblend_toggle_mirror_operator",
text="Toggle mirror for UV mapping"
)
col.operator(
"object.nusiq_mcblend_uv_group_operator",
text="Set the UV group"
)
col.operator(
"object.nusiq_mcblend_clear_uv_group_operator",
text="Clear UV group"
)
col.operator(
"object.nusiq_mcblend_toggle_is_bone_operator",
text="Toggle export as bones"
)
col.operator(
"object.nusiq_mcblend_set_inflate_operator",
text="Inflate"
)
col.operator(
"object.nusiq_mcblend_round_dimensions_operator",
text="Round dimensions"
)
```
#### File: mcblend/tests/test_animation_export.py
```python
import os
import shutil
import json
from pathlib import Path
import typing as tp
import pytest
from .common import assert_is_model, blender_run_script, make_comparable_json
def make_comparison_files(
tmp: str, scene_name: str, blend_file_path: str,
) -> tp.Tuple[tp.Dict, str]:
'''
Opens blender file, selects_scene and exports animation from that to
given tmp path.
Returns the result JSON in a dictionary and the path to newly created file.
'''
tmp = os.path.abspath(tmp)
target = os.path.join(tmp, f'{scene_name}.animation.json')
expected_result_path = (
f'./tests/data/test_animation_export/{scene_name}.animation.json'
)
script = os.path.abspath('./blender_scripts/export_animation.py')
blend_file_path = os.path.abspath(blend_file_path)
# Windows uses wierd path separators
tmp = tmp.replace('\\', '/')
target = target.replace('\\', '/')
script = script.replace('\\', '/')
# Create tmp if not exists
Path(tmp).mkdir(parents=True, exist_ok=True)
# Run blender actions
blender_run_script(
script, scene_name, target, blend_file_path=blend_file_path
)
# Return results
with open(target, 'r') as f:
target_dict = json.load(f)
with open(expected_result_path, 'r') as f:
expected_result = json.load(f)
return target_dict, target, expected_result # type: ignore
# PYTEST FUNCTIONS
SCENES = [
'ObjectAnimation', 'ArmatureAnimation', 'issue71'
# 'BattleMech'
]
def setup_module(module):
'''Runs before tests'''
tmp_path = "./.tmp/test_animation_export"
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
@pytest.fixture(params=SCENES)
def scene(request):
return request.param
# TESTS
def test_animation_export(scene):
result_dict, result_path, expected_result = make_comparison_files(
"./.tmp/test_animation_export", scene,
'./tests/data/tests_project.blend'
)
assert result_dict == expected_result
``` |
{
"source": "7digital/python-7digital-api",
"score": 3
} |
#### File: python-7digital-api/lib/oauth7digital.py
```python
import httplib
import oauth
from lockerEndpoint import Locker
class Oauth7digital(object):
key = None
SERVER = 'api.7digital.com'
REQUEST_TOKEN_URL = 'https://%s/1.2/oauth/requesttoken' % SERVER
ACCESS_TOKEN_URL = 'https://%s/1.2/oauth/accesstoken' % SERVER
LOCKER_ENDPOINT_URL = 'http://%s/1.2/user/locker' %SERVER
def __init__(self, key, secret, access_token = None):
self.key = key
self.secret = secret
self.access_token = access_token
def request_token(self):
print '\nOAUTH STEP 1'
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.__consumer(), http_url = self.REQUEST_TOKEN_URL, parameters={})
print '\nMESSAGE:: %s' %oauth_request
oauth_request.sign_request(self.__signature_method(), self.__consumer(), None)
resp = self.__fetch_response(oauth_request, self.__secure_connection())
token = oauth.OAuthToken.from_string(resp)
return token
def authorize_request_token(self, token):
AUTHORIZATION_URL = 'https://account.7digital.com/%s/oauth/authorise' % self.key
print '\nOAUTH STEP 2'
auth_url="%s?oauth_token=%s" % (AUTHORIZATION_URL, token.key)
# auth url to go to
print 'Authorization URL:\n%s' % auth_url
oauth_verifier = raw_input('Please go to the above URL and authorize the app. Hit return when you have been authorized: ')
return True
def request_access_token(self, token):
print '\nOAUTH STEP 3'
oauth_request = self.__sign_oauth_request(token, self.ACCESS_TOKEN_URL)
resp = self.__fetch_response(oauth_request, self.__secure_connection())
token = oauth.OAuthToken.from_string(resp)
return token
def get_user_locker(self):
resp = self.__get_locker()
return Locker(resp).get_content()
def get_artist_from_user_locker(self):
resp = self.__get_locker()
return Locker(resp).get_artists()
def get_releases_from_user_locker(self):
resp = self.__get_locker()
return Locker(resp).get_releases()
def get_tracks_from_user_locker(self):
resp = self.__get_locker()
return Locker(resp).get_tracks()
def get_locker(self):
resp = self.__get_locker()
return Locker(resp).get_contents()
def __get_locker(self):
oauth_request = self.__sign_oauth_request(self.access_token, self.LOCKER_ENDPOINT_URL)
resp = self.__fetch_response(oauth_request, self.__connection())
return resp
def __sign_oauth_request(self, token, url_end_point):
oauth_request = oauth.OAuthRequest.from_consumer_and_token(self.__consumer(), token=token, http_url = url_end_point, parameters={})
oauth_request.sign_request(self.__signature_method(), self.__consumer(), token)
return oauth_request
def __consumer(self):
return oauth.OAuthConsumer(self.key, self.secret)
def __signature_method(self):
return oauth.OAuthSignatureMethod_HMAC_SHA1()
def __secure_connection(self):
return httplib.HTTPSConnection(self.SERVER)
def __connection(self):
return httplib.HTTPConnection(self.SERVER)
def __fetch_response(self, oauth_request, connection):
url = oauth_request.to_url()
connection.request(oauth_request.http_method, url)
response = connection.getresponse()
result = response.read()
return result
```
#### File: 7digital/python-7digital-api/lockerEndpoint.py
```python
import os
from xml.dom.minidom import parseString
class Locker(object):
def __init__(self, xml_response):
self.xml_response = parseString(xml_response)
def get_contents(self):
results = []
locker_items = self.xml_response.getElementsByTagName('lockerRelease')
for item in locker_items:
results.append(LockerItem(item))
return results
def get_artists(self):
results = []
artist_nodes = self.xml_response.getElementsByTagName('artist')
for artist in artist_nodes:
results.append(LockerArtist(artist))
return results
def get_releases(self):
results = []
release_nodes = self.xml_response.getElementsByTagName('release')
for release in release_nodes:
results.append(LockerRelease(release))
return results
def get_tracks(self):
results = []
track_nodes = self.xml_response.getElementsByTagName('lockerTrack')
for track in track_nodes:
results.append(LockerTrack(track))
return results
class _LockerBase(object):
def extract(self, node, name, index = 0):
"""Extracts a value from the xml string"""
try:
nodes = node.getElementsByTagName(name)
if len(nodes):
if nodes[index].firstChild:
return nodes[index].firstChild.data.strip()
else:
return None
except:
return None
class LockerItem(_LockerBase):
def __init__(self, xml):
self.release = LockerRelease(xml.getElementsByTagName('release')[0])
self.tracks = self.__get_release_tracks(xml.getElementsByTagName('lockerTrack'))
def __get_release_tracks(self, tracks):
result = []
for track in tracks:
result.append(LockerTrack(track))
return result
class LockerTrack(_LockerBase):
def __init__(self, xml):
self.track = Track(xml.getElementsByTagName('track')[0])
self.remaining_downloads = self.extract(xml, 'remainingDownloads')
self.purchaseDate = self.extract(xml, 'purchaseDate')
self.download_urls = self.__get_download_urls(xml.getElementsByTagName('downloadUrls'))
def __get_download_urls(self, urls):
result = []
for url in urls:
result.append(DownloadUrls(url))
return result
class DownloadUrls(_LockerBase):
def __init__(self, xml):
self.url = self.extract(xml, 'url')
self.format = Format(xml.getElementsByTagName('format')[0])
class Format(_LockerBase):
def __init__(self, xml):
self.id = xml.getAttribute('id')
self.file_format = self.extract(xml, 'fileFormat')
self.bit_rate = self.extract(xml, 'bitRate')
class Track(_LockerBase):
def __init__(self, xml):
self.id = xml.getAttribute('id')
self.title = self.extract(xml, 'title')
self.version = self.extract(xml, 'version')
self.artist = LockerArtist(xml.getElementsByTagName('artist')[0])
self.url = self.extract(xml, 'url')
class LockerRelease(_LockerBase):
def __init__(self, xml):
self.id = xml.getAttribute('id')
self.title = self.extract(xml, 'title')
self.type = self.extract(xml, 'type')
self.artist = LockerArtist(xml.getElementsByTagName('artist')[0])
self.url = self.extract(xml, 'url')
self.image = self.extract(xml, 'image')
self.release_date = self.extract(xml, 'releaseDate')
class LockerArtist(_LockerBase):
def __init__(self, xml):
self.id = xml.getAttribute('id')
self.name = self.extract(xml, 'name')
self.url = self.extract(xml, 'url')
``` |
{
"source": "7dudtj/BOJ_myCode",
"score": 4
} |
#### File: Implementation/9095_1, 2, 3 더하기/9095_1, 2, 3 더하기.py
```python
def sumott(n):
if n == 1:
return 1
elif n == 2:
return 2
elif n == 3:
return 4
elif n == 4:
return 7
elif n == 5:
return 13
elif n == 6:
return 24
elif n == 7:
return 44
elif n == 8:
return 81
elif n == 9:
return 149
elif n == 10:
return 274
T = int(input())
for i in range(T):
a = int(input())
print(sumott(a))
``` |
{
"source": "7dudtj/Multi_Chatting_Program",
"score": 3
} |
#### File: Multi_Chatting_Program/client/client3.py
```python
import socket
import threading
import socket as sc
import sys
import PyQt5.QtWidgets
from PyQt5.QtCore import QCoreApplication
from PyQt5 import uic
import os
ui_form = uic.loadUiType("client.ui")[0]
class QtWindow(PyQt5.QtWidgets.QMainWindow, ui_form):
def __init__(self):
super().__init__()
self.setupUi(self)
self.socket = None
self.userName = None
self.isRun = False
self.allChat = ''
self.sendButton.clicked.connect(self.sendMessage)
self.accessButton.clicked.connect(self.connect)
self.exitButton.clicked.connect(self.quit)
self.sendFileButton.clicked.connect(self.sendFile)
def connect(self):
if self.isRun:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', self.userName + '님, 이미 연결중입니다.', PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
else:
try:
ip = str(self.inputIp.toPlainText()).strip()
port = int(str(self.inputPort.toPlainText()).strip())
self.userName = self.inputName.toPlainText().strip()
self.socket = sc.socket(sc.AF_INET, sc.SOCK_STREAM)
self.socket.connect((ip, port))
self.socket.sendall(self.userName.encode(encoding='utf-8'))
self.isRun = True
thread = threading.Thread(target=self.receive)
thread.daemon = True
thread.start()
print("서버와 연결했습니다")
except socket.gaierror:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', "잘못된 IP와 PORT입니다.", PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
except Exception as e:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', str(e) + " " + str(e.__class__), PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
def receive(self):
try:
print('수신 thread 가동')
while True:
print('수신 대기중')
message = self.socket.recv(1024).decode()
if message == "/text":
message = self.socket.recv(1024).decode()
self.chatBox.append(str(message) + "\n")
elif message == "/file":
print("서버가 파일 보냄")
try:
nowdir = os.getcwd()
fileName = self.socket.recv(1024).decode()
print("받은 파일이름: "+str(fileName))
fileName = "To"+self.userName+"_"+fileName
data = self.socket.recv(1024).decode()
print("파일 내용: "+str(data))
with open(nowdir + "\\" + fileName, 'w') as f:
f.write(str(data))
f.close()
print("파일 저장 완료")
except FileNotFoundError:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', '작성하신 파일명과 일치하는 파일이 존재하지 않습니다.',
PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
self.inputFileName.setPlainText("")
except Exception as e:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', "파일 수신 실패: " + str(e),
PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
print(e.__class__)
print(e)
else:
message = self.socket.recv(1024).decode()
self.chatBox.append(str(message) + "\n")
except Exception as e:
self.isRun = False
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', str(e), PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
def sendMessage(self, e):
if self.isRun:
message = self.inputMsg.toPlainText()
# 메세지 입력 없이 전송버튼 누를때 예외처리
if message == "":
return
self.inputMsg.setPlainText("")
message = message.encode(encoding='utf-8')
try:
self.socket.sendall("/text".encode(encoding='utf-8'))
self.socket.sendall(message)
print("메시지 전송")
except Exception as e:
self.isRun = False
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', str(e), PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
else:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', '먼저 서버의 IP, PORT, 그리고 사용자 이름을 입력하고 접속해주세요.', PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
self.inputMsg.setPlainText("")
def sendFile(self):
if self.isRun:
try:
nowdir = os.getcwd()
fileName = self.inputFileName.toPlainText().strip()
newfileName = "From"+self.userName+"_"+fileName
self.socket.sendall("/file".encode(encoding='utf-8'))
self.socket.sendall(newfileName.encode(encoding='utf-8'))
with open(nowdir + "\\" + fileName, 'r') as f:
data = f.read(1024)
f.close()
self.socket.sendall(data.encode(encoding="utf-8"))
print("파일 전송 완료")
except FileNotFoundError:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', '작성하신 파일명과 일치하는 파일이 존재하지 않습니다.', PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
self.inputFileName.setPlainText("")
except Exception as e:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', str(e), PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
print(e.__class__)
print(e)
else:
PyQt5.QtWidgets.QMessageBox.question(self, 'Message', '먼저 서버의 IP, PORT, 그리고 사용자 이름을 입력하고 접속해주세요.', PyQt5.QtWidgets.QMessageBox.Yes,
PyQt5.QtWidgets.QMessageBox.NoButton)
self.inputFileName.setPlainText("")
def quit(self):
# 서버에 접속 종료를 알림
if self.isRun:
message = "/stop".encode(encoding='utf-8')
self.socket.sendall(message)
# 서버와의 연결 종료
if self.socket:
self.socket.close()
# 윈도우 종료
QCoreApplication.instance().quit()
sys.exit(1)
def closeEvent(self, event):
self.deleteLater()
def main():
app = PyQt5.QtWidgets.QApplication(sys.argv)
window = QtWindow()
window.setWindowTitle("채팅")
window.show()
app.exec_()
main()
```
#### File: Multi_Chatting_Program/server/server.py
```python
import socket
import threading
import os
################################################################
class Room: # Room class
def __init__(self):
self.chatUsers = [] # 채팅방 접속 유저 리스트 (채팅방)
self.waitUsers = [] # 재접속을 기다리는 유저 리스트 (대기열)
def add_chatUser(self, c): # 채팅방에 유저 추가
self.chatUsers.append(c)
def add_waitUser(self, c): # 대기열에 유저 추가
self.waitUsers.append(c)
def del_chatUser(self, c): # 채팅방에 유저 삭제 (정상 종료)
c.soc.close()
self.chatUsers.remove(c)
def del_waitUser(self, c): # 대기열에서 유저 삭제
c.soc.close()
self.waitUsers.remove(c)
def sendMsgAll(self, msg): # 채팅방에 있는 모든 유저한테 메시지 전송
for i in self.chatUsers:
print(str(i.userName)+"에게 전송")
i.sendMsg(msg)
################################################################
################################################################
class chatClient: # 채팅 유저 클래스
def __init__(self, r, soc):
self.room = r # 채팅방(Room) 객체
self.userName = None # user name
self.soc = soc # 유저와 1:1로 통신할 소켓
def readMsg(self):
# 유저의 닉네임 받아오기
self.userName = self.soc.recv(1024).decode()
print(str(self.userName) + "한테서 받은 메세지: " + str(self.userName))
# 재접속 유저인지 확인
reconnect = False
for i in self.room.waitUsers: # 'i'는 대기열의 유저
if (i.userName == self.userName):
reconnect = True
self.room.del_waitUser(i) # 대기열에서 삭제
# 채팅방 재입장
if (reconnect):
msg = self.userName + '님이 재접속했습니다.'
# 채팅방 신규입장
else:
msg = self.userName + '님이 입장하셨습니다.'
self.room.sendMsgAll('/text')
self.room.sendMsgAll(msg)
# 유저의 채팅을 받아오는 부분
while True:
try:
msg = self.soc.recv(1024).decode() # 유저가 전송한 채팅 읽기
print(str(self.userName)+"한테서 받은 메세지: "+str(msg))
# 종료 메시지이면 루프 종료
if msg == '/stop':
outmember = self.userName
self.room.del_chatUser(self) # 채팅방에서 퇴장
self.room.sendMsgAll('/text')
self.room.sendMsgAll(str(outmember)+"님이 퇴장하셨습니다.")
break
# 텍스트 수신
elif msg == '/text':
msg = self.soc.recv(1024).decode()
msg = self.userName + ': ' + msg
self.room.sendMsgAll('/text')
self.room.sendMsgAll(msg) # 모든 사용자에 메시지 전송
# 파일 수신
elif msg == '/file':
# 유저로부터 파일 수신
nowdir = os.getcwd()
fileName = self.soc.recv(1024).decode() # 파일 이름 받기
data = self.soc.recv(1024).decode() # 파일 내용 받기
print("파일 수신 완료")
with open(nowdir + "\\" + fileName, 'w') as f:
f.write(str(data))
f.close()
print("파일 저장 완료")
# 모든 유저들에게 파일 전송
self.room.sendMsgAll('/text')
self.room.sendMsgAll(self.userName+"님이 파일을 보냈습니다.")
with open(nowdir + "\\" + fileName, 'r') as f:
data = f.read(1024)
f.close()
self.room.sendMsgAll('/file')
self.room.sendMsgAll(fileName)
self.room.sendMsgAll(data)
print("파일 전송 완료")
os.remove(nowdir + "\\" + fileName)
print("서버측 파일 삭제 완료")
except Exception as e: # 에러가 발생할 경우
outuserName = self.userName # 퇴장 유저의 아이디
self.room.del_chatUser(self) # 채팅방에서 퇴장
self.room.add_waitUser(self) # 대기열으로 진입
self.room.sendMsgAll('/text')
self.room.sendMsgAll(str(outuserName)+"님이 접속이 끊겼습니다.")
print(str(self.userName)+": 접속이 끊겼습니다.\n"+str(e))
break
def sendMsg(self, msg):
print("\""+msg+"\"")
self.soc.sendall(msg.encode(encoding='utf-8'))
################################################################
################################################################
class ChatServer:
ip = 'localhost'
port = 8080
def __init__(self):
self.server_soc = None # 서버 소켓(대문)
self.room = Room() # 채팅방. 모든 유저와 공유됨
def open(self):
self.server_soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server_soc.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server_soc.bind((ChatServer.ip, ChatServer.port))
self.server_soc.listen()
def run(self):
self.open()
print('서버 시작')
while True: # 유저 접속 대기중
print('client 접속 대기중')
client_soc, addr = self.server_soc.accept()
print(addr, '접속 성공')
c = chatClient(self.room, client_soc) # 유저 객체 생성
self.room.add_chatUser(c) # 채팅방에 유저 추가
th = threading.Thread(target=c.readMsg)
th.start()
# 서버는 종료되지 않기에
# 소켓을 닫지 않고 계속 가동
################################################################
def main():
cs = ChatServer() # 서버 생성
cs.run() # 서버 가동
main()
``` |
{
"source": "7e4/chiadog",
"score": 3
} |
#### File: chiadog/src/config.py
```python
import logging
from pathlib import Path
from typing import Optional
# lib
import yaml
class Config:
def __init__(self, config_path: Path):
if not config_path.is_file():
raise ValueError(f"Invalid config.yaml path: {config_path}")
with open(config_path, "r") as config_file:
self._config = yaml.safe_load(config_file)
def _get_child_config(self, key: str, required: bool = True) -> Optional[dict]:
if key not in self._config.keys():
if required:
raise ValueError(f"Invalid config - cannot find {key} key")
else:
return None
return self._config[key]
def get_config(self):
return self._config
def get_notifier_config(self):
return self._get_child_config("notifier")
def get_chia_logs_config(self):
return self._get_child_config("chia_logs")
def get_log_level_config(self):
return self._get_child_config("log_level")
def get_keep_alive_monitor_config(self):
return self._get_child_config("keep_alive_monitor", required=False)
def get_daily_stats_config(self):
return self._get_child_config("daily_stats")
def check_keys(required_keys, config) -> bool:
for key in required_keys:
if key not in config.keys():
logging.error(f"Incompatible configuration. Missing {key} in {config}.")
return False
return True
``` |
{
"source": "7empestx/cactus",
"score": 3
} |
#### File: cactus/src/Client.py
```python
import Account as ac
class Client:
def __init__(self, account):
self.account = account
self.login_url = "https://auth.riotgames.com/api/v1/authorization"
self.login_session_body = {
"claims": "",
"acr_values": "urn:riot:bronze",
"redirect_uri": "http://localhost/redirect",
"client_id": "riot-client",
"nonce": 1,
"response_type": "token id_token",
"scope": "openid link ban lol_region",
}
self.login_token_body = {
"type": "auth",
"language": "em_US",
"remember": "false",
"region": self.account.region,
"username": self.account.username,
"password": <PASSWORD>,
}
self.purchase_info_url = ""
self.purchase_info_headers = {}
self.name_check_url = ""
self.name_check_headers = {}
self.change_name_url = ""
self.change_name_body = ""
self.change_name_referer = ""
self.change_name_headers = {}
def Build(self):
if self.account.region == "NA1":
self.purchase_info_url = "https://na.store.leagueoflegends.com/storefront/v3/history/purchase?language=en_GB"
self.name_check_url = f"https://na.store.leagueoflegends.com/storefront/v3/summonerNameChange/verify/{self.account.alias}"
self.change_name_url = "https://na.store.leagueoflegends.com/storefront/v3/summonerNameChange/purchase?language=en_GB"
self.change_name_referer = "https://na.store.leagueoflegends.com/storefront/ui/v1/app.html?language=en_GB&port=52684&clientRegion=na2&selectedItems=&page=featured&recipientSummonerId="
elif self.account.region == "EUN1":
self.purchase_info_url = "https://eun.store.leagueoflegends.com/storefront/v3/history/purchase?language=en_GB"
self.name_check_url = f"https://eun.store.leagueoflegends.com/storefront/v3/summonerNameChange/verify/{self.account.alias}"
self.change_name_url = "https://eun.store.leagueoflegends.com/storefront/v3/summonerNameChange/purchase?language=en_GB"
self.change_name_referer = (
f"https://eun.store.leagueoflegends.com/storefront/ui/v1/app.html?language=en_GB&port=52684&clientRegion="
f"{self.account.region}&selectedItems=&page=featured&recipientSummonerId="
)
elif self.account.region == "EUW1":
self.purchase_info_url = "https://euw.store.leagueoflegends.com/storefront/v3/history/purchase?language=en_GB"
self.name_check_url = f"https://euw.store.leagueoflegends.com/storefront/v3/summonerNameChange/verify/{self.account.alias}"
self.change_name_url = "https://euw.store.leagueoflegends.com/storefront/v3/summonerNameChange/purchase?language=en_GB"
self.change_name_referer = (
f"https://euw.store.leagueoflegends.com/storefront/ui/v1/app.html?language=en_GB&port=52684&clientRegion="
f"{self.account.region}&selectedItems=&page=featured&recipientSummonerId="
)
def UpdateAccountID(self):
self.change_name_body = '{"summonerName":"'
self.change_name_body += self.account.alias
self.change_name_body += '","accountId":'
self.change_name_body += self.account.account_id
self.change_name_body += ',"items":[{"inventoryType":"SUMMONER_CUSTOMIZATION","itemId":1,"ipCost":13900,"rpCost":null,"quantity":1}]}'
def UpdateAccessToken(self):
self.purchase_info_headers = {
"User-Agent": "RiotClient/18.0.0 (lol-store)",
"Accept": "application/json",
"Authorization": "Bearer " + self.account.access_token,
}
self.name_check_headers = {
"User-Agent": "RiotClient/18.0.0 (lol-store)",
"Accept": "application/json",
"Authorization": "Bearer " + self.account.access_token,
}
self.change_name_headers = {
"User-Agent": "RiotClient/18.0.0 (lol-store)",
"Accept": "application/json",
"Content-type": "application/json",
"Authorization": "Bearer " + self.account.access_token,
"Referer": self.change_name_referer,
}
``` |
{
"source": "7enTropy7/BipedalWalker",
"score": 3
} |
#### File: BipedalWalker/ARS/policy.py
```python
import numpy as np
class Policy():
def __init__(self,num_observations,num_actions,lr,num_dirs,num_dirs_best,noise):
self.theta = np.zeros((num_actions,num_observations))
self.learning_rate = lr
self.num_directions = num_dirs
self.num_best_directions = num_dirs_best
self.noise = noise
def evaluate(self,state, delta=None, direction=None):
if direction is None:
return self.theta.dot(state)
elif direction == "positive":
return (self.theta+self.noise*delta).dot(state)
else:
return (self.theta-self.noise*delta).dot(state)
def sample_deltas(self):
return [np.random.randn(*self.theta.shape) for i in range(self.num_directions)]
def update(self,rollouts,sigma_r):
step = np.zeros(self.theta.shape)
for positive_reward,negative_reward,d in rollouts:
step += (positive_reward-negative_reward)*d
self.theta += self.learning_rate/(self.num_best_directions * sigma_r)*step
``` |
{
"source": "7enTropy7/ravml",
"score": 3
} |
#### File: ravml/base/__init__.py
```python
class Base(object):
def __init__(self, **kwargs):
self._params = {}
self.set_params(**kwargs)
def set_params(self, **kwargs):
self._params.update(**kwargs)
@property
def params(self):
return self._params
def fit(self, X, y):
pass
def predict(self, X):
pass
```
#### File: ravml/cluster/kmeans.py
```python
import matplotlib.pyplot as plt
import ravop.core as R
from ravcom import ravdb
from ravop.core import Tensor, Scalar, square_root
class KMeans(object):
def __init__(self, **kwargs):
self.params = kwargs
self._points = None
self._label = None
self.centroids = None
self.k = None
def set_params(self, **kwargs):
self.params.update(**kwargs)
def get_params(self):
return self.params
def fit(self, X, k=3, iter=10):
self._points = R.Tensor(X, name="points")
self.k = k
self.centroids = self.initialize_centroids()
# inform_server()
self._label = self.closest_centroids(self.centroids)
self.update_centroids()
for i in range(iter):
print('iteration', i)
self.update_centroids()
self._label = self.closest_centroids(self.centroids)
# inform_server()
while self._label.status != "computed":
pass
def initialize_centroids(self):
return R.random(self._points, size=self.k)
def closest_centroids(self, centroids):
centroids = R.expand_dims(centroids, axis=1)
return R.argmin(square_root(R.sub(self._points, centroids).pow(Scalar(2)).sum(axis=2)))
def update_centroids(self):
gather = self._points.gather(R.find_indices(self._label, Tensor([0]))).mean(axis=1)
for i in range(1, self.k):
ind = R.find_indices(self._label, Tensor([i]))
gat = R.gather(self._points, ind).mean(axis=1)
gather = R.concat(gather, gat)
self.centroids = gather.reshape(shape=[self.k, len(self._points.output[0])])
# inform_server()
def plot(self):
fig, axs = plt.subplots(1)
axs.scatter(self._points.output[:, 0], self._points.output[:, 1], c=self._label.output)
# axs.scatter(self.centroids.output[:, 0], self.centroids.output[:, 1] ,'X', color="black",markersize=10)
plt.show()
@property
def Points(self):
if self._points is None:
self._points = ravdb.get_ops_by_name(op_name="points", graph_id=self.id)[0]
print(self._points.id)
if self._points.status == "computed":
return self._points.output
else:
raise Exception("Need to complete the prediction first")
@property
def label(self):
if self._label is None:
self._label = ravdb.get_ops_by_name(op_name="label", graph_id=self.id)[0]
print(self._label.id)
if self._label.status == "computed":
return self._label.output
else:
raise Exception("Need to complete the prediction first")
class MiniBatchKMeans(object):
def __init__(self, **kwargs):
self.params = kwargs
self.X = None
self._points = None
self._label = None
self.centroids = None
self.batch_size = None
self.k = None
def set_params(self, **kwargs):
self.params.update(**kwargs)
def get_params(self):
return self.params
def initialize_centroids(self):
cen = R.random(self._points, size=self.k)
while cen.status != 'computed':
pass
return cen
def Mini_batch(self, points, batch_size):
mb = R.random(points, size=batch_size)
return mb
def closest_centroids(self, points, centroids):
centroids = R.expand_dims(centroids, axis=1)
return R.argmin(R.square_root(R.sum(R.square(R.sub(points, centroids)), axis=2)))
def update_centroids(self, points, label):
while label.status != 'computed':
pass
if 0 in label.output:
gather = R.gather(points, R.find_indices(label, Tensor([0]))).mean(axis=1)
else:
gather = R.gather(self.centroids, Tensor([0])).expand_dims(axis=0)
for i in range(1, self.k):
if i in label.output:
ind = R.find_indices(label, Tensor([i]))
gat = R.gather(points, ind).mean(axis=1)
else:
gat = R.gather(self.centroids, Tensor([i])).expand_dims(axis=0)
gather = R.concat(gather, gat)
while gat.status != 'computed':
pass
return gather.reshape(shape=[self.k, len(self._points.output[0])])
def fit(self, X, k, iter=5, batch_size=None):
# inform_server()
self._points = Tensor(X)
self.k = k
self.iter = iter
self.batch_size = batch_size
self.centroids = self.initialize_centroids()
# self._label=self.closest_centroids(self.points,self.centroids)
points = self.Mini_batch(self._points, batch_size=batch_size)
_label = self.closest_centroids(points, self.centroids)
print(3)
self.centroids = self.update_centroids(points, _label)
# inform_server()
for i in range(iter):
print('iteration', i)
points = self.Mini_batch(self._points, batch_size=self.batch_size)
_label = self.closest_centroids(points, self.centroids)
self.centroids = self.update_centroids(points, _label)
# inform_server()
self._label = self.closest_centroids(self._points, self.centroids)
while self._label.status != "computed":
pass
return self._label
def plot(self):
fig, axs = plt.subplots(1)
axs.scatter(self._points.output[:, 0], self._points.output[:, 1], c=self._label.output)
# axs.scatter(self.centroids.output[:, 0], self.centroids.output[:, 1] ,'X', color="black",markersize=10)
plt.show()
@property
def points(self):
if self._points is None:
self._points = ravdb.get_ops_by_name(op_name="points", graph_id=self.id)[0]
print(self._points.id)
if self._points.status == "computed":
return self._points.output
else:
raise Exception("Need to complete the prediction first")
@property
def label(self):
if self._label is None:
self._label = ravdb.get_ops_by_name(op_name="label", graph_id=self.id)[0]
print(self._label.id)
if self._label.status == "computed":
return self._label.output
else:
raise Exception("Need to complete the prediction first")
```
#### File: ravml/linear/linear_regression.py
```python
import matplotlib.pyplot as plt
import ravop.core as R
from ravcom import inform_server
inform_server()
class LinearRegression():
def __init__(self,x_points,y_points,theta):
self.raw_X = x_points
self.raw_y = y_points
self.m = R.Scalar(self.raw_y.shape[0])
self.X = R.Tensor(self.raw_X.tolist())
self.y = R.Tensor(self.raw_y.tolist())
self.theta = R.Tensor(theta.tolist())
def compute_cost(self):
residual = self.X.dot(self.theta).sub(self.y)
while residual.status != 'computed':
pass
return (R.Scalar(1).div(R.Scalar(2).multiply(self.m))).multiply(residual.dot(residual.transpose()))
def gradient_descent(self, alpha, num_iters):
alpha_ = R.Scalar(alpha)
for e in range(num_iters):
residual = self.X.dot(self.theta).sub(self.y)
while residual.status != 'computed':
pass
temp = self.theta.sub((alpha_.div(self.m)).multiply(self.X.transpose().dot(residual)))
while temp.status!='computed':
pass
self.theta = temp
print('Iteration : ',e)
op_theta = self.theta()
print('Theta found by gradient descent: intercept={0}, slope={1}'.format(op_theta[0],op_theta[1]))
return self.theta
def plot_graph(self,optimal_theta):
optimal_theta = optimal_theta()
fig, ax = plt.subplots()
ax.plot(self.raw_X[:,1], self.raw_y[:,0], 'o', label='Raw Data')
ax.plot(self.raw_X[:,1], self.raw_X.dot(optimal_theta), linestyle='-', label='Linear Regression')
plt.ylabel('Profit')
plt.xlabel('Population of City')
legend = ax.legend(loc='upper center', shadow=True)
plt.show()
``` |
{
"source": "7enTropy7/ravop",
"score": 3
} |
#### File: ravop/core/ftp_client.py
```python
from ftplib import FTP
from ..config import FTP_SERVER_URL
from ..globals import globals as g
class FTPClient:
def __init__(self, host, user, passwd):
self.ftp = FTP(host)
# self.ftp.set_debuglevel(2)
self.ftp.set_pasv(True)
self.ftp.login(user, passwd)
def download(self, filename, path):
print('Downloading')
self.ftp.retrbinary('RETR ' + path, open(filename, 'wb').write)
print("Downloaded")
def upload(self, filename, path):
self.ftp.storbinary('STOR ' + path, open(filename, 'rb'), blocksize=g.ftp_upload_blocksize)
def list_server_files(self):
self.ftp.retrlines('LIST')
def close(self):
self.ftp.quit()
def get_client(username, password):
print("FTP User credentials:", FTP_SERVER_URL, username, password)
return FTPClient(host=FTP_SERVER_URL, user=username, passwd=password)
def check_credentials(username, password):
try:
FTPClient(host=FTP_SERVER_URL, user=username, passwd=password)
return True
except Exception as e:
print("Error:{}".format(str(e)))
return False
``` |
{
"source": "7enTropy7/Rummy_RL",
"score": 2
} |
#### File: Federated Rummy/Server/socketio_server.py
```python
import socketio
from aiohttp import web
import pickle
import pydealer
import sys
from utils import aggregate_models
sio = socketio.AsyncServer(cors_allowed_origins="*", async_mode='aiohttp', async_handlers=True)
app = web.Application()
sio.attach(app)
@sio.event
async def connect(sid, environ):
print('Connected: ', sid)
@sio.event
async def disconnect(sid):
print('Disconnected: ', sid)
server_status = 'standby'
client_statuses = {'client_A': 'standby', 'client_B': 'standby', 'client_C': 'standby'}
current_player = 'client_A'
ftp_client_statuses = {'client_A': 'standby', 'client_B': 'standby', 'client_C': 'standby'}
ftp_server_status = 'standby'
hands = {'client_A': None, 'client_B': None, 'client_C': None}
deck = pydealer.Deck()
deck.shuffle()
table_top_card = deck.deal(1)[0]
deck_top_card = None
global_done = False
game_number = 1
@sio.on('client_status')
async def client_status(sid, data):
global server_status, client_statuses, hands, deck, current_player, table_top_card, deck_top_card, global_done, game_number, ftp_client_statuses, ftp_server_status
client_statuses[data['client_name']] = data['client_status']
ftp_client_statuses[data['client_name']] = data['ftp_client_status']
has_played = data['has_played']
global_done = data['global_done']
flag_deck_top_card = data['flag_deck_top_card']
if deck.size == 0:
global_done = False
print('------------------ Restart Game ------------------' + ' Game Count: ' + str(game_number))
game_number += 1
deck = pydealer.Deck()
deck.shuffle()
table_top_card = deck.deal(1)[0]
deck_top_card = None
hands = {'client_A': None, 'client_B': None, 'client_C': None}
client_statuses = {'client_A': 'standby', 'client_B': 'standby', 'client_C': 'standby'}
current_player = 'client_A'
has_played = False
server_status = 'standby'
if FTP_check_for_uploaded_readiness(ftp_client_statuses) and ftp_server_status != 'aggregated':
aggregate_models()
ftp_server_status = 'aggregated'
if global_done:
server_status = 'GAME OVER'
print('\n\n@@@@@@@@@@@@@@@@@@@@@@@ WINNER @@@@@@@@@@@@@@@@@@@@@')
print(data['client_name'],' has won the GAME!')
print('@@@@@@@@@@@@@@@@@@@@@@@ WINNER @@@@@@@@@@@@@@@@@@@@@\n\n')
sys.exit(0)
if hands[data['client_name']] is None and (FTP_check_for_downloaded_readiness(ftp_client_statuses) or game_number==1):
cards = deck.deal(10)
hand = pydealer.Stack()
hand.add(cards)
hands[data['client_name']] = hand
ftp_server_status = 'standby'
await sio.emit('set_hand', {'hand':pickle.dumps(hands[data['client_name']])}, room=sid)
if check_for_readiness(client_statuses) and server_status != 'GAME OVER':
server_status = 'ready'
deck_top_card = deck[deck.size-1]
if server_status == 'ready' and has_played:
table_top_card = pickle.loads(data['table_top_card'])
if flag_deck_top_card:
temp = deck.deal(1)
if deck.size > 0:
deck_top_card = deck[deck.size-1]
else:
deck_top_card = None
print(str(data['client_name']) + ' has played ' + str(table_top_card) + ' Deck Size: ' + str(deck.size))
if current_player == 'client_A':
current_player = 'client_B'
elif current_player == 'client_B':
current_player = 'client_C'
elif current_player == 'client_C':
current_player = 'client_A'
return {'server_status': server_status,
'current_player' : current_player,
'table_top_card': pickle.dumps(table_top_card),
'deck_top_card': pickle.dumps(deck_top_card),
'global_done':global_done,
'game_number':game_number,
'ftp_server_status':ftp_server_status}
def FTP_check_for_downloaded_readiness(ftp_client_statuses):
for ftp_client_status in ftp_client_statuses.values():
if ftp_client_status != 'downloaded':
return False
return True
def FTP_check_for_uploaded_readiness(ftp_client_statuses):
for ftp_client_status in ftp_client_statuses.values():
if ftp_client_status != 'uploaded':
return False
return True
def check_for_readiness(client_statuses):
for client_status in client_statuses.values():
if client_status != 'ready':
return False
return True
``` |
{
"source": "7eRoM/Ftring",
"score": 2
} |
#### File: Ftring/pckg/AFlag.py
```python
import os
import sys
import binascii
import string
import random
from random import randint
from random import shuffle
from random import choice
import argparse
REGISTERS_TYPE = ["REGISTERS_32", "REGISTERS_16", "REGISTERS_8h", "REGISTERS_8l"]
REGISTERS_32 = ["ebx", "ecx", "edx", "esi", "edi"]
REGISTERS_16 = ["bx", "cx", "dx", "si", "di"]
REGISTERS_8h = ["bh", "ch", "dh"]
REGISTERS_8l = ["bl", "cl", "dl"]
def set_flag(method=-1):
set_flag_a = [
"""mov $TMP_REG$, $VAL1$
add $TMP_REG$, $VAL2$""",
"""mov $TMP_REG$, $VAL1$
sub $TMP_REG$, $VAL2$""",
"""mov $TMP_REG$, $VAL1$
inc $TMP_REG$""",
"""mov $TMP_REG$, $VAL1$
dec $TMP_REG$"""]
if method == -1:
method = randint(0, len(set_flag_a)-1)
tmp_set_flag = set_flag_a[method]
if method == 0:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_set_flag = tmp_set_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(randint(1, 15))[2:].zfill(4)
low_nibble_val2 = bin(randint(16-int(low_nibble_val1, 2), 15))[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
VAL2 = int(rest_nibble_val2 + low_nibble_val2, 2)
tmp_set_flag = tmp_set_flag.replace("$VAL1$", str(VAL1))
tmp_set_flag = tmp_set_flag.replace("$VAL2$", str(VAL2))
if method == 1:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_set_flag = tmp_set_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(randint(0, 14))[2:].zfill(4)
low_nibble_val2 = bin(randint(int(low_nibble_val1, 2)+1, 15))[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
VAL2 = int(rest_nibble_val2 + low_nibble_val2, 2)
tmp_set_flag = tmp_set_flag.replace("$VAL1$", str(VAL1))
tmp_set_flag = tmp_set_flag.replace("$VAL2$", str(VAL2))
if method == 2:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_set_flag = tmp_set_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(15)[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
tmp_set_flag = tmp_set_flag.replace("$VAL1$", str(VAL1))
if method == 3:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_set_flag = tmp_set_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(0)[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
tmp_set_flag = tmp_set_flag.replace("$VAL1$", str(VAL1))
return tmp_set_flag
def clear_flag(method=-1):
clear_flag_a = [
"""mov $TMP_REG$, $VAL1$
add $TMP_REG$, $VAL2$""",
"""mov $TMP_REG$, $VAL1$
sub $TMP_REG$, $VAL2$""",
"""mov $TMP_REG$, $VAL1$
inc $TMP_REG$""",
"""mov $TMP_REG$, $VAL1$
dec $TMP_REG$"""]
if method == -1:
method = randint(0, len(clear_flag_a)-1)
tmp_clear_flag = clear_flag_a[method]
if method == 0:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_clear_flag = tmp_clear_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(randint(0, 15))[2:].zfill(4)
low_nibble_val2 = bin(randint(0, 15-int(low_nibble_val1, 2)))[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
VAL2 = int(rest_nibble_val2 + low_nibble_val2, 2)
tmp_clear_flag = tmp_clear_flag.replace("$VAL1$", str(VAL1))
tmp_clear_flag = tmp_clear_flag.replace("$VAL2$", str(VAL2))
if method == 1:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_clear_flag = tmp_clear_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(randint(0, 15))[2:].zfill(4)
low_nibble_val2 = bin(randint(0, int(low_nibble_val1, 2)))[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
rest_nibble_val2 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
VAL2 = int(rest_nibble_val2 + low_nibble_val2, 2)
tmp_clear_flag = tmp_clear_flag.replace("$VAL1$", str(VAL1))
tmp_clear_flag = tmp_clear_flag.replace("$VAL2$", str(VAL2))
if method == 2:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_clear_flag = tmp_clear_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(randint(0, 14))[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
tmp_clear_flag = tmp_clear_flag.replace("$VAL1$", str(VAL1))
if method == 3:
sel_reg_type = REGISTERS_TYPE[randint(0, len(REGISTERS_TYPE)-1)]
sel_reg = globals()[sel_reg_type][randint(0, len(globals()[sel_reg_type])-1)]
tmp_clear_flag = tmp_clear_flag.replace("$TMP_REG$", sel_reg)
low_nibble_val1 = bin(randint(1, 15))[2:].zfill(4)
if sel_reg_type == "REGISTERS_32":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 29, 1)])
elif sel_reg_type == "REGISTERS_16":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 13, 1)])
elif sel_reg_type == "REGISTERS_8h":
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
else:
rest_nibble_val1 = ''.join([str(randint(0, 1)) for i in range(1, 5, 1)])
VAL1 = int(rest_nibble_val1 + low_nibble_val1, 2)
tmp_clear_flag = tmp_clear_flag.replace("$VAL1$", str(VAL1))
return tmp_clear_flag
def mov_flag_to_reg(reg="-1", method=-1):
mov_flag_to_reg_a = [
"""lahf
test ah, 10h
jz $LABEL$
xor al, 1
$LABEL$:"""]
if method == -1:
method = randint(0, len(mov_flag_to_reg_a)-1)
tmp_mov_flag_to_reg = mov_flag_to_reg_a[method].replace("$LABEL$", ''.join(random.choices(string.ascii_uppercase + string.ascii_lowercase, k=8)))
return tmp_mov_flag_to_reg
``` |
{
"source": "7erra/infEInf-assembler",
"score": 4
} |
#### File: infEInf-assembler/src/assembler.py
```python
import re
class TargetError(Exception):
""" Exception raised when the target of a command is not an integer.
Attributes:
expression -- input expression in which the error occured
message -- explanation of the error
"""
def __init__(self, expression, message):
super().__init__(message, expression)
class InfiniteLoopError(Exception):
""" Exception raised when the program takes too long."""
def __init__(self, maxIter):
super().__init__(f"Infinite loop, stopped program after {maxIter} steps!")
class Statement:
"""An executable line from the program.
Keyword arguments:
program -- The program that this Statement is part of
command -- The command. One of:
"TST" - Increase counter by two instead of one when the value of target is 0
"INC" - Increase the value of target by one
"DEC" - Decrease the value of target by one
"JMP" - Set the value of the counter to the target value
"HLT" - Stops the program
target: The target can be a register or the counter, depending on the command
"""
regex_command = re.compile(r"(?P<cmd>HLT|TST|INC|DEC|JMP)(?:\s+(?P<target>\d+))?")
def __init__(self, program, string, position):
self.program = program
self.position = position
self.compile(string)
def compile(self, string):
"""Converts a line into an assembler statement"""
match = self.regex_command.match(string)
self.command = match["cmd"]
target = match["target"]
try:
target = int(target)
except TypeError as exception:
if self.command != "HLT":
raise TargetError(
f"[LINE {self.position}] {string.strip()}", "Target must be an integer!"
) from exception
self.target = target
def execute(self):
"""Execute this statement"""
if self.command == "INC":
self.program.register[self.target] += 1
elif self.command == "DEC":
self.program.register[self.target] -= 1
elif self.command == "TST":
if self.program.register[self.target] == 0:
self.program.counter += 1
elif self.command == "JMP":
self.program.counter = self.target - 1
return
elif self.command == "HLT":
pass
self.program.counter += 1
class Program:
"""Initializes an Assembler program.
Arguments:
file -- The file that contains the program
"""
regex_command = re.compile(r"(?P<cmd>HLT|TST|INC|DEC|JMP)(?:\s+(?P<target>\d+))?")
counter = 0
register = {}
def __init__(self, file):
self.compile(file)
def compile(self, file):
"""Converts the file into a list of statements"""
self.statements = []
with open(file, encoding="utf-8") as code_file:
self.statements = tuple(Statement(self, s, i)
for i, s
in enumerate(code_file.readlines()))
def run(self, register=None, verbose=False, step_by_step=False):
"""Run the program.
The register is a dict() containing all values for the register, eg:
{100: 0, 101: 5, 102: 0}
Enabling verbose will print information each step in the following
format:
Command: DEC , Target: 100 , Step: 2
Counter Register
3 {100: 5}
4 {100: 4}
The first line contains the command that is executed, what the command
is targeting (can be a register or the command counter) and how many
times any command was executed. After this information follows a table
with the first column "Counter" which contains the command counter and a
a second column with the state of the register. The first line of the
table represents the state before the execution of the command while the
second line is the state after the execution.
Arguments:
register -- The starting register. If no register is given it will be asked from the user.
verbose -- Print more information each step (default False)
step_by_step -- Ask for input after each step, also enables verbose (default False)
"""
if not register:
register = {}
verbose = verbose or step_by_step
self.counter = 0
for statement in self.statements:
# Analyze the commands and ask for missing values
if statement.command in {"TST", "INC", "DEC"}:
while not statement.target in register:
value = input(f"Value for register {statement.target}: ")
if not value.isdigit():
print("INVALID: Only integers are allowed!")
continue
register[statement.target] = int(value)
self.register = register
run = 0
max_runs = 1000
while True:
current = self.statements[self.counter]
if verbose:
print("Command:", current.command, ", Target:", current.target, ", Step:", run)
print("Counter", "Register")
print(f"{self.counter:7} {self.register}")
current.execute()
if current.command == "HLT":
break
if verbose:
print(f"{self.counter:7} {self.register}\n")
run += 1
if run >= max_runs:
raise InfiniteLoopError(run)
if step_by_step:
input()
print("Final register:", self.register)
return self.register
```
#### File: infEInf-assembler/test/test_all.py
```python
import unittest
from src import assembler
class TestAssembler(unittest.TestCase):
"""Collection of test cases"""
def test_0(self):
"""Test a program that always ends in 0 for a register"""
self.assertEqual(assembler.Program("test/test_0.txt").run({100: 5}), {100: 0})
def test_move(self):
"""Move a value from one register to another"""
prog = assembler.Program("test/test_move.txt")
self.assertEqual(prog.run({100: 0, 101: 5}), {100: 5, 101: 0})
def test_copy(self):
"""Copy a value from one register to another"""
prog = assembler.Program("test/test_copy.txt")
self.assertEqual(prog.run({100: 0, 101: 5, 102: 0}), {100: 5, 101: 5, 102: 0})
def test_multiply(self):
"""Multiply two values (registeres 100, 101) and write the result in register 102"""
prog = assembler.Program("test/test_multiply.txt")
self.assertEqual(prog.run({100: 2, 101: 3, 102: 0, 103: 0})[102], 6)
self.assertEqual(prog.run({100: 5, 101: 6, 102: 0, 103: 0})[102], 30)
def test_error_index(self):
"""Program that tires to access a command counter beyond the last line"""
with self.assertRaises(IndexError):
assembler.Program("test/test_error_index.txt").run()
def test_error_target(self):
"""Argument (target) is not an integer"""
with self.assertRaises(assembler.TargetError):
assembler.Program("test/test_error_target.txt").run()
def test_error_infinite(self):
"""Loop infinitely"""
with self.assertRaises(assembler.InfiniteLoopError):
assembler.Program("test/test_error_infinite.txt").run()
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "7essa/Image-Classifier-with-Deep-Learning",
"score": 3
} |
#### File: 7essa/Image-Classifier-with-Deep-Learning/predict (1).py
```python
import argparse
import tensorflow as tf
import tensorflow_hub as hub
import numpy as np
from PIL import Image
import json
# defining arguments for parser object:
pars_app = argparse.ArgumentParser()
pars_app.add_argument('--user_img', default='./test_images/cautleya_spicata.jpg')
pars_app.add_argument('--my_model',default='bestt_model.h5')
pars_app.add_argument('--topk',default=5)
pars_app.add_argument('--classes', default='label_map.json')
# values of the arguments
arguments = pars_app.parse_args()
image = arguments.user_img
my_model = arguments.my_model
TK = arguments.topk
label = arguments.classes
#reading json file and loading the model:
with open(label, 'r')as f:
class_names = json.load(f)
model = tf.keras.models.load_model(my_model,
custom_objects={'KerasLayer':hub.KerasLayer})
# defining functions for predict :
# 1- to re-shape the image to need the valid model shape of images:
def process_image(image):
user_image = tf.convert_to_tensor(image)
user_image = tf.image.resize(user_image,(224,224))
user_image /= 255
user_image = user_image.numpy()#.squeeze()
return user_image
# 2- make the prediction :
def predict(image ,model,TK ):
im = Image.open(image)
to_pred_image = np.asarray(im)
to_pred_image = process_image(to_pred_image)
to_pred_image = np.expand_dims(to_pred_image,axis=0)
ps = model.predict(to_pred_image)
ps = ps.tolist()
# to get the topK values and classes:
values , indices = tf.math.top_k(ps,k=TK)
probs = values.numpy().tolist()[0]
classes = indices.numpy().tolist()[0]
#convert classes integer labels to actual flower names :
labels = [class_names[str(clas+1)]for clas in classes]
print('the probabilites for there is image are:', probs)
print('\n the classes for each prob. are :',labels)
#return probs , classes
if __name__ == '__main__':
predict(image ,model,TK)
``` |
{
"source": "7eta/udk_labeler",
"score": 2
} |
#### File: engine/trainer/train.py
```python
import os
import argparse
import numpy as np
import torch
import torch.optim as optim
from engine.dataloader import get_dataloader
from engine.retinanet import model
from engine.retinanet import coco_eval
from engine.log.saver import Saver
from tqdm import tqdm
from collections import deque
from engine.log import logger, summarise
assert torch.__version__.split('.')[0] == '1'
print('CUDA available: {}'.format(torch.cuda.is_available()))
class Trainer(object):
def __init__(self, config, img_dir, coco_json):
self.config = config
# Define Saver
self.saver = Saver(self.config)
# Define Tensorboard
if self.config.tensorboard:
self.summary = summarise.TensorboardSummary(self.saver.directory)
self.writer = self.summary.create_summary()
# Define Logger
self.getlogger = logger.get_logger(self.saver.directory)
self.logger = self.getlogger
# Define DataLoader
self.train_loader, self.n_train_img,\
self.val_set, self.val_loader, self.n_val_img, self.n_classes = get_dataloader(self.config, img_dir, coco_json)
# Define Network
if self.config.depth == 18:
self.retinanet = model.resnet18(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 34:
self.retinanet = model.resnet34(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 50:
self.retinanet = model.resnet50(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 101:
self.retinanet = model.resnet101(num_classes=self.n_classes, pretrained=True)
elif self.config.depth == 152:
self.retinanet = model.resnet152(num_classes=self.n_classes, pretrained=True)
else:
raise ValueError('Unsupported model depth, must be one of 18, 34, 50, 101, 152')
# Define Optimizer
self.optimizer = optim.Adam(self.retinanet.parameters(), lr=self.config.lr)
# Define lr_schduler
self.scheduler = optim.lr_scheduler.ReduceLROnPlateau(self.optimizer, patience=3, verbose=True)
# Define loss
self.loss_hist = deque(maxlen=500)
# Define cuda
if torch.cuda.is_available():
self.retinanet = torch.nn.DataParallel(self.retinanet).cuda()
else:
raise ValueError('=> Cuda is not available. Check cuda')
# Define resume
self.best_f1_score = .0
if self.config.resume is not None:
self.retinanet = torch.load(self.config.resume)
self.retinanet.cuda()
# check model summary
# summary(self.retinanet, (3, 512, 512))
def train(self, epoch):
self.retinanet.train()
self.retinanet.module.freeze_bn()
epoch_loss = []
print(f'Num training images: {self.n_train_img}')
with tqdm(self.train_loader) as tbar:
for iter_num, data in enumerate(tbar):
self.optimizer.zero_grad()
img = data['img'].cuda().float()
annot = data['annot']
cls_loss, reg_loss = self.retinanet([img, annot])
cls_loss = cls_loss.mean()
reg_loss = reg_loss.mean()
loss = cls_loss + reg_loss
epoch_loss.append(float(loss))
self.loss_hist.append(float(loss))
if bool(loss == 0):
continue
loss.backward()
torch.nn.utils.clip_grad_norm_(self.retinanet.parameters(), 0.1)
self.optimizer.step()
if self.config.tensorboard:
self.writer.add_scalar('Train_Loss/classification_loss',
cls_loss,
iter_num + epoch*(len(self.train_loader)))
self.writer.add_scalar('Train_Loss/regression_loss',
reg_loss,
iter_num + epoch*(len(self.train_loader)))
self.writer.add_scalar('Train_Loss/total_loss',
np.mean(self.loss_hist),
iter_num + epoch*(len(self.train_loader)))
tbar.set_description(f'Epoch: {epoch} | '
f'Cls loss: {cls_loss:1.5f} | '
f'Reg loss: {reg_loss:1.5f} | '
f'Running loss: {np.mean(self.loss_hist):1.5f}')
del cls_loss, reg_loss
self.scheduler.step(np.mean(epoch_loss))
def validation(self, epoch):
print('Evaluating dataset')
stats = coco_eval.evaluate_coco(self.val_set, self.retinanet, self.saver.directory)
if stats is None:
return
# stats: 0~11까지 12개의 값이 존재
# 0: mAP / 1: map .5 / 2: map .75 / 3: ap small / 4: ap medium / 5: ap large/
# 6: ar Det1 / 7: ar Det10 / 8: ar Det100 / 9: ar small / 10: ar medium / 11: ar large
if self.config.tensorboard:
self.writer.add_scalar('Precision/mAP', stats[0], epoch)
self.writer.add_scalar('Precision/mAP@50IOU', stats[1], epoch)
self.writer.add_scalar('Precision/mAP@75IOU', stats[2], epoch)
self.writer.add_scalar('Precision/mAP(samll)', stats[3], epoch)
self.writer.add_scalar('Precision/mAP(medium)', stats[4], epoch)
self.writer.add_scalar('Precision/mAP(large)', stats[5], epoch)
self.writer.add_scalar('Recall/AR@1', stats[6], epoch)
self.writer.add_scalar('Recall/AR@10', stats[7], epoch)
self.writer.add_scalar('Recall/AR@100', stats[8], epoch)
self.writer.add_scalar('Recall/AR@100(small)', stats[9], epoch)
self.writer.add_scalar('Recall/AR@100(medium)', stats[10], epoch)
self.writer.add_scalar('Recall/AR@100(large)', stats[11], epoch)
mAP, AR = stats[0], stats[8]
f1_score = 2 * (mAP * AR) / (mAP + AR)
if f1_score > self.best_f1_score:
self.best_f1_score = f1_score
self.saver.save_checkpoint(self.retinanet.module, f1_score)
```
#### File: engine/utils/confusion_metric.py
```python
import pandas as pd
import os
import numpy as np
import xml.etree.ElementTree as ET
from collections import defaultdict
def iou(pred_box, true_box):
p_xmin, p_ymin, p_xmax, p_ymax = pred_box[0], pred_box[1], pred_box[2], pred_box[3]
t_xmin, t_ymin, t_xmax, t_ymax = true_box[0], true_box[1], true_box[2], true_box[3]
inter_xmin, inter_ymin = max(p_xmin, t_xmin), max(p_ymin, t_ymin)
inter_xmax, inter_ymax = min(p_xmax, t_xmax), min(p_ymax, t_ymax)
inter_area = np.maximum(inter_xmax - inter_xmin + 1, 0) * np.maximum(inter_ymax - inter_ymin + 1, 0)
pred_area = (p_xmax - p_xmin + 1) * (p_ymax - p_ymin + 1)
true_area = (t_xmax - t_xmin + 1) * (t_ymax - t_ymin + 1)
union_area = true_area + pred_area - inter_area
iou = inter_area / union_area
return iou
def load_xml(anno_path):
tree = ET.parse(anno_path)
root = tree.getroot()
target = defaultdict(list)
for obj in root.findall('object'):
name = obj.find('name').text
xmin = int(obj.find('bndbox').find('xmin').text)
ymin = int(obj.find('bndbox').find('ymin').text)
xmax = int(obj.find('bndbox').find('xmax').text)
ymax = int(obj.find('bndbox').find('ymax').text)
target[name].append([xmin, ymin, xmax, ymax])
return target
def confusion_metric(true_xml_dir, pred_xml_path, classes=['NG', 'OK']):
_metric = pd.DataFrame(0, index=classes, columns=classes)
xml_name = os.path.basename(pred_xml_path)
true_xml_path = os.path.join(true_xml_dir, xml_name)
p = load_xml(pred_xml_path)
print(pred_xml_path)
if p.get('SOOT') is None:
if not os.path.exists(true_xml_path):
_metric.iloc[1, 1] += 1 # 정상
else:
t = load_xml(true_xml_path)
if t.get('SOOT') is None:
_metric.iloc[1, 1] += 1 # 정상
else:
_metric.iloc[0, 1] += 1 # 미검(2종과오)
else:
if not os.path.exists(true_xml_path):
_metric.iloc[1, 0] += 1 # 과검(1종과오)
else:
t = load_xml(true_xml_path)
if t.get('SOOT') is None:
_metric.iloc[1, 0] += 1 # 과검(1종과오)
else:
p_bboxes = p.get('SOOT')
t_bboxes = t.get('SOOT')
ious = []
for t_bbox in t_bboxes:
for p_bbox in p_bboxes:
ious.append(iou(p_bbox, t_bbox))
cnt = 0
for i in ious:
if i >= 0.5:
_metric.iloc[0, 0] += 1 # 불
break
else:
cnt += 1
if cnt == len(ious):
_metric.iloc[0, 1] += 1 # 미검(2종과오)
return _metric
class ConfusionMetric:
def __init__(self, classes=['NG', 'OK']):
self._classes = classes
self._metric = pd.DataFrame(0, index=self._classes, columns=self._classes)
def reset(self):
for col in self._metric.colums:
self._metric[col].values[:] = 0
def update(self, value):
self._metric += value
def result(self):
return self._metric
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description='Simple script for visualizing result of training.')
parser.add_argument('--true_xml_dir', help='Path to directory containing images')
parser.add_argument('--pred_xml_dir', help='Path to model')
args = parser.parse_args()
pred_xml_paths = [os.path.join(args.pred_xml_dir, x.name) for x in os.scandir(args.pred_xml_dir)]
c_metric = ConfusionMetric()
for pred_xml_path in pred_xml_paths:
value = confusion_metric(args.true_xml_dir, pred_xml_path)
c_metric.update(value)
print(c_metric.result())
```
#### File: engine/utils/machine_vision.py
```python
import numpy as np
import cv2
import os
def process(path):
origin_im = cv2.imread(path)
# crop_im = origin_im[730:900, :]
draw_im = origin_im.copy()
gray_im = cv2.cvtColor(origin_im, cv2.COLOR_BGR2GRAY)
_, thr = cv2.threshold(gray_im, 180, 255, cv2.THRESH_BINARY_INV)
kernel = np.ones((3, 3), np.uint8)
thr = cv2.morphologyEx(thr, cv2.MORPH_OPEN, kernel, iterations=3)
contours, _ = cv2.findContours(thr, cv2.RETR_TREE, cv2.THRESH_BINARY_INV)
draw_cnt = 0
for i, cnt in enumerate(contours):
area = cv2.contourArea(cnt)
perimeter = 1.0 * cv2.arcLength(cnt, True)
if area < 700:
continue
cv2.drawContours(draw_im, [cnt], -1, (0, 0, 255), -1)
draw_cnt += 1
cv2.namedWindow('thr', cv2.WINDOW_NORMAL)
cv2.resizeWindow('thr', 1500, 300)
cv2.imshow('thr', thr)
cv2.namedWindow('draw_im', cv2.WINDOW_NORMAL)
cv2.resizeWindow('draw_im', 1500, 300)
cv2.imshow('draw_im', draw_im)
cv2.namedWindow('origin_im', cv2.WINDOW_NORMAL)
cv2.resizeWindow('origin_im', 1500, 300)
cv2.imshow('origin_im', gray_im)
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser('machine_vision')
parser.add_argument('--input', '-i', default='/media/jsk/data/namwon/sealing/one_reclassification/TMP_OK/img')
args = parser.parse_args()
input_paths = [os.path.join(args.input, f.name) for f in os.scandir(args.input)]
# random.shuffle(input_paths)
for input_path in input_paths:
print(input_path)
process(input_path)
``` |
{
"source": "7eventeen/Kinematic-model-with-air-resistance",
"score": 2
} |
#### File: 7eventeen/Kinematic-model-with-air-resistance/pythongui4.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(969, 925)
MainWindow.setStyleSheet("background-color: rgb(99, 255, 99);")
MainWindow.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label_1 = QtWidgets.QLabel(self.centralwidget)
self.label_1.setGeometry(QtCore.QRect(40, 350, 271, 51))
self.label_1.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_1.setObjectName("label_1")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(310, 350, 351, 51))
self.label_2.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(330, 480, 291, 41))
self.label_3.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(30, 600, 301, 51))
self.label_4.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_4.setObjectName("label_4")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(40, 670, 41, 41))
font = QtGui.QFont()
font.setFamily("MathJax_Math")
self.label_6.setFont(font)
self.label_6.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_6.setObjectName("label_6")
self.pushButton_1 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_1.setGeometry(QtCore.QRect(30, 760, 281, 121))
font = QtGui.QFont()
font.setFamily("Ubuntu")
font.setPointSize(52)
font.setBold(True)
font.setWeight(75)
self.pushButton_1.setFont(font)
self.pushButton_1.setStyleSheet("QPushButton\n"
" {\n"
" background-color: rgb(61, 61, 61);\n"
" border: none;\n"
" color: rgb(255, 255, 255);\n"
"}\n"
"\n"
"QPushButton:hover\n"
" {\n"
" background-color: rgb(134, 134, 134);\n"
"}")
self.pushButton_1.setObjectName("pushButton_1")
self.textEdit_1 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_1.setGeometry(QtCore.QRect(90, 410, 151, 61))
font = QtGui.QFont()
font.setPointSize(26)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_1.setFont(font)
self.textEdit_1.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_1.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_1.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_1.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_1.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_1.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_1.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_1.setObjectName("textEdit_1")
self.label_7 = QtWidgets.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(40, 470, 271, 61))
self.label_7.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_7.setObjectName("label_7")
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setGeometry(QtCore.QRect(660, 760, 281, 121))
font = QtGui.QFont()
font.setFamily("Ubuntu")
font.setPointSize(52)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.pushButton_2.setFont(font)
self.pushButton_2.setStyleSheet("QPushButton\n"
" {\n"
" background-color: rgb(61, 61, 61);\n"
" border: none;\n"
" color: rgb(255, 255, 255);\n"
"}\n"
"\n"
"QPushButton:hover\n"
" {\n"
" background-color: rgb(134, 134, 134);\n"
"}")
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setGeometry(QtCore.QRect(340, 760, 281, 121))
font = QtGui.QFont()
font.setFamily("MathJax_Main")
font.setPointSize(52)
font.setBold(True)
font.setWeight(75)
self.pushButton_3.setFont(font)
self.pushButton_3.setStyleSheet("QPushButton\n"
" {\n"
" background-color: rgb(61, 61, 61);\n"
" border: none;\n"
" color: rgb(255, 255, 255);\n"
"}\n"
"\n"
"QPushButton:hover\n"
" {\n"
" background-color: rgb(134, 134, 134);\n"
"}")
self.pushButton_3.setObjectName("pushButton_3")
self.label_8 = QtWidgets.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(320, -25, 211, 31))
self.label_8.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_8.setObjectName("label_8")
self.textEdit_3 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_3.setGeometry(QtCore.QRect(390, 410, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_3.setFont(font)
self.textEdit_3.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_3.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_3.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_3.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_3.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_3.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_3.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_3.setObjectName("textEdit_3")
self.textEdit_2 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_2.setGeometry(QtCore.QRect(90, 530, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_2.setFont(font)
self.textEdit_2.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_2.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_2.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_2.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_2.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_2.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_2.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_2.setObjectName("textEdit_2")
self.textEdit_4 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_4.setGeometry(QtCore.QRect(390, 530, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_4.setFont(font)
self.textEdit_4.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_4.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_4.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_4.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_4.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_4.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_4.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_4.setObjectName("textEdit_4")
self.textEdit_5 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_5.setGeometry(QtCore.QRect(90, 660, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_5.setFont(font)
self.textEdit_5.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_5.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_5.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_5.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_5.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_5.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_5.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_5.setObjectName("textEdit_5")
self.textEdit_6 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_6.setGeometry(QtCore.QRect(390, 660, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_6.setFont(font)
self.textEdit_6.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_6.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_6.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_6.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_6.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_6.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_6.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.textEdit_6.setObjectName("textEdit_6")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(340, 600, 291, 51))
self.label_5.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_5.setObjectName("label_5")
self.label_9 = QtWidgets.QLabel(self.centralwidget)
self.label_9.setGeometry(QtCore.QRect(350, 530, 41, 61))
font = QtGui.QFont()
font.setFamily("MathJax_Math")
self.label_9.setFont(font)
self.label_9.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(40, 540, 31, 41))
font = QtGui.QFont()
font.setFamily("MathJax_Math")
self.label_10.setFont(font)
self.label_10.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_10.setObjectName("label_10")
self.label_11 = QtWidgets.QLabel(self.centralwidget)
self.label_11.setGeometry(QtCore.QRect(50, 420, 31, 41))
font = QtGui.QFont()
font.setFamily("MathJax_Math")
self.label_11.setFont(font)
self.label_11.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_11.setObjectName("label_11")
self.label_12 = QtWidgets.QLabel(self.centralwidget)
self.label_12.setGeometry(QtCore.QRect(350, 420, 41, 51))
font = QtGui.QFont()
font.setFamily("MathJax_Math")
self.label_12.setFont(font)
self.label_12.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_12.setObjectName("label_12")
self.textEdit_7 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_7.setGeometry(QtCore.QRect(710, 410, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_7.setFont(font)
self.textEdit_7.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_7.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_7.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_7.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_7.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_7.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_7.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_7.setObjectName("textEdit_7")
self.textEdit_8 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_8.setGeometry(QtCore.QRect(710, 530, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_8.setFont(font)
self.textEdit_8.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_8.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_8.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_8.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_8.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_8.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_8.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_8.setObjectName("textEdit_8")
self.label_13 = QtWidgets.QLabel(self.centralwidget)
self.label_13.setGeometry(QtCore.QRect(650, 350, 291, 51))
self.label_13.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_13.setObjectName("label_13")
self.label_14 = QtWidgets.QLabel(self.centralwidget)
self.label_14.setGeometry(QtCore.QRect(650, 480, 291, 41))
self.label_14.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_14.setObjectName("label_14")
self.label_15 = QtWidgets.QLabel(self.centralwidget)
self.label_15.setGeometry(QtCore.QRect(350, 660, 41, 61))
font = QtGui.QFont()
font.setFamily("MathJax_Math")
self.label_15.setFont(font)
self.label_15.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_15.setObjectName("label_15")
self.textEdit_9 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_9.setGeometry(QtCore.QRect(710, 660, 151, 61))
font = QtGui.QFont()
font.setPointSize(36)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.textEdit_9.setFont(font)
self.textEdit_9.setLayoutDirection(QtCore.Qt.LeftToRight)
self.textEdit_9.setStyleSheet("background-color: rgb(61, 61, 61);\n"
"color: rgb(255, 255, 255);")
self.textEdit_9.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.textEdit_9.setInputMethodHints(QtCore.Qt.ImhNone)
self.textEdit_9.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_9.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.textEdit_9.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.textEdit_9.setObjectName("textEdit_9")
self.label_16 = QtWidgets.QLabel(self.centralwidget)
self.label_16.setGeometry(QtCore.QRect(650, 610, 291, 41))
self.label_16.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.label_16.setObjectName("label_16")
self.main_widget = QtWidgets.QWidget(self.centralwidget)
self.main_widget.setGeometry(QtCore.QRect(30, 0, 911, 331))
self.main_widget.setObjectName("main_widget")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 969, 22))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Kinematic Model"))
self.label_1.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:26pt; font-weight:600;\">wind speed, m/s</span></p></body></html>"))
self.label_2.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">resistance, kg/m</span></p></body></html>"))
self.label_3.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">angle, degrees</span></p></body></html>"))
self.label_4.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:26pt; font-weight:600;\">start velocity, m/s</span></p></body></html>"))
self.label_6.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:28pt; font-weight:600;\">v</span><span style=\" font-size:16pt; font-weight:600; vertical-align:sub;\">0</span></p></body></html>"))
self.pushButton_1.setWhatsThis(_translate("MainWindow", "<html><head/><body><p><br/></p></body></html>"))
self.pushButton_1.setText(_translate("MainWindow", "Exit"))
self.textEdit_1.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:26pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:36pt;\">0.0</span></p></body></html>"))
self.label_7.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">mass, kg</span><span style=\" font-size:18pt; font-style:italic;\">(if k>0)</span></p></body></html>"))
self.pushButton_2.setWhatsThis(_translate("MainWindow", "<html><head/><body><p><br/></p></body></html>"))
self.pushButton_2.setText(_translate("MainWindow", "Help"))
self.pushButton_3.setWhatsThis(_translate("MainWindow", "<html><head/><body><p><br/></p></body></html>"))
self.pushButton_3.setText(_translate("MainWindow", "START"))
self.label_8.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:12pt; font-style:italic;\">of course only less or equals than 1</span></p><p><span style=\" font-style:italic;\"><br/></span></p></body></html>"))
self.textEdit_3.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">0.000</p></body></html>"))
self.textEdit_2.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">1.0</p></body></html>"))
self.textEdit_4.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">0.0</p></body></html>"))
self.textEdit_5.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">0.0</p></body></html>"))
self.textEdit_6.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">0.0</p></body></html>"))
self.label_5.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">flight time, sec</span></p></body></html>"))
self.label_9.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:28pt; font-weight:600;\">α</span></p></body></html>"))
self.label_10.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:28pt; font-weight:600;\">m</span></p></body></html>"))
self.label_11.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:28pt; font-weight:600;\">v</span></p></body></html>"))
self.label_12.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:28pt; font-weight:600;\">k</span></p></body></html>"))
self.textEdit_7.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">6.0</p></body></html>"))
self.textEdit_8.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">4.0</p></body></html>"))
self.label_13.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">x limit on a graph</span></p></body></html>"))
self.label_14.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">y limit on a graph</span></p></body></html>"))
self.label_15.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:28pt; font-weight:600;\">τ</span></p></body></html>"))
self.textEdit_9.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:36pt; font-weight:600; font-style:normal;\">\n"
"<p align=\"right\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">50.0</p></body></html>"))
self.label_16.setText(_translate("MainWindow", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt; font-weight:600;\">animation speed</span></p></body></html>"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
``` |
{
"source": "7exe/lostia-professional-dev",
"score": 3
} |
#### File: lostia-professional-dev/clib/cat.py
```python
import sys
import os
from command import end
loggedInUser = open("LostiaFiles/user.data").read()
if(loggedInUser == "guest"):
print("cat: Permission Denied")
end(sys.argv)
def check_if_has_arguments():
if(len(sys.argv)> 1):
return True
else:
print("cat: file")
end(sys.argv)
currentDir = open("LostiaFiles/current.directory").read()
def cat():
if(os.path.exists(currentDir+sys.argv[1])):
catFile = open(currentDir+sys.argv[1]).read()
print(catFile)
end(sys.argv)
else:
print("Directory does not exist")
end(sys.argv)
check_if_has_arguments()
cat()
```
#### File: lostia-professional-dev/clib/cd.py
```python
import sys
from command import end
import os
loggedInUser = open("LostiaFiles/user.data").read()
currentDir = open("LostiaFiles/current.directory").read()
def check_if_has_arguments():
if(len(sys.argv)> 1):
return True
else:
print("cd: .. or directory")
end(sys.argv)
if(loggedInUser == "guest"):
print("cd: Permission Denied")
end(sys.argv)
check_if_has_arguments()
if(loggedInUser != "systemadmin"):
homeDir = "LostiaFiles/root/home/"+loggedInUser+"/"
else:
homeDir = "LostiaFiles/root/"
if(".." not in sys.argv):
if("-" not in sys.argv):
if(os.path.isdir(currentDir+sys.argv[1])):
if(sys.argv[1].startswith("./") == False and sys.argv[1] != "/"):
open("LostiaFiles/current.directory","w").close()
with open("LostiaFiles/current.directory","r+") as changeFile:
changeFile.truncate(0)
if("/" in sys.argv[1]):
changeFile.write(currentDir+sys.argv[1])
else:
changeFile.write(currentDir+sys.argv[1]+"/")
changeFile.close()
else:
print("Directory not found")
else:
with open("LostiaFiles/current.directory","r+") as changeFile:
changeFile.truncate(0)
changeFile.write(homeDir)
changeFile.close()
else:
if(currentDir != "LostiaFiles/root/home/"+loggedInUser+"/" and currentDir != "LostiaFiles/root/"):
a = currentDir.split("/")
for i in range(a.count("")):
a.remove("")
# | | Shouldn't fucking happen
if (len(a) != 0):
a.pop()
Dir = ""
count = 0
for I in a:
if(count != 0):
Dir +="/"+I
else:
Dir += I
count += 1
Dir+="/"
with open("LostiaFiles/current.directory","r+") as changeFile:
changeFile.truncate(0)
changeFile.write(Dir)
changeFile.close()
end(sys.argv)
```
#### File: lostia-professional-dev/clib/cp.py
```python
import command
from shutil import copyfile
import sys
from sys import exit
import os
def has_arguments():
if(len(sys.argv)>2):
pass
else:
print("cp: source, target")
command.end(sys.argv)
has_arguments()
source = sys.argv[1]
target = sys.argv[2]
if(os.path.exists(command.CURDIR+sys.argv[1])):
pass
else:
print("Source file does not exist!")
if(os.path.exists(command.CURDIR+sys.argv[2])):
print("Target already exists!")
def read_and_write_from_source():
sourceData = open(command.CURDIR+source).read()
with open(command.CURDIR+target,"w") as newFile:
newFile.write(sourceData)
newFile.close()
command.end(sys.argv)
try:
read_and_write_from_source()
except IOError as e:
#print("Unable to copy file. %s" % e)
command.end(sys.argv)
except:
#print("Unexpected error:", sys.exc_info())
command.end(sys.argv)
```
#### File: lostia-professional-dev/clib/install.py
```python
import os
import sys
import zipfile
from command import end
cfgfixed = []
done = []
def reset():
print("[Lostia Installer] Reverting")
for I in done:
try:
os.remove(I)
print("[Lostia Installer] Removed "+I)
except:
print("[Lostia Installer] Failed to remove "+I)
if(len(sys.argv) > 1):
try:
print("[Lostia Installer] Reading .cfg file of "+sys.argv[1])
archive = zipfile.ZipFile(sys.argv[1], 'r')
cfg = archive.read('cmd.cfg')
cfgfixed = cfg.decode("utf-8").split(" ")
except:
print("[Lostia Installer] Failed to read .cfg file")
end(sys.argv)
for I in cfgfixed:
try:
print("[Lostia Installer] Installing command "+I)
with zipfile.ZipFile(sys.argv[1]) as z:
z.extract('clib/'+I+".py")
done.append('clib/'+I+".py")
except:
print("[Lostia Installer] Failed to install command")
reset()
end(sys.argv)
try:
print("[Lostia Installer] Updating help")
with zipfile.ZipFile(sys.argv[1]) as z:
z.extract('LostiaHelp/'+I +".help")
done.append('LostiaHelp/'+I +".help")
except:
print("[Lostia Installer] Failed to update help")
print("[Lostia Installer] Done!")
end(sys.argv)
else:
print("install: must provide target")
end(sys.argv)
```
#### File: lostia-professional-dev/clib/tree.py
```python
import os
from os import listdir, sep
from os.path import abspath, basename, isdir
from command import end
import sys
totalfiles = -1
def tree(dir, padding, print_files=True):
global totalfiles
print(""+padding[:-1] + '├─── \033[34m' + basename(abspath(dir)) + '\033[39m')
padding = padding + ' '
files = []
if print_files:
files = listdir(dir)
else:
files = [x for x in os.listdir(dir) if os.path.isdir(dir + sep + x)]
count = 0
for file in files:
count += 1
totalfiles = totalfiles + 1
path = dir + sep + file
if isdir(path):
if count == len(files):
tree(path, padding + ' ', print_files)
else:
tree(path, padding + '│', print_files)
else:
if(count==len(files)):
print(padding + '└─── ' + file)
else:
print(padding + '├─── ' + file)
try:
if(len(sys.argv) > 1):
if(os.path.isdir(sys.argv[1])):
tree(sys.argv[1]," ")
dirs = 0
files = 0
for _, dirnames, filenames in os.walk(sys.argv[1]):
files += len(filenames)
dirs += len(dirnames)
print("\n"+str(dirs+1)+" directories, "+str(files)+" files")
end(sys.argv)
else:
print("Directory not found")
end(sys.argv)
else:
tree("./"," ")
dirs = 0
files = 0
for _, dirnames, filenames in os.walk("./"):
files += len(filenames)
dirs += len(dirnames)
print("\n"+str(dirs+1)+" directories, "+str(files)+" files")
end(sys.argv)
except:
print("Failed to display Directory")
end(sys.argv)
``` |
{
"source": "7FeiW/uns",
"score": 3
} |
#### File: 7FeiW/uns/loss_functions.py
```python
from keras import backend as K
import tensorflow as tf
def mean_iou(y_true, y_pred):
score, up_opt = tf.metrics.mean_iou(y_true, y_pred, 2)
K.get_session().run(tf.local_variables_initializer())
with tf.control_dependencies([up_opt]):
score = tf.identity(score)
return score
def dice_coef(y_true, y_pred):
'''
Function to compute dice coef over enitre batch
'''
smooth = 1.0
y_true_flat = K.flatten(y_true)
y_pred_flat = K.flatten(y_pred)
intersection = K.sum(y_true_flat * y_pred_flat)
l = K.sum(y_true_flat)
r = K.sum(y_pred_flat)
return (2. * intersection + smooth) / (l + r + smooth)
def hard_dice_coef(y_true, y_pred):
'''
Function to compute dice coef over enitre batch
'''
y_true_flat = K.flatten(y_true)
y_pred_flat = K.flatten(y_pred)
intersection = K.sum(y_true_flat * y_pred_flat)
l = K.sum(y_true_flat)
r = K.sum(y_pred_flat)
if l + r == 0:
return 1
else:
return (2. * intersection) / (l + r)
def iou_score(y_true, y_pred):
'''
Function to compute dice coef over enitre batch
'''
y_true_flat = K.flatten(y_true)
y_pred_flat = K.flatten(y_pred)
intersection = K.sum(y_true_flat * y_pred_flat)
l = K.sum(y_true_flat)
r = K.sum(y_pred_flat)
if l + r == 0:
return 1
else:
return (2. * intersection) / (l + r)
def dice_loss(y_true, y_pred):
return 1.0 - dice_coef(y_true, y_pred)
```
#### File: 7FeiW/uns/models.py
```python
from keras.constraints import maxnorm
from keras.layers import Input, concatenate, Conv2D, MaxPooling2D, Conv2DTranspose, BatchNormalization, Dropout, Add
from keras.models import Model
def conv_block(inputs, filters, kernel_size, drop_rate = 0):
conv = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(inputs)
if drop_rate > 0:
conv = Dropout(drop_rate)(conv)
conv = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(conv)
if drop_rate > 0:
conv = Dropout(drop_rate)(conv)
return conv
def conv_bn_block(inputs, filters, kernel_size):
conv = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(inputs)
bn = BatchNormalization()(conv)
conv = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(bn)
bn = BatchNormalization()(conv)
return bn
def conv_bn_res_block(inputs, filters, kernel_size):
conv1 = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(inputs)
bn1 = BatchNormalization()(conv1)
conv2 = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(bn1)
bn2 = BatchNormalization()(conv2)
added = Add()([conv1, bn2])
return added
def wide_res_block(inputs, filters, kernel_size, drop_rate = 0.5):
conv1 = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(inputs)
bn1 = BatchNormalization()(conv1)
dp1 = Dropout(drop_rate)(bn1)
conv2 = Conv2D(filters, kernel_size, activation='relu', kernel_constraint=maxnorm(3),
kernel_initializer="he_uniform", padding='same')(dp1)
bn2 = BatchNormalization()(conv2)
added = Add()([conv1, bn2])
return added
def unet(img_rows=64, img_cols=80, base_filter_num=32):
'''
simple unet
'''
inputs = Input((img_rows, img_cols, 1))
conv1 = conv_block(inputs, 32, (3, 3))
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = conv_block(pool1, 64, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_block(pool2, 128, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_block(pool3, 256, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
conv5 = conv_block(pool4, 512, (3, 3))
up6 = concatenate([Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3)
conv6 = conv_block(up6, 256, (3, 3))
up7 = concatenate([Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3)
conv7 = conv_block(up7, 128, (3, 3))
up8 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = conv_block(up8, 64, (3, 3))
up9 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3)
conv9 = conv_block(up9, 32, (3, 3))
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
return model
def unet_deeper(img_rows=64, img_cols=80, base_filter_num=32):
'''
A deeper unet
'''
inputs = Input((img_rows, img_cols, 1))
conv1 = conv_block(inputs, 32, (3, 3))
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = conv_block(pool1, 64, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_block(pool2, 128, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_block(pool3, 256, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
conv5 = conv_block(pool4, 512, (3, 3))
pool5 = MaxPooling2D(pool_size=(2, 2))(conv5)
conv6 = conv_block(pool5, 1024, (3, 3))
up7 = concatenate([Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv6), conv5], axis=3)
conv7 = conv_block(up7, 256, (3, 3))
up8 = concatenate([Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv7), conv4], axis=3)
conv8 = conv_block(up8, 128, (3, 3))
up9 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv8), conv3], axis=3)
conv9 = conv_block(up9, 64, (3, 3))
up10 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv9), conv2], axis=3)
conv10 = conv_block(up10, 32, (3, 3))
up11 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv10), conv1], axis=3)
conv11 = conv_block(up11, 32, (3, 3))
conv12 = Conv2D(1, (1, 1), activation='sigmoid')(conv11)
model = Model(inputs=[inputs], outputs=[conv12])
return model
def unet_invert(img_rows=64, img_cols=80, base_filter_num=32):
'''
simple unet
'''
dp_rate = 0.5
inputs = Input((img_rows, img_cols, 1))
conv1 = conv_block(inputs, 512, (3, 3), dp_rate)
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = conv_block(pool1, 256, (3, 3), dp_rate)
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_block(pool2, 128, (3, 3), dp_rate)
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_block(pool3, 64, (3, 3), dp_rate)
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
conv5 = conv_block(pool4, 32, (3, 3), dp_rate)
up6 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3)
conv6 = conv_block(up6, 64, (3, 3), dp_rate)
up7 = concatenate([Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3)
conv7 = conv_block(up7, 128, (3, 3), dp_rate)
up8 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = conv_block(up8, 256, (3, 3), dp_rate)
up9 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3)
conv9 = conv_block(up9, 512, (3, 3), dp_rate)
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
return model
def unet_invert_small(img_rows=64, img_cols=80, base_filter_num=32):
'''
simple unet
'''
inputs = Input((img_rows, img_cols, 1))
conv2 = conv_block(inputs, 256, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_block(pool2, 128, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_block(pool3, 64, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
conv5 = conv_block(pool4, 32, (3, 3))
up6 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3)
conv6 = conv_block(up6, 64, (3, 3))
up7 = concatenate([Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3)
conv7 = conv_block(up7, 128, (3, 3))
up8 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = conv_block(up8, 256, (3, 3))
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv8)
model = Model(inputs=[inputs], outputs=[conv10])
return model
def unet_res(img_rows=64, img_cols=80, base_filter_num=32):
'''
unet with res_block
'''
inputs = Input((img_rows, img_cols, 1))
conv1 = conv_bn_res_block(inputs, base_filter_num, (3, 3))
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
base_filter_num *= 2
conv2 = conv_bn_res_block(pool1, base_filter_num, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
base_filter_num *= 2
conv3 = conv_bn_res_block(pool2, base_filter_num, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
base_filter_num *= 2
conv4 = conv_bn_res_block(pool3, base_filter_num, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
base_filter_num *= 2
conv5 = conv_bn_res_block(pool4, base_filter_num, (3, 3))
base_filter_num //= 2
up6 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3)
conv6 = conv_bn_res_block(up6, 256, (3, 3))
base_filter_num //= 2
up7 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3)
conv7 = conv_bn_res_block(up7, base_filter_num, (3, 3))
base_filter_num //= 2
up8 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = conv_bn_res_block(up8, base_filter_num, (3, 3))
base_filter_num //= 2
up9 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3)
conv9 = conv_bn_res_block(up9, base_filter_num, (3, 3))
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
return model
def unet_res_wide_dp(img_rows=64, img_cols=80, base_filter_num=32):
'''
unet with wide dropout layer
'''
inputs = Input((img_rows, img_cols, 1))
conv1 = wide_res_block(inputs, base_filter_num, (3, 3))
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
base_filter_num *= 2
conv2 = wide_res_block(pool1, base_filter_num, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
base_filter_num *= 2
conv3 = wide_res_block(pool2, base_filter_num, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
base_filter_num *= 2
conv4 = wide_res_block(pool3, base_filter_num, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
base_filter_num *= 2
conv5 = wide_res_block(pool4, base_filter_num, (3, 3))
base_filter_num //= 2
up6 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3)
conv6 = wide_res_block(up6, base_filter_num, (3, 3))
base_filter_num //= 2
up7 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3)
conv7 = wide_res_block(up7, base_filter_num, (3, 3))
base_filter_num //= 2
up8 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = wide_res_block(up8, base_filter_num, (3, 3))
base_filter_num //= 2
up9 = concatenate([Conv2DTranspose(base_filter_num, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3)
conv9 = wide_res_block(up9, base_filter_num, (3, 3))
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
return model
def unet_bn(img_rows=64, img_cols=80, base_filter_num=32):
'''
unet with res_block
'''
inputs = Input((img_rows, img_cols, 1))
conv1 = conv_bn_block(inputs, 32, (3, 3))
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = conv_bn_block(pool1, 64, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_bn_block(pool2, 128, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_bn_block(pool3, 256, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
conv5 = conv_bn_block(pool4, 512, (3, 3))
#conv5 = Dropout(0.5)(conv5)
up6 = concatenate([Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv5), conv4], axis=3)
conv6 = conv_bn_block(up6, 256, (3, 3))
up7 = concatenate([Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6), conv3], axis=3)
conv7 = conv_bn_block(up7, 128, (3, 3))
up8 = concatenate([Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv7), conv2], axis=3)
conv8 = conv_bn_block(up8, 64, (3, 3))
up9 = concatenate([Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv8), conv1], axis=3)
conv9 = conv_bn_block(up9, 32, (3, 3))
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
return model
def fcn(img_rows=64, img_cols=80, base_filter_num=32):
'''
simple fcn, whcih means Encoder_Decoder style net
'''
inputs = Input((img_rows, img_cols, 1))
conv1 = conv_block(inputs, 32, (3, 3))
pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)
conv2 = conv_block(pool1, 64, (3, 3))
pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)
conv3 = conv_block(pool2, 128, (3, 3))
pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)
conv4 = conv_block(pool3, 256, (3, 3))
pool4 = MaxPooling2D(pool_size=(2, 2))(conv4)
conv5 = conv_block(pool4, 512, (3, 3))
up6 = Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv5)
conv6 = conv_bn_block(up6, 256, (3, 3))
up7 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6)
conv7 = conv_bn_block(up7, 128, (3, 3))
up8 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv7)
conv8 = conv_bn_block(up8, 64, (3, 3))
up9 = Conv2DTranspose(32, (2, 2), strides=(2, 2), padding='same')(conv8)
conv9 = conv_bn_block(up9, 32, (3, 3))
conv10 = Conv2D(1, (1, 1), activation='sigmoid')(conv9)
model = Model(inputs=[inputs], outputs=[conv10])
return model
```
#### File: 7FeiW/uns/train.py
```python
import warnings
import json
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import ModelCheckpoint, EarlyStopping
from keras import backend as K
from loss_functions import *
from models import *
from submission import *
from math import isnan
from keras.callbacks import TensorBoard
K.set_image_data_format('channels_last') # TF dimension ordering in this code
def dice_score(im1, im2):
im1 = np.asarray(im1).astype(np.bool)
im2 = np.asarray(im2).astype(np.bool)
if im1.shape != im2.shape:
raise ValueError("Shape mismatch: im1 and im2 must have the same shape.")
# Compute Dice coefficient
intersection = np.logical_and(im1, im2)
if im1.sum() + im2.sum() == 0:
return 1.0
else:
return 2. * intersection.sum() / (im1.sum() + im2.sum())
def iou_score(im1, im2):
im1 = np.asarray(im1).astype(np.bool)
im2 = np.asarray(im2).astype(np.bool)
if im1.shape != im2.shape:
raise ValueError("Shape mismatch: im1 and im2 must have the same shape.")
# Compute Dice coefficient
intersection = np.logical_and(im1, im2)
if im1.sum() + im2.sum() == 0:
return 1.0
else:
return intersection.sum() / (im1.sum() + im2.sum())
def train_and_predict(model_name="unet",
num_epoch=10, batch_size=32, verbose=0, filters=32, load_model = 0):
# folder name to save current run
if not os.path.exists('models/'):
os.mkdir('models')
if not os.path.exists('models/' + model_name):
os.mkdir('models/' + model_name)
folder_name = 'models/' + model_name + '/e' + str(num_epoch) + '_b' + str(batch_size) + '_f' + str(filters)
if not os.path.exists(folder_name):
os.mkdir(folder_name)
# load data
train_set, train_set_masks = get_data_set('train')
val_set, val_set_masks = get_data_set('dev')
test_set, test_set_masks = get_data_set('test')
# resize data
train_set = resize_all_images(train_set, img_rows, img_cols)
train_set_masks = resize_all_images(train_set_masks, img_rows, img_cols)
val_set = resize_all_images(val_set, img_rows, img_cols)
val_set_masks = resize_all_images(val_set_masks, img_rows, img_cols)
test_set = resize_all_images(test_set, img_rows, img_cols)
test_set_masks = resize_all_images(test_set_masks, img_rows, img_cols)
# pre proccessing
train_set = train_set.astype('float32')
val_set = val_set.astype('float32')
test_set = test_set.astype('float32')
mean = np.mean(train_set) # mean for data centering
std = np.std(train_set) # std for data normalization
# save mean and std
data = { 'mean': float(mean), 'std' : float(std)}
with open(folder_name + '/data.json', 'w+') as outfile:
json.dump(data, outfile)
# normalizaztion
train_set -= mean
train_set /= std
val_set -= mean
val_set /= std
test_set -= mean
test_set /= std
# pre process masks
train_set_masks = train_set_masks.astype('float32')
train_set_masks /= 255.0 # scale masks to [0, 1]
train_set_masks = np.round(train_set_masks)
val_set_masks = val_set_masks.astype('float32')
val_set_masks /= 255.0 # scale masks to [0, 1]
val_set_masks = np.round(val_set_masks)
test_set_masks = test_set_masks.astype('float32')
test_set_masks /= 255.0 # scale masks to [0, 1]
test_set_masks = np.round(test_set_masks)
# get model and compile
if model_name == 'unet':
model = unet(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'unet_bn':
model = unet_bn(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'unet_res':
model = unet_res(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'unet_res_dp':
model = unet_res_wide_dp(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'fcn':
model = fcn(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'unet_deeper':
model = unet_deeper(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'unet_invert':
model = unet_invert(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
elif model_name == 'unet_invert_small':
model = unet_invert_small(img_rows=img_rows, img_cols=img_cols, base_filter_num=filters)
else:
print('ooops')
# set tensorboard
tensorboard = TensorBoard(log_dir=folder_name + '/logs', histogram_freq=0, write_graph=True, write_images=False)
if not os.path.exists(folder_name + '/logs'):
os.mkdir(folder_name + '/logs')
# create data generator
data_gen = ImageDataGenerator(
rotation_range=30,
vertical_flip=True,
horizontal_flip=True)
val_gen = ImageDataGenerator()
#model.compile(optimizer=Adadelta(lr=0.1, rho=0.95, epsilon=1e-08), loss=dice_loss2, metrics=[dice_coef2,hard_dice_coef2])
model.compile(optimizer=Adam(lr=1e-5), loss=dice_loss, metrics=[hard_dice_coef, mean_iou])
# set model checkpoint
model_checkpoint = ModelCheckpoint(folder_name + '/models.h5', monitor='val_loss', save_best_only=True)
# early stop
EarlyStopping(monitor='val_loss', min_delta=0, patience=1, verbose=0, mode='auto')
if load_model == 1 and os.path.exists(folder_name + '/models.h5'):
print("Loaded model from disk")
model.load_weights(folder_name + '/models.h5')
if num_epoch > 0:
# fitting model
model.fit_generator(data_gen.flow(train_set, train_set_masks, batch_size=batch_size, shuffle=True),
samples_per_epoch=len(train_set), epochs=num_epoch, verbose=verbose,
callbacks=[model_checkpoint, tensorboard],
validation_data=val_gen.flow(val_set, val_set_masks, batch_size=batch_size, shuffle=True),
validation_steps=int(len(val_set) / 32))
#model.fit(train_set, train_set_masks, batch_size=batch_size,
# epochs=num_epoch, verbose=verbose,
# shuffle=True,validation_split=0.2,callbacks=[model_checkpoint,tensorboard])
# evalutation
evl_score = model.evaluate(test_set, test_set_masks, batch_size=batch_size, verbose=verbose)
#print("evaluate score:", "loss,metrics")
print("evaluate score:" , evl_score)
predicted_test_masks = model.predict(test_set, verbose=verbose)
predicted_test_masks = np.around(predicted_test_masks)
shape = predicted_test_masks.shape
predicted_test_masks_reshaped = np.reshape(predicted_test_masks,(shape[0], shape[1] * shape[2]))
dice = 0.
iou = 0.
for predicted, val_mask in zip(predicted_test_masks, test_set_masks):
dice += dice_score(predicted,val_mask)
iou += iou_score(predicted,val_mask)
print('hard dice: ', dice/shape[0], 'mean of iou:', iou/shape[0])
print('model summary:')
print(model.count_params())
print(model.summary())
# create testings
'''
imgs_test, imgs_id_test = get_testing_npy_data()
imgs_test = resize_all_images(imgs_test, img_rows, img_cols)
imgs_test = imgs_test.astype('float32')
model.load_weights(folder_name + '/models.h5')
imgs_test -= mean
imgs_test /= std
imgs_mask_test = model.predict(imgs_test, verbose=verbose)
np.save(folder_name + '/imgs_mask_test.npy', imgs_mask_test)
pred_dir = folder_name + '/preds'
if not os.path.exists(pred_dir):
os.mkdir(pred_dir)
for image, image_id in zip(imgs_mask_test, imgs_id_test):
image = (image[:, :, 0] * 255.).astype(np.uint8)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
imsave(os.path.join(pred_dir, str(image_id) + '_pred.png'), image)
create_submission(folder_name + '/imgs_mask_test.npy', folder_name + '/submission.csv')
'''
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--is_train', type=str, default='True')
parser.add_argument('--num_epoch', type=int, default=20, help='')
parser.add_argument('--batch_size', type=int, default=32, help='')
parser.add_argument('--model', type=str, default='unet', help='unet,fcn,unet_res,unet_deeper')
parser.add_argument('--verbose', type=int, default=2, help='0 for desiable, 1 for progress bar, 2 for log')
parser.add_argument('--filters', type=int, default=32, help='')
parser.add_argument('--load_model', type=int, default=0, help='')
args = parser.parse_args()
train_and_predict(model_name=args.model,
num_epoch=args.num_epoch,
batch_size=args.batch_size,
verbose=args.verbose,
filters=args.filters,
load_model= args.load_model)
``` |
{
"source": "7FM/OpenRadar",
"score": 2
} |
#### File: mmwave/dsp/doppler_processing.py
```python
import numpy as np
from numba import njit, jit
from . import compensation
from . import utils
def doppler_resolution(band_width, start_freq_const=77, ramp_end_time=62, idle_time_const=100, num_loops_per_frame=128,
num_tx_antennas=3):
"""Calculate the doppler resolution for the given radar configuration.
Args:
start_freq_const (float): Frequency chirp starting point.
ramp_end_time (float): Frequency chirp end point.
idle_time_const (int): Idle time between chirps.
band_width (float): Radar config bandwidth.
num_loops_per_frame (int): The number of loops in each frame.
num_tx_antennas (int): The number of transmitting antennas (tx) on the radar.
Returns:
doppler_resolution (float): The doppler resolution for the given radar configuration.
"""
light_speed_meter_per_sec = 299792458
center_frequency = start_freq_const * 1e9 + band_width / 2
chirp_interval = (ramp_end_time + idle_time_const) * 1e-6
doppler_resolution = light_speed_meter_per_sec / (
2 * num_loops_per_frame * num_tx_antennas * center_frequency * chirp_interval)
return doppler_resolution
def separate_tx(signal, num_tx, vx_axis=1, axis=0):
"""Separate interleaved radar data from separate TX along a certain axis to account for TDM radars.
Args:
signal (ndarray): Received signal.
num_tx (int): Number of transmit antennas.
vx_axis (int): Axis in which to accumulate the separated data.
axis (int): Axis in which the data is interleaved.
Returns:
ndarray: Separated received data in the
"""
# Reorder the axes
reordering = np.arange(len(signal.shape))
reordering[0] = axis
reordering[axis] = 0
if not (reordering == np.arange(len(reordering))).all(): # check if has to reorder
signal = signal.transpose(reordering)
# if signal.shape[1] != num_tx * signal.shape[1]:
# pass
out = np.concatenate([signal[i::num_tx, ...] for i in range(num_tx)], axis=vx_axis)
return out.transpose(reordering)
def doppler_processing(radar_cube,
num_tx_antennas=2,
clutter_removal_enabled=False,
interleaved=True,
window_type_2d=None,
accumulate=True):
"""Perform 2D FFT on the radar_cube.
Interleave the radar_cube, perform optional windowing and 2D FFT on the radar_cube. Optional antenna couping
signature removal can also be performed right before 2D FFT. In constrast to the original TI codes, CFAR and peak
grouping are intentionally separated with 2D FFT for the easiness of debugging.
Args:
radar_cube (ndarray): Output of the 1D FFT. If not interleaved beforehand, it has the shape of
(numChirpsPerFrame, numRxAntennas, numRangeBins). Otherwise, it has the shape of
(numRangeBins, numVirtualAntennas, num_doppler_bins). It is assumed that after interleaving the doppler
dimension is located at the last axis.
num_tx_antennas (int): Number of transmitter antennas. This affects how interleaving is performed.
clutter_removal_enabled (boolean): Flag to enable naive clutter removal.
interleaved (boolean): If the input radar_cube is interleaved before passing in. The default radar_cube is not
interleaved, i.e. has the shape of (numChirpsPerFrame, numRxAntennas, numRangeBins). The interleaving
process will transform it such that it becomes (numRangeBins, numVirtualAntennas, num_doppler_bins). Note
that this interleaving is only applicable to TDM radar, i.e. each tx emits the chirp sequentially.
window_type_2d (mmwave.dsp.utils.Window): Optional windowing type before doppler FFT.
accumulate (boolean): Flag to reduce the numVirtualAntennas dimension.
Returns:
detMatrix (ndarray): (numRangeBins, num_doppler_bins) complete range-dopper information. Original datatype is
uint16_t. Note that azimuthStaticHeatMap can be extracted from zero-doppler index for
visualization.
aoa_input (ndarray): (numRangeBins, numVirtualAntennas, num_doppler_bins) ADC data reorganized by vrx instead of
physical rx.
"""
if interleaved:
# radar_cube is interleaved in the first dimension (for 2 tx and 0-based indexing, odd are the chirps from tx1,
# and even are from tx2) so it becomes ( , numVirtualAntennas, numADCSamples), where
# numChirpsPerFrame = num_doppler_bins * num_tx_antennas as designed.
# Antennas associated to tx1 (Ping) are 0:4 and to tx2 (Pong) are 5:8.
fft2d_in = separate_tx(radar_cube, num_tx_antennas, vx_axis=1, axis=0)
else:
fft2d_in = radar_cube
# (Optional) Static Clutter Removal
if clutter_removal_enabled:
# fft2d_in = compensation.clutter_removal(fft2d_in, axis=0)
fft2d_in[1:] = compensation.clutter_removal(fft2d_in[1:], axis=0) # TODO this or above with static detection removal
# transpose to (numRangeBins, numVirtualAntennas, num_doppler_bins)
fft2d_in = np.transpose(fft2d_in, axes=(2, 1, 0))
# Windowing 16x32
if window_type_2d:
fft2d_in = utils.windowing(fft2d_in, window_type_2d, axis=2)
# It is assumed that doppler is at the last axis.
# FFT 32x32
fft2d_out = np.fft.fft(fft2d_in)
aoa_input = fft2d_out
# Save zero-Doppler as azimuthStaticHeatMap, watch out for the bit shift in
# original code.
# Log_2 Absolute Value
fft2d_log_abs = np.log2(np.abs(fft2d_out))
# Accumulate
if accumulate:
return np.sum(fft2d_log_abs, axis=1), aoa_input # TODO divide by num_rx?
else:
return fft2d_log_abs, aoa_input
def doppler_estimation(radar_cube,
beam_weights,
num_tx_antennas=2,
clutter_removal_enabled=False,
interleaved=False,
window_type_2d=None):
"""Perform doppler estimation on the weighted sum of range FFT output across all virtual antennas.
In contrast to directly computing doppler FFT from the output of range FFT, this function combines it across all
the virtual receivers first using the weights generated from beamforming. Then FFT is performed and argmax is taken
across each doppler axis to return the indices of max doppler values.
Args:
radar_cube (ndarray): Output of the 1D FFT with only ranges on detected objects. If not interleaved beforehand,
it has the shape of (numChirpsPerFrame, numRxAntennas, numDetObjs). Otherwise, it has the shape of
(numDetObjs, numVirtualAntennas, num_doppler_bins). It is assumed that after interleaving the doppler
dimension is located at the last axis.
beam_weights (ndarray): Weights to sum up the radar_cube across the virtual receivers. It is from the
beam-forming and has the shape of (numVirtualAntennas, numDetObjs)
num_tx_antennas (int): Number of transmitter antennas. This affects how interleaving is performed.
clutter_removal_enabled (boolean): Flag to enable naive clutter removal.
interleaved (boolean): If the input radar_cube is interleaved before passing in. The default radar_cube is not
interleaved, i.e. has the shape of (numChirpsPerFrame, numRxAntennas, numDetObjs). The interleaveing process
will transform it such that it becomes (numDetObjs, numVirtualAntennas, num_doppler_bins). Note that this
interleaving is only appliable to TDM radar, i.e. each tx emits the chirp sequentially.
window_type_2d (string): Optional windowing type before doppler FFT.
Returns:
doppler_est (ndarray): (numDetObjs) Doppler index for each detected objects. Positive index means moving away
from radar while negative index means moving towards the radar.
"""
fft2d_in = None
if not interleaved:
num_doppler_bins = radar_cube.shape[0] / num_tx_antennas
# radar_cube is interleaved in the first dimension (for 2 tx and 0-based indexing, odd are the chirps from tx1,
# and even are from tx2) so it becomes (num_doppler_bins, numVirtualAntennas, numADCSamples), where
# numChirpsPerFrame = num_doppler_bins * num_tx_antennas as designed.
# Antennas associated to tx1 (Ping) are 0:4 and to tx2 (Pong) are 5:8.
if num_tx_antennas == 2:
fft2d_in = np.concatenate((radar_cube[0::2, ...], radar_cube[1::2, ...]), axis=1)
elif num_tx_antennas == 3:
fft2d_in = np.concatenate((radar_cube[0::3, ...], radar_cube[1::3, ...], radar_cube[2::3, ...]), axis=1)
# transpose to (numRangeBins, numVirtualAntennas, num_doppler_bins)
fft2d_in = np.transpose(fft2d_in, axes=(2, 1, 0))
else:
num_doppler_bins = radar_cube.shape[2]
# (Optional) Static Clutter Removal
if clutter_removal_enabled:
fft2d_in = compensation.clutter_removal(fft2d_in)
# Weighted sum across all virtual receivers.
fft2d_in = np.einsum('ijk,jk->ik', fft2d_in, beam_weights)
# Windowing 16x32
if window_type_2d:
fft2d_in = utils.windowing(fft2d_in, window_type_2d, axis=1)
# It is assumed that doppler is at the last axis.
# FFT 32x32
doppler_est = np.fft.fft(fft2d_in)
doppler_est = np.argmax(doppler_est, axis=1)
doppler_est[doppler_est[:] >= num_doppler_bins] -= num_doppler_bins * 2
return doppler_est
``` |
{
"source": "7FroSty/EENLP",
"score": 3
} |
#### File: data/paraphrase_detection/make_dataset_arpa.py
```python
import pandas as pd
from pyprojroot import here
def make_dataset():
dataset_name = "arpa"
commit_hash = "02fcbc70a36fdd3d8e8eefb226d41e2cb9519aa7"
df = pd.read_csv(
f"https://raw.githubusercontent.com/ivannikov-lab/arpa-paraphrase-corpus/{commit_hash}/train.tsv",
sep="\t",
)
result = pd.DataFrame()
result["sentence1"] = df["Sentence1"]
result["sentence2"] = df["Sentence2"]
# "labelled as either paraphrase, near paraphrase or non-paraphrase (with 1, 0, -1 labels respectively)"
# https://github.com/ivannikov-lab/arpa-paraphrase-corpus/blob/02fcbc70a36fdd3d8e8eefb226d41e2cb9519aa7/README.md
result["label"] = (df["Paraphrase"] == 1).astype(int)
result["lang"] = "Armenian"
result["source"] = dataset_name
result["split"] = "train"
output_path = here(
f"data/processed/paraphrase_detection/{dataset_name}/armenian.train.jsonl",
warn=False,
)
output_path.parent.mkdir(parents=True, exist_ok=True)
result.to_json(output_path, orient="records", lines=True)
```
#### File: data/paraphrase_detection/make_dataset_tapaco.py
```python
import numpy as np
import pandas as pd
import pycountry
from datasets import load_dataset
from fuzzywuzzy import process
from pyprojroot import here
from tqdm.auto import tqdm
def make_dataset():
dataset_name = "tapaco"
dataset = load_dataset("tapaco", "all_languages")
df = dataset["train"].to_pandas()
df["paraphrase_set_id"] = df["paraphrase_set_id"].astype(int)
languages_to_keep = [
"be", # Belarusian
"bg", # Bulgarian
"cs", # Czech
"et", # Estonian
"hr", # Croatian
"hu", # Hungarian
"hy", # Armenian
"lt", # Lithuanian
"mk", # Macedonian
"pl", # Polish
"ro", # Romanian
"ru", # Russian
"sl", # Slovenian
"sr", # Serbian
"uk", # Ukrainian
]
df = df[df["language"].isin(languages_to_keep)]
assert len(languages_to_keep) == len(
df["language"].unique()
), "Count of filtered languages doesn't match, probably a typo in 'languages_to_keep'?"
# paraphrase_set_id = 0 seems to mean "unassigned".
df = df[df["paraphrase_set_id"] != 0].copy()
# We need to generate example pairs somehow.
# As of the current version of the code, 50% of the generated examples are true paraphrases (label = 1),
# 25% are chosen from the most similar non-paraphrases (by fuzzywuzzy library Levenshtein distance) (label = 0),
# and 25% is random negative examples (label = 0).
result = []
np.random.seed(0)
for language, df_language in tqdm(df.groupby("language"), "language", position=0):
language_name = pycountry.languages.get(alpha_2=language).name
for paraphrase_set_id, df_set in tqdm(
df_language.groupby("paraphrase_set_id"), "paraphrase_set_id", position=1
):
df_negatives = df_language[
df_language["paraphrase_set_id"] != paraphrase_set_id
]
for row in df_set.itertuples():
result.append(
{
"sentence1": row.paraphrase,
"sentence2": np.random.choice(
df_set[df_set.index != row.Index]["paraphrase"]
),
"label": 1,
"lang": language_name,
}
)
# TODO this can sample the same pair twice, consider fixing it
result.append(
{
"sentence1": row.paraphrase,
"sentence2": np.random.choice(
df_set[df_set.index != row.Index]["paraphrase"]
),
"label": 1,
"lang": language_name,
}
)
# similar negative
result.append(
{
"sentence1": row.paraphrase,
# Fuzzywuzzy run time seems to grow exponentially, so we take a random sample, and look for
# similar sentences only in the smaller sample.
# Probably it would be worth investigating a faster alternative method.
"sentence2": process.extractOne(
row.paraphrase,
df_negatives["paraphrase"].sample(
np.min([256, len(df_negatives)])
),
)[0],
"label": 0,
"lang": language_name,
}
)
# random negative
result.append(
{
"sentence1": row.paraphrase,
"sentence2": np.random.choice(df_negatives["paraphrase"]),
"label": 0,
"lang": language_name,
}
)
result = pd.DataFrame(result)
result["source"] = dataset_name
result["split"] = "train"
output_dir = here(f"data/processed/paraphrase_detection/{dataset_name}", warn=False)
output_dir.mkdir(parents=True, exist_ok=True)
for lang in sorted(result["lang"].unique()):
result[result["lang"] == lang].to_json(
output_dir / f"{lang.lower()}.jsonl",
orient="records",
lines=True,
)
if __name__ == "__main__":
make_dataset()
```
#### File: eenlp/docs/generate_docs_models.py
```python
import mdformat
import pandas as pd
import yaml
from pyprojroot import here
from eenlp.docs.languages import languages
from eenlp.docs.model_types import cases, corpora, types
columns = [
"name",
"description",
"type",
"cased",
"languages",
"pre-trained on",
"links",
]
# Columns for the index table on the top.
# There is filtering for *both* the number of languages and these categories, so that e.g. static word embeddings won't
# show up in the index table.
# currently:
# - single == 1
# - 1 < few < 10
# - 10 <= multi
multilang_models = [
"BERT",
"RoBERTa",
"DistilBERT",
"Electra",
"ALBERT",
"mBERT",
"XLM-RoBERTa",
]
few_language_models = [
"BERT",
"RoBERTa",
"DistilBERT",
"Electra",
"ALBERT",
"XLM-RoBERTa",
"BERT/Electra(?)",
]
single_language_models = [
"BERT",
"RoBERTa",
"DistilBERT",
"Electra",
"ALBERT",
]
def generate():
df = []
for dataset in here("docs/data/models/").glob("*.yml"):
df.append(yaml.safe_load(dataset.read_text()))
df = pd.DataFrame(df)
df_o = df
df = df.explode("languages")
few_langs_models = [
x[0]
for x in df.sort_values("name").groupby(df.index)["languages"].count().items()
if 1 < x[1] < 10
]
few_langs_models = [
x for x in few_langs_models if df.loc[x].iloc[0]["type"] in few_language_models
]
with open(here("docs/models.md"), "w") as f:
f.write("# Models\n\n")
f.write("<table><thead>")
f.write(
f'<tr><td rowspan="2"></td><td align="center" width="100"> multilingual (transformers) </td><td align="center" colspan="{len(few_langs_models)}">several languages (transformers)</td><td align="center" colspan="5">single-language models</td></tr>'
)
f.write("<tr><td></td>")
f.write(
"".join(
f'<td align="center">{x}</td>'
for x in df.loc[few_langs_models]["name"].unique()
)
)
f.write(
'<td align="center">BERT</td><td align="center">RoBERTa</td><td align="center">DistilBERT</td><td align="center">Electra</td><td align="center">ALBERT</td>'
)
f.write("</tr></thead><tbody>\n")
for i, language in enumerate(languages):
emoji_name = language["emoji_name"]
language = language["name"]
f.write(
f'<tr><td><a href="#{emoji_name}-{language.lower().replace("/", "")}"><b>:{emoji_name}: {language}</b></a></td>'
)
# multilang
f.write("<td>")
dff = df[
(
df.apply(
lambda x: x["languages"] == language
and 10 <= len(df[df.index == x.name])
and x["type"] in multilang_models,
axis=1,
)
)
]
f.write(
" / ".join(
sorted(
f'<a href="#{x.name.lower().replace(" ", "-")}-multilingual">{x.name}</a>'
for x in dff.itertuples()
)
)
)
f.write("</td>")
# few langs
for i in few_langs_models:
f.write('<td align="center">')
dff = df[(df.index == i) & (df["languages"] == language)]
if len(dff):
f.write(
f'<a href="#{dff["name"].item().lower().replace(" ", "-")}-{language}">{dff["name"].item()}</a>'
)
f.write("</td>")
for model_name in single_language_models:
f.write('<td align="center">')
dff = df[
df.apply(
lambda x: x["languages"] == language
and x["type"] == model_name
and len(df[df.index == x.name]) == 1,
axis=1,
)
]
if len(dff):
f.write(
" / ".join(
sorted(
f'<a href="#{x.lower().replace(" ", "-")}-{language}">{x}</a>'
for x in dff["name"]
)
)
)
f.write("</td>")
f.write("\n")
f.write("</tbody></table>\n\n")
f.write("// TODO add legend\n\n")
for language in ["Multilingual"] + languages:
if language == "Multilingual":
emoji_name = "earth_africa"
else:
emoji_name = language["emoji_name"]
language = language["name"]
f.write(f"## :{emoji_name}: {language}\n\n")
if language == "Multilingual":
dff = df_o[df_o["languages"].str.len() >= 10]
else:
dff = df_o[
(df_o["languages"].apply(lambda x: language in x))
& (df_o["languages"].str.len() < 10)
]
f.write(
'<table width="100%"><thead><tr>'
'<th width="66%">name</th>'
'<th width="33%">description</th>'
"<th>type</th>"
"<th>cased</th>"
"<th>languages</th>"
"<th>corpora</th>"
"<th>links</th>"
"</tr></thead><tbody>"
)
for _, row in sorted(dff.iterrows(), key=lambda x: x[1]["name"]):
for column in columns:
if column == "name":
f.write(f"<td")
f.write(
f' id="{row["name"].lower().replace(" ", "-")}-{language}"'
)
f.write(f">{row[column]}</td>")
elif column == "languages":
f.write("<td>")
for x in sorted(
df[(df["name"] == row["name"])]["languages"].unique()
):
f.write(
f'<span title="{x}">:{next(y["emoji_name"] for y in languages if y["name"] == x)}:</span> '
)
f.write("</td>")
elif column == "links":
f.write("<td>")
if row["paper"] and row["paper"] != "?":
f.write(
f'<div title="paper"><a href="{row["paper"]}">📄</a></div>'
)
if row["citation"] and row["citation"] != "?":
f.write(
f'<div title="citation"><a href="{row["citation"]}">❞</a></div>'
)
if not pd.isna(row["huggingface"]):
f.write(
f"<div>"
f'<a title="huggingface model card" href="{row["huggingface"]}">🤗️</a> '
f"</div>"
)
f.write("</td>")
elif column == "pre-trained on":
f.write("<td><ul>")
for x in sorted(row["pre-trained on"]):
if x != "" and x != "?":
if x in corpora:
f.write(
f'<li title="{x}">{corpora[x]["emoji"]}</li>'
)
else:
f.write(f'<li title="{x}">{x}</li>')
f.write("</ul></td>")
elif column == "type":
if row["type"] in types:
if "image" in types[row["type"]]:
f.write(
f'<td align="center"><img width="21px" height="21px" title="{row["type"]}" src="{types[row["type"]]["image"]}"></td>'
)
else:
f.write(
f'<td align="center"><div title="{row["type"]}">{types[row["type"]]["emoji"]}</div></td>'
)
else:
f.write(f"<td>{row['type']}</td>")
elif column == "cased":
if row["cased"] in cases:
f.write(
f'<td><div title="{row["cased"]}">{cases[row["cased"]]["emoji"]}</div></td>'
)
else:
f.write(f"<td>{row['cased']}</td>")
else:
f.write(f"<td>{row[column]}</td>")
f.write("</tr>\n")
f.write("</tbody></table>\n\n")
if language != "Multilingual":
f.write(
" [+ multilingual](#earth_africa-multilingual)"
)
f.write("\n\n")
mdformat.file(here("docs/models.md"), extensions={"gfm"})
if __name__ == "__main__":
generate()
``` |
{
"source": "7gang/7synth",
"score": 3
} |
#### File: 7gang/7synth/waves.py
```python
import numpy as np
from scipy import signal
notes = { # maps keyboard keys to musical notes
"a": 440, # A4
"s": 494, # B4
"d": 523, # C4
"f": 587, # D4
"g": 660, # E4
"h": 698, # F4
"j": 784, # G4
"k": 880 # A5
}
def wave(note, duration=1):
""" Base method for generating basic wave data that must then be transformed into specialized waveforms """
frequency = notes[note]
samp_rate = 44100
n_data = duration * samp_rate
time = np.arange(0, n_data).T / samp_rate
init_phase = np.pi / 2
return frequency, time, init_phase
def sine(note, amplitude):
""" Generates a sine wave at given note and with given amplitude """
frequency, time, init_phase = wave(note)
return amplitude * np.cos(2 * np.pi * frequency * time + init_phase)
def square(note, amplitude):
""" Generates a square wave at given note and with given amplitude """
frequency, time, init_phase = wave(note)
return amplitude * signal.square(2 * np.pi * frequency * time + init_phase, duty=0.5)
def saw(note, amplitude):
""" Generates a saw wave at given note and with given amplitude """
frequency, time, init_phase = wave(note)
return amplitude * signal.sawtooth(2 * np.pi * frequency * time + init_phase)
``` |
{
"source": "7Geese/gutter-django",
"score": 2
} |
#### File: gutter-django/tests/mixins.py
```python
import django
from django.conf import settings
from exam import before, after
class SetupDjangoTest(object):
@before
def setup_django(self):
# For Django 1.7+, we need to run `django.setup()` first.
if hasattr(django, 'setup'):
self.__original_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sessions',
) + settings.INSTALLED_APPS
django.setup()
@after
def undo_setup_django(self):
if hasattr(django, 'setup'):
settings.INSTALLED_APPS = self.__original_installed_apps
del self.__original_installed_apps
``` |
{
"source": "7Geese/gutter",
"score": 2
} |
#### File: gutter/client/encoding.py
```python
from __future__ import absolute_import
# External Libraries
from durabledict.encoding import PickleEncoding
import jsonpickle as pickle
class JsonPickleEncoding(PickleEncoding):
@staticmethod
def encode(data):
return pickle.dumps(data)
@staticmethod
def decode(data):
try:
return pickle.loads(data)
except Exception:
return PickleEncoding.decode(data)
```
#### File: gutter/client/testutils.py
```python
from functools import wraps
from gutter.client import get_gutter_client
class SwitchContextManager(object):
"""
Allows temporarily enabling or disabling a switch.
Ideal for testing.
>>> @switches(my_switch_name=True)
>>> def foo():
>>> print gutter.active('my_switch_name')
>>> def foo():
>>> with switches(my_switch_name=True):
>>> print gutter.active('my_switch_name')
You may also optionally pass an instance of ``SwitchManager``
as the first argument.
>>> def foo():
>>> with switches(gutter, my_switch_name=True):
>>> print gutter.active('my_switch_name')
"""
def __init__(self, gutter=None, **keys):
self._gutter = gutter
self.keys = keys
@property
def gutter(self):
if self._gutter is None:
self._gutter = get_gutter_client()
elif isinstance(self._gutter, basestring):
self._gutter = get_gutter_client(alias=self._gutter)
return self._gutter
def __call__(self, func):
@wraps(func)
def inner(*args, **kwargs):
with self:
return func(*args, **kwargs)
return inner
def __enter__(self):
self.previous_active_func = self.gutter.active
def patched_active(gutter):
real_active = gutter.active
def wrapped(key, *args, **kwargs):
if key in self.keys:
return self.keys[key]
return real_active(key, *args, **kwargs)
return wrapped
self.gutter.active = patched_active(self.gutter)
def __exit__(self, exc_type, exc_val, exc_tb):
self.gutter.active = self.previous_active_func
switches = SwitchContextManager
``` |
{
"source": "7Geese/kombu",
"score": 3
} |
#### File: kombu/utils/uuid.py
```python
from __future__ import absolute_import, unicode_literals
from uuid import uuid4
def uuid(_uuid=uuid4):
"""Generate unique id in UUID4 format.
See Also:
For now this is provided by :func:`uuid.uuid4`.
"""
return str(_uuid())
```
#### File: integration/tests/test_redis.py
```python
from __future__ import absolute_import, unicode_literals
from funtests import transport
from kombu.tests.case import skip
@skip.unless_module('redis')
class test_redis(transport.TransportCase):
transport = 'redis'
prefix = 'redis'
def after_connect(self, connection):
client = connection.channel().client
client.info()
def test_cannot_connect_raises_connection_error(self):
conn = self.get_connection(port=65534)
with self.assertRaises(conn.connection_errors):
conn.connect()
```
#### File: t/unit/test_messaging.py
```python
from __future__ import absolute_import, unicode_literals
import pickle
import pytest
import sys
from collections import defaultdict
from case import Mock, patch
from kombu import Connection, Consumer, Producer, Exchange, Queue
from kombu.exceptions import MessageStateError
from kombu.utils import json
from kombu.utils.functional import ChannelPromise
from t.mocks import Transport
class test_Producer:
def setup(self):
self.exchange = Exchange('foo', 'direct')
self.connection = Connection(transport=Transport)
self.connection.connect()
assert self.connection.connection.connected
assert not self.exchange.is_bound
def test_repr(self):
p = Producer(self.connection)
assert repr(p)
def test_pickle(self):
chan = Mock()
producer = Producer(chan, serializer='pickle')
p2 = pickle.loads(pickle.dumps(producer))
assert p2.serializer == producer.serializer
def test_no_channel(self):
p = Producer(None)
assert not p._channel
@patch('kombu.messaging.maybe_declare')
def test_maybe_declare(self, maybe_declare):
p = self.connection.Producer()
q = Queue('foo')
p.maybe_declare(q)
maybe_declare.assert_called_with(q, p.channel, False)
@patch('kombu.common.maybe_declare')
def test_maybe_declare_when_entity_false(self, maybe_declare):
p = self.connection.Producer()
p.maybe_declare(None)
maybe_declare.assert_not_called()
def test_auto_declare(self):
channel = self.connection.channel()
p = Producer(channel, self.exchange, auto_declare=True)
# creates Exchange clone at bind
assert p.exchange is not self.exchange
assert p.exchange.is_bound
# auto_declare declares exchange'
assert 'exchange_declare' not in channel
p.publish('foo')
assert 'exchange_declare' in channel
def test_manual_declare(self):
channel = self.connection.channel()
p = Producer(channel, self.exchange, auto_declare=False)
assert p.exchange.is_bound
# auto_declare=False does not declare exchange
assert 'exchange_declare' not in channel
# p.declare() declares exchange')
p.declare()
assert 'exchange_declare' in channel
def test_prepare(self):
message = {'the quick brown fox': 'jumps over the lazy dog'}
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
m, ctype, cencoding = p._prepare(message, headers={})
assert json.loads(m) == message
assert ctype == 'application/json'
assert cencoding == 'utf-8'
def test_prepare_compression(self):
message = {'the quick brown fox': 'jumps over the lazy dog'}
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
headers = {}
m, ctype, cencoding = p._prepare(message, compression='zlib',
headers=headers)
assert ctype == 'application/json'
assert cencoding == 'utf-8'
assert headers['compression'] == 'application/x-gzip'
import zlib
assert json.loads(zlib.decompress(m).decode('utf-8')) == message
def test_prepare_custom_content_type(self):
message = 'the quick brown fox'.encode('utf-8')
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
m, ctype, cencoding = p._prepare(message, content_type='custom')
assert m == message
assert ctype == 'custom'
assert cencoding == 'binary'
m, ctype, cencoding = p._prepare(message, content_type='custom',
content_encoding='alien')
assert m == message
assert ctype == 'custom'
assert cencoding == 'alien'
def test_prepare_is_already_unicode(self):
message = 'the quick brown fox'
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
m, ctype, cencoding = p._prepare(message, content_type='text/plain')
assert m == message.encode('utf-8')
assert ctype == 'text/plain'
assert cencoding == 'utf-8'
m, ctype, cencoding = p._prepare(message, content_type='text/plain',
content_encoding='utf-8')
assert m == message.encode('utf-8')
assert ctype == 'text/plain'
assert cencoding == 'utf-8'
def test_publish_with_Exchange_instance(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
p.publish('hello', exchange=Exchange('foo'), delivery_mode='transient')
assert p._channel.basic_publish.call_args[1]['exchange'] == 'foo'
def test_publish_with_expiration(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
p.publish('hello', exchange=Exchange('foo'), expiration=10)
properties = p._channel.prepare_message.call_args[0][5]
assert properties['expiration'] == '10000'
def test_publish_with_reply_to(self):
p = self.connection.Producer()
p.channel = Mock()
p.channel.connection.client.declared_entities = set()
assert not p.exchange.name
p.publish('hello', exchange=Exchange('foo'), reply_to=Queue('foo'))
properties = p._channel.prepare_message.call_args[0][5]
assert properties['reply_to'] == 'foo'
def test_set_on_return(self):
chan = Mock()
chan.events = defaultdict(Mock)
p = Producer(ChannelPromise(lambda: chan), on_return='on_return')
p.channel
chan.events['basic_return'].add.assert_called_with('on_return')
def test_publish_retry_calls_ensure(self):
p = Producer(Mock())
p._connection = Mock()
p._connection.declared_entities = set()
ensure = p.connection.ensure = Mock()
p.publish('foo', exchange='foo', retry=True)
ensure.assert_called()
def test_publish_retry_with_declare(self):
p = self.connection.Producer()
p.maybe_declare = Mock()
p.connection.ensure = Mock()
ex = Exchange('foo')
p._publish('hello', 0, '', '', {}, {}, 'rk', 0, 0, ex, declare=[ex])
p.maybe_declare.assert_called_with(ex)
def test_revive_when_channel_is_connection(self):
p = self.connection.Producer()
p.exchange = Mock()
new_conn = Connection('memory://')
defchan = new_conn.default_channel
p.revive(new_conn)
assert p.channel is defchan
p.exchange.revive.assert_called_with(defchan)
def test_enter_exit(self):
p = self.connection.Producer()
p.release = Mock()
assert p.__enter__() is p
p.__exit__()
p.release.assert_called_with()
def test_connection_property_handles_AttributeError(self):
p = self.connection.Producer()
p.channel = object()
p.__connection__ = None
assert p.connection is None
def test_publish(self):
channel = self.connection.channel()
p = Producer(channel, self.exchange, serializer='json')
message = {'the quick brown fox': 'jumps over the lazy dog'}
ret = p.publish(message, routing_key='process')
assert 'prepare_message' in channel
assert 'basic_publish' in channel
m, exc, rkey = ret
assert json.loads(m['body']) == message
assert m['content_type'] == 'application/json'
assert m['content_encoding'] == 'utf-8'
assert m['priority'] == 0
assert m['properties']['delivery_mode'] == 2
assert exc == p.exchange.name
assert rkey == 'process'
def test_no_exchange(self):
chan = self.connection.channel()
p = Producer(chan)
assert not p.exchange.name
def test_revive(self):
chan = self.connection.channel()
p = Producer(chan)
chan2 = self.connection.channel()
p.revive(chan2)
assert p.channel is chan2
assert p.exchange.channel is chan2
def test_on_return(self):
chan = self.connection.channel()
def on_return(exception, exchange, routing_key, message):
pass
p = Producer(chan, on_return=on_return)
assert on_return in chan.events['basic_return']
assert p.on_return
class test_Consumer:
def setup(self):
self.connection = Connection(transport=Transport)
self.connection.connect()
assert self.connection.connection.connected
self.exchange = Exchange('foo', 'direct')
def test_accept(self):
a = Consumer(self.connection)
assert a.accept is None
b = Consumer(self.connection, accept=['json', 'pickle'])
assert b.accept == {
'application/json', 'application/x-python-serialize',
}
c = Consumer(self.connection, accept=b.accept)
assert b.accept == c.accept
def test_enter_exit_cancel_raises(self):
c = Consumer(self.connection)
c.cancel = Mock(name='Consumer.cancel')
c.cancel.side_effect = KeyError('foo')
with c:
pass
c.cancel.assert_called_with()
def test_enter_exit_cancel_not_called_on_connection_error(self):
c = Consumer(self.connection)
c.cancel = Mock(name='Consumer.cancel')
assert self.connection.connection_errors
with pytest.raises(self.connection.connection_errors[0]):
with c:
raise self.connection.connection_errors[0]()
c.cancel.assert_not_called()
def test_receive_callback_accept(self):
message = Mock(name='Message')
message.errors = []
callback = Mock(name='on_message')
c = Consumer(self.connection, accept=['json'], on_message=callback)
c.on_decode_error = None
c.channel = Mock(name='channel')
c.channel.message_to_python = None
c._receive_callback(message)
callback.assert_called_with(message)
assert message.accept == c.accept
def test_accept__content_disallowed(self):
conn = Connection('memory://')
q = Queue('foo', exchange=self.exchange)
p = conn.Producer()
p.publish(
{'complex': object()},
declare=[q], exchange=self.exchange, serializer='pickle',
)
callback = Mock(name='callback')
with conn.Consumer(queues=[q], callbacks=[callback]) as consumer:
with pytest.raises(consumer.ContentDisallowed):
conn.drain_events(timeout=1)
callback.assert_not_called()
def test_accept__content_allowed(self):
conn = Connection('memory://')
q = Queue('foo', exchange=self.exchange)
p = conn.Producer()
p.publish(
{'complex': object()},
declare=[q], exchange=self.exchange, serializer='pickle',
)
callback = Mock(name='callback')
with conn.Consumer(queues=[q], accept=['pickle'],
callbacks=[callback]):
conn.drain_events(timeout=1)
callback.assert_called()
body, message = callback.call_args[0]
assert body['complex']
def test_set_no_channel(self):
c = Consumer(None)
assert c.channel is None
c.revive(Mock())
assert c.channel
def test_set_no_ack(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True, no_ack=True)
assert consumer.no_ack
def test_add_queue_when_auto_declare(self):
consumer = self.connection.Consumer(auto_declare=True)
q = Mock()
q.return_value = q
consumer.add_queue(q)
assert q in consumer.queues
q.declare.assert_called_with()
def test_add_queue_when_not_auto_declare(self):
consumer = self.connection.Consumer(auto_declare=False)
q = Mock()
q.return_value = q
consumer.add_queue(q)
assert q in consumer.queues
assert not q.declare.call_count
def test_consume_without_queues_returns(self):
consumer = self.connection.Consumer()
consumer.queues[:] = []
assert consumer.consume() is None
def test_consuming_from(self):
consumer = self.connection.Consumer()
consumer.queues[:] = [Queue('a'), Queue('b'), Queue('d')]
consumer._active_tags = {'a': 1, 'b': 2}
assert not consumer.consuming_from(Queue('c'))
assert not consumer.consuming_from('c')
assert not consumer.consuming_from(Queue('d'))
assert not consumer.consuming_from('d')
assert consumer.consuming_from(Queue('a'))
assert consumer.consuming_from(Queue('b'))
assert consumer.consuming_from('b')
def test_receive_callback_without_m2p(self):
channel = self.connection.channel()
c = channel.Consumer()
m2p = getattr(channel, 'message_to_python')
channel.message_to_python = None
try:
message = Mock()
message.errors = []
message.decode.return_value = 'Hello'
recv = c.receive = Mock()
c._receive_callback(message)
recv.assert_called_with('Hello', message)
finally:
channel.message_to_python = m2p
def test_receive_callback__message_errors(self):
channel = self.connection.channel()
channel.message_to_python = None
c = channel.Consumer()
message = Mock()
try:
raise KeyError('foo')
except KeyError:
message.errors = [sys.exc_info()]
message._reraise_error.side_effect = KeyError()
with pytest.raises(KeyError):
c._receive_callback(message)
def test_set_callbacks(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
callbacks = [lambda x, y: x,
lambda x, y: x]
consumer = Consumer(channel, queue, auto_declare=True,
callbacks=callbacks)
assert consumer.callbacks == callbacks
def test_auto_declare(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.consume()
consumer.consume() # twice is a noop
assert consumer.queues[0] is not queue
assert consumer.queues[0].is_bound
assert consumer.queues[0].exchange.is_bound
assert consumer.queues[0].exchange is not self.exchange
for meth in ('exchange_declare',
'queue_declare',
'queue_bind',
'basic_consume'):
assert meth in channel
assert channel.called.count('basic_consume') == 1
assert consumer._active_tags
consumer.cancel_by_queue(queue.name)
consumer.cancel_by_queue(queue.name)
assert not consumer._active_tags
def test_consumer_tag_prefix(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, tag_prefix='consumer_')
consumer.consume()
assert consumer._active_tags[queue.name].startswith('consumer_')
def test_manual_declare(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=False)
assert consumer.queues[0] is not queue
assert consumer.queues[0].is_bound
assert consumer.queues[0].exchange.is_bound
assert consumer.queues[0].exchange is not self.exchange
for meth in ('exchange_declare',
'queue_declare',
'basic_consume'):
assert meth not in channel
consumer.declare()
for meth in ('exchange_declare',
'queue_declare',
'queue_bind'):
assert meth in channel
assert 'basic_consume' not in channel
consumer.consume()
assert 'basic_consume' in channel
def test_consume__cancel(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.consume()
consumer.cancel()
assert 'basic_cancel' in channel
assert not consumer._active_tags
def test___enter____exit__(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
context = consumer.__enter__()
assert context is consumer
assert consumer._active_tags
res = consumer.__exit__(None, None, None)
assert not res
assert 'basic_cancel' in channel
assert not consumer._active_tags
def test_flow(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.flow(False)
assert 'flow' in channel
def test_qos(self):
channel = self.connection.channel()
queue = Queue('qname', self.exchange, 'rkey')
consumer = Consumer(channel, queue, auto_declare=True)
consumer.qos(30, 10, False)
assert 'basic_qos' in channel
def test_purge(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
b2 = Queue('qname2', self.exchange, 'rkey')
b3 = Queue('qname3', self.exchange, 'rkey')
b4 = Queue('qname4', self.exchange, 'rkey')
consumer = Consumer(channel, [b1, b2, b3, b4], auto_declare=True)
consumer.purge()
assert channel.called.count('queue_purge') == 4
def test_multiple_queues(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
b2 = Queue('qname2', self.exchange, 'rkey')
b3 = Queue('qname3', self.exchange, 'rkey')
b4 = Queue('qname4', self.exchange, 'rkey')
consumer = Consumer(channel, [b1, b2, b3, b4])
consumer.consume()
assert channel.called.count('exchange_declare') == 4
assert channel.called.count('queue_declare') == 4
assert channel.called.count('queue_bind') == 4
assert channel.called.count('basic_consume') == 4
assert len(consumer._active_tags) == 4
consumer.cancel()
assert channel.called.count('basic_cancel') == 4
assert not len(consumer._active_tags)
def test_receive_callback(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
received = []
def callback(message_data, message):
received.append(message_data)
message.ack()
message.payload # trigger cache
consumer.register_callback(callback)
consumer._receive_callback({'foo': 'bar'})
assert 'basic_ack' in channel
assert 'message_to_python' in channel
assert received[0] == {'foo': 'bar'}
def test_basic_ack_twice(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.ack()
message.ack()
consumer.register_callback(callback)
with pytest.raises(MessageStateError):
consumer._receive_callback({'foo': 'bar'})
def test_basic_reject(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.reject()
consumer.register_callback(callback)
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject' in channel
def test_basic_reject_twice(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.reject()
message.reject()
consumer.register_callback(callback)
with pytest.raises(MessageStateError):
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject' in channel
def test_basic_reject__requeue(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.requeue()
consumer.register_callback(callback)
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject:requeue' in channel
def test_basic_reject__requeue_twice(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
def callback(message_data, message):
message.requeue()
message.requeue()
consumer.register_callback(callback)
with pytest.raises(MessageStateError):
consumer._receive_callback({'foo': 'bar'})
assert 'basic_reject:requeue' in channel
def test_receive_without_callbacks_raises(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
with pytest.raises(NotImplementedError):
consumer.receive(1, 2)
def test_decode_error(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
consumer.channel.throw_decode_error = True
with pytest.raises(ValueError):
consumer._receive_callback({'foo': 'bar'})
def test_on_decode_error_callback(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
thrown = []
def on_decode_error(msg, exc):
thrown.append((msg.body, exc))
consumer = Consumer(channel, [b1], on_decode_error=on_decode_error)
consumer.channel.throw_decode_error = True
consumer._receive_callback({'foo': 'bar'})
assert thrown
m, exc = thrown[0]
assert json.loads(m) == {'foo': 'bar'}
assert isinstance(exc, ValueError)
def test_recover(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
consumer.recover()
assert 'basic_recover' in channel
def test_revive(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
consumer = Consumer(channel, [b1])
channel2 = self.connection.channel()
consumer.revive(channel2)
assert consumer.channel is channel2
assert consumer.queues[0].channel is channel2
assert consumer.queues[0].exchange.channel is channel2
def test_revive__with_prefetch_count(self):
channel = Mock(name='channel')
b1 = Queue('qname1', self.exchange, 'rkey')
Consumer(channel, [b1], prefetch_count=14)
channel.basic_qos.assert_called_with(0, 14, False)
def test__repr__(self):
channel = self.connection.channel()
b1 = Queue('qname1', self.exchange, 'rkey')
assert repr(Consumer(channel, [b1]))
def test_connection_property_handles_AttributeError(self):
p = self.connection.Consumer()
p.channel = object()
assert p.connection is None
```
#### File: unit/transport/test_mongodb.py
```python
from __future__ import absolute_import, unicode_literals
import datetime
import pytest
from case import MagicMock, call, patch, skip
from kombu import Connection
from kombu.five import Empty
def _create_mock_connection(url='', **kwargs):
from kombu.transport import mongodb # noqa
class _Channel(mongodb.Channel):
# reset _fanout_queues for each instance
_fanout_queues = {}
collections = {}
now = datetime.datetime.utcnow()
def _create_client(self):
mock = MagicMock(name='client')
# we need new mock object for every collection
def get_collection(name):
try:
return self.collections[name]
except KeyError:
mock = self.collections[name] = MagicMock(
name='collection:%s' % name)
return mock
mock.__getitem__.side_effect = get_collection
return mock
def get_now(self):
return self.now
class Transport(mongodb.Transport):
Channel = _Channel
return Connection(url, transport=Transport, **kwargs)
@skip.unless_module('pymongo')
class test_mongodb_uri_parsing:
def test_defaults(self):
url = 'mongodb://'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'kombu_default'
assert hostname == 'mongodb://127.0.0.1'
def test_custom_host(self):
url = 'mongodb://localhost'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'kombu_default'
def test_custom_database(self):
url = 'mongodb://localhost/dbname'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'dbname'
def test_custom_credentials(self):
url = 'mongodb://localhost/dbname'
channel = _create_mock_connection(
url, userid='foo', password='<PASSWORD>').default_channel
hostname, dbname, options = channel._parse_uri()
assert hostname == 'mongodb://foo:bar@localhost/dbname'
assert dbname == 'dbname'
def test_correct_readpreference(self):
url = 'mongodb://localhost/dbname?readpreference=nearest'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert options['readpreference'] == 'nearest'
class BaseMongoDBChannelCase:
def _get_method(self, cname, mname):
collection = getattr(self.channel, cname)
method = getattr(collection, mname.split('.', 1)[0])
for bit in mname.split('.')[1:]:
method = getattr(method.return_value, bit)
return method
def set_operation_return_value(self, cname, mname, *values):
method = self._get_method(cname, mname)
if len(values) == 1:
method.return_value = values[0]
else:
method.side_effect = values
def declare_droadcast_queue(self, queue):
self.channel.exchange_declare('fanout_exchange', type='fanout')
self.channel._queue_bind('fanout_exchange', 'foo', '*', queue)
assert queue in self.channel._broadcast_cursors
def get_broadcast(self, queue):
return self.channel._broadcast_cursors[queue]
def set_broadcast_return_value(self, queue, *values):
self.declare_droadcast_queue(queue)
cursor = MagicMock(name='cursor')
cursor.__iter__.return_value = iter(values)
self.channel._broadcast_cursors[queue]._cursor = iter(cursor)
def assert_collection_accessed(self, *collections):
self.channel.client.__getitem__.assert_has_calls(
[call(c) for c in collections], any_order=True)
def assert_operation_has_calls(self, cname, mname, calls, any_order=False):
method = self._get_method(cname, mname)
method.assert_has_calls(calls, any_order=any_order)
def assert_operation_called_with(self, cname, mname, *args, **kwargs):
self.assert_operation_has_calls(cname, mname, [call(*args, **kwargs)])
@skip.unless_module('pymongo')
class test_mongodb_channel(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection()
self.channel = self.connection.default_channel
# Tests for "public" channel interface
def test_new_queue(self):
self.channel._new_queue('foobar')
self.channel.client.assert_not_called()
def test_get(self):
import pymongo
self.set_operation_return_value('messages', 'find_and_modify', {
'_id': 'docId', 'payload': '{"some": "data"}',
})
event = self.channel._get('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'find_and_modify',
query={'queue': 'foobar'},
remove=True,
sort=[
('priority', pymongo.ASCENDING),
],
)
assert event == {'some': 'data'}
self.set_operation_return_value('messages', 'find_and_modify', None)
with pytest.raises(Empty):
self.channel._get('foobar')
def test_get_fanout(self):
self.set_broadcast_return_value('foobar', {
'_id': 'docId1', 'payload': '{"some": "data"}',
})
event = self.channel._get('foobar')
self.assert_collection_accessed('messages.broadcast')
assert event == {'some': 'data'}
with pytest.raises(Empty):
self.channel._get('foobar')
def test_put(self):
self.channel._put('foobar', {'some': 'data'})
self.assert_collection_accessed('messages')
self.assert_operation_called_with('messages', 'insert', {
'queue': 'foobar',
'priority': 9,
'payload': '{"some": "data"}',
})
def test_put_fanout(self):
self.declare_droadcast_queue('foobar')
self.channel._put_fanout('foobar', {'some': 'data'}, 'foo')
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with('broadcast', 'insert', {
'queue': 'foobar', 'payload': '{"some": "data"}',
})
def test_size(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._size('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'find', {'queue': 'foobar'},
)
assert result == 77
def test_size_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
cursor.get_size.return_value = 77
self.channel._broadcast_cursors['foobar'] = cursor
result = self.channel._size('foobar')
assert result == 77
def test_purge(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._purge('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'remove', {'queue': 'foobar'},
)
assert result == 77
def test_purge_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
cursor.get_size.return_value = 77
self.channel._broadcast_cursors['foobar'] = cursor
result = self.channel._purge('foobar')
cursor.purge.assert_any_call()
assert result == 77
def test_get_table(self):
state_table = [('foo', '*', 'foo')]
stored_table = [('bar', '*', 'bar')]
self.channel.exchange_declare('test_exchange')
self.channel.state.exchanges['test_exchange']['table'] = state_table
self.set_operation_return_value('routing', 'find', [{
'_id': 'docId',
'routing_key': stored_table[0][0],
'pattern': stored_table[0][1],
'queue': stored_table[0][2],
}])
result = self.channel.get_table('test_exchange')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'find', {'exchange': 'test_exchange'},
)
assert set(result) == frozenset(state_table) | frozenset(stored_table)
def test_queue_bind(self):
self.channel._queue_bind('test_exchange', 'foo', '*', 'foo')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange'},
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange'},
upsert=True,
)
def test_queue_delete(self):
self.channel.queue_delete('foobar')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'remove', {'queue': 'foobar'},
)
def test_queue_delete_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
self.channel._broadcast_cursors['foobar'] = cursor
self.channel.queue_delete('foobar')
cursor.close.assert_any_call()
assert 'foobar' not in self.channel._broadcast_cursors
assert 'foobar' not in self.channel._fanout_queues
# Tests for channel internals
def test_create_broadcast(self):
self.channel._create_broadcast(self.channel.client)
self.channel.client.create_collection.assert_called_with(
'messages.broadcast', capped=True, size=100000,
)
def test_ensure_indexes(self):
self.channel._ensure_indexes(self.channel.client)
self.assert_operation_called_with(
'messages', 'ensure_index',
[('queue', 1), ('priority', 1), ('_id', 1)],
background=True,
)
self.assert_operation_called_with(
'broadcast', 'ensure_index',
[('queue', 1)],
)
self.assert_operation_called_with(
'routing', 'ensure_index', [('queue', 1), ('exchange', 1)],
)
def test_create_broadcast_cursor(self):
import pymongo
with patch.object(pymongo, 'version_tuple', (2, )):
self.channel._create_broadcast_cursor(
'fanout_exchange', 'foo', '*', 'foobar',
)
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with(
'broadcast', 'find',
tailable=True,
query={'queue': 'fanout_exchange'},
)
if pymongo.version_tuple >= (3, ):
self.channel._create_broadcast_cursor(
'fanout_exchange1', 'foo', '*', 'foobar',
)
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with(
'broadcast', 'find',
cursor_type=pymongo.CursorType.TAILABLE,
filter={'queue': 'fanout_exchange1'},
)
def test_open_rc_version(self):
import pymongo
def server_info(self):
return {'version': '3.6.0-rc'}
with patch.object(pymongo.MongoClient, 'server_info', server_info):
self.channel._open()
@skip.unless_module('pymongo')
class test_mongodb_channel_ttl(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection(
transport_options={'ttl': True},
)
self.channel = self.connection.default_channel
self.expire_at = (
self.channel.get_now() + datetime.timedelta(milliseconds=777))
# Tests
def test_new_queue(self):
self.channel._new_queue('foobar')
self.assert_operation_called_with(
'queues', 'update',
{'_id': 'foobar'},
{'_id': 'foobar', 'options': {}, 'expire_at': None},
upsert=True,
)
def test_get(self):
import pymongo
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
self.set_operation_return_value('messages', 'find_and_modify', {
'_id': 'docId', 'payload': '{"some": "data"}',
})
self.channel._get('foobar')
self.assert_collection_accessed('messages', 'messages.queues')
self.assert_operation_called_with(
'messages', 'find_and_modify',
query={'queue': 'foobar'},
remove=True,
sort=[
('priority', pymongo.ASCENDING),
],
)
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foobar'},
{'$set': {'expire_at': self.expire_at}},
multi=True,
)
def test_put(self):
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-message-ttl': 777}},
})
self.channel._put('foobar', {'some': 'data'})
self.assert_collection_accessed('messages')
self.assert_operation_called_with('messages', 'insert', {
'queue': 'foobar',
'priority': 9,
'payload': '{"some": "data"}',
'expire_at': self.expire_at,
})
def test_queue_bind(self):
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
self.channel._queue_bind('test_exchange', 'foo', '*', 'foo')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange'},
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange',
'expire_at': self.expire_at},
upsert=True,
)
def test_queue_delete(self):
self.channel.queue_delete('foobar')
self.assert_collection_accessed('messages.queues')
self.assert_operation_called_with(
'queues', 'remove', {'_id': 'foobar'})
def test_ensure_indexes(self):
self.channel._ensure_indexes(self.channel.client)
self.assert_operation_called_with(
'messages', 'ensure_index', [('expire_at', 1)],
expireAfterSeconds=0)
self.assert_operation_called_with(
'routing', 'ensure_index', [('expire_at', 1)],
expireAfterSeconds=0)
self.assert_operation_called_with(
'queues', 'ensure_index', [('expire_at', 1)], expireAfterSeconds=0)
def test_get_expire(self):
result = self.channel._get_expire(
{'arguments': {'x-expires': 777}}, 'x-expires')
self.channel.client.assert_not_called()
assert result == self.expire_at
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
result = self.channel._get_expire('foobar', 'x-expires')
assert result == self.expire_at
def test_update_queues_expire(self):
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
self.channel._update_queues_expire('foobar')
self.assert_collection_accessed('messages.routing', 'messages.queues')
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foobar'},
{'$set': {'expire_at': self.expire_at}},
multi=True,
)
self.assert_operation_called_with(
'queues', 'update',
{'_id': 'foobar'},
{'$set': {'expire_at': self.expire_at}},
multi=True,
)
@skip.unless_module('pymongo')
class test_mongodb_channel_calc_queue_size(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection(
transport_options={'calc_queue_size': False})
self.channel = self.connection.default_channel
self.expire_at = (
self.channel.get_now() + datetime.timedelta(milliseconds=777))
# Tests
def test_size(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._size('foobar')
self.assert_operation_has_calls('messages', 'find', [])
assert result == 0
```
#### File: unit/transport/test_redis.py
```python
from __future__ import absolute_import, unicode_literals
import fakeredis
import pytest
import redis
import unittest
import socket
from array import array
from case import ANY, ContextMock, Mock, call, mock, skip, patch
from collections import defaultdict
from contextlib import contextmanager
from itertools import count
from kombu import Connection, Exchange, Queue, Consumer, Producer
from kombu.exceptions import InconsistencyError, VersionMismatch
from kombu.five import Empty, Queue as _Queue
from kombu.transport import virtual
from kombu.utils import eventio # patch poll
from kombu.utils.json import dumps
_fake_redis_client = None
def _get_fake_redis_client():
global _fake_redis_client
if _fake_redis_client is None:
_fake_redis_client = FakeRedisClient()
return _fake_redis_client
class _poll(eventio._select):
def register(self, fd, flags):
if flags & eventio.READ:
self._rfd.add(fd)
def poll(self, timeout):
events = []
for fd in self._rfd:
if fd.data:
events.append((fd.fileno(), eventio.READ))
return events
eventio.poll = _poll
# must import after poller patch, pep8 complains
from kombu.transport import redis as kombu_redis # noqa
class ResponseError(Exception):
pass
class DummyParser(object):
def __init__(self, *args, **kwargs):
self.socket_read_size = 1
self.encoder = None
self._sock = None
self._buffer = None
def on_disconnect(self):
self.socket_read_size = 1
self.encoder = None
self._sock = None
self._buffer = None
def on_connect(self, connection):
pass
class FakeRedisSocket(fakeredis._server.FakeSocket):
blocking = True
filenos = count(30)
def __init__(self, server):
super(FakeRedisSocket, self).__init__(server)
self._server = server
self._fileno = next(self.filenos)
self.data = []
self.connection = None
self.channel = None
self.transport_options = {}
self.hostname = None
self.port = None
self.password = <PASSWORD>
self.virtual_host = '/'
self.max_connections = 10
self.ssl = None
class FakeRedisConnection(fakeredis.FakeConnection):
disconnected = False
default_port = 6379
channel_max = 65535
def __init__(self, client, server, **kwargs):
kwargs['parser_class'] = DummyParser
super(fakeredis.FakeConnection, self).__init__(**kwargs)
if client is None:
client = _get_fake_redis_client()
self.client = client
if server is None:
server = fakeredis.FakeServer()
self._server = server
self._sock = FakeRedisSocket(server=server)
try:
self.on_connect()
except redis.exceptions.RedisError:
# clean up after any error in on_connect
self.disconnect()
raise
self._parser = ()
self._avail_channel_ids = array(
virtual.base.ARRAY_TYPE_H, range(self.channel_max, 0, -1),
)
self.cycle = kombu_redis.MultiChannelPoller()
conn_errs, channel_errs = kombu_redis.get_redis_error_classes()
self.connection_errors, self.channel_errors = conn_errs, channel_errs
def disconnect(self):
self.disconnected = True
class FakeRedisConnectionPool(redis.ConnectionPool):
def __init__(self, connection_class, max_connections=None,
**connection_kwargs):
connection_class = FakeRedisConnection
connection_kwargs['client'] = None
connection_kwargs['server'] = None
self._connections = []
super(FakeRedisConnectionPool, self).__init__(
connection_class=connection_class,
max_connections=max_connections,
**connection_kwargs
)
def get_connection(self, *args, **kwargs):
connection = self.connection_class(**self.connection_kwargs)
self._connections.append(connection)
return connection
def release(self, connection):
pass
class FakeRedisClient(fakeredis.FakeStrictRedis):
queues = {}
shard_hint = None
def __init__(self, db=None, port=None, connection_pool=None, **kwargs):
self._called = []
self._connection = None
self.bgsave_raises_ResponseError = False
self.server = server = fakeredis.FakeServer()
connection_pool = FakeRedisConnectionPool(FakeRedisConnection)
self.connection_pool = connection_pool
super(FakeRedisClient, self).__init__(
db=db, port=port, connection_pool=connection_pool, server=server)
self.connection = FakeRedisConnection(self, server)
self.response_callbacks = dict()
def __del__(self, key=None):
if key:
self.delete(key)
def ping(self, *args, **kwargs):
return True
def pipeline(self):
return FakePipeline(self.server, self.connection_pool, [], '1234', '')
def set_response_callback(self, command, callback):
pass
def _new_queue(self, queue, auto_delete=False, **kwargs):
self.queues[queue] = _Queue()
if auto_delete:
self.auto_delete_queues.add(queue)
def rpop(self, key):
try:
return self.queues[key].get_nowait()
except (KeyError, Empty):
pass
def llen(self, key):
try:
return self.queues[key].qsize()
except KeyError:
return 0
def lpush(self, key, value):
self.queues[key].put_nowait(value)
def pubsub(self, *args, **kwargs):
self.connection_pool = FakeRedisConnectionPool(FakeRedisConnection)
return self
def delete(self, key):
self.queues.pop(key, None)
class FakeRedisClientLite(object):
"""The original FakeRedis client from Kombu to support the
Producer/Consumer TestCases, preferred to use FakeRedisClient."""
queues = {}
sets = defaultdict(set)
hashes = defaultdict(dict)
shard_hint = None
def __init__(self, db=None, port=None, connection_pool=None, **kwargs):
self._called = []
self._connection = None
self.bgsave_raises_ResponseError = False
self.connection = self._sconnection(self)
def exists(self, key):
return key in self.queues or key in self.sets
def hset(self, key, k, v):
self.hashes[key][k] = v
def hget(self, key, k):
return self.hashes[key].get(k)
def hdel(self, key, k):
self.hashes[key].pop(k, None)
def sadd(self, key, member, *args):
self.sets[key].add(member)
def zadd(self, key, *args):
(mapping,) = args
for item in mapping:
self.sets[key].add(item)
def smembers(self, key):
return self.sets.get(key, set())
def sismember(self, name, value):
return value in self.sets.get(name, set())
def scard(self, key):
return len(self.sets.get(key, set()))
def ping(self, *args, **kwargs):
return True
def srem(self, key, *args):
self.sets.pop(key, None)
zrem = srem
def llen(self, key):
try:
return self.queues[key].qsize()
except KeyError:
return 0
def lpush(self, key, value):
self.queues[key].put_nowait(value)
def parse_response(self, connection, type, **options):
cmd, queues = self.connection._sock.data.pop()
queues = list(queues)
assert cmd == type
self.connection._sock.data = []
if type == 'BRPOP':
timeout = queues.pop()
item = self.brpop(queues, timeout)
if item:
return item
raise Empty()
def brpop(self, keys, timeout=None):
for key in keys:
try:
item = self.queues[key].get_nowait()
except Empty:
pass
else:
return key, item
def rpop(self, key):
try:
return self.queues[key].get_nowait()
except (KeyError, Empty):
pass
def __contains__(self, k):
return k in self._called
def pipeline(self):
return FakePipelineLite(self)
def encode(self, value):
return str(value)
def _new_queue(self, key):
self.queues[key] = _Queue()
class _sconnection(object):
disconnected = False
class _socket(object):
blocking = True
filenos = count(30)
def __init__(self, *args):
self._fileno = next(self.filenos)
self.data = []
def fileno(self):
return self._fileno
def setblocking(self, blocking):
self.blocking = blocking
def __init__(self, client):
self.client = client
self._sock = self._socket()
def disconnect(self):
self.disconnected = True
def send_command(self, cmd, *args):
self._sock.data.append((cmd, args))
class FakePipelineLite(object):
def __init__(self, client):
self.client = client
self.stack = []
def __enter__(self):
return self
def __exit__(self, *exc_info):
pass
def __getattr__(self, key):
if key not in self.__dict__:
def _add(*args, **kwargs):
self.stack.append((getattr(self.client, key), args, kwargs))
return self
return _add
return self.__dict__[key]
def execute(self):
stack = list(self.stack)
self.stack[:] = []
return [fun(*args, **kwargs) for fun, args, kwargs in stack]
class FakePipeline(redis.client.Pipeline):
def __init__(self, server, connection_pool,
response_callbacks, transaction, shard_hint):
if not server:
server = fakeredis.FakeServer()
self._server = server
correct_pool_instance = isinstance(
connection_pool, FakeRedisConnectionPool)
if connection_pool is not None and not correct_pool_instance:
connection_pool = FakeRedisConnectionPool(FakeRedisConnection)
self.connection_pool = connection_pool
self.connection = FakeRedisConnection(self, server)
self.client = connection_pool.get_connection().client
self.response_callbacks = response_callbacks
self.transaction = transaction
self.shard_hint = shard_hint
self.watching = False
self.reset()
def __enter__(self):
return self
def __exit__(self, *exc_info):
pass
def __getattr__(self, key):
if key not in self.__dict__:
def _add(*args, **kwargs):
self.command_stack.append(
(getattr(self.connection.client, key), args, kwargs))
return self
return _add
return self.__dict__[key]
def reset(self):
# Do nothing with the real connection
self.command_stack = []
self.scripts = set()
def execute(self):
stack = list(self.command_stack)
all_cmds = self.connection.pack_commands(
[args for args, _ in self.command_stack])
self.connection.send_packed_command(all_cmds)
response = []
for cmd in all_cmds:
try:
response.append(
self.parse_response(self.connection, cmd))
except ResponseError:
import sys
response.append(sys.exc_info()[1])
self.raise_first_error(self.command_stack, response)
results = []
for t, kwargs in stack:
redis_func_name = t[0]
redis_func_name = redis_func_name.lower()
if redis_func_name == 'del':
redis_func_name = 'delete'
args = t[1:]
fun = getattr(self.client, redis_func_name)
r = fun(*args, **kwargs)
results.append(r)
self.command_stack[:] = []
self.reset()
return results
class FakeRedisKombuChannelLite(kombu_redis.Channel):
def _get_client(self):
return FakeRedisClientLite
def _get_pool(self, asynchronous=False):
return Mock()
def _get_response_error(self):
return ResponseError
def _new_queue(self, queue, **kwargs):
for pri in self.priority_steps:
self.client._new_queue(self._queue_for_priority(queue, pri))
def pipeline(self):
return FakePipelineLite(FakeRedisClientLite())
class FakeRedisKombuChannel(kombu_redis.Channel):
_fanout_queues = {}
def __init__(self, *args, **kwargs):
super(FakeRedisKombuChannel, self).__init__(*args, **kwargs)
def _get_client(self):
return FakeRedisClient
def _create_client(self, asynchronous=False):
global _fake_redis_client
if _fake_redis_client is None:
_fake_redis_client = self._get_client()()
return _fake_redis_client
@contextmanager
def conn_or_acquire(self, client=None):
if client:
yield client
else:
yield self._create_client()
def _get_pool(self, asynchronous=False):
params = self._connparams(asynchronous=asynchronous)
self.keyprefix_fanout = self.keyprefix_fanout.format(db=params['db'])
return FakeRedisConnectionPool(**params)
def _get_response_error(self):
return ResponseError
def _new_queue(self, queue, **kwargs):
for pri in self.priority_steps:
self.client._new_queue(self._queue_for_priority(queue, pri))
def pipeline(self):
yield _get_fake_redis_client().pipeline()
def basic_publish(self, message, exchange='', routing_key='', **kwargs):
self._inplace_augment_message(message, exchange, routing_key)
# anon exchange: routing_key is the destination queue
return self._put(routing_key, message, **kwargs)
class FakeRedisKombuTransportLite(kombu_redis.Transport):
Channel = FakeRedisKombuChannelLite
def __init__(self, *args, **kwargs):
super(FakeRedisKombuTransportLite, self).__init__(*args, **kwargs)
def _get_errors(self):
return ((KeyError,), (IndexError,))
class FakeRedisKombuTransport(FakeRedisKombuTransportLite):
Channel = FakeRedisKombuChannel
@skip.unless_module('redis')
class TestRedisChannel(unittest.TestCase):
def setUp(self):
self.connection = self.create_connection()
self.channel = self.connection.default_channel
def tearDown(self):
self.connection = None
self.channel = None
global _fake_redis_client
_fake_redis_client = None
def create_connection(self, **kwargs):
kwargs.setdefault('transport_options', {'fanout_patterns': True})
return Connection(transport=FakeRedisKombuTransport, **kwargs)
def _get_one_delivery_tag(self, n='test_uniq_tag'):
chan = self.connection.default_channel
chan.exchange_declare(n)
chan.queue_declare(n)
chan.queue_bind(n, n, n)
msg = chan.prepare_message('quick brown fox')
chan.basic_publish(msg, n, n)
payload = chan._get(n)
assert payload
pymsg = chan.message_to_python(payload)
return pymsg.delivery_tag
def test_delivery_tag_is_uuid(self):
seen = set()
for i in range(100):
tag = self._get_one_delivery_tag()
assert tag not in seen
seen.add(tag)
with pytest.raises(ValueError):
int(tag)
assert len(tag) == 36
def test_disable_ack_emulation(self):
conn = Connection(
transport=FakeRedisKombuTransport,
transport_options={'ack_emulation': False}
)
chan = conn.channel()
assert not chan.ack_emulation
assert chan.QoS == virtual.QoS
def test_redis_ping_raises(self):
pool = Mock(name='pool')
pool_at_init = [pool]
client = Mock(name='client')
class XChannel(FakeRedisKombuChannel):
def __init__(self, *args, **kwargs):
self._pool = pool_at_init[0]
super(XChannel, self).__init__(*args, **kwargs)
def _get_client(self):
return lambda *_, **__: client
def _create_client(self, asynchronous=False):
if asynchronous:
return self.Client(connection_pool=self.async_pool)
return self.Client(connection_pool=self.pool)
class XTransport(FakeRedisKombuTransport):
Channel = XChannel
conn = Connection(transport=XTransport)
client.ping.side_effect = RuntimeError()
with pytest.raises(RuntimeError):
conn.channel()
pool.disconnect.assert_called_with()
pool.disconnect.reset_mock()
pool_at_init = [None]
with pytest.raises(RuntimeError):
conn.channel()
pool.disconnect.assert_not_called()
def test_after_fork(self):
self.channel._pool = None
self.channel._after_fork()
pool = self.channel._pool = Mock(name='pool')
self.channel._after_fork()
pool.disconnect.assert_called_with()
def test_next_delivery_tag(self):
assert (self.channel._next_delivery_tag() !=
self.channel._next_delivery_tag())
def test_do_restore_message(self):
client = Mock(name='client')
pl1 = {'body': 'BODY'}
spl1 = dumps(pl1)
lookup = self.channel._lookup = Mock(name='_lookup')
lookup.return_value = {'george', 'elaine'}
self.channel._do_restore_message(
pl1, 'ex', 'rkey', client,
)
client.rpush.assert_has_calls([
call('george', spl1), call('elaine', spl1),
], any_order=True)
client = Mock(name='client')
pl2 = {'body': 'BODY2', 'headers': {'x-funny': 1}}
headers_after = dict(pl2['headers'], redelivered=True)
spl2 = dumps(dict(pl2, headers=headers_after))
self.channel._do_restore_message(
pl2, 'ex', 'rkey', client,
)
client.rpush.assert_any_call('george', spl2)
client.rpush.assert_any_call('elaine', spl2)
client.rpush.side_effect = KeyError()
with patch('kombu.transport.redis.crit') as crit:
self.channel._do_restore_message(
pl2, 'ex', 'rkey', client,
)
crit.assert_called()
def test_restore(self):
message = Mock(name='message')
with patch('kombu.transport.redis.loads') as loads:
loads.return_value = 'M', 'EX', 'RK'
client = self.channel._create_client = Mock(name='client')
client = client()
client.pipeline = ContextMock()
restore = self.channel._do_restore_message = Mock(
name='_do_restore_message',
)
pipe = client.pipeline.return_value
pipe_hget = Mock(name='pipe.hget')
pipe.hget.return_value = pipe_hget
pipe_hget_hdel = Mock(name='pipe.hget.hdel')
pipe_hget.hdel.return_value = pipe_hget_hdel
result = Mock(name='result')
pipe_hget_hdel.execute.return_value = None, None
self.channel._restore(message)
client.pipeline.assert_called_with()
unacked_key = self.channel.unacked_key
loads.assert_not_called()
tag = message.delivery_tag
pipe.hget.assert_called_with(unacked_key, tag)
pipe_hget.hdel.assert_called_with(unacked_key, tag)
pipe_hget_hdel.execute.assert_called_with()
pipe_hget_hdel.execute.return_value = result, None
self.channel._restore(message)
loads.assert_called_with(result)
restore.assert_called_with('M', 'EX', 'RK', client, False)
def test_qos_restore_visible(self):
client = self.channel._create_client = Mock(name='client')
client = client()
def pipe(*args, **kwargs):
return FakePipelineLite(client)
client.pipeline = pipe
client.zrevrangebyscore.return_value = [
(1, 10),
(2, 20),
(3, 30),
]
qos = kombu_redis.QoS(self.channel)
restore = qos.restore_by_tag = Mock(name='restore_by_tag')
qos._vrestore_count = 1
qos.restore_visible()
client.zrevrangebyscore.assert_not_called()
assert qos._vrestore_count == 2
qos._vrestore_count = 0
qos.restore_visible()
restore.assert_has_calls([
call(1, client), call(2, client), call(3, client),
])
assert qos._vrestore_count == 1
qos._vrestore_count = 0
restore.reset_mock()
client.zrevrangebyscore.return_value = []
qos.restore_visible()
restore.assert_not_called()
assert qos._vrestore_count == 1
qos._vrestore_count = 0
client.setnx.side_effect = kombu_redis.MutexHeld()
qos.restore_visible()
def test_basic_consume_when_fanout_queue(self):
self.channel.exchange_declare(exchange='txconfan', type='fanout')
self.channel.queue_declare(queue='txconfanq')
self.channel.queue_bind(queue='txconfanq', exchange='txconfan')
assert 'txconfanq' in self.channel._fanout_queues
self.channel.basic_consume('txconfanq', False, None, 1)
assert 'txconfanq' in self.channel.active_fanout_queues
assert self.channel._fanout_to_queue.get('txconfan') == 'txconfanq'
def test_basic_cancel_unknown_delivery_tag(self):
assert self.channel.basic_cancel('txaseqwewq') is None
def test_subscribe_no_queues(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.clear()
self.channel._subscribe()
self.channel.subclient.subscribe.assert_not_called()
def test_subscribe(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.add('a')
self.channel.active_fanout_queues.add('b')
self.channel._fanout_queues.update(a=('a', ''), b=('b', ''))
self.channel._subscribe()
self.channel.subclient.psubscribe.assert_called()
s_args, _ = self.channel.subclient.psubscribe.call_args
assert sorted(s_args[0]) == ['/{db}.a', '/{db}.b']
self.channel.subclient.connection._sock = None
self.channel._subscribe()
self.channel.subclient.connection.connect.assert_called_with()
def test_handle_unsubscribe_message(self):
s = self.channel.subclient
s.subscribed = True
self.channel._handle_message(s, ['unsubscribe', 'a', 0])
assert not s.subscribed
def test_handle_pmessage_message(self):
res = self.channel._handle_message(
self.channel.subclient,
['pmessage', 'pattern', 'channel', 'data'],
)
assert res == {
'type': 'pmessage',
'pattern': 'pattern',
'channel': 'channel',
'data': 'data',
}
def test_handle_message(self):
res = self.channel._handle_message(
self.channel.subclient,
['type', 'channel', 'data'],
)
assert res == {
'type': 'type',
'pattern': None,
'channel': 'channel',
'data': 'data',
}
def test_brpop_start_but_no_queues(self):
assert self.channel._brpop_start() is None
def test_receive(self):
s = self.channel.subclient = Mock()
self.channel._fanout_to_queue['a'] = 'b'
self.channel.connection._deliver = Mock(name='_deliver')
message = {
'body': 'hello',
'properties': {
'delivery_tag': 1,
'delivery_info': {'exchange': 'E', 'routing_key': 'R'},
},
}
s.parse_response.return_value = ['message', 'a', dumps(message)]
self.channel._receive_one(self.channel.subclient)
self.channel.connection._deliver.assert_called_once_with(
message, 'b',
)
def test_receive_raises_for_connection_error(self):
self.channel._in_listen = True
s = self.channel.subclient = Mock()
s.parse_response.side_effect = KeyError('foo')
with pytest.raises(KeyError):
self.channel._receive_one(self.channel.subclient)
assert not self.channel._in_listen
def test_receive_empty(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = None
assert self.channel._receive_one(self.channel.subclient) is None
def test_receive_different_message_Type(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = ['message', '/foo/', 0, 'data']
assert self.channel._receive_one(self.channel.subclient) is None
def test_brpop_read_raises(self):
c = self.channel.client = Mock()
c.parse_response.side_effect = KeyError('foo')
with pytest.raises(KeyError):
self.channel._brpop_read()
c.connection.disconnect.assert_called_with()
def test_brpop_read_gives_None(self):
c = self.channel.client = Mock()
c.parse_response.return_value = None
with pytest.raises(kombu_redis.Empty):
self.channel._brpop_read()
def test_poll_error(self):
c = self.channel.client = Mock()
c.parse_response = Mock()
self.channel._poll_error('BRPOP')
c.parse_response.assert_called_with(c.connection, 'BRPOP')
c.parse_response.side_effect = KeyError('foo')
with pytest.raises(KeyError):
self.channel._poll_error('BRPOP')
def test_poll_error_on_type_LISTEN(self):
c = self.channel.subclient = Mock()
c.parse_response = Mock()
self.channel._poll_error('LISTEN')
c.parse_response.assert_called_with()
c.parse_response.side_effect = KeyError('foo')
with pytest.raises(KeyError):
self.channel._poll_error('LISTEN')
def test_put_fanout(self):
self.channel._in_poll = False
c = self.channel._create_client = Mock()
body = {'hello': 'world'}
self.channel._put_fanout('exchange', body, '')
c().publish.assert_called_with('/{db}.exchange', dumps(body))
def test_put_priority(self):
client = self.channel._create_client = Mock(name='client')
msg1 = {'properties': {'priority': 3}}
self.channel._put('george', msg1)
client().lpush.assert_called_with(
self.channel._queue_for_priority('george', 3), dumps(msg1),
)
msg2 = {'properties': {'priority': 313}}
self.channel._put('george', msg2)
client().lpush.assert_called_with(
self.channel._queue_for_priority('george', 9), dumps(msg2),
)
msg3 = {'properties': {}}
self.channel._put('george', msg3)
client().lpush.assert_called_with(
self.channel._queue_for_priority('george', 0), dumps(msg3),
)
def test_delete(self):
x = self.channel
x._create_client = Mock()
x._create_client.return_value = x.client
delete = x.client.delete = Mock()
srem = x.client.srem = Mock()
x._delete('queue', 'exchange', 'routing_key', None)
delete.assert_has_calls([
call(x._queue_for_priority('queue', pri))
for pri in kombu_redis.PRIORITY_STEPS
])
srem.assert_called_with(
x.keyprefix_queue % ('exchange',),
x.sep.join(['routing_key', '', 'queue']))
def test_has_queue(self):
self.channel._create_client = Mock()
self.channel._create_client.return_value = self.channel.client
exists = self.channel.client.exists = Mock()
exists.return_value = True
assert self.channel._has_queue('foo')
exists.assert_has_calls([
call(self.channel._queue_for_priority('foo', pri))
for pri in kombu_redis.PRIORITY_STEPS
])
exists.return_value = False
assert not self.channel._has_queue('foo')
def test_close_when_closed(self):
self.channel.closed = True
self.channel.close()
def test_close_deletes_autodelete_fanout_queues(self):
self.channel._fanout_queues = {'foo': ('foo', ''), 'bar': ('bar', '')}
self.channel.auto_delete_queues = ['foo']
self.channel.queue_delete = Mock(name='queue_delete')
client = self.channel.client
self.channel.close()
self.channel.queue_delete.assert_has_calls([
call('foo', client=client),
])
def test_close_client_close_raises(self):
c = self.channel.client = Mock()
connection = c.connection
connection.disconnect.side_effect = self.channel.ResponseError()
self.channel.close()
connection.disconnect.assert_called_with()
def test_invalid_database_raises_ValueError(self):
with pytest.raises(ValueError):
self.channel.connection.client.virtual_host = 'dwqeq'
self.channel._connparams()
def test_connparams_allows_slash_in_db(self):
self.channel.connection.client.virtual_host = '/123'
assert self.channel._connparams()['db'] == 123
def test_connparams_db_can_be_int(self):
self.channel.connection.client.virtual_host = 124
assert self.channel._connparams()['db'] == 124
def test_new_queue_with_auto_delete(self):
kombu_redis.Channel._new_queue(
self.channel, 'george', auto_delete=False)
assert 'george' not in self.channel.auto_delete_queues
kombu_redis.Channel._new_queue(
self.channel, 'elaine', auto_delete=True)
assert 'elaine' in self.channel.auto_delete_queues
def test_connparams_regular_hostname(self):
self.channel.connection.client.hostname = 'george.vandelay.com'
assert self.channel._connparams()['host'] == 'george.vandelay.com'
def test_connparams_password_for_unix_socket(self):
self.channel.connection.client.hostname = \
'socket://:foo@/var/run/redis.sock'
connection_parameters = self.channel._connparams()
password = connection_parameters['password']
path = connection_parameters['path']
assert (password, path) == ('foo', '/var/run/redis.sock')
self.channel.connection.client.hostname = \
'socket://@/var/run/redis.sock'
connection_parameters = self.channel._connparams()
password = connection_parameters['password']
path = connection_parameters['path']
assert (password, path) == (None, '/var/run/redis.sock')
def test_rotate_cycle_ValueError(self):
cycle = self.channel._queue_cycle
cycle.update(['kramer', 'jerry'])
cycle.rotate('kramer')
assert cycle.items, ['jerry' == 'kramer']
cycle.rotate('elaine')
def test_get_client(self):
kombu_redis_client = kombu_redis.Channel._get_client(self.channel)
assert kombu_redis_client
redis_version = getattr(redis, 'VERSION', None)
try:
redis.VERSION = (2, 4, 0)
with pytest.raises(VersionMismatch):
kombu_redis.Channel._get_client(self.channel)
finally:
if redis_version is not None:
redis.VERSION = redis_version
def test_get_response_error(self):
kombu_error = kombu_redis.Channel._get_response_error(self.channel)
assert kombu_error is redis.exceptions.ResponseError
def test_avail_client(self):
self.channel._pool = Mock()
cc = self.channel._create_client = Mock()
with self.channel.conn_or_acquire():
pass
cc.assert_called_with()
def test_register_with_event_loop(self):
transport = self.connection.transport
transport.cycle = Mock(name='cycle')
transport.cycle.fds = {12: 'LISTEN', 13: 'BRPOP'}
conn = Mock(name='conn')
loop = Mock(name='loop')
kombu_redis.Transport.register_with_event_loop(transport, conn, loop)
transport.cycle.on_poll_init.assert_called_with(loop.poller)
loop.call_repeatedly.assert_called_with(
10, transport.cycle.maybe_restore_messages,
)
loop.on_tick.add.assert_called()
on_poll_start = loop.on_tick.add.call_args[0][0]
on_poll_start()
transport.cycle.on_poll_start.assert_called_with()
loop.add_reader.assert_has_calls([
call(12, transport.on_readable, 12),
call(13, transport.on_readable, 13),
])
def test_transport_on_readable(self):
transport = self.connection.transport
cycle = transport.cycle = Mock(name='cyle')
cycle.on_readable.return_value = None
kombu_redis.Transport.on_readable(transport, 13)
cycle.on_readable.assert_called_with(13)
def test_transport_get_errors(self):
assert kombu_redis.Transport._get_errors(self.connection.transport)
def test_transport_driver_version(self):
assert kombu_redis.Transport.driver_version(self.connection.transport)
def test_transport_get_errors_when_InvalidData_used(self):
try:
errors = kombu_redis.Transport._get_errors(
self.connection.transport)
assert errors
assert redis.exceptions.DataError in errors[1]
except Exception:
raise
def test_empty_queues_key(self):
channel = self.channel
channel._in_poll = False
key = channel.keyprefix_queue % 'celery'
# Everything is fine, there is a list of queues.
list_of_queues = 'celery\x06\x16\x06\x16celery'
channel.client.sadd(key, list_of_queues)
assert channel.get_table('celery') == [
('celery', '', 'celery'),
]
# ... then for some reason, the _kombu.binding.celery key gets lost
channel.client.srem(key, list_of_queues)
# which raises a channel error so that the consumer/publisher
# can recover by redeclaring the required entities.
with pytest.raises(InconsistencyError):
self.channel.get_table('celery')
def test_socket_connection(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('redis+socket:///tmp/redis.sock') as conn:
connparams = conn.default_channel._connparams()
assert issubclass(
connparams['connection_class'],
redis.UnixDomainSocketConnection,
)
assert connparams['path'] == '/tmp/redis.sock'
def test_ssl_argument__dict(self):
with patch('kombu.transport.redis.Channel._create_client'):
# Expected format for redis-py's SSLConnection class
ssl_params = {
'ssl_cert_reqs': 2,
'ssl_ca_certs': '/foo/ca.pem',
'ssl_certfile': '/foo/cert.crt',
'ssl_keyfile': '/foo/pkey.key'
}
with Connection('redis://', ssl=ssl_params) as conn:
params = conn.default_channel._connparams()
assert params['ssl_cert_reqs'] == ssl_params['ssl_cert_reqs']
assert params['ssl_ca_certs'] == ssl_params['ssl_ca_certs']
assert params['ssl_certfile'] == ssl_params['ssl_certfile']
assert params['ssl_keyfile'] == ssl_params['ssl_keyfile']
assert params.get('ssl') is None
def test_ssl_connection(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('redis://', ssl={'ssl_cert_reqs': 2}) as conn:
connparams = conn.default_channel._connparams()
assert issubclass(
connparams['connection_class'],
redis.SSLConnection,
)
def test_rediss_connection(self):
with patch('kombu.transport.redis.Channel._create_client'):
with Connection('rediss://') as conn:
connparams = conn.default_channel._connparams()
assert issubclass(
connparams['connection_class'],
redis.SSLConnection,
)
def test_sep_transport_option(self):
conn_kwargs = dict(
transport=FakeRedisKombuTransport,
transport_options={'sep': ':'})
with Connection(**conn_kwargs) as conn:
key = conn.default_channel.keyprefix_queue % 'celery'
conn.default_channel.client.sadd(key, 'celery::celery')
assert conn.default_channel.sep == ':'
assert conn.default_channel.get_table('celery') == [
('celery', '', 'celery'),
]
@skip.unless_module('redis')
@mock.patch('redis.Connection', FakeRedisConnection)
class TestRedisConnections(unittest.TestCase):
def setUp(self):
self.connection = self.create_connection()
self.exchange_name = exchange_name = 'test_Redis'
self.exchange = Exchange(exchange_name, type='direct')
self.queue = Queue('test_Redis', self.exchange, 'test_Redis')
self.queue(self.connection.default_channel).declare()
self.real_scard = FakeRedisClient.scard
def tearDown(self):
self.connection.close()
self.queue = None
self.exchange = None
global _fake_redis_client
_fake_redis_client = None
FakeRedisClient.scard = self.real_scard
def create_connection(self, **kwargs):
kwargs.setdefault('transport_options', {'fanout_patterns': True})
return Connection(transport=FakeRedisKombuTransport, **kwargs)
def test_purge(self):
channel = self.connection.default_channel
producer = Producer(channel, self.exchange, routing_key='test_Redis')
for i in range(10):
producer.publish({'hello': 'world-%s' % (i,)})
assert channel._size('test_Redis') == 10
assert self.queue(channel).purge() == 10
channel.close()
def test_db_values(self):
Connection(virtual_host=1,
transport=FakeRedisKombuTransport).channel()
Connection(virtual_host='1',
transport=FakeRedisKombuTransport).channel()
Connection(virtual_host='/1',
transport=FakeRedisKombuTransport).channel()
with pytest.raises(Exception):
Connection('redis:///foo').channel()
def test_db_port(self):
c1 = Connection(port=None, transport=FakeRedisKombuTransport).channel()
c1.close()
c2 = Connection(port=9999, transport=FakeRedisKombuTransport).channel()
c2.close()
def test_close_poller_not_active(self):
c = Connection(transport=FakeRedisKombuTransport).channel()
cycle = c.connection.cycle
c.client.connection
c.close()
assert c not in cycle._channels
def test_close_ResponseError(self):
c = Connection(transport=FakeRedisKombuTransport).channel()
c.client.bgsave_raises_ResponseError = True
c.close()
def test_close_disconnects(self):
c = Connection(transport=FakeRedisKombuTransport).channel()
conn1 = c.client.connection
conn2 = c.subclient.connection
c.close()
assert conn1.disconnected
assert conn2.disconnected
def test_get__Empty(self):
channel = self.connection.channel()
with pytest.raises(Empty):
channel._get('does-not-exist')
channel.close()
def test_get_client(self):
conn = Connection(transport=FakeRedisKombuTransport)
chan = conn.channel()
assert chan.Client
assert chan.ResponseError
assert conn.transport.connection_errors
assert conn.transport.channel_errors
def test_check_at_least_we_try_to_connect_and_fail(self):
connection = Connection('redis://localhost:65534/')
with pytest.raises(redis.exceptions.ConnectionError):
chan = connection.channel()
chan._size('some_queue')
def test_redis_queue_lookup_happy_path(self):
fake_redis_client = _get_fake_redis_client()
redis_channel = self.connection.default_channel
routing_key = redis_channel.keyprefix_queue % self.exchange_name
redis_channel.queue_bind(routing_key, self.exchange_name, routing_key)
fake_redis_client.sadd(routing_key, routing_key)
lookup_queue_result = redis_channel._lookup(
exchange=self.exchange_name,
routing_key=routing_key,
default='default_queue')
assert lookup_queue_result == [routing_key]
def test_redis_queue_lookup_gets_default_when_queue_doesnot_exist(self):
# Given: A test redis client and channel
fake_redis_client = _get_fake_redis_client()
redis_channel = self.connection.default_channel
# Given: The default queue is set:
default_queue = 'default_queue'
redis_channel.deadletter_queue = default_queue
# Determine the routing key
routing_key = redis_channel.keyprefix_queue % self.exchange
fake_redis_client.sadd(routing_key, "DoesNotExist")
lookup_queue_result = redis_channel._lookup(
exchange=self.exchange_name,
routing_key=routing_key,
default=default_queue)
assert lookup_queue_result == [redis_channel.deadletter_queue]
def test_redis_queue_lookup_gets_queue_when_exchange_doesnot_exist(self):
# Given: A test redis client and channel
fake_redis_client = _get_fake_redis_client()
redis_channel = self.connection.default_channel
# Given: The default queue is set:
default_queue = 'default_queue'
redis_channel.deadletter_queue = default_queue
# Determine the routing key
routing_key = redis_channel.keyprefix_queue % self.exchange
fake_redis_client.sadd(routing_key, routing_key)
lookup_queue_result = redis_channel._lookup(
exchange=None,
routing_key=routing_key,
default=default_queue)
assert lookup_queue_result == [routing_key]
def test_redis_queue_lookup_gets_default_when_route_doesnot_exist(self):
# Given: A test redis client and channel
fake_redis_client = _get_fake_redis_client()
redis_channel = self.connection.default_channel
# Given: The default queue is set:
default_queue = 'default_queue'
redis_channel.deadletter_queue = default_queue
# Determine the routing key
routing_key = redis_channel.keyprefix_queue % self.exchange
fake_redis_client.sadd(routing_key, "DoesNotExist")
lookup_queue_result = redis_channel._lookup(
exchange=None,
routing_key=None,
default=None)
assert lookup_queue_result == [default_queue]
def test_redis_queue_lookup_raises_inconsistency_error(self):
connection = Mock(client=Mock(
hostname='127.0.0.1',
virtual_host='/', port=6379,
transport_options={},
))
redis_channel = FakeRedisKombuChannel(connection)
exchange = Mock(name='Exchange')
try:
redis_channel._lookup(
exchange=exchange.name,
routing_key='routing_key',
default=None)
except kombu_redis.InconsistencyError:
pass # This is expected
else:
raise("Redis test did not raise expected InconsistencyError!")
def test_redis_queue_lookup_client_raises_key_error_gets_default(self):
fake_redis_client = _get_fake_redis_client()
fake_redis_client.scard = Mock(side_effect=KeyError)
redis_channel = self.connection.default_channel
routing_key = redis_channel.keyprefix_queue % self.exchange
redis_channel.queue_bind(routing_key, self.exchange_name, routing_key)
fake_redis_client.sadd(routing_key, routing_key)
default_queue_name = 'default_queue'
lookup_queue_result = redis_channel._lookup(
exchange=self.exchange_name,
routing_key=routing_key,
default=default_queue_name)
assert lookup_queue_result == [default_queue_name]
def test_redis_queue_lookup_client_raises_key_error_gets_deadletter(self):
fake_redis_client = _get_fake_redis_client()
fake_redis_client.scard = Mock(side_effect=KeyError)
redis_channel = self.connection.default_channel
routing_key = redis_channel.keyprefix_queue % self.exchange
redis_channel.queue_bind(routing_key, self.exchange_name, routing_key)
fake_redis_client.sadd(routing_key, routing_key)
default_queue_name = 'deadletter_queue'
redis_channel.deadletter_queue = default_queue_name
lookup_queue_result = redis_channel._lookup(
exchange=self.exchange_name,
routing_key=routing_key,
default=None)
assert lookup_queue_result == [default_queue_name]
@skip.unless_module('redis')
class TestKombuRedisMultiChannelPoller(unittest.TestCase):
def setUp(self):
self.Poller = kombu_redis.MultiChannelPoller
def test_on_poll_start(self):
p = self.Poller()
p._channels = []
p.on_poll_start()
p._register_BRPOP = Mock(name='_register_BRPOP')
p._register_LISTEN = Mock(name='_register_LISTEN')
chan1 = Mock(name='chan1')
p._channels = [chan1]
chan1.active_queues = []
chan1.active_fanout_queues = []
p.on_poll_start()
chan1.active_queues = ['q1']
chan1.active_fanout_queues = ['q2']
chan1.qos.can_consume.return_value = False
p.on_poll_start()
p._register_LISTEN.assert_called_with(chan1)
p._register_BRPOP.assert_not_called()
chan1.qos.can_consume.return_value = True
p._register_LISTEN.reset_mock()
p.on_poll_start()
p._register_BRPOP.assert_called_with(chan1)
p._register_LISTEN.assert_called_with(chan1)
def test_on_poll_init(self):
p = self.Poller()
chan1 = Mock(name='chan1')
p._channels = []
poller = Mock(name='poller')
p.on_poll_init(poller)
assert p.poller is poller
p._channels = [chan1]
p.on_poll_init(poller)
chan1.qos.restore_visible.assert_called_with(
num=chan1.unacked_restore_limit,
)
def test_restore_visible_with_gevent(self):
with patch('kombu.transport.redis.time') as time:
with patch('kombu.transport.redis._detect_environment') as env:
timeout = 3600
time.return_value = timeout
env.return_value = 'gevent'
chan1 = Mock(name='chan1')
redis_ctx_mock = Mock()
redis_client_mock = Mock(name='redis_client_mock')
redis_ctx_mock.__exit__ = Mock()
redis_ctx_mock.__enter__ = Mock(return_value=redis_client_mock)
chan1.conn_or_acquire.return_value = redis_ctx_mock
qos = kombu_redis.QoS(chan1)
qos.visibility_timeout = timeout
qos.restore_visible()
redis_client_mock.zrevrangebyscore\
.assert_called_with(chan1.unacked_index_key, timeout, 0,
start=0, num=10, withscores=True)
def test_handle_event(self):
p = self.Poller()
chan = Mock(name='chan')
p._fd_to_chan[13] = chan, 'BRPOP'
chan.handlers = {'BRPOP': Mock(name='BRPOP')}
chan.qos.can_consume.return_value = False
p.handle_event(13, kombu_redis.READ)
chan.handlers['BRPOP'].assert_not_called()
chan.qos.can_consume.return_value = True
p.handle_event(13, kombu_redis.READ)
chan.handlers['BRPOP'].assert_called_with()
p.handle_event(13, kombu_redis.ERR)
chan._poll_error.assert_called_with('BRPOP')
p.handle_event(13, ~(kombu_redis.READ | kombu_redis.ERR))
def test_fds(self):
p = self.Poller()
p._fd_to_chan = {1: 2}
assert p.fds == p._fd_to_chan
def test_close_unregisters_fds(self):
p = self.Poller()
poller = p.poller = Mock()
p._chan_to_sock.update({1: 1, 2: 2, 3: 3})
p.close()
assert poller.unregister.call_count == 3
u_args = poller.unregister.call_args_list
assert sorted(u_args) == [
((1,), {}),
((2,), {}),
((3,), {}),
]
def test_close_when_unregister_raises_KeyError(self):
p = self.Poller()
p.poller = Mock()
p._chan_to_sock.update({1: 1})
p.poller.unregister.side_effect = KeyError(1)
p.close()
def test_close_resets_state(self):
p = self.Poller()
p.poller = Mock()
p._channels = Mock()
p._fd_to_chan = Mock()
p._chan_to_sock = Mock()
p._chan_to_sock.itervalues.return_value = []
p._chan_to_sock.values.return_value = [] # py3k
p.close()
p._channels.clear.assert_called_with()
p._fd_to_chan.clear.assert_called_with()
p._chan_to_sock.clear.assert_called_with()
def test_register_when_registered_reregisters(self):
p = self.Poller()
p.poller = Mock()
channel, client, type = Mock(), Mock(), Mock()
sock = client.connection._sock = Mock()
sock.fileno.return_value = 10
p._chan_to_sock = {(channel, client, type): 6}
p._register(channel, client, type)
p.poller.unregister.assert_called_with(6)
assert p._fd_to_chan[10] == (channel, type)
assert p._chan_to_sock[(channel, client, type)] == sock
p.poller.register.assert_called_with(sock, p.eventflags)
# when client not connected yet
client.connection._sock = None
def after_connected():
client.connection._sock = Mock()
client.connection.connect.side_effect = after_connected
p._register(channel, client, type)
client.connection.connect.assert_called_with()
def test_register_BRPOP(self):
p = self.Poller()
channel = Mock()
channel.client.connection._sock = None
p._register = Mock()
channel._in_poll = False
p._register_BRPOP(channel)
assert channel._brpop_start.call_count == 1
assert p._register.call_count == 1
channel.client.connection._sock = Mock()
p._chan_to_sock[(channel, channel.client, 'BRPOP')] = True
channel._in_poll = True
p._register_BRPOP(channel)
assert channel._brpop_start.call_count == 1
assert p._register.call_count == 1
def test_register_LISTEN(self):
p = self.Poller()
channel = Mock()
channel.subclient.connection._sock = None
channel._in_listen = False
p._register = Mock()
p._register_LISTEN(channel)
p._register.assert_called_with(channel, channel.subclient, 'LISTEN')
assert p._register.call_count == 1
assert channel._subscribe.call_count == 1
channel._in_listen = True
p._chan_to_sock[(channel, channel.subclient, 'LISTEN')] = 3
channel.subclient.connection._sock = Mock()
p._register_LISTEN(channel)
assert p._register.call_count == 1
assert channel._subscribe.call_count == 1
def create_get(self, events=None, queues=None, fanouts=None):
_pr = [] if events is None else events
_aq = [] if queues is None else queues
_af = [] if fanouts is None else fanouts
p = self.Poller()
p.poller = Mock()
p.poller.poll.return_value = _pr
p._register_BRPOP = Mock()
p._register_LISTEN = Mock()
channel = Mock()
p._channels = [channel]
channel.active_queues = _aq
channel.active_fanout_queues = _af
return p, channel
def test_get_no_actions(self):
p, channel = self.create_get()
with pytest.raises(kombu_redis.Empty):
p.get(Mock())
def test_qos_reject(self):
p, channel = self.create_get()
qos = kombu_redis.QoS(channel)
qos.ack = Mock(name='Qos.ack')
qos.reject(1234)
qos.ack.assert_called_with(1234)
def test_get_brpop_qos_allow(self):
p, channel = self.create_get(queues=['a_queue'])
channel.qos.can_consume.return_value = True
with pytest.raises(kombu_redis.Empty):
p.get(Mock())
p._register_BRPOP.assert_called_with(channel)
def test_get_brpop_qos_disallow(self):
p, channel = self.create_get(queues=['a_queue'])
channel.qos.can_consume.return_value = False
with pytest.raises(kombu_redis.Empty):
p.get(Mock())
p._register_BRPOP.assert_not_called()
def test_get_listen(self):
p, channel = self.create_get(fanouts=['f_queue'])
with pytest.raises(kombu_redis.Empty):
p.get(Mock())
p._register_LISTEN.assert_called_with(channel)
def test_get_receives_ERR(self):
p, channel = self.create_get(events=[(1, eventio.ERR)])
p._fd_to_chan[1] = (channel, 'BRPOP')
with pytest.raises(kombu_redis.Empty):
p.get(Mock())
channel._poll_error.assert_called_with('BRPOP')
def test_get_receives_multiple(self):
p, channel = self.create_get(events=[(1, eventio.ERR),
(1, eventio.ERR)])
p._fd_to_chan[1] = (channel, 'BRPOP')
with pytest.raises(kombu_redis.Empty):
p.get(Mock())
channel._poll_error.assert_called_with('BRPOP')
@skip.unless_module('redis')
class TestKombuRedisMutex(unittest.TestCase):
def test_mutex(self, lock_id='xxx'):
client = Mock(name='client')
with patch('kombu.transport.redis.uuid') as uuid:
# Won
uuid.return_value = lock_id
client.setnx.return_value = True
client.pipeline = ContextMock()
pipe = client.pipeline.return_value
pipe.get.return_value = lock_id
held = False
with kombu_redis.Mutex(client, 'foo1', 100):
held = True
assert held
client.setnx.assert_called_with('foo1', lock_id)
pipe.get.return_value = 'yyy'
held = False
with kombu_redis.Mutex(client, 'foo1', 100):
held = True
assert held
# Did not win
client.expire.reset_mock()
pipe.get.return_value = lock_id
client.setnx.return_value = False
with pytest.raises(kombu_redis.MutexHeld):
held = False
with kombu_redis.Mutex(client, 'foo1', '100'):
held = True
assert not held
client.ttl.return_value = 0
with pytest.raises(kombu_redis.MutexHeld):
held = False
with kombu_redis.Mutex(client, 'foo1', '100'):
held = True
assert not held
client.expire.assert_called()
# Wins but raises WatchError (and that is ignored)
client.setnx.return_value = True
pipe.watch.side_effect = redis.WatchError()
held = False
with kombu_redis.Mutex(client, 'foo1', 100):
held = True
assert held
class TestRedisProducerConsumer(unittest.TestCase):
def setUp(self):
self.connection = self.create_connection()
self.channel = self.connection.default_channel
self.routing_key = routing_key = 'test_redis_producer'
self.exchange_name = exchange_name = 'test_redis_producer'
self.exchange = Exchange(exchange_name, type='direct')
self.queue = Queue(routing_key, self.exchange, routing_key)
self.queue(self.connection.default_channel).declare()
self.channel.queue_bind(routing_key, self.exchange_name, routing_key)
def create_connection(self, **kwargs):
kwargs.setdefault('transport_options', {'fanout_patterns': True})
return Connection(transport=FakeRedisKombuTransportLite, **kwargs)
def teardown(self):
self.connection.close()
def test_publish__get(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange,
routing_key=self.routing_key)
self.queue(channel).declare()
producer.publish({'hello': 'world'})
assert self.queue(channel).get().payload == {'hello': 'world'}
assert self.queue(channel).get() is None
assert self.queue(channel).get() is None
assert self.queue(channel).get() is None
def test_publish__consume(self):
connection = self.create_connection()
channel = connection.default_channel
producer = Producer(channel, self.exchange,
routing_key=self.routing_key)
consumer = Consumer(channel, queues=[self.queue])
producer.publish({'hello2': 'world2'})
_received = []
def callback(message_data, message):
_received.append(message_data)
message.ack()
consumer.register_callback(callback)
consumer.consume()
assert channel in channel.connection.cycle._channels
try:
connection.drain_events(timeout=1)
assert _received
with pytest.raises(socket.timeout):
connection.drain_events(timeout=0.01)
finally:
channel.close()
@skip.unless_module('redis.sentinel')
class TestRedisSentinel(unittest.TestCase):
def test_method_called(self):
with patch.object(kombu_redis.SentinelChannel,
'_sentinel_managed_pool') as p:
connection = Connection(
'sentinel://localhost:65534/',
transport_options={
'master_name': 'not_important',
},
)
connection.channel()
p.assert_called()
def test_getting_master_from_sentinel(self):
with patch('redis.sentinel.Sentinel') as patched:
connection = Connection(
'sentinel://localhost:65532/;'
'sentinel://user@localhost:65533/;'
'sentinel://:password@localhost:65534/;'
'sentinel://user:password@localhost:65535/;',
transport_options={
'master_name': 'not_important',
},
)
connection.channel()
patched.assert_called_once_with(
[
(u'localhost', 65532),
(u'localhost', 65533),
(u'localhost', 65534),
(u'localhost', 65535),
],
connection_class=mock.ANY, db=0, max_connections=10,
min_other_sentinels=0, password=<PASSWORD>, sentinel_kwargs=None,
socket_connect_timeout=None, socket_keepalive=None,
socket_keepalive_options=None, socket_timeout=None)
master_for = patched.return_value.master_for
master_for.assert_called()
master_for.assert_called_with('not_important', ANY)
master_for().connection_pool.get_connection.assert_called()
def test_getting_master_from_sentinel_single_node(self):
with patch('redis.sentinel.Sentinel') as patched:
connection = Connection(
'sentinel://localhost:65532/',
transport_options={
'master_name': 'not_important',
},
)
connection.channel()
patched.assert_called_once_with(
[(u'localhost', 65532)],
connection_class=mock.ANY, db=0, max_connections=10,
min_other_sentinels=0, password=<PASSWORD>, sentinel_kwargs=None,
socket_connect_timeout=None, socket_keepalive=None,
socket_keepalive_options=None, socket_timeout=None)
master_for = patched.return_value.master_for
master_for.assert_called()
master_for.assert_called_with('not_important', ANY)
master_for().connection_pool.get_connection.assert_called()
def test_can_create_connection(self):
connection = Connection(
'sentinel://localhost:65534/',
transport_options={
'master_name': 'not_important',
},
)
with pytest.raises(redis.exceptions.ConnectionError):
connection.channel()
```
#### File: unit/utils/test_debug.py
```python
from __future__ import absolute_import, unicode_literals
import logging
from case import Mock, patch
from kombu.five import bytes_if_py2
from kombu.utils.debug import Logwrapped, setup_logging
class test_setup_logging:
def test_adds_handlers_sets_level(self):
with patch('kombu.utils.debug.get_logger') as get_logger:
logger = get_logger.return_value = Mock()
setup_logging(loggers=['kombu.test'])
get_logger.assert_called_with('kombu.test')
logger.addHandler.assert_called()
logger.setLevel.assert_called_with(logging.DEBUG)
class test_Logwrapped:
def test_wraps(self):
with patch('kombu.utils.debug.get_logger') as get_logger:
logger = get_logger.return_value = Mock()
W = Logwrapped(Mock(), 'kombu.test')
get_logger.assert_called_with('kombu.test')
assert W.instance is not None
assert W.logger is logger
W.instance.__repr__ = lambda s: bytes_if_py2('foo')
assert repr(W) == 'foo'
W.instance.some_attr = 303
assert W.some_attr == 303
W.instance.some_method.__name__ = bytes_if_py2('some_method')
W.some_method(1, 2, kw=1)
W.instance.some_method.assert_called_with(1, 2, kw=1)
W.some_method()
W.instance.some_method.assert_called_with()
W.some_method(kw=1)
W.instance.some_method.assert_called_with(kw=1)
W.ident = 'ident'
W.some_method(kw=1)
logger.debug.assert_called()
assert 'ident' in logger.debug.call_args[0][0]
assert dir(W) == dir(W.instance)
``` |
{
"source": "7german7/python_comands",
"score": 4
} |
#### File: python_comands/desafios/suma_recursiva2.py
```python
def suma(num, acum):
if(num<=0):
return acum
else:
acum = acum + num
num = num-1
#print('acum: {}, num: {}'.format(acum,num))
return suma(num, acum)
if __name__ == "__main__":
resultado = 0
print('=======SUMA RECURSIVA========')
num = int(raw_input('Ingresa un numero: '))
resultado = suma(num, resultado)
print("El resultado es: {}".format(resultado))
```
#### File: python_comands/practicas/num_aleatorio.py
```python
import random # La librería random nos proporciona métodos o funciones para gestionar números aleatoriamente
def run():
num_found = False # inicializo una variable
random_num = random.randint(0, 20) # Selecciona un numero aleatorio del 0 al 20
print("***********ADIVINA EL NÚMERO*************")
while not num_found: # mientras el usuario no adivine el numero, repite esto.
print("Ingresa un número del 0 al 20:")
num = int(raw_input()) # ingresa un valor por teclado, y cambia su tipo de valor a entero/int
if num == random_num:
print("Que suerte tienes, lo Adivinaste!")
print("Quieres seguir jugando(s/n)?")
resp = str(raw_input()) # ingresa un valor por teclado, y cambia su tipo de valor a cadena/string
if resp=='s':
run() # RECURSIVIDAD, la función run() se llama dentro de si misma.
elif resp=='n':
print("Espero que te hallas divertido, vuelve pronto! Adiós")
exit() # cierro mi aplicación
elif num > random_num:
print("Lo siento, fallaste =(, pero no te desanimes, te daré una pista: El número es más pequeño")
elif num < random_num:
print("Lo siento, fallaste =(, pero no te desanimes, te daré una pista: El número es más grande")
if __name__ == "__main__": # aquí arranca/inicia mi aplicación
run()
``` |
{
"source": "7h3f0x/Part-of-Speech-Tagging",
"score": 3
} |
#### File: 7h3f0x/Part-of-Speech-Tagging/HMM.py
```python
import glob
import xml.etree.ElementTree as ET
from decimal import Decimal
from typing import Iterator
import pickle
import os
import sys
WORD_TAG_DICT =dict()
WORD_DICT = dict()
TAG_DICT = dict()
TAG_TRANSITION = dict()
start_count = Decimal(0)
def parse_sentence(sentence: list, train: bool) -> list:
global TAG_TRANSITION
prev_tags = ["^"]
word_list = list()
skip_cnt = 0
for word in sentence.findall('.//*[@c5]'):
if skip_cnt > 0:
skip_cnt -= 1
continue
if word.tag != "mw":
try:
text = word.text.strip()
tags = word.get("c5").split("-")
except:
continue
else:
text = ""
for w in word:
skip_cnt += 1
text += w.text
text = text.strip()
tags = word.get("c5").split("-")
word_list.append([text, tags])
if train:
for prev_tag in prev_tags:
for tag in tags:
TAG_TRANSITION[f"{tag}_{prev_tag}"] = TAG_TRANSITION.get(f"{tag}_{prev_tag}", 0) + 1
prev_tags = tags
return word_list
def parse_single_xml(xml_fname: str, train=False) -> Iterator:
tree = ET.parse(xml_fname)
sentences = tree.findall(".//s") # get all sentences
# sentence_list = list()
for sentence in sentences:
tmp = parse_sentence(sentence, train)
if not tmp:
print(xml_fname)
print(sentence.get('n'))
exit(1)
yield tmp
# return sentence_list
def train(train_files_list : list):
global WORD_TAG_DICT
global WORD_DICT
global TAG_DICT
global TAG_TRANSITION
global start_count
if os.path.exists("./cache"):
with open('./cache', 'rb') as f:
WORD_TAG_DICT, WORD_DICT, TAG_DICT, TAG_TRANSITION, start_count = pickle.load(f)
return
for fname in train_files_list:
sentence_list = parse_single_xml(fname, train=True)
for word_list in sentence_list:
for word, tags in word_list:
for tag in tags:
WORD_DICT[word] = WORD_DICT.get(word, 0) + 1
TAG_DICT[tag] = TAG_DICT.get(tag, 0) + 1
WORD_TAG_DICT[f"{word}_{tag}"] = WORD_TAG_DICT.get(f"{word}_{tag}", 0) + 1
for tag in TAG_DICT:
start_count += Decimal(TAG_TRANSITION.get(f"{tag}_^", 0))
with open('./cache', 'wb') as f:
pickle.dump([WORD_TAG_DICT, WORD_DICT, TAG_DICT, TAG_TRANSITION, start_count], f)
def probability_tag_tag(tag: str, prev_tag: str) -> Decimal:
# Add one smoothing
if prev_tag == "^":
# start probabilities
return (Decimal(TAG_TRANSITION.get(f"{tag}_{prev_tag}", 0)) + Decimal(1)) / (start_count + Decimal(len(TAG_TRANSITION)))
else:
# transition probabilities
return (Decimal(TAG_TRANSITION.get(f"{tag}_{prev_tag}", 0)) + Decimal(1)) / (Decimal(TAG_DICT.get(prev_tag, 0)) + Decimal(len(TAG_TRANSITION)))
def probability_word_tag(word: str, tag: str) -> Decimal:
# add 1 smoothing
return ( Decimal(WORD_TAG_DICT.get(f"{word}_{tag}", 0)) + Decimal(1) ) / ( Decimal(TAG_DICT.get(tag, 0)) + Decimal(len(TAG_DICT)) )
def viterbi(sentence: list) -> list:
assert len(sentence) > 0
# probability_matrix = [{key: Decimal(0.0) for key in TAG_DICT} for _ in range(len(sentence))]
# back_ptr = [{key: None for key in TAG_DICT} for _ in range(len(sentence))]
probability_matrix = list()
back_ptr = list()
for _ in range(len(sentence)):
matrix_temp = dict()
back_tmp = dict()
for key in TAG_DICT:
matrix_temp[key] = Decimal(0)
back_tmp[key] = None
probability_matrix.append(matrix_temp)
back_ptr.append(back_tmp)
for tag in TAG_DICT:
probability_matrix[0][tag] = probability_tag_tag(tag, "^") * probability_word_tag(sentence[0], tag)
back_ptr[0][tag] = None
for idx in range(len(sentence)):
for tag in TAG_DICT:
for prev_tag in TAG_DICT:
back_probability = probability_matrix[idx - 1][prev_tag] * probability_tag_tag(tag, prev_tag) * probability_word_tag(sentence[idx], tag)
if back_probability > probability_matrix[idx][tag]:
probability_matrix[idx][tag] = back_probability
back_ptr[idx][tag] = prev_tag
best_probability = max(probability_matrix[idx], key=probability_matrix[idx].get)
sequence = list()
iterator = best_probability
while iterator is not None:
sequence.append(iterator)
iterator = back_ptr[idx][iterator]
idx -= 1
sequence.reverse()
assert len(sequence) == len(sentence)
return sequence
def hmm(test_files_list: list, f_start: int, f_end: int):
tag_dict = dict()
idx = 0
for key in TAG_DICT.keys():
tag_dict[key] = idx
idx += 1
confusion_matrix = list()
for i in range(len(tag_dict.keys()) + 1):
t = list()
for j in range(idx + 1):
t.append(0)
confusion_matrix.append(t)
word_count = 0
f_cnt = 0
for fname in test_files_list:
# skip initial `f_skip` number of files
# FOR TESTING PURPOSE ONLY
# TODO : remove this at the end
if f_cnt < f_start:
f_cnt += 1
continue
print(fname)
for sentence in parse_single_xml(fname):
word_count += len(sentence)
words = [word[0] for word in sentence]
predicted_tags = viterbi(words)
for i in range(len(sentence)):
tag_predicted = predicted_tags[i]
for tag in sentence[i][1]:
confusion_matrix[tag_dict[tag]][tag_dict[tag_predicted]] = confusion_matrix[tag_dict[tag]][tag_dict[tag_predicted]] + 1
# break
# break
f_cnt += 1
if f_cnt >= f_end:
break
with open(f'./confusion_matrices/{f_start}_{f_end}', 'wb') as f:
pickle.dump(confusion_matrix, f)
with open(f'./word_count/{f_start}_{f_end}', 'wb') as f:
pickle.dump(word_count, f)
# for i in range(len(tag_dict.keys())):
# sum_row = 0
# for j in range(len(tag_dict.keys())):
# sum_row += confusion_matrix[i][j]
# confusion_matrix[i][idx] = sum_row
# for i in range(len(tag_dict.keys())):
# sum_col = 0
# for j in range(len(tag_dict.keys())):
# sum_col += confusion_matrix[j][i]
# correct_pred = 0
# for i in range(len(tag_dict.keys())):
# correct_pred += confusion_matrix[i][i]
# print(correct_pred, word_count)
def main():
train(glob.glob("Train-corups/A*/*.xml"))
hmm(sorted(glob.glob("Test-corpus/A*/*.xml")), int(sys.argv[1]), int(sys.argv[2]))
if __name__ == "__main__":
main()
``` |
{
"source": "7h3kk1d/picture_ranking",
"score": 2
} |
#### File: picture_ranking/photos/models.py
```python
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Photo(models.Model):
image = models.ImageField()
upload_date = models.DateTimeField('date uploaded')
rating = models.IntegerField()
description = models.CharField(max_length=140)
user = models.ForeignKey(User)
def __unicode__(self):
return self.description
``` |
{
"source": "7h3qu1rkyb1t/Xarena",
"score": 2
} |
#### File: Xarena/tourney/models.py
```python
from django.db import models
from django.contrib.auth.models import User
from accounts.models import Game, Membership
from django.utils import timezone
from django.contrib.postgres.fields import ArrayField
class TourneyObject(models.Model):
mode = models.CharField(max_length=50)
bonus = models.CharField(max_length=50)
def __str__(self):
return self.mode
class Tournament(models.Model):
game = models.ForeignKey(Game, on_delete=models.CASCADE)
players = models.ManyToManyField(User, blank=True, through="Subscription")
max_players = models.IntegerField(default=100)
tourney_type = models.ForeignKey(TourneyObject, on_delete=models.CASCADE)
entry_fee = models.IntegerField()
time = models.DateTimeField()
tourney_id = models.CharField(max_length=50)
tourney_pass = models.CharField(max_length=15)
first_prize = models.IntegerField()
second_prize = models.IntegerField()
third_prize = models.IntegerField()
bonus = models.IntegerField()
tourney_end = models.BooleanField(default=False)
def is_live(self):
return True if self.time <= timezone.now() and not self.tourney_end else False
def __str__(self):
return self.game.name+str(self.time)
def players_joined(self):
return len(self.players.all())
def elimination_count(self):
return len([i for i in self.subscription_set.all() if i.active()])
class Subscription(models.Model):
tourney = models.ForeignKey(Tournament, on_delete=models.CASCADE)
player = models.ForeignKey(User, on_delete=models.CASCADE)
subscription_id = models.AutoField(primary_key=True,)
eliminated_by = models.ForeignKey("self", on_delete=models.SET_NULL, blank=True, null=True)
elimination_number = models.PositiveIntegerField(default=None, null=True, blank=True)
class Meta:
unique_together = ('subscription_id', 'elimination_number')
def __str__(self):
return self.player.username + "_" + self.tourney.game.name
def active(self):
return True if self.elimination_number is None else False
def membership(self):
return self.tourney.game.membership_set.get(player=self.player)
# every time a game mode added it has to be updated
def bonus_count(self):
if self.tourney.tourney_type.mode == "Survival":
return len(self.subscription_set.all())
else:
return None
``` |
{
"source": "7h3rAm/rudra",
"score": 2
} |
#### File: plugins/generic/extractvisual.py
```python
from lib.external.PluginManager import PluginInterface, Manager
from prettytable import PrettyTable
from aayudh import utils, fileutils
import sys
import os
current_dir = os.path.abspath(os.path.dirname(__file__))
root_dir = os.path.normpath(os.path.join(current_dir, ".."))
sys.path.insert(0, root_dir)
class extractvisual(PluginInterface):
name = "extractvisual"
enabled = True
def __init__(self):
self.details = utils.objdict({})
self.details.name = self.name
self.details.description = "Extract all visual content into report directory"
self.details.mimetypes = None
self.details.author = "@7h3rAm"
self.details.version = "0.01"
self.details.date = "23/MAR/2015"
self.details.path = ("" if __file__ is None else os.path.abspath(__file__))
def run(self, report):
if report.misc.config.enablefilevisualization and report.meta.visual:
fileutils.file_save(filename="%s/%s.pnggray" % (report.misc.config.currreportpath, report.misc.config.currreportfile), data=utils.from_base64(report.meta.visual.pnggray), mode="wb")
fileutils.file_save(filename="%s/%s.pngrgb" % (report.misc.config.currreportpath, report.misc.config.currreportfile), data=utils.from_base64(report.meta.visual.pngrgb), mode="wb")
fileutils.file_save(filename="%s/%s.bfh" % (report.misc.config.currreportpath, report.misc.config.currreportfile), data=report.meta.visual.bytefreqhistogram, mode="wb")
if report.meta.visual.identicon:
fileutils.file_save(filename="%s/%s.identicon" % (report.misc.config.currreportpath, report.misc.config.currreportfile), data=utils.from_base64(report.meta.visual.identicon), mode="wb")
return
Manager().register_plugin(extractvisual)
``` |
{
"source": "7haar/schnitte_kodi_addons",
"score": 2
} |
#### File: schnitte_kodi_addons/plugin.video.oho.newstv/default.py
```python
import xbmc, xbmcgui, xbmcaddon, sys
import random
#import urllib.request
import urllib2
import json
import datetime
import re
sendung = []
addon = xbmcaddon.Addon()
__setting__ = addon.getSetting
#
if __setting__('rbb') == 'true':
sendung.append('Brandenburgaktuell')
sendung.append('rbb')
if __setting__('ts') == 'true':
sendung.append('tagesschau')
sendung.append('ts')
if __setting__('ts24') == 'true':
sendung.append('tagesschauXX')
sendung.append('ts')
if __setting__('ht') == 'true':
sendung.append('heute')
sendung.append('ht')
if __setting__('hte') == 'true':
sendung.append('heute-inEuropa')
sendung.append('ht')
if __setting__('htd') == 'true':
sendung.append('heute-inDeutschland')
sendung.append('ht')
if __setting__('htp') == 'true':
sendung.append('heuteplus')
sendung.append('ht')
if __setting__('mdrh') == 'true':
sendung.append('MDRSACHSEN-ANHALTHEUTE')
sendung.append('mdrh')
if __setting__('htj') == 'true':
sendung.append('heutejournal')
sendung.append('ht')
if __setting__('sr') == 'true':
sendung.append('Aktuell(XXUhr)')
sendung.append('sr')
if __setting__('mdra') == 'true':
sendung.append('MDRaktuellXX:XXUhr')
sendung.append('mdrh')
if __setting__('ndra') == 'true':
sendung.append('NDRAktuell')
sendung.append('ndr')
if __setting__('srb') == 'true':
sendung.append('aktuellerbericht')
sendung.append('sr')
if __setting__('wdras') == 'true':
sendung.append('AktuelleStunde')
sendung.append('wdr')
if __setting__('swra') == 'true':
sendung.append('SWRAktuellRheinland-Pfalz')
sendung.append('swr')
if __setting__('hra') == 'true':
sendung.append('hessenschau')
sendung.append('hs')
if __setting__('brfa') == 'true':
sendung.append('Frankenschauaktuell')
sendung.append('brfa')
def datum(tim):
return datetime.datetime.fromtimestamp(int(tim)).strftime('%d.%m.%Y')
def prep(str):
prepstr = re.sub('\d', 'X', str)
prepstr = prepstr.replace(' ','')
prepstr = prepstr.replace('//','')
if (prepstr == 'Aktuell(XX:XXUhr)'):
prepstr = 'Aktuell(XXUhr)'
if (prepstr=='heuteXX:XXUhr'):
prepstr = 'heute'
return prepstr
def start():
size=200
payload={'queries':[{'fields':['topic'],'query':'tagesschau'},{'fields':['topic'],'query':'heute'},{'fields':['topic'],'query':'Rundschau'},{'fields':['topic'],'query':'hessenschau'},{'fields':['topic'],'query':'aktuell'},{'fields':['channel'],'query':'zdf'},{'fields':['channel'],'query':'ard'}],'sortBy':'timestamp','sortOrder':'desc','future':False,'offset':0,'size':size}
mvw = 'https://mediathekviewweb.de/api/query'
#mvw = 'http://123derf.org/'
params = json.dumps(payload).encode('utf8')
req = urllib2.Request(mvw, data=params,headers={'content-type': 'text/plain'})
try:
response = urllib2.urlopen(req)
except urllib2.HTTPError, e:
notice('HTTPError = ' + str(e.code))
xbmcgui.Dialog().notification('ERROR','mediathekviewweb.de nicht erreichbar',xbmcgui.NOTIFICATION_ERROR, 5000)
return
except urllib2.URLError, e:
notice('URLError = ' + str(e.reason))
xbmcgui.Dialog().notification('ERROR','mediathekviewweb.de nicht erreichbar',xbmcgui.NOTIFICATION_ERROR, 5000)
return
#sender = ['tagesschau','tagesschauXX','heute','heuteplus','RundschauNacht','heutejournal','NDR//Aktuell','SWRAktuellRheinland-Pfalz','Aktuell(XX:XXUhr)','SWRAktuellBaden-W\xc3rttemberg','MDRaktuellXX:XXUhr','Brandenburgaktuell','hessenschau','aktuellerbericht','MDRSACHSEN-ANHALTHEUTE','heuteXX:XXUhr','AktuelleStunde','Aktuell(XXUhr)','Leuteheute','Frankenschauaktuell','heute-inEuropa','heute-inDeutschland']
#sender = ['tagesschau','tagesschauXX','heute','heuteplus','RundschauNacht','heutejournal','NDR//Aktuell','SWRAktuellRheinland-Pfalz','Aktuell(XX:XXUhr)','MDRaktuellXX:XXUhr','Brandenburgaktuell','hessenschau','aktuellerbericht','MDRSACHSEN-ANHALTHEUTE','heuteXX:XXUhr','AktuelleStunde','Aktuell(XXUhr)','Frankenschauaktuell','heute-inEuropa','heute-inDeutschland']
j = json.loads(response.read())
arr = []
hash = {}
geb = u'Gebärden'.encode('utf-8')
pl=xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
pl.clear()
for i in j['result']['results']:
if(round((i['duration']/60))>9):
topic = prep(i['topic'])
if not topic in hash:
if (not geb in topic.encode('utf-8')) and (not geb in i['title'].encode('utf-8')) and (topic in sendung):
up = {topic:'x'}
hash.update(up)
date = datum(i['timestamp'])
url = i['url_video_hd']
if url == '':
url = i['url_video']
name = i['title']
img = image(sendung[sendung.index(topic)+1])
desc = i['description']
if topic == 'Frankenschauaktuell':
name = '<NAME>'
if topic == 'tagesschauXX':
name = 'Tageschau 24'
desc = i['title']
if re.search('(31|30|[012]\d|\d)\.(0\d|1[012]|\d)\.(\d{1,6})',name) is None:
name = name+' vom '+date
li = xbmcgui.ListItem(name,thumbnailImage=img)
li.setInfo('video', { 'plot': desc })
xbmc.PlayList(1).add(url,li)
notice(topic)
notice(url)
xbmc.Player().play(pl)
def debug(content):
log(content, xbmc.LOGDEBUG)
def notice(content):
log(content, xbmc.LOGNOTICE)
def log(msg, level=xbmc.LOGNOTICE):
addon = xbmcaddon.Addon()
addonID = addon.getAddonInfo('id')
xbmc.log('%s: %s' % (addonID, msg), level)
def mUnescape(s):
if '&' not in s:
return s
s = s.replace('&','&')
s = s.replace('"','"')
s = s.replace('>','>')
s = s.replace('<','<')
s = s.replace(''',"'")
s = s.replace('@','@')
s = s.replace('%','%')
return s
def image(sender):
image = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('path')+'/resources/media/'+sender+'.png').decode('utf-8')
return image
# START #
start()
``` |
{
"source": "7haomeng/fira_gazebo-",
"score": 2
} |
#### File: src/strategy/nubot_communication.py
```python
import rospy
import math
from nubot_common.msg import OminiVisionInfo
from nubot_common.msg import VelCmd
from transfer.msg import PPoint
from nubot_common.srv import BallHandle
from nubot_common.srv import Shoot
class Nubot_communication(object):
def __init__(self, robot_number, robot_id):
self.init_flag1 = 0
self.init_flag2 = 0
self.robot_number = robot_number
self.subscriber(robot_id)
self.publisher(robot_id)
def subscriber(self, robot_id):
if robot_id == 1:
rospy.Subscriber('nubot{}/omnivision/OmniVisionInfo'.format(self.robot_number), OminiVisionInfo, self.getOmniVision)
rospy.Subscriber('nubot{}/omnivision/OmniVisionInfo/GoalInfo'.format(self.robot_number), PPoint, self.getGoalInfo)
else:
rospy.Subscriber('rival{}/omnivision/OmniVisionInfo'.format(self.robot_number), OminiVisionInfo, self.getOmniVision)
rospy.Subscriber('rival{}/omnivision/OmniVisionInfo/GoalInfo'.format(self.robot_number), PPoint, self.getGoalInfo)
def publisher(self, robot_id):
if robot_id == 1:
self.nubot_cmd_pub = rospy.Publisher('nubot{}/nubotcontrol/velcmd'.format(self.robot_number), VelCmd, queue_size=100)
else:
self.rival_cmd_pub = rospy.Publisher('rival{}/nubotcontrol/velcmd'.format(self.robot_number), VelCmd, queue_size=100)
def ballhandle_client(self, robot_id):
if robot_id == 1:
rospy.wait_for_service('nubot{}/BallHandle'.format(self.robot_number))
ballhandle_srv = rospy.ServiceProxy('nubot{}/BallHandle'.format(self.robot_number), BallHandle)
ballhandle_response = ballhandle_srv(1)
return ballhandle_response.BallIsHolding
else:
rospy.wait_for_service('rival{}/BallHandle'.format(self.robot_number))
ballhandle_srv = rospy.ServiceProxy('rival{}/BallHandle'.format(self.robot_number), BallHandle)
ballhandle_response = ballhandle_srv(1)
return ballhandle_response.BallIsHolding
def shoot_client(self, robot_id):
if robot_id == 1:
rospy.wait_for_service('nubot{}/Shoot'.format(self.robot_number))
nubot_goal_keeper_shoot_srv = rospy.ServiceProxy('nubot{}/Shoot'.format(self.robot_number), Shoot)
nubot_goal_keeper_shoot_ground_response = nubot_goal_keeper_shoot_srv(5, 1)
return nubot_goal_keeper_shoot_ground_response.ShootIsDone
else:
rospy.wait_for_service('rival{}/Shoot'.format(self.robot_number))
nubot_goal_keeper_shoot_srv = rospy.ServiceProxy('rival{}/Shoot'.format(self.robot_number), Shoot)
nubot_goal_keeper_shoot_ground_response = nubot_goal_keeper_shoot_srv(5, 1)
return nubot_goal_keeper_shoot_ground_response.ShootIsDone
def getOmniVision(self, vision):
self.init_flag1 = 1
self.ball_dis = vision.ballinfo.real_pos.radius
self.ball_ang = math.degrees(vision.ballinfo.real_pos.angle)
def getGoalInfo(self, goal_info):
self.init_flag2 = 1
self.left_goal_dis = goal_info.left_radius
self.left_goal_ang = goal_info.left_angle
self.right_goal_dis = goal_info.right_radius
self.right_goal_ang = goal_info.right_angle
def pubNubotCtrl(self, x, y, yaw, robot_id):
angle = yaw
velocity = math.hypot(x, y)
if x != 0:
alpha = math.degrees(math.atan2(y, x))
else:
alpha = 0
dis_max = 2
dis_min = 0.3
velocity_max = 70
velocity_min = 50
angular_velocity_max = 2
angular_velocity_min = 0.5
angle_max = 144
angle_min = 20
angle_out = angle
if velocity == 0:
pass
elif velocity > dis_max:
velocity = velocity_max
elif velocity < dis_min:
velocity = velocity_min
else:
velocity = (velocity_max - velocity_min) * (math.cos((((velocity - dis_min) / (dis_max-dis_min) - 1) * math.pi)) + 1 )/ 2 + velocity_min
if angle == 0:
pass
elif abs(angle) > angle_max:
angle_out = angular_velocity_max
elif abs(angle) < angle_min:
angle_out = angular_velocity_min
else:
angle_out = (angular_velocity_max - angular_velocity_min) * (math.cos((((angle - angle_min) / (angle_max-angle_min) - 1) * math.pi)) + 1 )/ 2 + angular_velocity_min
if angle < 0:
angle_out = -angle_out
x = velocity * math.cos(math.radians(alpha))
y = velocity * math.sin(math.radians(alpha))
yaw = angle_out
vel = VelCmd()
vel.Vx = x
vel.Vy = y
vel.w = yaw
if robot_id == 1:
self.nubot_cmd_pub.publish(vel)
else:
self.rival_cmd_pub.publish(vel)
print('\r vel.Vx: {}\n vel.Vy: {}\n vel.w: {}'.format(vel.Vx, vel.Vy, vel.w))
``` |
{
"source": "7homasSutter/BigMAC",
"score": 3
} |
#### File: BigMAC/android/capabilities.py
```python
class Capabilities(object):
def __init__(self):
# these are sets of strings, which make them easier to work with
self.inherited = set()
self.permitted = set()
self.effective = set()
self.bounding = set()
self.ambient = set()
# Not a real capset, just here as a guess on capabilities
# from the SELinux policy
self.selinux = set()
def grant_all(self):
self.inherited = set()
self.ambient = set()
self.permitted = set(ALL_CAPABILITIES)
self.effective = set(ALL_CAPABILITIES)
self.bounding = set(ALL_CAPABILITIES)
def bound_default(self):
self.bounding = set(ALL_CAPABILITIES)
def bound_none(self):
self.bounding = set(ALL_CAPABILITIES)
def drop_all(self):
self.inherited = set()
self.ambient = set()
self.permitted = set()
self.effective = set()
self.bounding = set(ALL_CAPABILITIES)
def add(self, set_name, cap):
name_mapping = {
"selinux": self.selinux,
"inherited": self.inherited,
"effective": self.effective,
"ambient": self.ambient,
"permitted": self.permitted,
"bounding": self.bounding
}
if set_name not in name_mapping:
raise ValueError("Capability set '%s' does not exist" % set_name)
else:
self._add_cap(cap, name_mapping[set_name])
def selinux(self, cap):
self._add_cap(cap, self.selinux)
def _add_cap(self, cap, capset):
assert isinstance(cap, str)
Capabilities.name_to_bit(cap)
capset |= set([Capabilities._cannonicalize_name(cap)])
@staticmethod
def _cannonicalize_name(name):
if name.lower().startswith("cap_"):
return name.upper()
else:
return ("CAP_"+name).upper()
@staticmethod
def name_to_bit(name):
return CAPABILITIES_INV[Capabilities._cannonicalize_name(name)]
@staticmethod
def bit_to_name(bit):
return CAPABILITIES[bit]
def __str__(self):
output = ""
names = ['CapInh', 'CapPrm', 'CapEff', 'CapBnd', 'CapAmb']
sets = [self.inherited, self.permitted, self.effective, self.bounding, self.ambient]
for name, s in zip(names, sets):
number = 0
for cap in s:
number |= 1 << Capabilities.name_to_bit(cap)
# Just like how Linux outputs
output += "%s:\t%016x\n" % (name, number)
return output
CAPABILITIES = {
0: "CAP_CHOWN",
1: "CAP_DAC_OVERRIDE",
2: "CAP_DAC_READ_SEARCH",
3: "CAP_FOWNER",
4: "CAP_FSETID",
5: "CAP_KILL",
6: "CAP_SETGID",
7: "CAP_SETUID",
8: "CAP_SETPCAP",
9: "CAP_LINUX_IMMUTABLE",
10: "CAP_NET_BIND_SERVICE",
11: "CAP_NET_BROADCAST",
12: "CAP_NET_ADMIN",
13: "CAP_NET_RAW",
14: "CAP_IPC_LOCK",
15: "CAP_IPC_OWNER",
16: "CAP_SYS_MODULE",
17: "CAP_SYS_RAWIO",
18: "CAP_SYS_CHROOT",
19: "CAP_SYS_PTRACE",
20: "CAP_SYS_PACCT",
21: "CAP_SYS_ADMIN",
22: "CAP_SYS_BOOT",
23: "CAP_SYS_NICE",
24: "CAP_SYS_RESOURCE",
25: "CAP_SYS_TIME",
26: "CAP_SYS_TTY_CONFIG",
27: "CAP_MKNOD",
28: "CAP_LEASE",
29: "CAP_AUDIT_WRITE",
30: "CAP_AUDIT_CONTROL",
31: "CAP_SETFCAP",
32: "CAP_MAC_OVERRIDE",
33: "CAP_MAC_ADMIN",
34: "CAP_SYSLOG",
35: "CAP_WAKE_ALARM",
36: "CAP_BLOCK_SUSPEND",
37: "CAP_AUDIT_READ",
}
CAPABILITIES_INV = dict([[v,k] for k,v in CAPABILITIES.items()])
ALL_CAPABILITIES = list(CAPABILITIES.values())
ALL_CAPABILITY_BITS = list(range(len(CAPABILITIES)))
```
#### File: BigMAC/android/dac.py
```python
import android
from android.capabilities import Capabilities
from android.sepolicy import SELinuxContext
class Cred(object):
def __init__(self):
self.uid = None
self.gid = None
self.groups = set()
self.sid = None
self.cap = Capabilities()
def clear_groups(self):
self.groups = set()
def __eq__(self, other):
return hash(self) == hash(other)
def __hash__(self):
return hash(str(self))
def execve(self, file_obj=None, new_sid=None):
import copy
new = Cred()
new.uid = self.uid
new.gid = self.gid
new.groups = copy.deepcopy(self.groups)
if new_sid:
assert isinstance(new_sid, SELinuxContext)
new.sid = copy.deepcopy(new_sid)
else:
new.sid = copy.deepcopy(self.sid)
# TODO: check file if set-user-id and for file capabilities
# TODO: handle capability(7) assignment semantics
# TODO: file system capabilities /vendor/bin/pm-service
# Drop by default, when transitioning to a non-privileged process
if new.uid != 0:
new.cap = Capabilities()
else:
new.cap = copy.deepcopy(self.cap)
return new
def add_group(self, gid):
if isinstance(gid, int):
self.groups |= set([gid])
elif isinstance(gid, str):
self.groups |= set([AID_MAP_INV[gid]])
else:
raise ValueError("Expected type int or str")
def __str__(self):
additional_info = [
"u=%s" % AID_MAP.get(self.uid, str(self.uid)),
"g=%s" % AID_MAP.get(self.gid, str(self.gid)),
]
if self.sid:
additional_info += ["sid=" + str(self.sid)]
if self.groups and len(self.groups):
additional_info += ["groups=" + ",".join(map(lambda x: AID_MAP.get(x, str(x)), sorted(self.groups)))]
if len(self.cap.effective):
if len(self.cap.effective) == len(android.capabilities.ALL_CAPABILITIES):
additional_info += ["cap=EVERYTHING"]
else:
additional_info += ["cap=" + ",".join(list(self.cap.effective))]
additional_info = " ".join(additional_info)
return "<Cred %s>" % additional_info
def _parse_aid_file():
import re
import os
# Parse android AID definitions (/system/core/libcutils/include_vndk/cutils/android_filesystem_config.h)
# TODO: this is not completely cross-vendor as some vendors fork this file and add custom UIDs to it
# The solution to this that is to extract the exported table symbol `android_id` from the libc.so ELF
try:
fp = open(os.path.join(os.path.dirname(__file__), 'android_fs.h'), 'r')
data = fp.read()
fp.close()
except IOError:
raise IOError("Unable to find android_fs.h file (uid/gid mapping)")
mapping = {}
got_range = False
range_start = 0
for line in data.split("\n"):
# Ignore comments and blank lines
if re.match(r'^\s*((/\*)|(//)|($))', line):
continue
tokens = list(filter(lambda x: x != "", line.split(" ")))
aid_name = tokens[1]
aid = int(tokens[2])
assert aid_name[:4] == "AID_"
if got_range:
assert aid_name.endswith("_END")
got_range = False
aid_name = aid_name[4:-4].lower()
for i in range(range_start, aid+1):
mapping[i] = "%s_%d" % (aid_name, i)
continue
if aid_name.endswith("_START"):
got_range = True
range_start = aid
continue
# XXX: there are some exceptions to this (mediacodec, etc.)
aid_name = aid_name[4:].lower()
# Some exceptions to the rule
if aid_name in ['media_codec', 'media_ex', 'media_drm']:
aid_name = aid_name.replace('_', '')
# XXX: not handling ranges
mapping[aid] = aid_name
return mapping
# number to name
AID_MAP = _parse_aid_file()
AID_MAP_INV = dict([[v,k] for k,v in AID_MAP.items()])
```
#### File: eval/tools/compare.py
```python
from collections import OrderedDict
import argparse
import os
def read_data(data):
files = OrderedDict()
for line in data.split("\n"):
if line == "":
continue
if "Permission denied" in line:
continue
components = line.split(" ")
if len(components) < 5:
raise ValueError("Invalid line %s" % components)
dac = components[0]
user = components[1]
group = components[2]
sid = components[3]
path = components[4]
if path in files:
print("WARNDUP: " + path)
continue
if path.endswith("/") and path != "/":
raise ValueError(path)
# weird crap: some symbolic links on Android have not 777 perms!!!
# this is messed up and the Linux kernel doesn't seem to care anyways
if dac.startswith('l'):
dac = "lrwxrwxrwx"
# blacklist
if path.lower().find("magisk") != -1 or path.startswith("/sys/") or path.startswith("/acct/") or path.startswith("/sbin/.core") or \
path.startswith("/system_root") or path.startswith("/proc/") or path.startswith("/d/") or path.startswith("/dev/__properties__") or \
path.startswith("/dev/socket/"):
continue
files[path] = { "perms" : dac, "user" : user, "group" : group, "selinux" : sid }
return files
def fnum(n):
nn = ""
n = str(n)
for i in range(len(n)):
if i > 0:
if i % 3 == 0:
nn += ","
nn += n[len(n) - i - 1]
return nn[::-1]
def print_prefixes(fps, levels, total=None, detailed=False, flatten=False):
prefixes = [{}, {}]
cdfs = [0, 0]
print("Total: %s" % fnum(len(fps)))
for level in range(levels):
fc = prefixes[level]
for fn in fps:
parts = fn.split(os.path.sep)
prefix = parts[1]
for i in range(level):
idx = i+2
if len(parts) > idx:
prefix += "/" + parts[idx]
if prefix not in fc:
fc[prefix] = 1
else:
fc[prefix] += 1
if flatten:
cdf = 0
for f, freq in sorted(prefixes[1].items(), key=lambda x: (x[1]), reverse=True):
missing = len(fps) if not total else total
cdf += freq
if freq > 1 or detailed:
print("/%-10s - %s (%.1f%%, %.1f%%)" % (f, fnum(freq), float(freq)/missing*100, float(cdf)/missing*100.0))
else:
cdf = 0
for f, freq in sorted(prefixes[0].items(), key=lambda x: (x[1]), reverse=True):
missing = len(fps) if not total else total
cdf += freq
if freq > 1 or detailed:
print("/%-10s - %s (%.1f%%, %.1f%%)" % (f, fnum(freq), float(freq)/missing*100, float(cdf)/missing*100.0))
missing = freq
cdf2 = 0
for f2, freq2 in filter(lambda x: x[0].startswith(f+"/"), sorted(prefixes[1].items(), key=lambda x: (x[1]), reverse=True)):
cdf2 += freq2
if freq2 > 1 or detailed:
print(" /%-10s - %s (%.1f%%, %.1f%%)" % (f2, fnum(freq2), float(freq2)/missing*100, float(cdf2)/missing*100.0))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--detailed", action="store_true")
parser.add_argument("--flatten", action="store_true")
parser.add_argument("--levels", default=1, type=int)
parser.add_argument("recovered")
parser.add_argument("real")
args = parser.parse_args()
recovered_data = open(args.recovered).read()
real_data = open(args.real).read()
bad_files = OrderedDict()
good_files = OrderedDict()
extra_files = OrderedDict()
missing_files = OrderedDict()
wrong_dac = OrderedDict()
wrong_mac = OrderedDict()
recovered = read_data(recovered_data)
real = read_data(real_data)
for fn, f in recovered.items():
if fn not in real:
extra_files.update({ fn : f })
else:
of = real[fn]
bad = False
if f["user"] != of["user"] or f["group"] != of["group"] or f["perms"] != of["perms"]:
wrong_dac.update({ fn : { "recovered" : f, "real" : of }})
bad = True
if f["selinux"] != of["selinux"]:
wrong_mac.update({ fn : { "recovered" : f, "real" : of }})
bad = True
if not bad:
good_files.update({ fn : f })
else:
bad_files.update({ fn : f })
for fn, f in real.items():
if fn not in recovered:
missing_files.update({ fn : f })
print("----- DAC LISTING -----")
for fn, f in wrong_dac.items():
l = f["recovered"]
r = f["real"]
diff = []
if l["user"] != r["user"]:
diff += [[l["user"], r["user"]]]
if l["group"] != r["group"]:
diff += [[l["group"], r["group"]]]
if l["perms"] != r["perms"]:
diff += [[l["perms"], r["perms"]]]
nd = []
for d in diff:
nd += ["%s != %s" % (d[0], d[1])]
diff = ", ".join(nd)
print("%s [%s]" % (fn, diff))
recovered_secontexts = {}
real_secontexts = {}
for fn, f in real.items():
se = f["selinux"]
if se not in real_secontexts:
real_secontexts[se] = 0
real_secontexts[se] += 1
for fn, f in recovered.items():
se = str(f["selinux"])
if se not in recovered_secontexts:
recovered_secontexts[se] = 0
recovered_secontexts[se] += 1
print("")
print("----- MAC LISTING -----")
for fn, f in wrong_mac.items():
print("%s [%s != %s]" % (fn, f["recovered"]["selinux"], f["real"]["selinux"]))
print("")
print("----- BAD MAC REPORT -----")
print_prefixes(wrong_mac, args.levels, total=len(recovered), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- BAD DAC REPORT -----")
print_prefixes(wrong_dac, args.levels, total=len(recovered), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- BAD FILES REPORT -----")
print_prefixes(bad_files, args.levels, total=len(recovered), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- GOOD FILES REPORT -----")
print_prefixes(good_files, args.levels, total=len(recovered), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- EXTRA FILES REPORT -----")
print_prefixes(extra_files, args.levels, total=len(recovered), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- MISSING FILES REPORT -----")
print_prefixes(missing_files, args.levels, total=(len(real)-len(bad_files)-len(good_files)), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- MISSING SECONTEXTS -----")
secontext_diff = set(real_secontexts) - set(recovered_secontexts)
print(secontext_diff)
print_prefixes(missing_files, args.levels, total=(len(real)-len(bad_files)-len(good_files)), detailed=args.detailed, flatten=args.flatten)
print("")
print("----- STATS -----")
print("Recovered files %s, Real files %s" % (fnum(len(recovered)), fnum(len(real))))
print("Extra files %s (in recovered)" % fnum(len(extra_files)))
print("Missing files %s (from real)" % fnum(len(missing_files)))
print("Wrong DAC %s" % fnum(len(wrong_dac)))
print("Wrong MAC %s" % fnum(len(wrong_mac)))
print("Recovered %s SEContexts, Real SEContexts %s" % (fnum(len(recovered_secontexts)), fnum(len(real_secontexts))))
print("")
print("File Contexts Recovery %.2f%%" % (float(len(recovered_secontexts))/(len(real_secontexts))*100.0))
print("MAC/DAC Accuracy %.2f%%" % (float(len(good_files))/(len(recovered)-len(extra_files))*100.0))
```
#### File: 7homasSutter/BigMAC/graph2prolog.py
```python
from __future__ import print_function
import argparse
import sys
import os
import logging
import json
import networkx as nx
from android.sepolicy import SELinuxContext
from networkx.readwrite import json_graph
from config import *
logging.basicConfig(stream=sys.stdout, format="%(levelname)s: %(message)s", level=logging.INFO)
log = logging.getLogger(__name__)
def main():
print("Policy Graph to Prolog")
print("")
parser = argparse.ArgumentParser()
parser.add_argument('--debug', action='store_true')
parser.add_argument("graph_saved")
args = parser.parse_args()
if args.debug:
logging.getLogger().setLevel(logging.DEBUG)
if not os.access(args.graph_saved, os.R_OK):
log.error("Graph file does not exist or is not readable")
return 1
log.info("Loading policy graph %s", args.graph_saved)
with open(args.graph_saved, 'r') as fp:
graph_json = json.load(fp)
G = json_graph.node_link_graph(graph_json)
files = nx.get_node_attributes(G, 'files')
# Re-inflate SELinuxContext objects
for node, attr in files.items():
for fk, f in attr.items():
f["selinux"] = SELinuxContext.FromString(f["selinux"])
focus_types = ['init', 'mediaserver', 'untrusted_app', 'system_server']
G_subgraph = nx.MultiDiGraph(G.subgraph(focus_types))
for node in G_subgraph.nodes():
print(node, " ---- ", G_subgraph[node])
from IPython import embed
embed()
return 0
if __name__ == "__main__":
sys.exit(main())
```
#### File: 7homasSutter/BigMAC/stats.py
```python
from __future__ import print_function
import argparse
import sys
import os
import logging
import shutil
import json
import networkx as nx
from prolog import Prolog
from config import *
from security_policy import ASPCodec, AndroidSecurityPolicy
from android.file_contexts import read_file_contexts
from android.initrc import AndroidInit
from segraph import SELinuxPolicyGraph
from sedump import SELinuxPolicyDump
from overlay import SEPolicyInst, ProcessState
from process import determine_hardware
from util.file import directories
logging.basicConfig(stream=sys.stdout, format="%(levelname)s: %(message)s", level=logging.INFO)
log = logging.getLogger(__name__)
def main():
print("Android Policy Inspector")
print("")
parser = argparse.ArgumentParser()
parser.add_argument('--vendor', required=True)
parser.add_argument('--debug', action='store_true')
args = parser.parse_args()
if args.debug:
logging.getLogger().setLevel(logging.DEBUG)
#if args.policy_name[-1] == "/":
# args.policy_name = args.policy_name[:-1]
#args.policy_name = os.path.basename(args.policy_name)
policy_results_dir = os.path.join(POLICY_RESULTS_DIR, args.vendor.lower())
if not os.access(policy_results_dir, os.R_OK):
log.error("Policy directory does not exist or is not readable")
return 1
policies = list(directories(policy_results_dir))
models = {}
log.info("Loading %d %s policies...", len(policies), args.vendor)
for pol in policies:
if pol[-1] == "/":
pol = pol[:-1]
pol = os.path.basename(pol)
asp = AndroidSecurityPolicy(args.vendor, pol)
aspc = ASPCodec(asp)
try:
asp = aspc.load(quick=True)
except ValueError as e:
log.debug("%s is corrupt: %s", pol, e)
continue
image_data = asp.get_properties()
android_version = asp.get_android_version()
major, minor, revision = android_version
model = image_data["properties"]["model"]
if args.vendor != "aosp":
model = model[:-1]
if model not in models:
models[model] = []
models[model] += [[asp, aspc, android_version]]
if args.vendor == "aosp":
pixel = sorted(models["Pixel"], key=lambda x: x[2][0])
for asp, aspc, version in pixel:
asp = aspc.load()
image_data = asp.get_properties()
log.info("STAT: Image summary: %s (%s)",
image_data["summary"], ".".join(map(str, version)))
process(args, asp, aspc)
elif args.vendor == "samsung":
device = sorted(models["SM-G955"], key=lambda x: x[2][0])
for asp, aspc, version in device:
asp = aspc.load()
image_data = asp.get_properties()
log.info("STAT: Image summary: %s (%s)",
image_data["summary"], ".".join(map(str, version)))
process(args, asp, aspc)
else:
for mn, model in models.items():
majors = set()
for _, _, version in model:
majors |= set([version[0]])
if len(majors) >= 3:
print(mn, model)
if args.debug:
from IPython import embed
oldlevel = logging.getLogger().getEffectiveLevel()
logging.getLogger().setLevel(logging.INFO)
embed()
logging.getLogger().setLevel(oldlevel)
return 0
def process(args, asp, aspc):
android_version = asp.get_android_version()
major, minor, revision = android_version
try:
# Treble handling
if major == 8:
file_contexts = read_file_contexts(asp.get_saved_file_path("plat_file_contexts"))
file_contexts += read_file_contexts(asp.get_saved_file_path("nonplat_file_contexts"))
elif major >= 9:
file_contexts = read_file_contexts(asp.get_saved_file_path("plat_file_contexts"))
file_contexts += read_file_contexts(asp.get_saved_file_path("vendor_file_contexts"))
else:
file_contexts = read_file_contexts(asp.get_saved_file_path("file_contexts"))
except KeyError:
return
log.info("STAT: %d file contexts", len(file_contexts))
primary_filesystem = asp.fs_policies[0]
init = AndroidInit(aspc.results_dir, asp.properties, primary_filesystem)
determine_hardware(asp, primary_filesystem, init)
init.read_configs("/init.rc")
init.boot_system()
log.info("STAT: %d services", len(init.services))
active_services = 0
root_services = 0
for sname, service in sorted(list(init.services.items())):
if not service.oneshot:
active_services += 1
if service.cred.uid is None or service.cred.uid == 0:
root_services +=1
log.info("STAT: %d active", active_services)
log.info("STAT: %d will start as root", root_services)
log.info("STAT: %d files", len(primary_filesystem.files))
try:
sepolicy = None
if "sepolicy" in asp.policy_files:
sepolicy = asp.get_saved_file_path("sepolicy")
elif "precompiled_sepolicy" in asp.policy_files:
sepolicy = asp.get_saved_file_path("precompiled_sepolicy")
if not sepolicy:
log.error("STAT: No compiled sepolicy found. Cannot continue")
return
policy_graph = SELinuxPolicyGraph(sepolicy)
except OSError:
log.error("STAT: Unable to load SEAndroid policy file. Use --debug for more details")
return
log.info("Building SEPolicy graph")
graph = policy_graph.build_graph()
log.info("STAT: SEPolicy %d nodes and %d allow edges",
len(graph["graphs"]["allow"].nodes()), len(graph["graphs"]["allow"].edges()))
stats = ["attributes", "types", "genfs", "fs_use"]
for s in stats:
log.info("STAT: SEPolicy %s %d", s, len(graph[s]))
log.info("STAT: SEPolicy domains %d", len(graph["attributes"]["domain"]))
inst = SEPolicyInst(primary_filesystem, graph, file_contexts, init, android_version)
inst.instantiate(draw_graph=False, expand_obj=True, skip_fileless=True)
running = sorted(filter(lambda x: x[1].state == ProcessState.RUNNING, inst.processes.items()))
log.info("STAT: Recovered processes %d", len(inst.processes))
log.info("STAT: Recovered running processes %d", len(running))
if __name__ == "__main__":
sys.exit(main())
```
#### File: BigMAC/util/file.py
```python
import os
def files(path):
for file in os.listdir(path):
if os.path.isfile(os.path.join(path, file)):
yield os.path.join(path, file)
def directories(path):
for file in os.listdir(path):
if os.path.isdir(os.path.join(path, file)):
yield os.path.join(path, file)
def mkdir(path):
"""Same as os.mkdir but ignores errors due to existing directories"""
try:
os.mkdir(path)
except FileExistsError:
pass
def mkdir_recursive(path):
total_path = ""
for component in os.path.normpath(path).split(os.sep):
total_path = os.path.join(total_path, component)
mkdir(total_path)
def chown_parents(path, uid, gid):
if os.path.isabs(path):
raise ValueError("Path must not be absolute (this is always an error)")
total_path = ""
for component in os.path.normpath(path).split(os.sep):
total_path = os.path.join(total_path, component)
os.chown(total_path, uid, gid)
def chown_recursive(path, uid, gid):
includeroot = True
for root, dirs, files in os.walk(path, followlinks=False):
if includeroot:
objects = ["."] + dirs + files
includeroot = False
else:
objects = dirs + files
for obj in objects:
path = os.path.join(root, obj)
path = os.path.normpath(path)
os.chown(path, uid, gid)
``` |
{
"source": "7igr3ss/python_projects",
"score": 3
} |
#### File: IPGeolocation/expert/ipGeolocation.py
```python
print("""
\033[1;36;40m###################################### IPGEOLOCATION SCRIPT ###################################### \033[0m
""")
# Imports
import requests
import sys
import ipaddress
from datetime import datetime
# Variables
current = datetime.now()
dt_str = current.strftime("%d/%m/%Y %H:%M:%S")
# Usage
def help():
print("Usage: ./ipGeolocation.py </path/file> <api_key>")
print("Example: ./ipGeolocation.py /path/ipaddress_list.txt f2fb2bd77bab4b27ac702b0e363277e2")
exit(0)
# CIDR IPv4 Query
def cidrIpQuery(path2File, api_Key):
try:
with open (path2File, mode='r') as file:
contents = file.readlines()
for entry in contents:
sp = entry.replace("\n","").replace("\r","")
cidr = ipaddress.ip_network(sp)
for ip_lists in cidr.hosts():
data = {'ip' : ip_lists, 'apiKey' : api_Key}
reqs = requests.get("https://api.ipgeolocation.io/ipgeo", params=data)
# Data in json
json_data = reqs.json()
# Printing out the data
ipaddr = json_data['ip']
organization = json_data['organization']
countryCode = json_data['country_code3']
country = json_data['country_name']
print(f"\n \033[1;33;40m[✓] [{dt_str}] ~ Quering IP in List ~ \033[0m")
print(f"\tIP Address: {ipaddr}\r\n\tOrganization: {organization}\r\n\tCountry: {country}\r\n\tCountry Code: {countryCode}")
except (IsADirectoryError, FileNotFoundError) as e:
print("[x] Please enter the correct path to file that contains your IP Addresses!")
try:
if len(sys.argv) != 3:
help()
sys.exit(0)
if len(sys.argv) == 3:
cidrIpQuery(sys.argv[1], sys.argv[2])
except KeyError:
print("[x] Please supply the right API Key!")
```
#### File: web_header_reporting/beginner/web_header_scan.py
```python
import argparse
import sys
import requests
# Banner
BANNER_INFO = f"================== Retrieving Web Headers =================="
# Creating ArugumentParser Arguments
EPL_INFO = "Queries the supplied urls for vulnerable web headers"
DESC_INFO = "API Key is needed from the xxx.xxxxx.xxxxx to run this script\n"
# Creating Arugument Parser Help Arguments
URL_INFO = "provide the URL link you wish to scan"
OUTPUT_INFO = "output result to a file"
# Arguments construction
PS_ARGS = argparse.ArgumentParser(description=f"{DESC_INFO}",
epilog=f"{EPL_INFO}")
PS_ARGS.add_argument("-u", "--url", help=f"{URL_INFO}", dest="URL", required=True,
metavar="http(s)://xxxx.xxx", type=str)
PS_ARGS.add_argument("-o", "--output", help=f"{OUTPUT_INFO}", dest="OUTPUT",
metavar="/path/output.txt", type=str)
# Parse arguments
args = PS_ARGS.parse_args()
# Declaring Header
STS = "Strict-Transport-Security"
class WebHrScan:
def __init__(self, URL=args.URL):
self.url = URL
# Query headers for a single url link
def query_web_headers(self):
data_entry = ""
resp = requests.get(self.url)
rsp_sts = resp.headers.get(STS)
data_entry += BANNER_INFO
data_entry += f"\nUrl: {self.url}"
if rsp_sts is not None:
data_entry += f"\n\t[✓] Strict-Transport-Security has been set!"
data_entry += f"\n\t[!] Configured Setting(s): {rsp_sts}!"
else:
data_entry += f"\n\t[x] Strict-Transport-Security is missing!"
return data_entry
# Output to file
def output_file(data_to_write, output_file=args.OUTPUT):
with open(output_file, mode='w', encoding='utf-8') as output_results:
output_results.write(data_to_write)
print(f"[!] Results have been writen to - {output_file} file!")
if __name__ == '__main__':
if args.URL is not None:
if args.OUTPUT is not None:
output_file(WebHrScan().query_web_headers())
else:
print(WebHrScan().query_web_headers())
``` |
{
"source": "7imbrook/imperfect",
"score": 3
} |
#### File: imperfect/tests/editing.py
```python
import unittest
from .. import parse_string
class EditingTest(unittest.TestCase):
def test_existing_section_change_entry_preserves_space(self) -> None:
# Space around the '=' is not changed when you change the value
conf = parse_string("[a]\n#comment1\nb = 1\n#comment2\n")
conf.set_value("a", "b", "2")
self.assertEqual("[a]\n#comment1\nb = 2\n#comment2\n", conf.text)
def test_existing_section_change_entry(self) -> None:
conf = parse_string("[a]\n#comment1\nb=1\n#comment2\n")
conf.set_value("a", "b", "2")
self.assertEqual("[a]\n#comment1\nb=2\n#comment2\n", conf.text)
def test_existing_section_new_entry(self) -> None:
conf = parse_string("[a]\nb = 1\n")
conf.set_value("a", "c", "2")
self.assertEqual("[a]\nb = 1\nc = 2\n", conf.text)
def test_new_section_new_entry(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "1")
self.assertEqual("[a]\nb = 1\n", conf.text)
def test_empty_value(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "")
self.assertEqual("[a]\nb =\n", conf.text)
def test_multiline_value(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "1\n2")
self.assertEqual("[a]\nb = 1\n 2\n", conf.text)
def test_multiline_value_hanging(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "\n1\n2")
self.assertEqual("[a]\nb =\n 1\n 2\n", conf.text)
def test_multiline_value_hanging_edit(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "x")
conf.set_value("a", "c", "y")
self.assertEqual("[a]\nb = x\nc = y\n", conf.text)
conf.set_value("a", "b", "\n1\n2")
self.assertEqual("[a]\nb =\n 1\n 2\nc = y\n", conf.text)
def test_set_to_empty_string(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "")
self.assertEqual("[a]\nb =\n", conf.text)
def test_set_to_empty_string_edit(self) -> None:
conf = parse_string("")
conf.set_value("a", "b", "x")
conf.set_value("a", "c", "y")
self.assertEqual("[a]\nb = x\nc = y\n", conf.text)
conf.set_value("a", "b", "")
self.assertEqual("[a]\nb =\nc = y\n", conf.text)
conf.set_value("a", "b", "2")
self.assertEqual("[a]\nb = 2\nc = y\n", conf.text)
def test_del_section(self) -> None:
conf = parse_string("[a]\na=1\n[b]\nb=2\n")
del conf["a"]
self.assertEqual("[b]\nb=2\n", conf.text)
with self.assertRaises(KeyError):
del conf["c"]
def test_del_value(self) -> None:
conf = parse_string("[a]\na=1\naa=2\n[b]\nb=2\n")
del conf["a"]["aa"]
self.assertEqual("[a]\na=1\n[b]\nb=2\n", conf.text)
with self.assertRaises(KeyError):
del conf["a"]["z"]
```
#### File: imperfect/tests/imperfect.py
```python
import configparser
import itertools
import unittest
from typing import List, Optional
from parameterized import parameterized
import imperfect
SAMPLES = [
"[s]\nb=1",
"[s]\nb=1\nc=2",
"[s]\nb=1\n\n",
]
# This is for future adaptation to hypothesis, perhaps
def variations(section: Optional[str], k: str, v: str) -> List[str]:
assert section
section_strs = [f"\n\n[{section}]\n", f"[{section}]\n"]
return [
"".join(c)
for c in itertools.product(
section_strs,
["", "\n"],
["", " ", " ", "\t", " \t "],
[k],
["", " "],
["=", ":"],
["", " "],
[v],
["", "\n", "\n\n"],
)
]
class ImperfectTests(unittest.TestCase):
# The goal is to get to 100% test coverage with these, but then also have
# the hypothesis-based tests that validate behavior on many invented
# examples.
@parameterized.expand( # type: ignore
[("[s]\na=1",), ("[s]\na = 1",), ("[s]\na = 1\n",), ("[s]\na=\n 1",),],
)
def test_simple_parse(self, example: str) -> None:
conf = imperfect.parse_string(example)
# Minimal mapping api
self.assertEqual(["s"], conf.keys())
self.assertTrue("s" in conf)
self.assertEqual(["a"], conf["s"].keys())
self.assertTrue("a" in conf["s"])
# KeyError coverage
self.assertFalse("b" in conf)
self.assertFalse("b" in conf["s"])
self.assertIn(conf["s"]["a"], ("1", "\n1"))
@parameterized.expand([("a=1",),],) # type: ignore
def test_fail_to_parse(self, example: str) -> None:
with self.assertRaises(imperfect.ParseError):
imperfect.parse_string(example)
def test_multiline_with_comment(self) -> None:
conf = imperfect.parse_string("[s]\na=\n #comment\n b\n")
self.assertEqual("\nb", conf["s"]["a"])
def test_allow_no_value(self) -> None:
conf = imperfect.parse_string("[s]\na=", allow_no_value=True)
self.assertEqual("", conf["s"]["a"])
def test_alternate_delimiters(self) -> None:
conf = imperfect.parse_string("[s]\naqq1", delimiters=("qq",))
self.assertEqual("1", conf["s"]["a"])
def test_alternate_delimiters_allow_no_value(self) -> None:
conf = imperfect.parse_string(
"[s]\naqq", delimiters=("qq",), allow_no_value=True
)
self.assertEqual("", conf["s"]["a"])
def test_comment_prefixes(self) -> None:
conf = imperfect.parse_string("[s]\n@comment\na=1", comment_prefixes=("@",))
self.assertEqual("1", conf["s"]["a"])
@parameterized.expand( # type: ignore
[
("[ ]=",),
(" [0]",),
("[s]\na=1",),
("[s]\n[s2]\na=1",),
("[s]\n a = 1 \n\n",),
("#comment\n[s]\na=1\n#comment2",),
],
name_func=(lambda a, b, c: f"{a.__name__}_{b}"),
)
def test_simple_roundtrips(self, example: str) -> None:
conf = imperfect.parse_string(example)
self.assertEqual(example, conf.text)
def test_exhaustive_roundtrip(self) -> None:
for example in variations("sect", "a", "1"):
oracle = configparser.RawConfigParser()
try:
oracle.read_string(example)
self.assertEqual(oracle["sect"]["a"], "1")
except Exception: # pragma: no cover
continue
conf = None
try:
conf = imperfect.parse_string(example)
# Did we parse the same value as configparser
self.assertEqual(len(conf["sect"].entries), 1)
self.assertEqual(conf["sect"].entries[0].key, "a")
self.assertEqual(conf["sect"].entries[0].interpret_value(), "1")
except Exception: # pragma: no cover
print("Input: ", repr(example))
if conf:
# if conf.default:
# print("default:")
# for v in conf.default.entries:
# print(" ", v)
for s in conf.sections:
print(f"{s.name}:")
for v in s.entries:
print(" ", v)
raise
self.assertEqual(example, conf.text)
def test_example_from_readme(self) -> None:
INPUT = """
[metadata]
# the package name
name = imperfect
# slurp the readme
long_description = file: README.md
[options]
packages = imperfect
"""
EXPECTED = """
[metadata]
# the package name
name = imperfect
# slurp the readme
long_description = file: README.md
long_description_content_type = text/markdown
[options]
packages = imperfect
"""
import imperfect
import moreorless
data = INPUT
conf: imperfect.ConfigFile = imperfect.parse_string(data)
metadata_section = conf["metadata"]
# Ignoring some whitespace for now, this looks like
# long_description_content_type = text/markdown\n
# [ entry ]
# [ key ][eq][ value ]
value = imperfect.ValueLine(
whitespace_before_text="",
text="text/markdown",
whitespace_after_text="",
newline="\n",
)
new_entry = imperfect.ConfigEntry(
key="long_description_content_type",
equals="=",
value=[value],
whitespace_before_equals=" ",
whitespace_before_value=" ",
)
for i, entry in enumerate(metadata_section.entries):
if entry.key == "long_description":
metadata_section.entries.insert(i + 1, new_entry)
break
self.assertEqual(EXPECTED, conf.text)
temp = moreorless.unified_diff(data, conf.text, "setup.cfg")
# print(temp, end="")
self.assertTrue(temp)
```
#### File: imperfect/imperfect/verify.py
```python
import configparser
import io
import sys
from imperfect import parse_string
def verify(name: str) -> None:
with open(name) as f:
data = f.read()
conf = parse_string(data)
buf = io.StringIO()
conf.build(buf)
if data != buf.getvalue():
print(name, "FAIL ROUND TRIP")
return
try:
co = configparser.RawConfigParser()
co.read_string(data)
except Exception as e:
print(name, "FAIL TO READ IN CONFIGPARSER", e)
return
compares = 0
for section_name in co:
for key in co[section_name]:
compares += 1
expected_value = co[section_name][key]
try:
if conf[section_name][key] != expected_value:
print(name, section_name, key, "FAIL COMPARE")
print("configpar:", repr(expected_value))
print("imperfect:", repr(conf[section_name][key]))
return
except Exception as e:
print(name, section_name, key, "FAIL", str(e))
return
if compares == 0:
print(name, "FAIL EMPTY")
return
print(name, "OK")
if __name__ == "__main__": # pragma: no cover
for f in sys.argv[1:]:
verify(f)
``` |
{
"source": "7imon7ays/protein-matcher",
"score": 2
} |
#### File: protein-matcher/matcher/background_tasks.py
```python
from background_task import background
from matcher.clients import EntrezClient
from matcher.models import Search
@background(schedule=0)
def match_to_protein(search_id):
"""Execute calls to NCBI servers without blocking the client."""
search_instance = Search.objects.get(pk=search_id)
# TODO: Add method on search to print first ten chars of DNA sequqnce.
print('Starting search #%s for a protein that matches DNA sequence "%s".' % ( # noqa
search_instance.id, search_instance.dna_sequence,
))
search_instance.mark_running()
entrez_client = EntrezClient()
try:
protein_id, accession_string, match_from, match_to =\
entrez_client.blast(search_instance.dna_sequence)
# Record in the searches table that this search encountered an error.
except Exception as exception: # noqa TODO: Find out what errors exactly the API call can throw.
if 'Query contains no sequence data' in str(exception):
print(
'Entrez reports no sequence data in "%s".' % (
search_instance.dna_sequence,
)
)
print('Marking search as not found.')
search_instance.mark_not_found()
return
# TODO: Make it easier to tell if the job has no more retries left.
search_instance.mark_error()
raise
if protein_id == '':
print('Search #%s did not match DNA sequence "%s" to a known protein ID.' % ( # noqa
search_instance.id, search_instance.dna_sequence,
))
search_instance.mark_not_found()
return
search_instance.protein_id = protein_id
search_instance.accession_string = accession_string
search_instance.match_from = match_from
search_instance.match_to = match_to
search_instance.mark_found()
print(
'Completed search with attributes %s.' % str(search_instance.as_dict())
)
```
#### File: protein-matcher/matcher/clients.py
```python
import io
import os
import xml.etree.ElementTree as ET
from time import sleep
from Bio.Blast.NCBIWWW import qblast
from django.conf import settings
def _mock_qblast(program, db, dna_sequence, entrez_query):
"""Mock out call to NCBI for development and testing."""
info_string = """Running mock qblast.
Program: {0}
Database: {0}
DNA sequence: {0}
Entrez query: {0}
"""
print(info_string.format(program, db, dna_sequence, entrez_query))
if dna_sequence == settings.BLAST.get('mock_unmatchable_sequence'):
file_name = 'qblast_response_empty.xml'
else:
file_name = 'qblast_response_found.xml'
mock_file_path = os.path.join(
settings.BASE_DIR, 'matcher', 'mocks', file_name
)
with open(mock_file_path, 'r') as mock_file:
mock_response = mock_file.read()
sleep(settings.BLAST.get('mock_sleep_time_seconds', 2))
return io.StringIO(mock_response)
class EntrezClient:
"""Interface to NCBI databases."""
def __init__(self):
if settings.BLAST['mock_backend']:
self._qblast = _mock_qblast
else:
self._qblast = qblast
self._program = settings.BLAST['program']
self._db = settings.BLAST['database']
self._protein_accession_strings_to_ids =\
settings.PROTEIN_ACCESSION_STRINGS_TO_IDS
def blast(self, dna_sequence):
"""Call out to NCBI servers."""
qblast_response = self._qblast(
self._program,
self._db,
dna_sequence,
entrez_query=self._build_entrez_query()
)
protein_id, accession_string, match_from, match_to =\
self._parse_blast_result(qblast_response)
return protein_id, accession_string, match_from, match_to
def _parse_blast_result(self, qblast_response):
"""Get the accession string and map it to a known protein ID.
Any hit in the response is relevant since it made it past the filter of
the Entrez query.
"""
blast_result_xml = ET.fromstring(qblast_response.read())
# Take whichever hit comes first.
hit_elements = blast_result_xml.iter(tag='Hit')
for hit_element in hit_elements:
hit_accession = hit_element.find('Hit_accession')
accession_string = hit_accession.text
protein_id =\
self._protein_accession_strings_to_ids[accession_string]
# TODO: What is an HSP?
hsp = hit_element.find('Hit_hsps').find('Hsp')
match_from = hsp.find('Hsp_hit-from').text
match_to = hsp.find('Hsp_hit-to').text
# Ignore subsequent matches.
# TODO: Handle value error in unlikely scenario that match from
# and match to don't come back as integers.
# TODO: Return a dict or named tuple.
return protein_id, accession_string, int(match_from), int(match_to)
return ('', '', '', '',) # No matches found.
def _build_entrez_query(self):
"""
Given ['NC_000852', 'NC_007346']
Return 'NC_000852[accession] OR NC_007346[accession]'.
"""
accession_strings = self._protein_accession_strings_to_ids.keys()
labeled_search_filters = [
protein_id + '[accession]' for protein_id in accession_strings
]
return ' OR '.join(labeled_search_filters)
```
#### File: management/commands/map_accession_strings.py
```python
import json
from Bio import Entrez
from django.conf import settings
from django.core.management import BaseCommand
class Command(BaseCommand):
help = 'For every protein ID we want to search against,\
query the NCBI database for its corresponding accession string.'
def handle(self, *args, **options):
protein_ids = settings.PROTEIN_ACCESSION_STRINGS_TO_IDS.values()
# TODO: replace with API key stored as environment variable.
Entrez.email = '<EMAIL>'
searchHandle = Entrez.esearch(db='nucleotide', term='OR '.join([
protein_id + '[accession]' for protein_id in protein_ids
]))
ids = Entrez.read(searchHandle)['IdList']
summary_response = Entrez.esummary(
db='nucleotide', id=','.join(ids), retmode='json'
)
summary_dict = json.loads(
summary_response.read().decode('UTF-8').strip()
)
for result_key in summary_dict['result']:
if result_key == 'uids':
continue
hit = summary_dict['result'][result_key]
print(hit['assemblyacc'], '=>', hit['caption'])
``` |
{
"source": "7irth/PyBot",
"score": 3
} |
#### File: 7irth/PyBot/pybot.py
```python
__author__ = '<NAME> <<EMAIL>>'
version = '0.0.5'
from time import time, sleep
import sudoku
import crossword
import messages
# import reddit
import rss
debug = True
timings = True
functions = ['sudoku', 'crossword', 'facebook', 'whatsapp', 'reddit',
'rss', 'kill all humans']
tom_delay = 1
def choose():
picked = input('Pick a function - ' + str(functions) + ': ')
while picked not in functions:
picked = input("Invalid choice, try again foo': ")
if picked == 'exit':
exit()
if picked == 'sudoku':
sudoku.stuff.go()
elif picked == 'crossword':
crossword.stuff.go()
elif picked == 'facebook':
messages.fb.go()
elif picked == 'whatsapp':
messages.whatsapp.go()
# elif picked == 'reddit':
# reddit.stuff.go()
elif picked == 'rss':
rss.stuff.go()
def chill_out_for_a_bit(extra_delay=0):
delay = tom_delay + extra_delay
if timings:
print('sleeping for', delay, 'seconds' if delay != 1 else 'second')
sleep(delay)
class Timer:
def __init__(self, label):
self.label = label
def __enter__(self):
if timings:
print(self.label, end=' ')
self.start = time()
return self
def __exit__(self, *args):
self.time = time() - self.start
if timings:
print('took', str(round(self.time, 5)), 'seconds')
if __name__ == '__main__':
# debug = timings = True if input('Debug? (y/n) ') == 'y' else False
print('PyBot! v' + version + ('-debug' if debug else ''))
# print("Enter time delay between steps -")
# tom_delay = float(input((
# "(1 second works for me, might need more for slower computers): ")))
choose()
input("\nIronically, press enter to exit")
```
#### File: PyBot/sudoku/stuff.py
```python
__author__ = '<NAME> <<EMAIL>>'
from collections import OrderedDict
import pybot
from sudoku import solver
from utils.imaging import *
from utils.windows import *
runs = 3
def open_sudoku_on_chrome():
press('winkey')
pybot.chill_out_for_a_bit()
enter_phrase('google chrome')
press('enter')
pybot.chill_out_for_a_bit()
press_hold_release('ctrl', 't')
pybot.chill_out_for_a_bit()
enter_phrase('websudoku.com')
press('enter')
# press_hold_release("winkey", "up_arrow")
def switch_to_evil():
pass
# size of ↴ shaped box with a top left corner at (x, y)
def get_box_size(puzzle, x, y):
first_x, first_y = int(x), int(y)
while close_enough(puzzle[x, y], puzzle[x + 1, y]): # check row
x += 1
while close_enough(puzzle[x, y], puzzle[x, y + 1]): # check col
y += 1
return x - first_x + 1, y - first_y + 1 # x_size, y_size ↴
# returns coordinates and size of puzzle
def find_puzzle(sample_in=screen_grab()):
columns, rows = sample_in.size
min_x_size, min_y_size = 250, 250
init_pixel = (102, 102, 153) # from target image
sample = sample_in.load()
x, y = -1, -1
while y + 1 < rows:
y += 1 # next row
x = -1 # reset column
while x + 1 < columns:
x += 1 # next column
if close_enough(sample[x, y], init_pixel, 3): # first pixel match
size = get_box_size(sample, x, y)
if size > (min_x_size, min_y_size):
return x, y, size[0], size[1]
return None, None, None, None
# returns array of given values in puzzle
def get_puzzle(sudoku_img):
puzzle = sudoku_img.load()
puzzle_size = 9
# initialize new puzzle
sudoku = [0 for _ in range(81)]
border = puzzle[0, 0]
# find top left cell
for i in range(puzzle_size):
if puzzle[i, i] != border:
x, y = i, i
x_size, y_size = get_box_size(puzzle, x, y)
for row in range(puzzle_size):
for col in range(puzzle_size):
# correct box size
x_size, y_size = get_box_size(puzzle, x, y)
cell = sudoku_img.crop((x, y, x + x_size, y + y_size))
if pybot.debug:
print_img(cell, 'all/' + str(row * puzzle_size + col))
# retrieve value from OCR and fill in puzzle
try:
sudoku[row * puzzle_size + col] = \
int(tesser(cell, '10', 'digits'))
except ValueError:
pass
# compensate for thicker column breaks
x += x_size + (2 if col in [2, 5] else 1)
# compensate for row breaks
y += y_size + (2 if row in [2, 5] else 1)
x -= x_size * puzzle_size + 11 # reset to start
# ↳ compensate for compensations
return sudoku
return sudoku
# returns array of given values in puzzle using magic numbers
def get_puzzle_hack(sudoku):
loaded = sudoku.load()
# magic numbers for character recognition
numbers = OrderedDict()
numbers[(9, 22, 15)] = []
numbers[(6, 12, 15)] = []
numbers[(4, 12, 19)] = []
numbers[(8, 13, 20)] = []
numbers[(5, 14, 14)] = []
numbers[(7, 22, 9)] = []
numbers[(2, 21, 24)] = []
numbers[(3, 13, 11)] = []
numbers[(1, 15, 10)] = []
got = []
for row in range(9):
for col in range(9):
for number in numbers:
x = number[1] + col * 33 # width of the box
y = number[2] + row * 33 # height of the box
if col > 2:
x += 1
if col > 5:
x += 1
if (row, col) not in got and (loaded[x, y] != (255, 255, 255)):
numbers[number].append((row, col))
got.append((row, col))
# initialize new puzzle
puzzle = [0 for _ in range(81)]
# fill with given values
for value in numbers:
for coords in numbers[value]:
puzzle[coords[0] * 9 + coords[1]] = value[0]
return puzzle
# takes in picture of puzzle, then solves and submits it
def solve_puzzle(sudoku_img):
sudoku = solver.Sudoku()
found = False
with pybot.Timer('getting numbers the easy way'):
if sudoku.get_input(get_puzzle_hack(sudoku_img)):
found = True
if not found:
with pybot.Timer('getting numbers the hard way'):
if sudoku.get_input(get_puzzle(sudoku_img)):
found = True
if found:
with pybot.Timer('solving the puzzle'):
sudoku.solve()
submit_puzzle([number for row in sudoku.sudoku for number in row])
else:
# TODO: refresh and try again
raise SudokuException
class SudokuException(Exception):
pass
def submit_puzzle(solved):
for number in solved:
press(str(number), 'tab', delay=0.005)
press_hold_release('shift', 'tab')
press('enter')
def next_puzzle(old_x, old_y, old_x_size, old_y_size):
global runs
if runs > 0:
pybot.chill_out_for_a_bit()
bring_search = screen_grab(old_x, old_y, old_x_size, old_y_size)
with pybot.Timer('finding the button for next puzzle'):
buttons = find_buttons(bring_search)
try:
bring_x, bring_y, bring_x_size, bring_y_size = \
buttons['Bring on a new puzzle!']
bring_x += bring_x_size // 2 + old_x
bring_y += bring_y_size // 2 + old_y
move(bring_x, bring_y)
left_click()
pybot.chill_out_for_a_bit(1)
# increased search area for next puzzle
old_x -= 50
old_y -= 50
next_search = screen_grab(old_x, old_y,
old_x_size + 100, old_y_size + 100)
with pybot.Timer('finding the next puzzle'):
next_x, next_y, x_size, y_size = find_puzzle(next_search)
if next_x is None:
if pybot.debug:
print_img(next_search, 'send_to_tirth/next_sudoku')
print("Couldn't find puzzle this time :(")
else:
next_x += old_x
next_y += old_y
move(next_x + 5, next_y + 5)
left_click()
solve_puzzle((screen_grab(next_x, next_y, x_size, y_size)))
runs -= 1
print(runs, 'runs' if runs != 1 else 'run', 'left')
next_puzzle(next_x, next_y, x_size, y_size)
except KeyError:
if pybot.debug:
print_img(bring_search, 'send_to_tirth/bring_search')
print("Couldn't find button for next puzzle")
else:
print("You get the point")
def go():
global runs
runs = int(float(input('Runs through the puzzle (try at least 2): ')))
print("\nPlease don't move and/or breathe while the bot is working,\n"
"and keep your arms and legs inside the ride at all times\n")
open_sudoku_on_chrome()
pybot.chill_out_for_a_bit(2)
initial_search = screen_grab()
with pybot.Timer('finding the puzzle'):
x, y, x_size, y_size = find_puzzle(initial_search)
if x is None:
if pybot.debug:
print_img(initial_search, "send_to_tirth/no_joy")
input("Couldn't find puzzle!")
else:
print("Found puzzle! Going to try", runs, "runs")
puzzle = screen_grab(x, y, x_size, y_size)
move(x + 5, y + 5)
left_click()
if pybot.debug:
print_img(puzzle, "send_to_tirth/initial_found_puzzle")
try:
solve_puzzle(puzzle)
next_puzzle(x, y, x_size, y_size)
except SudokuException:
print("Couldn't solve puzzle :(")
```
#### File: PyBot/tests/test_crossword.py
```python
__author__ = '<NAME> <<EMAIL>>'
import unittest
from os import path
import crossword.solver as sol
class TestCrossword(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
cells = 13
sample_puzzle = path.realpath('..') + '\\crossword.txt'
sample_answers = path.realpath('..') + '\\answers.json'
across, down = sol.read_guardian_puzzle(sample_puzzle)
crossword = sol.Crossword(cells, across, down, sample_answers)
crossword.fill_answers()
self.assertNotEqual((sum(line.count(sol.delimiter)
for line in crossword.puzzle)), 63)
while not crossword.fill_answers():
crossword.fill_clues(None, None, True) # reset and shuffle
self.assertEqual((sum(line.count(sol.delimiter)
for line in crossword.puzzle)), 63)
self.assertEqual(sum(len(a) for a in crossword.answers),
sum(c.length for c in crossword.clues))
if __name__ == '__main__':
unittest.main(exit=False)
```
#### File: PyBot/tests/test_sudoku.py
```python
__author__ = '<NAME> <<EMAIL>>'
import unittest
from sudoku import solver
import time
class TestSudoku(unittest.TestCase):
def setUp(self):
self.sudoku = solver.Sudoku(max_iterations=1000, debug_print=False)
easy = ('830076042'
'600300097'
'000082100'
'090030005'
'026000730'
'300020010'
'003460000'
'170003006'
'260790054')
medium = ('050000006'
'480090003'
'903800000'
'017004000'
'600172005'
'000500810'
'000003508'
'700050041'
'800000090')
hard = ('003105060'
'000004008'
'060000507'
'056000000'
'094602150'
'000000620'
'509000040'
'600400000'
'040203900')
evil = ('005090400'
'700046000'
'000300090'
'600000020'
'090158030'
'080000009'
'060005000'
'000920001'
'008010900')
# complete
done = ('391286574'
'487359126'
'652714839'
'875431692'
'213967485'
'964528713'
'149673258'
'538142967'
'726895341')
# supposedly the world's hardest?
everest = ('800000000'
'003600000'
'070090200'
'050007000'
'000045700'
'000100030'
'001000068'
'008500010'
'090000400')
self.puzzle = evil # choose puzzle
def test_solve(self):
self.assertTrue(self.sudoku.get_input(self.puzzle))
self.assertTrue(self.sudoku.solve())
print('done in', self.sudoku.current)
def test_efficiency(self):
iterations, times = [], []
validity = 100
for i in range(validity):
if self.sudoku.get_input(self.puzzle):
start = time.clock()
self.assertTrue(self.sudoku.solve())
end = time.clock()
times.append(end - start)
progress = i / (validity / 100)
if progress % 10.0 == 0.0:
print(str(progress) + "%")
iterations.append(self.sudoku.current)
self.sudoku = solver.Sudoku(max_iterations=1000)
self.sudoku.get_input(self.puzzle)
print('--')
print('after', len(iterations), 'runs')
print('min iters:', str(min(iterations)) + ',',
'max iters:', max(iterations))
print('min time: ', str(round(min(times) * 1000, 2)) + ',',
'max time:', round(max(times) * 1000, 2))
print('average iters:', sum(iterations) / len(iterations))
print('average time: ', round((sum(times) / len(times) * 1000), 2))
if __name__ == '__main__':
unittest.main(exit=False)
``` |
{
"source": "7iW/YellowFin_Pytorch",
"score": 2
} |
#### File: YellowFin_Pytorch/word_language_model/main.py
```python
import argparse
import time
import math
import torch
import torch.nn as nn
from torch.autograd import Variable
import numpy as np
import data
import model
import sys
import os
sys.path.append("../tuner_utils")
from yellowfin import YFOptimizer
from debug_plot import plot_func
import logging
parser = argparse.ArgumentParser(description='PyTorch PennTreeBank RNN/LSTM Language Model')
parser.add_argument('--data', type=str, default='./data/penn',
help='location of the data corpus')
parser.add_argument('--model', type=str, default='LSTM',
help='type of recurrent net (RNN_TANH, RNN_RELU, LSTM, GRU)')
parser.add_argument('--emsize', type=int, default=200,
help='size of word embeddings')
parser.add_argument('--nhid', type=int, default=200,
help='number of hidden units per layer')
parser.add_argument('--nlayers', type=int, default=2,
help='number of layers')
parser.add_argument('--lr', type=float, default=20,
help='initial learning rate')
parser.add_argument('--clip', type=float, default=1.0,
help='gradient clipping')
parser.add_argument('--epochs', type=int, default=40,
help='upper epoch limit')
parser.add_argument('--batch_size', type=int, default=20, metavar='N',
help='batch size')
parser.add_argument('--bptt', type=int, default=35,
help='sequence length')
parser.add_argument('--dropout', type=float, default=0.2,
help='dropout applied to layers (0 = no dropout)')
parser.add_argument('--tied', action='store_true',
help='tie the word embedding and softmax weights')
parser.add_argument('--seed', type=int, default=1111,
help='random seed')
parser.add_argument('--cuda', action='store_true',
help='use CUDA')
parser.add_argument('--log-interval', type=int, default=250, metavar='N',
help='report interval')
parser.add_argument('--save', type=str, default='model.pt',
help='path to save the final model')
parser.add_argument('--logdir', type=str, default='.',
help='folder for the logs')
parser.add_argument('--opt_method', type=str, default='YF',
help='select the optimizer you are using')
parser.add_argument('--lr_thresh', type=float, default=1.0)
args = parser.parse_args()
if not os.path.isdir(args.logdir):
os.makedirs(args.logdir)
logging.basicConfig(
level=logging.DEBUG,
handlers=[
logging.FileHandler(args.logdir + "/num.log", mode='w'),
logging.StreamHandler(),
],
)
# Set the random seed manually for reproducibility.
torch.manual_seed(args.seed)
if torch.cuda.is_available():
if not args.cuda:
logging.info("WARNING: You have a CUDA device, so you should probably run with --cuda")
else:
torch.cuda.manual_seed(args.seed)
###############################################################################
# Load data
###############################################################################
corpus = data.Corpus(args.data)
def batchify(data, bsz):
# Work out how cleanly we can divide the dataset into bsz parts.
nbatch = data.size(0) // bsz
# Trim off any extra elements that wouldn't cleanly fit (remainders).
data = data.narrow(0, 0, nbatch * bsz)
# Evenly divide the data across the bsz batches.
data = data.view(bsz, -1).t().contiguous()
if args.cuda:
data = data.cuda()
return data
eval_batch_size = 10
train_data = batchify(corpus.train, args.batch_size)
val_data = batchify(corpus.valid, eval_batch_size)
test_data = batchify(corpus.test, eval_batch_size)
###############################################################################
# Build the model
###############################################################################
ntokens = len(corpus.dictionary)
model = model.RNNModel(args.model, ntokens, args.emsize, args.nhid, args.nlayers, args.dropout, args.tied)
if args.cuda:
model.cuda()
criterion = nn.CrossEntropyLoss()
###############################################################################
# Training code
###############################################################################
def repackage_hidden(h):
"""Wraps hidden states in new Variables, to detach them from their history."""
if isinstance(h, torch.Tensor):
return h.detach()
else:
return tuple(repackage_hidden(v) for v in h)
def get_batch(source, i, evaluation=False):
seq_len = min(args.bptt, len(source) - 1 - i)
data = Variable(source[i:i+seq_len], volatile=evaluation)
target = Variable(source[i+1:i+1+seq_len].view(-1))
return data, target
def evaluate(data_source):
# Turn on evaluation mode which disables dropout.
model.eval()
total_loss = 0
ntokens = len(corpus.dictionary)
hidden = model.init_hidden(eval_batch_size)
for i in range(0, data_source.size(0) - 1, args.bptt):
data, targets = get_batch(data_source, i, evaluation=True)
output, hidden = model(data, hidden)
output_flat = output.view(-1, ntokens)
total_loss += len(data) * criterion(output_flat, targets).data
hidden = repackage_hidden(hidden)
return total_loss.item() / float(len(data_source))
def train(opt, loss_list,\
local_curv_list,\
max_curv_list,\
min_curv_list,\
lr_list,\
lr_t_list,\
mu_t_list,\
dr_list,\
mu_list,\
dist_list,\
grad_var_list,\
lr_g_norm_list,\
lr_g_norm_squared_list,\
move_lr_g_norm_list,\
move_lr_g_norm_squared_list,\
lr_grad_norm_clamp_act_list,\
fast_view_act_list):
# Turn on training mode which enables dropout.
model.train()
total_loss = 0
start_time = time.time()
ntokens = len(corpus.dictionary)
hidden = model.init_hidden(args.batch_size)
train_loss_list = []
for batch, i in enumerate(range(0, train_data.size(0) - 1, args.bptt)):
data, targets = get_batch(train_data, i)
# Starting each batch, we detach the hidden state from how it was previously produced.
# If we didn't, the model would try backpropagating all the way to start of the dataset.
hidden = repackage_hidden(hidden)
model.zero_grad()
output, hidden = model(data, hidden)
loss = criterion(output.view(-1, ntokens), targets)
loss.backward()
# `clip_grad_norm` helps prevent the exploding gradient problem in RNNs / LSTMs.
torch.nn.utils.clip_grad_norm(model.parameters(), args.clip)
optimizer.step()
# for p in model.parameters():
# p.data.add_(-lr, p.grad.data)
# for group in optimizer._optimizer.param_groups:
# print group['lr'], group['momentum']
loss_list.append(loss.data.item())
if args.opt_method == 'YF':
local_curv_list.append(opt._global_state['grad_norm_squared'] )
max_curv_list.append(opt._h_max)
min_curv_list.append(opt._h_min)
lr_list.append(opt._lr)
mu_list.append(opt._mu)
dr_list.append((opt._h_max + 1e-6) / (opt._h_min + 1e-6))
dist_list.append(opt._dist_to_opt)
grad_var_list.append(opt._grad_var)
lr_g_norm_list.append(opt._lr * np.sqrt(opt._global_state['grad_norm_squared'].cpu() ) )
lr_g_norm_squared_list.append(opt._lr * opt._global_state['grad_norm_squared'] )
move_lr_g_norm_list.append(opt._optimizer.param_groups[0]["lr"] * np.sqrt(opt._global_state['grad_norm_squared'].cpu() ) )
move_lr_g_norm_squared_list.append(opt._optimizer.param_groups[0]["lr"] * opt._global_state['grad_norm_squared'] )
lr_t_list.append(opt._lr_t)
mu_t_list.append(opt._mu_t)
total_loss += loss.data
train_loss_list.append(loss.data.item() )
if batch % args.log_interval == 0 and batch > 0:
cur_loss = total_loss.item() / args.log_interval
elapsed = time.time() - start_time
logging.info('| epoch {:3d} | {:5d}/{:5d} batches | lr {:02.2f} | ms/batch {:5.2f} | '
'loss {:5.2f} | ppl {:8.2f}'.format(
epoch, batch, len(train_data) // args.bptt, lr,
elapsed * 1000 / args.log_interval, cur_loss, math.exp(cur_loss)))
total_loss = 0
start_time = time.time()
return train_loss_list,\
loss_list,\
local_curv_list,\
max_curv_list,\
min_curv_list,\
lr_list,\
lr_t_list,\
mu_t_list,\
dr_list,\
mu_list,\
dist_list,\
grad_var_list,\
lr_g_norm_list,\
lr_g_norm_squared_list,\
move_lr_g_norm_list,\
move_lr_g_norm_squared_list,\
lr_grad_norm_clamp_act_list,\
fast_view_act_list
# Loop over epochs.
lr = args.lr
best_val_loss = None
# At any point you can hit Ctrl + C to break out of training early.
try:
if not os.path.isdir(args.logdir):
os.mkdir(args.logdir)
train_loss_list = []
val_loss_list = []
lr_list = []
mu_list = []
loss_list = []
local_curv_list = []
max_curv_list = []
min_curv_list = []
lr_g_norm_list = []
lr_list = []
lr_t_list = []
mu_t_list = []
dr_list = []
mu_list = []
dist_list = []
grad_var_list = []
lr_g_norm_list = []
lr_g_norm_squared_list = []
move_lr_g_norm_list = []
move_lr_g_norm_squared_list = []
lr_grad_norm_clamp_act_list = []
fast_view_act_list = []
if args.opt_method == "SGD":
logging.info("using SGD")
optimizer = torch.optim.SGD(model.parameters(), lr, momentum=0.0)
elif args.opt_method == "momSGD":
logging.info("using mom SGD")
optimizer = torch.optim.SGD(model.parameters(), lr, momentum=0.9)
elif args.opt_method == "YF":
logging.info("using YF")
optimizer = YFOptimizer(model.parameters() )
elif args.opt_method == "Adagrad":
logging.info("using Adagrad")
optimizer = torch.optim.Adagrad(model.parameters(), lr)
elif args.opt_method == "Adam":
logging.info("using Adam")
optimizer = torch.optim.Adam(model.parameters(), lr)
elif args.opt_method == "AMSG":
logging.info("using AMSG")
optimizer = torch.optim.Adam(model.parameters(), lr, amsgrad=True)
for epoch in range(1, args.epochs+1):
epoch_start_time = time.time()
#train_loss = train()
train_loss, \
loss_list, \
local_curv_list,\
max_curv_list,\
min_curv_list,\
lr_list,\
lr_t_list,\
mu_t_list,\
dr_list,\
mu_list,\
dist_list,\
grad_var_list,\
lr_g_norm_list,\
lr_g_norm_squared_list,\
move_lr_g_norm_list,\
move_lr_g_norm_squared_list,\
lr_grad_norm_clamp_act_list,\
fast_view_act_list = \
train(optimizer,
loss_list, \
local_curv_list,\
max_curv_list,\
min_curv_list,\
lr_list,\
lr_t_list,\
mu_t_list,\
dr_list,\
mu_list,\
dist_list,\
grad_var_list,\
lr_g_norm_list,\
lr_g_norm_squared_list,\
move_lr_g_norm_list,\
move_lr_g_norm_squared_list,\
lr_grad_norm_clamp_act_list,\
fast_view_act_list)
train_loss_list += train_loss
val_loss = evaluate(val_data)
val_loss_list.append(val_loss)
logging.info('-' * 89)
logging.info('| end of epoch {:3d} | time: {:5.2f}s | valid loss {:5.2f} | '
'valid ppl {:8.2f}'.format(epoch,
(time.time() - epoch_start_time),
val_loss, math.exp(val_loss)))
logging.info('-' * 89)
# Save the model if the validation loss is the best we've seen so far.
if not best_val_loss or val_loss < best_val_loss:
with open(args.logdir + "/" + args.save, 'wb') as f:
torch.save(model, f)
best_val_loss = val_loss
else:
# Anneal the learning rate if no improvement has been seen in the validation dataset.
lr /= 4.0
if args.opt_method == "YF":
optimizer.set_lr_factor(optimizer.get_lr_factor() / 4.0)
else:
for group in optimizer.param_groups:
group['lr'] /= 4.0
#if args.opt_method == "YF":
# mu_list.append(optimizer._mu)
# lr_list.append(optimizer._lr)
with open(args.logdir+"/loss.txt", "wb") as f:
np.savetxt(f, np.array(train_loss_list) )
with open(args.logdir+"/val_loss.txt", "wb") as f:
np.savetxt(f, np.array(val_loss_list) )
except KeyboardInterrupt:
logging.info('-' * 89)
logging.info('Exiting from training early')
# Load the best saved model.
# with open(args.save, 'rb') as f:
# model = torch.load(f)
# Run on test data.
test_loss = evaluate(test_data)
logging.info('=' * 89)
logging.info('| End of training | test loss {:5.2f} | test ppl {:8.2f}'.format(
test_loss, math.exp(test_loss)))
logging.info('=' * 89)
``` |
{
"source": "7Johnsz/PassAPI",
"score": 2
} |
#### File: src/schemas/models.py
```python
from fastapi import FastAPI
from fastapi.responses import JSONResponse
from typing import Optional
from pydantic import BaseModel
# Schemas
class Password(BaseModel):
length: int
def length(self, length):
self.length = length
return self.length
password = Password()
``` |
{
"source": "7Johnsz/RichPresence",
"score": 3
} |
#### File: 7Johnsz/RichPresence/main.py
```python
import time
import os
from pypresence import Presence
from colorama import Fore, Style
# Application
client_id = "Your Application ID"
# Creating a function that, when activated with some parameter, runs in presence with Application ID
RPC = Presence(client_id)
# Just a banner and warn start xD
def start():
os.system('cls')
banner = (f"""
{Fore.BLUE}{Style.BRIGHT}
╦┌─┐┬ ┬┌┐┌┌─┐┌─┐ │
║│ │├─┤│││└─┐┌─┘ │ https://github.com/7Johnsz
╚╝└─┘┴ ┴┘└┘└─┘└─┘ │
{Fore.RESET}{Style.RESET_ALL}
""")
print(banner)
print(Fore.GREEN + "[-] " + Fore.RESET + Style.BRIGHT + "Starting RichPresence" + Style.RESET_ALL)
# Creating a function to try to connect with your application
def connectRpc():
RPC.connect()
# Attempt to establish connection
def connectLoop(conn=0):
if conn > 10:
return
try:
connectRpc()
except Exception as e:
print(Fore.RED + "[!]" + Fore.RESET + Style.BRIGHT,e,Style.RESET_ALL)
time.sleep(2)
exit()
else:
update()
# Here you can customize your rich presence
def update():
print(Fore.GREEN + "[-] " + Fore.RESET + Style.BRIGHT + "Rich Presence is working" + Style.RESET_ALL)
try:
while True:
RPC.update(buttons=[{"label":"example", "url":"https://bit.ly/3nzNusS"}])
time.sleep(15)
except KeyboardInterrupt:
print("\n" + Fore.RED + "[!] " + Fore.RESET + Style.BRIGHT + "Keyboard Interrupt" + Style.RESET_ALL)
time.sleep(2)
RPC.clear()
# Calling the start function and starting the script
start()
connectLoop()
``` |
{
"source": "7kbird/chrome",
"score": 2
} |
#### File: platform/power_monitor/android_dumpsys_power_monitor.py
```python
import csv
import logging
from collections import defaultdict
from telemetry.core.platform import android_sysfs_platform
from telemetry.core.platform.power_monitor import sysfs_power_monitor
class DumpsysPowerMonitor(sysfs_power_monitor.SysfsPowerMonitor):
"""PowerMonitor that relies on the dumpsys batterystats to monitor the power
consumption of a single android application. This measure uses a heuristic
and is the same information end-users see with the battery application.
"""
def __init__(self, device):
"""Constructor.
Args:
device: DeviceUtils instance.
"""
super(DumpsysPowerMonitor, self).__init__(
android_sysfs_platform.AndroidSysfsPlatform(device))
self._device = device
def CanMonitorPower(self):
return self._device.old_interface.CanControlUsbCharging()
def StartMonitoringPower(self, browser):
super(DumpsysPowerMonitor, self).StartMonitoringPower(browser)
# Disable the charging of the device over USB. This is necessary because the
# device only collects information about power usage when the device is not
# charging.
self._device.old_interface.DisableUsbCharging()
def StopMonitoringPower(self):
if self._browser:
# pylint: disable=W0212
package = self._browser._browser_backend.package
cpu_stats = super(DumpsysPowerMonitor, self).StopMonitoringPower()
self._device.old_interface.EnableUsbCharging()
# By default, 'dumpsys batterystats' measures power consumption during the
# last unplugged period.
result = self._device.RunShellCommand(
'dumpsys batterystats -c %s' % package)
assert result, 'Dumpsys produced no output'
return super(DumpsysPowerMonitor, self).CombineResults(
cpu_stats, DumpsysPowerMonitor.ParseSamplingOutput(package, result))
@staticmethod
def ParseSamplingOutput(package, dumpsys_output):
"""Parse output of 'dumpsys batterystats -c'
See:
https://android.googlesource.com/platform/frameworks/base/+/master/core/java/android/os/BatteryStats.java
Returns:
Dictionary in the format returned by StopMonitoringPower().
"""
# Raw power usage samples.
out_dict = {}
out_dict['identifier'] = 'dumpsys'
out_dict['power_samples_mw'] = []
# The list of useful CSV columns.
# Index of the column containing the format version.
DUMP_VERSION_INDEX = 0
# Index of the column containing the type of the row.
ROW_TYPE_INDEX = 3
# Index for columns of type unique identifier ('uid')
# Index of the column containing the uid.
PACKAGE_UID_INDEX = 4
# Index of the column containing the application package.
PACKAGE_NAME_INDEX = 5
# Index for columns of type power use ('pwi')
# The column containing the uid of the item.
PWI_UID_INDEX = 1
# The column containing the type of consumption. Only consumtion since last
# charge are of interest here.
PWI_AGGREGATION_INDEX = 2
# The column containing the amount of power used, in mah.
PWI_POWER_COMSUMPTION_INDEX = 5
csvreader = csv.reader(dumpsys_output)
uid_entries = {}
pwi_entries = defaultdict(list)
for entry in csvreader:
if entry[DUMP_VERSION_INDEX] not in ['8', '9']:
# Wrong file version.
break
if ROW_TYPE_INDEX >= len(entry):
continue
if entry[ROW_TYPE_INDEX] == 'uid':
current_package = entry[PACKAGE_NAME_INDEX]
assert current_package not in uid_entries
uid_entries[current_package] = entry[PACKAGE_UID_INDEX]
elif (PWI_POWER_COMSUMPTION_INDEX < len(entry) and
entry[ROW_TYPE_INDEX] == 'pwi' and
entry[PWI_AGGREGATION_INDEX] == 'l'):
pwi_entries[entry[PWI_UID_INDEX]].append(
float(entry[PWI_POWER_COMSUMPTION_INDEX]))
# Find the uid of for the given package.
if not package in uid_entries:
logging.warning('Unable to parse dumpsys output. ' +
'Please upgrade the OS version of the device.')
out_dict['energy_consumption_mwh'] = 0
return out_dict
uid = uid_entries[package]
consumptions_mah = pwi_entries[uid]
consumption_mah = sum(consumptions_mah)
# Converting at a nominal voltage of 4.0V, as those values are obtained by a
# heuristic, and 4.0V is the voltage we set when using a monsoon device.
consumption_mwh = consumption_mah * 4.0
out_dict['energy_consumption_mwh'] = consumption_mwh
return out_dict
```
#### File: telemetry/page/page_runner.py
```python
import collections
import logging
import optparse
import os
import random
import sys
import tempfile
import time
from telemetry import decorators
from telemetry.core import browser_finder
from telemetry.core import browser_info
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.core import wpr_modes
from telemetry.core.platform.profiler import profiler_finder
from telemetry.page import page_filter
from telemetry.page import page_test
from telemetry.page.actions import navigate
from telemetry.page.actions import page_action
from telemetry.results import results_options
from telemetry.util import cloud_storage
from telemetry.util import exception_formatter
from telemetry.value import failure
from telemetry.value import skip
class _RunState(object):
def __init__(self):
self.browser = None
self._append_to_existing_wpr = False
self._last_archive_path = None
self._first_browser = True
self.first_page = collections.defaultdict(lambda: True)
self.profiler_dir = None
def StartBrowserIfNeeded(self, test, page_set, page, possible_browser,
credentials_path, archive_path, finder_options):
started_browser = not self.browser
# Create a browser.
if not self.browser:
test.CustomizeBrowserOptionsForSinglePage(page, finder_options)
self.browser = possible_browser.Create()
self.browser.credentials.credentials_path = credentials_path
# Set up WPR path on the new browser.
self.browser.SetReplayArchivePath(archive_path,
self._append_to_existing_wpr,
page_set.make_javascript_deterministic)
self._last_archive_path = page.archive_path
test.WillStartBrowser(self.browser)
self.browser.Start()
test.DidStartBrowser(self.browser)
if self._first_browser:
self._first_browser = False
self.browser.credentials.WarnIfMissingCredentials(page_set)
logging.info('OS: %s %s',
self.browser.platform.GetOSName(),
self.browser.platform.GetOSVersionName())
if self.browser.supports_system_info:
system_info = self.browser.GetSystemInfo()
if system_info.model_name:
logging.info('Model: %s', system_info.model_name)
if system_info.gpu:
for i, device in enumerate(system_info.gpu.devices):
logging.info('GPU device %d: %s', i, device)
if system_info.gpu.aux_attributes:
logging.info('GPU Attributes:')
for k, v in sorted(system_info.gpu.aux_attributes.iteritems()):
logging.info(' %-20s: %s', k, v)
if system_info.gpu.feature_status:
logging.info('Feature Status:')
for k, v in sorted(system_info.gpu.feature_status.iteritems()):
logging.info(' %-20s: %s', k, v)
if system_info.gpu.driver_bug_workarounds:
logging.info('Driver Bug Workarounds:')
for workaround in system_info.gpu.driver_bug_workarounds:
logging.info(' %s', workaround)
else:
logging.info('No GPU devices')
else:
logging.warning('System info not supported')
else:
# Set up WPR path if it changed.
if page.archive_path and self._last_archive_path != page.archive_path:
self.browser.SetReplayArchivePath(
page.archive_path,
self._append_to_existing_wpr,
page_set.make_javascript_deterministic)
self._last_archive_path = page.archive_path
if self.browser.supports_tab_control and test.close_tabs_before_run:
# Create a tab if there's none.
if len(self.browser.tabs) == 0:
self.browser.tabs.New()
# Ensure only one tab is open, unless the test is a multi-tab test.
if not test.is_multi_tab_test:
while len(self.browser.tabs) > 1:
self.browser.tabs[-1].Close()
# Must wait for tab to commit otherwise it can commit after the next
# navigation has begun and RenderFrameHostManager::DidNavigateMainFrame()
# will cancel the next navigation because it's pending. This manifests as
# the first navigation in a PageSet freezing indefinitly because the
# navigation was silently cancelled when |self.browser.tabs[0]| was
# committed. Only do this when we just started the browser, otherwise
# there are cases where previous pages in a PageSet never complete
# loading so we'll wait forever.
if started_browser:
self.browser.tabs[0].WaitForDocumentReadyStateToBeComplete()
def StopBrowser(self):
if self.browser:
self.browser.Close()
self.browser = None
# Restarting the state will also restart the wpr server. If we're
# recording, we need to continue adding into the same wpr archive,
# not overwrite it.
self._append_to_existing_wpr = True
def StartProfiling(self, page, finder_options):
if not self.profiler_dir:
self.profiler_dir = tempfile.mkdtemp()
output_file = os.path.join(self.profiler_dir, page.file_safe_name)
is_repeating = (finder_options.page_repeat != 1 or
finder_options.pageset_repeat != 1)
if is_repeating:
output_file = util.GetSequentialFileName(output_file)
self.browser.StartProfiling(finder_options.profiler, output_file)
def StopProfiling(self):
if self.browser:
self.browser.StopProfiling()
class PageState(object):
def __init__(self, page, tab):
self.page = page
self.tab = tab
self._did_login = False
def PreparePage(self, test=None):
if self.page.is_file:
server_started = self.tab.browser.SetHTTPServerDirectories(
self.page.page_set.serving_dirs | set([self.page.serving_dir]))
if server_started and test:
test.DidStartHTTPServer(self.tab)
if self.page.credentials:
if not self.tab.browser.credentials.LoginNeeded(
self.tab, self.page.credentials):
raise page_test.Failure('Login as ' + self.page.credentials + ' failed')
self._did_login = True
if test:
if test.clear_cache_before_each_run:
self.tab.ClearCache(force=True)
def ImplicitPageNavigation(self, test=None):
"""Executes the implicit navigation that occurs for every page iteration.
This function will be called once per page before any actions are executed.
"""
if test:
test.WillNavigateToPage(self.page, self.tab)
test.RunNavigateSteps(self.page, self.tab)
test.DidNavigateToPage(self.page, self.tab)
else:
i = navigate.NavigateAction()
i.RunAction(self.page, self.tab, None)
def CleanUpPage(self, test):
test.CleanUpAfterPage(self.page, self.tab)
if self.page.credentials and self._did_login:
self.tab.browser.credentials.LoginNoLongerNeeded(
self.tab, self.page.credentials)
def AddCommandLineArgs(parser):
page_filter.PageFilter.AddCommandLineArgs(parser)
results_options.AddResultsOptions(parser)
# Page set options
group = optparse.OptionGroup(parser, 'Page set ordering and repeat options')
group.add_option('--pageset-shuffle', action='store_true',
dest='pageset_shuffle',
help='Shuffle the order of pages within a pageset.')
group.add_option('--pageset-shuffle-order-file',
dest='pageset_shuffle_order_file', default=None,
help='Filename of an output of a previously run test on the current '
'pageset. The tests will run in the same order again, overriding '
'what is specified by --page-repeat and --pageset-repeat.')
group.add_option('--page-repeat', default=1, type='int',
help='Number of times to repeat each individual page '
'before proceeding with the next page in the pageset.')
group.add_option('--pageset-repeat', default=1, type='int',
help='Number of times to repeat the entire pageset.')
parser.add_option_group(group)
# WPR options
group = optparse.OptionGroup(parser, 'Web Page Replay options')
group.add_option('--use-live-sites',
dest='use_live_sites', action='store_true',
help='Run against live sites and ignore the Web Page Replay archives.')
parser.add_option_group(group)
parser.add_option('-d', '--also-run-disabled-tests',
dest='run_disabled_tests',
action='store_true', default=False,
help='Ignore @Disabled and @Enabled restrictions.')
def ProcessCommandLineArgs(parser, args):
page_filter.PageFilter.ProcessCommandLineArgs(parser, args)
# Page set options
if args.pageset_shuffle_order_file and not args.pageset_shuffle:
parser.error('--pageset-shuffle-order-file requires --pageset-shuffle.')
if args.page_repeat < 1:
parser.error('--page-repeat must be a positive integer.')
if args.pageset_repeat < 1:
parser.error('--pageset-repeat must be a positive integer.')
def _PrepareAndRunPage(test, page_set, expectations, finder_options,
browser_options, page, credentials_path,
possible_browser, results, state):
if finder_options.use_live_sites:
browser_options.wpr_mode = wpr_modes.WPR_OFF
elif browser_options.wpr_mode != wpr_modes.WPR_RECORD:
browser_options.wpr_mode = (
wpr_modes.WPR_REPLAY
if page.archive_path and os.path.isfile(page.archive_path)
else wpr_modes.WPR_OFF)
max_attempts = test.attempts
attempt_num = 0
while attempt_num < max_attempts:
attempt_num = attempt_num + 1
try:
results.WillAttemptPageRun(attempt_num, max_attempts)
if test.RestartBrowserBeforeEachPage() or page.startup_url:
state.StopBrowser()
# If we are restarting the browser for each page customize the per page
# options for just the current page before starting the browser.
state.StartBrowserIfNeeded(test, page_set, page, possible_browser,
credentials_path, page.archive_path,
finder_options)
if not page.CanRunOnBrowser(browser_info.BrowserInfo(state.browser)):
logging.info('Skip test for page %s because browser is not supported.'
% page.url)
return
expectation = expectations.GetExpectationForPage(state.browser, page)
_WaitForThermalThrottlingIfNeeded(state.browser.platform)
if finder_options.profiler:
state.StartProfiling(page, finder_options)
try:
_RunPage(test, page, state, expectation, results)
_CheckThermalThrottling(state.browser.platform)
except exceptions.TabCrashException as e:
if test.is_multi_tab_test:
logging.error('Aborting multi-tab test after tab %s crashed',
page.url)
raise
logging.warning(str(e))
state.StopBrowser()
if finder_options.profiler:
state.StopProfiling()
if (test.StopBrowserAfterPage(state.browser, page)):
state.StopBrowser()
return
except exceptions.BrowserGoneException as e:
state.StopBrowser()
if attempt_num == max_attempts:
logging.error('Aborting after too many retries')
raise
if test.is_multi_tab_test:
logging.error('Aborting multi-tab test after browser crashed')
raise
logging.warning(str(e))
@decorators.Cache
def _UpdatePageSetArchivesIfChanged(page_set):
# Attempt to download the credentials file.
if page_set.credentials_path:
try:
cloud_storage.GetIfChanged(
os.path.join(page_set.base_dir, page_set.credentials_path))
except (cloud_storage.CredentialsError, cloud_storage.PermissionError,
cloud_storage.CloudStorageError) as e:
logging.warning('Cannot retrieve credential file %s due to cloud storage '
'error %s', page_set.credentials_path, str(e))
# Scan every serving directory for .sha1 files
# and download them from Cloud Storage. Assume all data is public.
all_serving_dirs = page_set.serving_dirs.copy()
# Add individual page dirs to all serving dirs.
for page in page_set:
if page.is_file:
all_serving_dirs.add(page.serving_dir)
# Scan all serving dirs.
for serving_dir in all_serving_dirs:
if os.path.splitdrive(serving_dir)[1] == '/':
raise ValueError('Trying to serve root directory from HTTP server.')
for dirpath, _, filenames in os.walk(serving_dir):
for filename in filenames:
path, extension = os.path.splitext(
os.path.join(dirpath, filename))
if extension != '.sha1':
continue
cloud_storage.GetIfChanged(path, page_set.bucket)
def Run(test, page_set, expectations, finder_options, results):
"""Runs a given test against a given page_set with the given options."""
test.ValidatePageSet(page_set)
# Create a possible_browser with the given options.
try:
possible_browser = browser_finder.FindBrowser(finder_options)
except browser_finder.BrowserTypeRequiredException, e:
sys.stderr.write(str(e) + '\n')
sys.exit(-1)
if not possible_browser:
sys.stderr.write(
'No browser found. Available browsers:\n' +
'\n'.join(browser_finder.GetAllAvailableBrowserTypes(finder_options)) +
'\n')
sys.exit(-1)
browser_options = possible_browser.finder_options.browser_options
browser_options.browser_type = possible_browser.browser_type
test.CustomizeBrowserOptions(browser_options)
should_run = decorators.IsEnabled(test, possible_browser)
should_run = should_run or finder_options.run_disabled_tests
if not should_run:
logging.warning('You are trying to run a disabled test.')
logging.warning('Pass --also-run-disabled-tests to squelch this message.')
return
if possible_browser.IsRemote():
possible_browser.RunRemote()
sys.exit(0)
# Reorder page set based on options.
pages = _ShuffleAndFilterPageSet(page_set, finder_options)
if (not finder_options.use_live_sites and
browser_options.wpr_mode != wpr_modes.WPR_RECORD):
_UpdatePageSetArchivesIfChanged(page_set)
pages = _CheckArchives(page_set, pages, results)
# Verify credentials path.
credentials_path = None
if page_set.credentials_path:
credentials_path = os.path.join(os.path.dirname(page_set.file_path),
page_set.credentials_path)
if not os.path.exists(credentials_path):
credentials_path = None
# Set up user agent.
browser_options.browser_user_agent_type = page_set.user_agent_type or None
if finder_options.profiler:
profiler_class = profiler_finder.FindProfiler(finder_options.profiler)
profiler_class.CustomizeBrowserOptions(browser_options.browser_type,
finder_options)
for page in list(pages):
if not test.CanRunForPage(page):
results.WillRunPage(page)
logging.debug('Skipping test: it cannot run for %s', page.url)
results.AddValue(skip.SkipValue(page, 'Test cannot run'))
results.DidRunPage(page)
pages.remove(page)
if not pages:
return
state = _RunState()
# TODO(dtu): Move results creation and results_for_current_run into RunState.
try:
test.WillRunTest(finder_options)
for _ in xrange(0, finder_options.pageset_repeat):
for page in pages:
if test.IsExiting():
break
test.WillRunPageRepeats(page)
for _ in xrange(0, finder_options.page_repeat):
results.WillRunPage(page)
try:
_PrepareAndRunPage(
test, page_set, expectations, finder_options, browser_options,
page, credentials_path, possible_browser, results, state)
finally:
discard_run = False
if state.first_page[page]:
state.first_page[page] = False
if test.discard_first_result:
discard_run = True
results.DidRunPage(page, discard_run=discard_run)
test.DidRunPageRepeats(page)
if (not test.max_failures is None and
len(results.failures) > test.max_failures):
logging.error('Too many failures. Aborting.')
test.RequestExit()
finally:
test.DidRunTest(state.browser, results)
state.StopBrowser()
return
def _ShuffleAndFilterPageSet(page_set, finder_options):
if finder_options.pageset_shuffle_order_file:
return page_set.ReorderPageSet(finder_options.pageset_shuffle_order_file)
pages = [page for page in page_set.pages[:]
if not page.disabled and page_filter.PageFilter.IsSelected(page)]
if finder_options.pageset_shuffle:
random.Random().shuffle(pages)
return pages
def _CheckArchives(page_set, pages, results):
"""Returns a subset of pages that are local or have WPR archives.
Logs warnings if any are missing."""
page_set_has_live_sites = False
for page in pages:
if not page.is_local:
page_set_has_live_sites = True
break
# Potential problems with the entire page set.
if page_set_has_live_sites:
if not page_set.archive_data_file:
logging.warning('The page set is missing an "archive_data_file" '
'property. Skipping any live sites. To include them, '
'pass the flag --use-live-sites.')
if not page_set.wpr_archive_info:
logging.warning('The archive info file is missing. '
'To fix this, either add svn-internal to your '
'.gclient using http://goto/read-src-internal, '
'or create a new archive using record_wpr.')
# Potential problems with individual pages.
pages_missing_archive_path = []
pages_missing_archive_data = []
for page in pages:
if page.is_local:
continue
if not page.archive_path:
pages_missing_archive_path.append(page)
elif not os.path.isfile(page.archive_path):
pages_missing_archive_data.append(page)
if pages_missing_archive_path:
logging.warning('The page set archives for some pages do not exist. '
'Skipping those pages. To fix this, record those pages '
'using record_wpr. To ignore this warning and run '
'against live sites, pass the flag --use-live-sites.')
if pages_missing_archive_data:
logging.warning('The page set archives for some pages are missing. '
'Someone forgot to check them in, or they were deleted. '
'Skipping those pages. To fix this, record those pages '
'using record_wpr. To ignore this warning and run '
'against live sites, pass the flag --use-live-sites.')
for page in pages_missing_archive_path + pages_missing_archive_data:
results.WillRunPage(page)
results.AddValue(failure.FailureValue.FromMessage(
page, 'Page set archive doesn\'t exist.'))
results.DidRunPage(page)
return [page for page in pages if page not in
pages_missing_archive_path + pages_missing_archive_data]
def _RunPage(test, page, state, expectation, results):
if expectation == 'skip':
logging.debug('Skipping test: Skip expectation for %s', page.url)
results.AddValue(skip.SkipValue(page, 'Skipped by test expectations'))
return
logging.info('Running %s', page.url)
page_state = PageState(page, test.TabForPage(page, state.browser))
def ProcessError():
if expectation == 'fail':
msg = 'Expected exception while running %s' % page.url
else:
msg = 'Exception while running %s' % page.url
results.AddValue(failure.FailureValue(page, sys.exc_info()))
exception_formatter.PrintFormattedException(msg=msg)
try:
page_state.PreparePage(test)
page_state.ImplicitPageNavigation(test)
test.RunPage(page, page_state.tab, results)
util.CloseConnections(page_state.tab)
except page_test.TestNotSupportedOnPlatformFailure:
raise
except page_test.Failure:
if expectation == 'fail':
exception_formatter.PrintFormattedException(
msg='Expected failure while running %s' % page.url)
else:
exception_formatter.PrintFormattedException(
msg='Failure while running %s' % page.url)
results.AddValue(failure.FailureValue(page, sys.exc_info()))
except (util.TimeoutException, exceptions.LoginException,
exceptions.ProfilingException):
ProcessError()
except (exceptions.TabCrashException, exceptions.BrowserGoneException):
ProcessError()
# Run() catches these exceptions to relaunch the tab/browser, so re-raise.
raise
except page_action.PageActionNotSupported as e:
results.AddValue(skip.SkipValue(page, 'Unsupported page action: %s' % e))
except Exception:
exception_formatter.PrintFormattedException(
msg='Unhandled exception while running %s' % page.url)
results.AddValue(failure.FailureValue(page, sys.exc_info()))
else:
if expectation == 'fail':
logging.warning('%s was expected to fail, but passed.\n', page.url)
finally:
page_state.CleanUpPage(test)
def _WaitForThermalThrottlingIfNeeded(platform):
if not platform.CanMonitorThermalThrottling():
return
thermal_throttling_retry = 0
while (platform.IsThermallyThrottled() and
thermal_throttling_retry < 3):
logging.warning('Thermally throttled, waiting (%d)...',
thermal_throttling_retry)
thermal_throttling_retry += 1
time.sleep(thermal_throttling_retry * 2)
if thermal_throttling_retry and platform.IsThermallyThrottled():
logging.warning('Device is thermally throttled before running '
'performance tests, results will vary.')
def _CheckThermalThrottling(platform):
if not platform.CanMonitorThermalThrottling():
return
if platform.HasBeenThermallyThrottled():
logging.warning('Device has been thermally throttled during '
'performance tests, results will vary.')
```
#### File: telemetry/results/results_options.py
```python
import optparse
import os
import sys
from telemetry.core import util
from telemetry.results import buildbot_output_formatter
from telemetry.results import csv_output_formatter
from telemetry.results import gtest_progress_reporter
from telemetry.results import html_output_formatter
from telemetry.results import json_output_formatter
from telemetry.results import page_test_results
from telemetry.results import progress_reporter
# Allowed output formats. The default is the first item in the list.
_OUTPUT_FORMAT_CHOICES = ('html', 'buildbot', 'block', 'csv', 'gtest', 'json',
'none')
def AddResultsOptions(parser):
group = optparse.OptionGroup(parser, 'Results options')
group.add_option('--output-format',
default=_OUTPUT_FORMAT_CHOICES[0],
choices=_OUTPUT_FORMAT_CHOICES,
help='Output format. Defaults to "%%default". '
'Can be %s.' % ', '.join(_OUTPUT_FORMAT_CHOICES))
group.add_option('-o', '--output',
dest='output_file',
help='Redirects output to a file. Defaults to stdout.')
group.add_option('--output-trace-tag',
default='',
help='Append a tag to the key of each result trace.')
group.add_option('--reset-results', action='store_true',
help='Delete all stored results.')
group.add_option('--upload-results', action='store_true',
help='Upload the results to cloud storage.')
group.add_option('--results-label',
default=None,
help='Optional label to use for the results of a run .')
group.add_option('--suppress_gtest_report',
default=False,
help='Whether to suppress GTest progress report.')
parser.add_option_group(group)
def CreateResults(metadata, options):
"""
Args:
options: Contains the options specified in AddResultsOptions.
"""
# TODO(chrishenry): This logic prevents us from having multiple
# OutputFormatters. We should have an output_file per OutputFormatter.
# Maybe we should have --output-dir instead of --output-file?
if options.output_format == 'html' and not options.output_file:
options.output_file = os.path.join(util.GetBaseDir(), 'results.html')
elif options.output_format == 'json' and not options.output_file:
options.output_file = os.path.join(util.GetBaseDir(), 'results.json')
if hasattr(options, 'output_file') and options.output_file:
output_file = os.path.expanduser(options.output_file)
open(output_file, 'a').close() # Create file if it doesn't exist.
output_stream = open(output_file, 'r+')
else:
output_stream = sys.stdout
if not hasattr(options, 'output_format'):
options.output_format = _OUTPUT_FORMAT_CHOICES[0]
if not hasattr(options, 'output_trace_tag'):
options.output_trace_tag = ''
output_formatters = []
output_skipped_tests_summary = True
reporter = None
if options.output_format == 'none':
pass
elif options.output_format == 'csv':
output_formatters.append(csv_output_formatter.CsvOutputFormatter(
output_stream))
elif options.output_format == 'buildbot':
output_formatters.append(buildbot_output_formatter.BuildbotOutputFormatter(
output_stream, trace_tag=options.output_trace_tag))
elif options.output_format == 'gtest':
# TODO(chrishenry): This is here to not change the output of
# gtest. Let's try enabling skipped tests summary for gtest test
# results too (in a separate patch), and see if we break anything.
output_skipped_tests_summary = False
elif options.output_format == 'html':
# TODO(chrishenry): We show buildbot output so that users can grep
# through the results easily without needing to open the html
# file. Another option for this is to output the results directly
# in gtest-style results (via some sort of progress reporter),
# as we plan to enable gtest-style output for all output formatters.
output_formatters.append(buildbot_output_formatter.BuildbotOutputFormatter(
sys.stdout, trace_tag=options.output_trace_tag))
output_formatters.append(html_output_formatter.HtmlOutputFormatter(
output_stream, metadata, options.reset_results,
options.upload_results, options.browser_type,
options.results_label, trace_tag=options.output_trace_tag))
elif options.output_format == 'json':
output_formatters.append(
json_output_formatter.JsonOutputFormatter(output_stream, metadata))
else:
# Should never be reached. The parser enforces the choices.
raise Exception('Invalid --output-format "%s". Valid choices are: %s'
% (options.output_format,
', '.join(_OUTPUT_FORMAT_CHOICES)))
if options.suppress_gtest_report:
reporter = progress_reporter.ProgressReporter()
else:
reporter = gtest_progress_reporter.GTestProgressReporter(
sys.stdout, output_skipped_tests_summary=output_skipped_tests_summary)
return page_test_results.PageTestResults(
output_formatters=output_formatters, progress_reporter=reporter)
```
#### File: web_perf/metrics/smoothness.py
```python
from telemetry.perf_tests_helper import FlattenList
from telemetry.util import statistics
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
from telemetry.web_perf.metrics import rendering_stats
from telemetry.web_perf.metrics import timeline_based_metric
class SmoothnessMetric(timeline_based_metric.TimelineBasedMetric):
def __init__(self):
super(SmoothnessMetric, self).__init__()
def AddResults(self, model, renderer_thread, interaction_records, results):
self.VerifyNonOverlappedRecords(interaction_records)
renderer_process = renderer_thread.parent
stats = rendering_stats.RenderingStats(
renderer_process, model.browser_process,
[r.GetBounds() for r in interaction_records])
input_event_latency = FlattenList(stats.input_event_latency)
if input_event_latency:
mean_input_event_latency = statistics.ArithmeticMean(
input_event_latency)
input_event_latency_discrepancy = statistics.DurationsDiscrepancy(
input_event_latency)
results.AddValue(scalar.ScalarValue(
results.current_page, 'mean_input_event_latency', 'ms',
round(mean_input_event_latency, 3)))
results.AddValue(scalar.ScalarValue(
results.current_page, 'input_event_latency_discrepancy', 'ms',
round(input_event_latency_discrepancy, 4)))
scroll_update_latency = FlattenList(stats.scroll_update_latency)
if scroll_update_latency:
mean_scroll_update_latency = statistics.ArithmeticMean(
scroll_update_latency)
scroll_update_latency_discrepancy = statistics.DurationsDiscrepancy(
scroll_update_latency)
results.AddValue(scalar.ScalarValue(
results.current_page, 'mean_scroll_update_latency', 'ms',
round(mean_scroll_update_latency, 3)))
results.AddValue(scalar.ScalarValue(
results.current_page, 'scroll_update_latency_discrepancy', 'ms',
round(scroll_update_latency_discrepancy, 4)))
gesture_scroll_update_latency = FlattenList(
stats.gesture_scroll_update_latency)
if gesture_scroll_update_latency:
results.AddValue(scalar.ScalarValue(
results.current_page, 'first_gesture_scroll_update_latency', 'ms',
round(gesture_scroll_update_latency[0], 4)))
# List of queueing durations.
frame_queueing_durations = FlattenList(stats.frame_queueing_durations)
if frame_queueing_durations:
results.AddValue(list_of_scalar_values.ListOfScalarValues(
results.current_page, 'queueing_durations', 'ms',
frame_queueing_durations))
# List of raw frame times.
frame_times = FlattenList(stats.frame_times)
results.AddValue(list_of_scalar_values.ListOfScalarValues(
results.current_page, 'frame_times', 'ms', frame_times,
description='List of raw frame times, helpful to understand the other '
'metrics.'))
# Arithmetic mean of frame times.
mean_frame_time = statistics.ArithmeticMean(frame_times)
results.AddValue(scalar.ScalarValue(
results.current_page, 'mean_frame_time', 'ms',
round(mean_frame_time, 3),
description='Arithmetic mean of frame times.'))
# Absolute discrepancy of frame time stamps.
frame_discrepancy = statistics.TimestampsDiscrepancy(
stats.frame_timestamps)
results.AddValue(scalar.ScalarValue(
results.current_page, 'jank', 'ms', round(frame_discrepancy, 4),
description='Absolute discrepancy of frame time stamps, where '
'discrepancy is a measure of irregularity. It quantifies '
'the worst jank. For a single pause, discrepancy '
'corresponds to the length of this pause in milliseconds. '
'Consecutive pauses increase the discrepancy. This metric '
'is important because even if the mean and 95th '
'percentile are good, one long pause in the middle of an '
'interaction is still bad.'))
# Are we hitting 60 fps for 95 percent of all frames?
# We use 19ms as a somewhat looser threshold, instead of 1000.0/60.0.
percentile_95 = statistics.Percentile(frame_times, 95.0)
results.AddValue(scalar.ScalarValue(
results.current_page, 'mostly_smooth', 'score',
1.0 if percentile_95 < 19.0 else 0.0,
description='Were 95 percent of the frames hitting 60 fps?'
'boolean value (1/0).'))
# Mean percentage of pixels approximated (missing tiles, low resolution
# tiles, non-ideal resolution tiles).
results.AddValue(scalar.ScalarValue(
results.current_page, 'mean_pixels_approximated', 'percent',
round(statistics.ArithmeticMean(
FlattenList(stats.approximated_pixel_percentages)), 3),
description='Percentage of pixels that were approximated '
'(checkerboarding, low-resolution tiles, etc.).'))
``` |
{
"source": "7kfpun/django-simple-cache-admin",
"score": 2
} |
#### File: django-simple-cache-admin/simple_cache_admin/admin.py
```python
from django.contrib import admin
from django.conf.urls import patterns, url
from .models import SimpleCache
from . import views
class SimpleCacheAdmin(admin.ModelAdmin):
"""SimpleCacheAdmin."""
def has_add_permission(self, request):
"""has_add_permission."""
return False
def has_delete_permission(self, request):
"""has_delete_permission."""
return False
def get_urls(self):
"""get_urls."""
urls = super(SimpleCacheAdmin, self).get_urls()
urlpatterns = patterns(
'simple_cache_admin.views',
url(r'^$', self.admin_site.admin_view(views.simple_cache), name='simple_cache_view'),
)
return urlpatterns + urls
admin.site.register(SimpleCache, SimpleCacheAdmin)
``` |
{
"source": "7kfpun/hotkeys-api",
"score": 2
} |
#### File: 7kfpun/hotkeys-api/app.py
```python
import csv
import logging
import os
import uuid
from datetime import datetime
import requests
from flask import Flask, Response, jsonify, request
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy_utils import UUIDType, force_auto_coercion
force_auto_coercion()
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['DATABASE_URL']
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
DEBUG = True
class Hotkey(db.Model):
"""Hotket model."""
id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4)
order = db.Column(db.Integer)
name = db.Column(db.String)
platform = db.Column(db.String)
group = db.Column(db.String)
type = db.Column(db.String)
uri = db.Column(db.String)
shortcut = db.Column(db.String)
description = db.Column(db.String)
def __init__(self, order, name, platform, group, type_, uri, shortcut, description):
"""__init__."""
self.order = order
self.name = name
self.platform = platform
self.group = group
self.type = type_
self.uri = uri
self.shortcut = shortcut
self.description = description
def __str__(self):
"""__str__."""
return self.shortcut
class SearchUrl(db.Model):
"""ChatRecord model."""
id = db.Column(UUIDType(binary=False), primary_key=True, default=uuid.uuid4)
name = db.Column(db.String)
platform = db.Column(db.String)
group = db.Column(db.String)
type = db.Column(db.String)
url = db.Column(db.Text)
created_datetime = db.Column(db.DateTime, default=datetime.utcnow)
def __init__(self, name, platform, group, type_, url):
"""__init__."""
self.name = name
self.platform = platform
self.group = group
self.type = type_
self.url = url
def __str__(self):
"""__str__."""
return self.url
@app.route('/', methods=['GET'])
def hello():
"""Hello world."""
return 'Hello world'
def clean_uris(url):
"""Clean uris."""
url = url.replace('http://', '').replace('https://', '')
uris = url.split('/', 3)
if len(uris) <= 1:
return uris
elif len(uris) == 2:
return [uris[0], '/'.join([uris[0], uris[1]])]
else:
return [uris[0], '/'.join([uris[0], uris[1]]), '/'.join([uris[0], uris[1], uris[2]])]
@app.route('/api/hotkeys/', methods=['GET'])
def get_hotkets():
"""GET hotkeys."""
name = request.args.get('name')
platform = request.args.get('platform')
group = request.args.get('group')
type_ = request.args.get('type')
url = request.args.get('url', '')
if name or platform or group or type_ or url:
search_url = SearchUrl(name, platform, group, type_, url)
db.session.add(search_url)
db.session.commit()
uris = clean_uris(url)
print(uris)
logging.info(uris)
query = Hotkey.query.filter(
Hotkey.uri.in_(uris)
).order_by(Hotkey.group).order_by(Hotkey.order)
# return jsonify(results=query.all())
return jsonify(results=[{
'id': h.id,
'order': h.order,
'name': h.name,
'platform': h.platform,
'group': h.group,
'type': h.type,
'uri': h.uri,
'shortcut': h.shortcut,
'description': h.description,
} for h in query.all()])
else:
return jsonify(results=[])
@app.route('/api/hotkeys/pull_update/', methods=['GET'])
def pull_update():
"""Pull_update."""
CSV_URL = 'http://docs.google.com/spreadsheets/d/1JH-eQdWAXx70T5XkGTfz4jgXTMvG9Fpm96ANnyRpnkQ/pub?gid=0&single=true&output=csv' # noqa
with requests.Session() as s:
download = s.get(CSV_URL)
decoded_content = download.content.decode('utf-8')
cr = csv.reader(decoded_content.splitlines(), delimiter=',')
my_list = list(cr)
Hotkey.query.delete()
for row in my_list[1:]:
logging.info(row)
print(row)
hotkey = Hotkey(*row)
db.session.add(hotkey)
db.session.commit()
return Response(status=200)
if __name__ == "__main__":
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, debug=DEBUG)
``` |
{
"source": "7l2icj/kamo_clone",
"score": 2
} |
#### File: beamline/eiger/backup_gui.py
```python
import wx
import wx.lib.newevent
import wx.lib.agw.pybusyinfo
import os
from yamtbx import util
from yamtbx.dataproc import bl_logfiles
from libtbx import adopt_init_args
from libtbx.utils import multi_out
import threading
import time
import traceback
import shutil
import fnmatch
import sys
import re
import glob
re_data_h5 = re.compile("^(.+)_data_([0-9]+).h5$")
re_eiger_h5 = re.compile("^(.+)_(master|data_([0-9]+)).h5$")
logfile_name = None #"bl32xu_datasync.log"
EventSyncError, EVT_SYNC_ERROR = wx.lib.newevent.NewEvent()
EventSyncDone, EVT_SYNC_DONE = wx.lib.newevent.NewEvent()
def get_mounted_dirs():
# XXX Linux specific
ret, out, err = util.call("df")
dirs = map(lambda x: x.split()[-1], out.splitlines()[1:])
dirs = filter(lambda x: x.startswith("/media/"), dirs)
return dirs
# get_mounted_dirs()
class SyncOptions:
def __init__(self, sdir=None, ddir=None, exclude_dirs=[], exclude_files=[],
copy_shika_results=True, copy_scan_data=True, copy_hits=True):
adopt_init_args(self, locals())
# __init__()
def show(self, out):
out.write("""\
# Options:
# source_dir = %(sdir)s
# target_dir = %(ddir)s
# copy_shika_results = %(copy_shika_results)s
# copy_scan_files = %(copy_scan_data)s
# copy_scan_hit_files = %(copy_hits)s
# exclude_dirs = %(exclude_dirs)s
# exclude_files = %(exclude_files)s
""" % self.__dict__)
# show()
# class SyncOptions
class DataSyncThread:
def __init__(self, parent):
self.parent = parent
self.interval = 10
self.thread = None
self.opts = SyncOptions()
self.log_out = sys.stdout
self._transferred = {}
self._bsslog_cache = {}
self.skipped = set()
# __init__()
def start(self, interval=None, opts=None):
self.stop()
if self.opts.sdir!=opts.sdir or self.opts.ddir!=opts.ddir:
self._transferred = {}
self._bsslog_cache = {}
self.skipped = set()
self.opts = opts
self.log_out = multi_out()
self.log_out.register("stdout", sys.stdout)
self.log_out.register("file", open(os.path.join(self.opts.sdir, logfile_name), "a"))
self.log_out.write(time.strftime("# DATA SYNC GUI - Starting at %Y-%m-%d %H:%M:%S\n"))
self.opts.show(self.log_out)
self.log_out.write("\n")
self.log_out.flush()
self.keep_going = True
self.stop_now = False # for emergency stop
self.running = True
if interval is not None:
self.interval = interval
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
# start()
def stop(self, stop_now=False):
if self.is_running():
self.stop_now = stop_now
self.keep_going = False
self.thread.join()
# stop()
def is_running(self): return self.thread is not None and self.thread.is_alive()
def run(self):
counter = 0
while self.keep_going:
counter += 1
if self.interval == 0: # Run only once
self.keep_going = False
continue
try:
self.log_out.write(time.strftime("# %Y-%m-%d %H:%M:%S syncing..\n"))
self.log_out.flush()
st = time.time()
bytes_total, files_copied, skipped = self.do_sync()
eltime = time.time() - st
self.log_out.write(time.strftime("# %Y-%m-%d %H:%M:%S sync done. "))
self.log_out.write("Total %.1f %s copied " % util.human_readable_bytes(bytes_total))
self.log_out.write("in %.1f sec. " % eltime)
self.log_out.write("(%.1f %s/sec.)\n\n" % util.human_readable_bytes(float(bytes_total)/eltime))
self.log_out.flush()
ev = EventSyncDone(bytes_total=bytes_total,
files_copied=files_copied,
skipped=skipped)
wx.PostEvent(self.parent, ev)
except:
self.log_out.write("Error occurred in syncing.\n")
self.log_out.write(traceback.format_exc()+"\n")
self.log_out.flush()
self.keep_going = False
ev = EventSyncError(msg=traceback.format_exc())
wx.PostEvent(self.parent, ev)
if self.interval < 1:
time.sleep(self.interval)
else:
for i in xrange(int(self.interval/.5)):
if self.keep_going:
time.sleep(.5)
self.running = False
self.log_out.write(time.strftime("# %Y-%m-%d %H:%M:%S Stopped"))
self.log_out.write(" urgently.\n" if self.stop_now else ".\n")
self.log_out.flush()
# run()
def get_total_bytes_sofar(self):
ret = 0.
for k in self._transferred: ret += self._transferred[k][1]
return ret
# get_total_bytes_sofar()
def analyze_bsslogs_in_dir(self, wdir, files):
"""
Analyze all BSS log files in `wdir', and return {prefix:"data"or"scan", ...}
"""
if not files:
return {}
#self.log_out.write("debug:: checking logs in %s\n" % wdir)
#self.log_out.flush()
logfiles = filter(lambda x: x.endswith(".log"), map(lambda x: os.path.join(wdir, x), files))
logfiles_id = map(lambda x: (os.path.basename(x), os.path.getmtime(x), os.path.getsize(x)),
sorted(logfiles))
if wdir in self._bsslog_cache and self._bsslog_cache[wdir][0] == logfiles_id:
#print "Using cached", self._bsslog_cache[wdir][1]
return self._bsslog_cache[wdir][1]
ret = {}
tmp = {} # {prefix:[("data"|"scan", datetime.datetime object)]
# Summarize valid data collections, and resolve overwritten issues
for logf in logfiles:
if os.path.basename(logf) == "diffscan.log":
try:
slog = bl_logfiles.BssDiffscanLog(logf)
slog.remove_overwritten_scans()
for scan in slog.scans:
prefix = scan.get_prefix()[:-1] # ends with "_"
tmp.setdefault(prefix, []).append(("scan", scan.date.toordinal()))
except:
print traceback.format_exc()
continue
else:
try:
jlog = bl_logfiles.BssJobLog(logf)
jlog.annotate_overwritten_images(remove=True)
jobs = filter(lambda x: x.job_mode.lower() not in ("xafs", "raster scan"), jlog.jobs)
for job in jlog.jobs:
t = job.scans[0].datetime.toordinal() if job.scans else -1
# job.prefix does not include "_"
tmp.setdefault(job.prefix, []).append(("data", t))
except:
print traceback.format_exc()
continue
for p in tmp:
newest = max(tmp[p], key=lambda x:x[1]) # take latest one
ret[p] = newest[0]
self._bsslog_cache[wdir] = (logfiles_id, ret)
return ret
# analyze_bsslogs_in_dir()
def skip_directory(self, dname_rel): # XXX Not tested!
"""
`dname' must be a relative path from source directory.
assuming the path is already normalized.. true??
"""
if not self.opts.exclude_dirs and self.opts.copy_shika_results:
return False
dirs = dname_rel.split(os.sep) # what if there is directory named "hoge/fuga"??
if self.opts.exclude_dirs:
for dname in dirs:
if any(map(lambda x: fnmatch.fnmatch(dname, x), self.opts.exclude_dirs)):
return True
if not self.opts.copy_shika_results and "_spotfinder" in dirs:
return True
return False
# skip_directory()
def skip_file(self, f, prefix_dict): # XXX Not tested!
"""
`f' must be abspath.
"""
if self.opts.exclude_files:
fbase = os.path.basename(f)
if any(map(lambda x: fnmatch.fnmatch(fbase, x), self.opts.exclude_files)):
return True
# If non-h5 file, do not skip (unless matched with user-specified pattern)
if not f.endswith(".h5"): return False
if not self.opts.copy_scan_data and not f.endswith("_onlyhits.h5"):
#if not prefix_dict: return False
r_h5_prefix = re_eiger_h5.search(os.path.basename(f))
if r_h5_prefix:
prefix = r_h5_prefix.group(1)
#self.log_out.write("debug: %s %s\n" % (prefix, prefix_dict[prefix]))
if prefix_dict.get(prefix, "") == "scan": return True
"""
# If there is no diffscan.log in the same directory, the file never scan.
# ..but, OVERWRITTEN_* directory has no log files.
scanlog = os.path.join(os.path.dirname(f), "diffscan.log")
if not os.path.exists(scanlog): return False
fbase = os.path.basename(f)
dirname = os.path.dirname(f)
# If threre is <sameprefix>_onlyhits.h5, the related files must be scan files.
r_h5_prefix = re_eiger_h5.search(fbase)
if r_h5_prefix:
prefix = r_h5_prefix.group(1)
if os.path.exists(os.path.join(dirname, prefix+"_onlyhits.h5")):
return False
# How can we check if this is scan file??
# What if no onlyhits.h5.. maybe users want data if only few frames?
if 0: return True # XXX check this!
"""
# What if hit-extract failed??
if not self.opts.copy_hits and f.endswith("_onlyhits.h5"):
return True
return False
# skip_file()
def need_copy(self, f_src, f_dst):
time_tol = 2. # for FAT
if not os.path.exists(f_dst):
return True
mtime_size = os.path.getmtime(f_src), os.path.getsize(f_src)
mtime_size_saved = os.path.getmtime(f_dst), os.path.getsize(f_dst)
print "debug::", f_dst, mtime_size[1] - mtime_size_saved[1], mtime_size[0]-mtime_size_saved[0]
if mtime_size[1] == mtime_size_saved[1] and abs(mtime_size[0]-mtime_size_saved[0]) < time_tol:
return False
return True
# need_copy()
def do_sync(self):
# TODO exit loop when requested
sdir = self.opts.sdir
ddir = self.opts.ddir
log_out = self.log_out
bytes_total, files_copied, skipped = 0, [], []
for root, dirnames, filenames in os.walk(sdir, followlinks=True):
root_rel = os.path.relpath(root, sdir)
root_d = os.path.join(ddir, root_rel)
if self.stop_now: return bytes_total, files_copied, skipped
if self.skip_directory(os.path.join(sdir, root_rel)):
if not root_rel in self.skipped:
log_out.write("skipping %s/\n"%root_rel)
self.skipped.add(root_rel)
continue
if not os.path.exists(root_d):
log_out.write("mkdir %s\n"%root_d)
os.makedirs(root_d)
prefix_dict = self.analyze_bsslogs_in_dir(root, filenames)
for f in filenames:
if self.stop_now: return bytes_total, files_copied, skipped
f_abs = os.path.join(root, f)
f_rel = os.path.join(root_rel, f)
if not os.path.exists(f_abs): continue # seems sometimes to happen..
mtime, size = os.path.getmtime(f_abs), os.path.getsize(f_abs)
if self.skip_file(f_abs, prefix_dict):
if f_rel not in self.skipped:
log_out.write("skipping %s\n"%f_rel)
self.skipped.add(f_rel)
skipped.append(f_rel)
continue
if self.need_copy(f_abs, os.path.join(root_d, f)): #(mtime, size)):
log_out.write("copying %s "%f_rel)
st = time.time()
try:
util.safe_copy(f_abs, root_d)
except:
log_out.write(" failed.\n")
log_out.flush()
continue
self._transferred[f_abs] = (mtime, size)
files_copied.append(f_rel)
bytes_total += size
log_out.write(" %.1f %s/s\n" % util.human_readable_bytes(float(size)/(time.time()-st)))
try:
# we experienced OSError here once.. I think this can be ignored.
shutil.copystat(root, root_d)
except:
log_out.write("Error in copying stat of %s\n" % root)
log_out.write(traceback.format_exc()+"\n")
return bytes_total, files_copied, skipped
# do_sync()
# class DataSyncThread
class MainFrame(wx.Frame):
def __init__(self, opts, parent=None, id=wx.ID_ANY):
wx.Frame.__init__(self, parent=parent, id=id, title="User data backup for %s" % opts.bl,
size=(700,500))
self.data_sync_thread = DataSyncThread(self)
self.update_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.on_update_timer, self.update_timer)
self.update_timer.Start(5000)
self.log_update_timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.on_log_update_timer, self.log_update_timer)
self.log_update_timer.Start(500)
self._logfile_pos = {}
panel = wx.Panel(self, wx.ID_ANY)
vbox = wx.BoxSizer(wx.VERTICAL)
self.txtSourceDir = wx.TextCtrl(panel, wx.ID_ANY, size=(300, 25))
self.txtSourceDir.SetValue(os.getcwd())
self.txtDestDir = wx.TextCtrl(panel, wx.ID_ANY, style=wx.TE_PROCESS_ENTER, size=(300, 25))
mounted_dirs = get_mounted_dirs()
if mounted_dirs: self.txtDestDir.SetValue(mounted_dirs[0])
self.txtSourceDir.Bind(wx.EVT_TEXT_ENTER, self.txtSource_enter)
self.txtDestDir.Bind(wx.EVT_TEXT_ENTER, self.txtDestDir_enter)
self.btnSourceDir = wx.Button(panel, wx.ID_ANY, "...", size=(25,25))
self.btnDestDir = wx.Button(panel, wx.ID_ANY, "...", size=(25,25))
self.btnUnmount = wx.Button(panel, wx.ID_ANY, "umount", size=(70,25))
self.btnSourceDir.Bind(wx.EVT_BUTTON, self.btnSourceDir_click)
self.btnDestDir.Bind(wx.EVT_BUTTON, self.btnDestDir_click)
self.btnUnmount.Bind(wx.EVT_BUTTON, self.btnUnmount_click)
grid1 = wx.GridBagSizer(3, 5)
grid1.Add(wx.StaticText(panel, wx.ID_ANY, "Data location: "),
pos=(0,0), flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=4)
grid1.Add(self.txtSourceDir, pos=(0,1), flag=wx.EXPAND)
grid1.Add(self.btnSourceDir, pos=(0,2), flag=wx.EXPAND)
self.lblCopiedSize = wx.StaticText(panel, wx.ID_ANY, " (Total transferred size: ??? GB)")
grid1.Add(self.lblCopiedSize, pos=(0,3), span=(1,2), flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=4)
grid1.Add(wx.StaticText(panel, wx.ID_ANY, "Destination: "),
pos=(1,0), flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=4)
grid1.Add(self.txtDestDir, pos=(1,1), flag=wx.EXPAND)
grid1.Add(self.btnDestDir, pos=(1,2))
grid1.Add(self.btnUnmount, pos=(1,3))
self.lblDestLeft = wx.StaticText(panel, wx.ID_ANY, " (Space left: ??? GB)")
grid1.Add(self.lblDestLeft, pos=(1,4), flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=4)
grid1.AddGrowableCol(1)
vbox.Add(grid1)
vbox.AddSpacer(5)
hbox1 = wx.BoxSizer(wx.HORIZONTAL)
vbox.Add(hbox1)
self.cbCopyShikaResults = wx.CheckBox(panel, label="Copy SHIKA results")
self.cbCopyShikaResults.SetValue(1)
self.cbCopyScanOrg = wx.CheckBox(panel, label="Copy scan data (all images)")
self.cbCopyScanHit = wx.CheckBox(panel, label="Copy scan data (hits only)")
self.cbCopyScanHit.SetValue(1)
sb1 = wx.StaticBox(panel, label="Copy options")
svb1 = wx.StaticBoxSizer(sb1, wx.VERTICAL)
svb1.Add(self.cbCopyShikaResults, flag=wx.LEFT|wx.TOP, border=5)
svb1.Add(self.cbCopyScanOrg, flag=wx.LEFT|wx.TOP, border=5)
svb1.Add(self.cbCopyScanHit, flag=wx.LEFT|wx.TOP, border=5)
self.txtExcDirs = wx.TextCtrl(panel, wx.ID_ANY, style= wx.TE_MULTILINE, size=(160,80))
self.txtExcDirs.SetValue("coot-*\n")
self.txtExcFiles = wx.TextCtrl(panel, wx.ID_ANY, style= wx.TE_MULTILINE, size=(160,80))
self.txtExcFiles.SetValue("0-coot-*\n.*\n")
sb2 = wx.StaticBox(panel, label="Exclude options")
svb2 = wx.StaticBoxSizer(sb2, wx.VERTICAL)
grid2 = wx.GridBagSizer(2, 2)
grid2.Add(wx.StaticText(panel, wx.ID_ANY, "directories: "),
pos=(0,0), flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=4)
grid2.Add(wx.StaticText(panel, wx.ID_ANY, "files: "),
pos=(0,1), flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=4)
grid2.Add(self.txtExcDirs, pos=(1,0), flag=wx.EXPAND, border=4)
grid2.Add(self.txtExcFiles, pos=(1,1), flag=wx.EXPAND, border=4)
svb2.Add(grid2)
hbox1.Add(svb1, 1, flag=wx.EXPAND, border=4)
hbox1.Add(svb2, 1, flag=wx.EXPAND, border=4)
hbox2 = wx.BoxSizer(wx.HORIZONTAL)
vbox.Add(hbox2)
self.btnStart = wx.Button(panel, wx.ID_ANY, "Start", size=(200,50))
self.btnStop = wx.Button(panel, wx.ID_ANY, "Stop after this round", size=(200,50))
self.btnStopNow = wx.Button(panel, wx.ID_ANY, "Stop immediately", size=(200,50))
self.btnStart.Bind(wx.EVT_BUTTON, self.btnStart_click)
self.btnStop.Bind(wx.EVT_BUTTON, lambda e: self.btnStop_click(e, False))
self.btnStopNow.Bind(wx.EVT_BUTTON, lambda e: self.btnStop_click(e, True))
hbox2.AddStretchSpacer()
#hbox2.AddSpacer(100)
hbox2.Add(self.btnStart)
#hbox2.AddSpacer(100)
hbox2.Add(self.btnStop)
hbox2.Add(self.btnStopNow)
#hbox2.AddSpacer(100)
#hbox2.AddStretchSpacer(prop=1)
vbox.AddSpacer(5)
notebook = wx.Notebook(panel, wx.ID_ANY)
page1 = wx.Panel(notebook, wx.ID_ANY)
page2 = wx.Panel(notebook, wx.ID_ANY)
vboxp1 = wx.BoxSizer(wx.VERTICAL)
page1.SetSizer(vboxp1)
self.txtLog = wx.TextCtrl(page1, wx.ID_ANY, style= wx.TE_MULTILINE)
self.txtLog.SetEditable(False)
vboxp1.Add(self.txtLog, 1, wx.EXPAND)
vboxp2 = wx.BoxSizer(wx.VERTICAL)
page2.SetSizer(vboxp2)
self.lstSummary = wx.ListCtrl(page2, wx.ID_ANY, style=wx.LC_REPORT)
#self.lstSummary.SetEditable(False)
for i,(v,s) in enumerate((("Date", 150), ("Transferred", 100), ("Files updated", 100), ("Skipped", 100))):
self.lstSummary.InsertColumn(i,v)
self.lstSummary.SetColumnWidth(i, s)
vboxp2.Add(self.lstSummary, 1, wx.EXPAND)
notebook.InsertPage(0, page1, "LogView")
notebook.InsertPage(1, page2, "Summary")
vbox.Add(notebook, 1, flag=wx.EXPAND)
panel.SetSizer(vbox)
self.Bind(wx.EVT_CLOSE, self.onClose)
self.Bind(EVT_SYNC_ERROR, self.on_sync_error)
self.Bind(EVT_SYNC_DONE, self.on_sync_done)
self.CreateStatusBar()
self.update_gui()
self.Show()
# __init__()
def onClose(self, ev):
if self.data_sync_thread.is_running():
if wx.MessageDialog(None, "Syncing. Are you sure to exit?",
"Warning", style=wx.OK|wx.CANCEL).ShowModal() != wx.ID_OK:
return
else:
self.btnStop_click()
self.Destroy()
# onClose()
def update_gui(self):
if self.data_sync_thread.is_running():
self.btnStop.Enable()
self.btnStopNow.Enable()
self.btnStart.Disable()
self.btnSourceDir.Disable()
self.btnDestDir.Disable()
self.btnUnmount.Disable()
self.cbCopyShikaResults.Disable()
self.cbCopyScanOrg.Disable()
self.cbCopyScanHit.Disable()
self.txtSourceDir.SetEditable(False)
self.txtDestDir.SetEditable(False)
self.txtExcDirs.SetEditable(False)
self.txtExcFiles.SetEditable(False)
#self.log_update_timer.Start(500)
self.SetStatusText("Syncing..")
else: # stopped
self.btnStop.Disable()
self.btnStopNow.Disable()
self.btnStart.Enable()
self.btnSourceDir.Enable()
self.btnDestDir.Enable()
self.btnUnmount.Enable()
self.cbCopyShikaResults.Enable()
self.cbCopyScanOrg.Enable()
self.cbCopyScanHit.Enable()
self.txtSourceDir.SetEditable(True)
self.txtDestDir.SetEditable(True)
self.txtExcDirs.SetEditable(True)
self.txtExcFiles.SetEditable(True)
#self.log_update_timer.Stop()
self.SetStatusText("Not syncing.")
# update_gui()
def on_update_timer(self, ev):
if not self.data_sync_thread.is_running():
self.update_gui()
return
ddir = self.data_sync_thread.opts.ddir
space_left = util.check_disk_free_bytes(ddir)
self.lblDestLeft.SetLabel(" (Space left: %.2f %s)" % util.human_readable_bytes(space_left))
bytes_sofar = self.data_sync_thread.get_total_bytes_sofar()
self.lblCopiedSize.SetLabel(" (Total transferred size: %.2f %s)" % util.human_readable_bytes(bytes_sofar))
# on_update_timer()
def on_log_update_timer(self, ev=None):
#if not self.data_sync_thread.is_running(): return
if not self.data_sync_thread.opts.sdir: return
log_f = os.path.join(self.data_sync_thread.opts.sdir, logfile_name)
if not os.path.isfile(log_f): return
size = os.path.getsize(log_f)
append_str = ""
if log_f not in self._logfile_pos or self._logfile_pos[log_f] > size:
append_str = open(log_f).read(size)
elif self._logfile_pos[log_f] == size:
return
else:
f = open(log_f)
pos = self._logfile_pos[log_f]
f.seek(pos)
append_str = f.read(size - pos)
self._logfile_pos[log_f] = size
if append_str:
# Remove first lines if exceeded the limit
max_lines = 100000
curr_lines = self.txtLog.GetNumberOfLines()
if curr_lines > max_lines:
lls = map(lambda x: self.txtLog.GetLineLength(x), xrange(curr_lines-3*max_lines//2))
self.txtLog.Remove(0, sum(lls)+len(lls))
self.txtLog.SetInsertionPointEnd()
self.txtLog.AppendText(append_str)
tmp = filter(lambda x: x.endswith("\n") and x.strip(), append_str.splitlines(True))
if tmp: self.SetStatusText(tmp[-1].strip().replace("# ", ""))
# on_log_update_timer()
def btnSourceDir_click(self, ev):
current_dir = self.txtSourceDir.GetValue()
if not os.path.isdir(current_dir): current_dir = ""
dlg = wx.DirDialog(None, message="Choose a source directory", defaultPath=current_dir)
if dlg.ShowModal() == wx.ID_OK:
dirsel = dlg.GetPath()
self.txtSourceDir.SetValue(dirsel)
dlg.Destroy()
# btnSourceDir_click()
def btnDestDir_click(self, ev):
current_dir = self.txtDestDir.GetValue()
dlg = wx.DirDialog(None, message="Choose a destination directory", defaultPath=current_dir)
if dlg.ShowModal() == wx.ID_OK:
dirsel = dlg.GetPath()
self.txtDestDir.SetValue(dirsel)
self.txtDestDir_enter()
dlg.Destroy()
# btnDestDir_click()
def btnUnmount_click(self, ev):
def df(d):
ret, out, err = util.call("df", '"%s"'%d) # XXX Linux specific?
out_lines = filter(lambda x: x, map(lambda x: x.strip(), out.splitlines()))
if len(out_lines) < 2: return None
sp = out_lines[1].split()
if not sp: return None
return sp[0]
ddir = self.txtDestDir.GetValue()
if not ddir or not os.path.isabs(ddir) or not os.path.isdir(ddir):
wx.MessageDialog(None, "Invalid directory.", "Error", style=wx.OK).ShowModal()
return
mounted_dev = df(ddir)
if not mounted_dev:
wx.MessageDialog(None, "Invalid directory.", "Error", style=wx.OK).ShowModal()
return
if wx.MessageDialog(None, "Unmounting %s. Are you sure?" % mounted_dev,
"Confirmation", style=wx.OK|wx.CANCEL).ShowModal() != wx.ID_OK:
return
ret, out, err = util.call("udisks", "--unmount %s" % mounted_dev) # XXX Linux specific?
if out:
wx.MessageDialog(None, out, "Error", style=wx.OK).ShowModal()
elif mounted_dev == df(ddir):
wx.MessageDialog(None, "Unmount failed.", "Error", style=wx.OK).ShowModal()
else:
wx.MessageDialog(None, "Successfully unmounted.", "OK", style=wx.OK).ShowModal()
# btnUnmount_click()
def txtSource_enter(self, ev=None):
sdir = str(self.txtSourceDir.GetValue())
if not sdir:
wx.MessageDialog(None, "Please specify source directory.", "Error", style=wx.OK).ShowModal()
return False
if not os.path.isabs(sdir):
wx.MessageDialog(None, "Please specify absolute path for source directory.", "Error", style=wx.OK).ShowModal()
return False
if not os.path.isdir(sdir):
wx.MessageDialog(None, "Please specify source directory, not file", "Error", style=wx.OK).ShowModal()
return False
if not os.access(sdir, os.R_OK|os.X_OK):
wx.MessageDialog(None, "You can't read the source directory.", "Error", style=wx.OK).ShowModal()
return False
sdir_norm = os.path.normpath(sdir)
if sdir != sdir_norm: self.txtSourceDir.SetValue(sdir_norm)
return True
# txtSource_enter()
def txtDestDir_enter(self, ev=None):
ddir = str(self.txtDestDir.GetValue())
self.lblDestLeft.SetLabel(" (Space left: ??? GB)")
if not ddir:
wx.MessageDialog(None, "Please specify destination directory.", "Error", style=wx.OK).ShowModal()
return False
if not os.path.isabs(ddir):
wx.MessageDialog(None, "Please specify absolute path for destination directory.", "Error", style=wx.OK).ShowModal()
return False
if not os.path.exists(ddir):
try:
os.makedirs(ddir)
except:
wx.MessageDialog(None, "Creation of %s failed"%ddir, "Error", style=wx.OK).ShowModal()
return False
if not os.path.isdir(ddir):
wx.MessageDialog(None, "Please specify destination directory, not file", "Error", style=wx.OK).ShowModal()
return False
ddir_norm = os.path.normpath(ddir)
if ddir != ddir_norm: self.txtDestDir.SetValue(ddir_norm)
space_left = util.check_disk_free_bytes(ddir)
self.lblDestLeft.SetLabel(" (Space left: %.2f %s)" % util.human_readable_bytes(space_left))
if not os.access(ddir, os.W_OK):
wx.MessageDialog(None, "You don't have write permission in the destination directory.", "Error", style=wx.OK).ShowModal()
return False
return True
# txtDestDir_enter()
def prepare_opts(self):
sdir = os.path.normpath(str(self.txtSourceDir.GetValue()))
ddir = os.path.normpath(str(self.txtDestDir.GetValue()))
if os.path.basename(ddir) != os.path.basename(sdir):
# this would be user-friendly..
ddir = os.path.join(ddir, os.path.basename(sdir))
if not os.path.exists(ddir): os.mkdir(ddir)
return SyncOptions(sdir=sdir,
ddir=ddir,
exclude_dirs=filter(lambda x:x, self.txtExcDirs.GetValue().splitlines()),
exclude_files=filter(lambda x:x, self.txtExcFiles.GetValue().splitlines()),
copy_shika_results=self.cbCopyShikaResults.GetValue(),
copy_scan_data=self.cbCopyScanOrg.GetValue(),
copy_hits=self.cbCopyScanHit.GetValue())
# prepare_opts()
def btnStart_click(self, ev):
if not self.txtSource_enter(): return
if not self.txtDestDir_enter(): return
sdir = str(self.txtSourceDir.GetValue())
ddir = str(self.txtDestDir.GetValue())
if os.path.realpath(sdir) == os.path.realpath(ddir):
wx.MessageDialog(None, "You cannot specify the same directory", "Error", style=wx.OK).ShowModal()
return
if (os.path.realpath(ddir)+os.sep).startswith(os.path.realpath(sdir)+os.sep):
wx.MessageDialog(None, "Destination path is a subdirectory of source path!", "Error", style=wx.OK).ShowModal()
return
if os.stat(sdir).st_dev == os.stat(ddir).st_dev:
if wx.MessageDialog(None, "Two directories may be in the same device. Proceed?",
"Warning", style=wx.OK|wx.CANCEL).ShowModal() != wx.ID_OK:
return
opts = self.prepare_opts()
self.data_sync_thread.start(opts=opts)
self.update_gui()
# btnStart_click()
def btnStop_click(self, ev=None, stop_now=True):
if not self.data_sync_thread.is_running():
wx.MessageDialog(None, "Not running now", "Info", style=wx.OK).ShowModal()
return False
busyinfo = wx.lib.agw.pybusyinfo.PyBusyInfo("Stopping..", title="Wait")
try: wx.SafeYield()
except: pass
try:
self.data_sync_thread.stop(stop_now)
finally:
busyinfo = None
self.on_log_update_timer() # update log
self.update_gui()
# btnStop_click()
def on_sync_error(self, ev):
self.update_gui()
wx.MessageDialog(None, ev.msg, "Error", style=wx.OK).ShowModal()
# on_sync_error()
def on_sync_done(self, ev):
max_rows = 10000
if self.lstSummary.GetItemCount() > max_rows: # just in case
for i in xrange(self.lstSummary.GetItemCount()-max_rows):
self.lstSummary.DeleteItem(0)
n_copied = len(ev.files_copied)
if "./%s" % logfile_name in ev.files_copied: n_copied -= 1
pos = self.lstSummary.InsertStringItem(self.lstSummary.GetItemCount(),
time.strftime("%Y-%m-%d %H:%M:%S"))
self.lstSummary.SetStringItem(pos, 1, "%.2f %s"%(util.human_readable_bytes(ev.bytes_total)))
self.lstSummary.SetStringItem(pos, 2, "%d"%n_copied)
self.lstSummary.SetStringItem(pos, 3, "%d"%len(ev.skipped))
self.lstSummary.EnsureVisible(pos)
# on_sync_done()
# class MainFrame
def run(argv):
import optparse
parser = optparse.OptionParser(usage="usage: %prog [options]")
parser.add_option("--bl", action="store", dest="bl", default="BL32XU", help="For Window title and logfile name")
opts, args = parser.parse_args(argv)
global logfile_name
logfile_name = "%s_datasync.log" % opts.bl.lower()
app = wx.App()
app.TopWindow = MainFrame(opts)
app.MainLoop()
# run()
if __name__ == "__main__":
import sys
run(sys.argv[1:])
```
#### File: beamline/eiger/hit_extract_online.py
```python
from yamtbx.util import get_temp_local_dir
from yamtbx.util import safe_copy
from yamtbx.dataproc import eiger
from yamtbx.dataproc.XIO import XIO
import h5py
import os
import shutil
import time
import traceback
import re
import pysqlite2.dbapi2 as sqlite3
import glob
def get_nspots(dbfile, prefix): # prefix must include _
re_file = re.compile(re.escape(prefix)+"[0-9]+\..{1,3}$")
print prefix
con = sqlite3.connect(dbfile, timeout=10)
cur = con.cursor()
for itrial in xrange(60):
try:
c = con.execute("select imgf,nspot from stats")
file_spots = c.fetchall()
#print file_spots
return filter(lambda x: re_file.search(str(x[0])), file_spots)
except sqlite3.DatabaseError:
print traceback.format_exc()
print "DB failed. retrying (%d)" % itrial
time.sleep(1)
continue
return []
# get_nspots()
def create_hitsonly_h5(master_h5, dbfile, hits_h5, spots_min):
start_time = time.time()
prefix = os.path.basename(master_h5)[:-len("master.h5")]
n_images = XIO.Image(master_h5).header["Nimages"]
print " %d images in %s" % (n_images, master_h5)
flag_ok = False
for i in xrange(100):
file_spots = get_nspots(dbfile, prefix)
print "%6d spot-finding results out of %6d images" % (len(file_spots), n_images)
if len(file_spots) >= n_images:
flag_ok = True
break
time.sleep(6)
if not flag_ok:
print "ERROR: Timeout!"
return
hits = filter(lambda x: x[1]>=spots_min, file_spots)
print "There are %6d hits (>= %d spots) out of %6d images" % (len(hits), spots_min, n_images)
shutil.copyfile(master_h5, hits_h5)
h = h5py.File(hits_h5, "a")
for k in h["/entry/data"].keys():
del h["/entry/data"][k]
for name, nsp in sorted(hits):
print " hit: %s %d" %(name, nsp)
frameno = int(os.path.splitext(name[len(prefix):])[0])
data = eiger.extract_data(master_h5, frameno, return_raw=True)
grpname = "%s%.6d"%(prefix, frameno)
rootgrp = h["/entry/data"]
rootgrp.create_group(grpname)
if data is not None:
eiger.compress_h5data(h, "/entry/data/%s/data"%grpname, data, chunks=None, compression="bslz4")
rootgrp["%s/data"%grpname].attrs["n_spots"] = nsp
else:
print " error: data not found (%s)" % name
h.close()
org_files = filter(lambda x: os.path.exists(x), eiger.get_masterh5_related_filenames(master_h5))
org_size = sum(map(lambda x: os.path.getsize(x), org_files))/1024.0**2
new_size = os.path.getsize(hits_h5)/1024.0**2
eltime = time.time() - start_time
print "SIZE: %.2f MB (%d) to %.2f MB (saved %.2f MB; %.1f%%) took %.2f sec" % (org_size, len(org_files),
new_size, org_size-new_size,
new_size/org_size*100., eltime)
# TODO if original file moved to OVERWRITTEN_, then discard this onlyhits.h5 file!!
# Because hit-finding results may be contaminated, and onlyhits.h5 may be no use!
# To check this consistency, need master h5 creation date or something.
# run()
def run(master_h5, master_h5_ctime, dbfile, tmpdir=None, spots_min=3, remove_files=False):
"""
Download must be finished when this script started!!
Everything in tmpdir will be removed!
"""
os.stat_float_times(False)
if tmpdir: assert os.path.isdir(tmpdir)
try:
assert master_h5.endswith("_master.h5")
prefix = os.path.basename(master_h5)[:-len("master.h5")]
# If tmpdir given, copy of master.h5 and data h5 files should be there
if tmpdir: master_h5_in_tmp = os.path.join(tmpdir, os.path.basename(master_h5))
else: master_h5_in_tmp = master_h5 # If not, just use this
# If tmpdir given, just use there; otherwise create new one.
if not tmpdir: tmpdir = get_temp_local_dir("hitsonly", min_gb=1)
hits_h5 = os.path.join(tmpdir, prefix+"onlyhits.h5")
print "tmpdir is %s" %tmpdir
create_hitsonly_h5(master_h5_in_tmp, dbfile, hits_h5, spots_min)
if not os.path.isfile(hits_h5):
raise Exception("Generation of %s failed" % hits_h5)
print "ctime_master_h5 =", os.path.getctime(master_h5)
if os.path.getctime(master_h5) != master_h5_ctime:
raise Exception("Master h5 file (%s, %d) is changed! Discarding this hitsonly h5" % (master_h5, master_h5_ctime))
#shutil.move(hits_h5, os.path.join(os.path.dirname(master_h5), os.path.basename(hits_h5)))
safe_copy(hits_h5, os.path.normpath(os.path.dirname(master_h5)), move=True)
finally:
files_in_tmp = glob.glob(os.path.join(tmpdir, "*"))
if files_in_tmp:
print "Removing %d files in tmpdir:" % len(files_in_tmp)
for f in files_in_tmp: print " %s" % f
shutil.rmtree(tmpdir)
# run()
if __name__ == "__main__":
import sys
import optparse
parser = optparse.OptionParser(usage="usage: %prog [options] master.h5 master.h5-in-local dbfile",
description="")
parser.add_option("--min-spots", action="store", dest="spots_min", type=int, default=3)
parser.add_option("--ctime-master", action="store", dest="ctime_master", type=int)
parser.add_option("--tmpdir", action="store", dest="tmpdir", type=str)
opts, args = parser.parse_args(sys.argv[1:])
master_h5, dbfile = args
os.stat_float_times(False)
master_h5_ctime = opts.ctime_master
if master_h5_ctime is None or master_h5_ctime < 0: master_h5_ctime = os.path.getctime(master_h5)
if opts.tmpdir=="None": opts.tmpdir = None
print "Command args:"
print " %s %s --min-spots=%s --ctime-master=%s --tmpdir=%s" %(master_h5, dbfile, opts.spots_min, opts.ctime_master, opts.tmpdir)
run(master_h5, master_h5_ctime, dbfile, tmpdir=opts.tmpdir, spots_min=opts.spots_min)
```
#### File: kamo_clone/cctbx_progs/aniso_cutoff.py
```python
import os
import string
import re
import math
import iotbx.mtz
from cctbx.array_family import flex
from iotbx.reflection_file_editor import guess_array_output_labels
# Original is in iotbx.reflection_file_editor
def get_original_array_types (mtz_file, original_labels) :
array_types = ""
mtz_columns = mtz_file.column_labels()
mtz_types = mtz_file.column_types()
mtz_crossref = dict(zip(mtz_columns, mtz_types))
for label in original_labels :
array_types += mtz_crossref[label]
return array_types
# Original is in iotbx.reflection_file_editor
def add_array_to_mtz_dataset (mtz_dataset, output_array, fake_label, column_types) :
if mtz_dataset is None :
mtz_dataset = output_array.as_mtz_dataset(column_root_label=fake_label,
column_types=column_types)
else :
mtz_dataset.add_miller_array(miller_array=output_array,
column_root_label=fake_label,
column_types=column_types)
return mtz_dataset
def aniso_res_cut_selection(arr, astarlim, bstarlim, cstarlim):
"""
cctbx.miller.array.ellipsoidal_truncation_by_sigma()
"""
ehm = 1./astarlim
ekm = 1./bstarlim
elm = 1./cstarlim
selection = flex.bool(arr.indices().size(), False)
#selection = flex.bool(arr.indices().size(), True)
data = arr.data()
sigmas = arr.sigmas()
for i_mi, mi in enumerate(arr.indices()):
rsv = arr.unit_cell().reciprocal_space_vector(mi)
r = math.sqrt((rsv[0]/ehm)**2 + (rsv[1]/ekm)**2 + (rsv[2]/elm)**2)
if(r<=1): selection[i_mi] = True
#if(r>1 and data[i_mi]/sigmas[i_mi]<sigma_cutoff): selection[i_mi] = False
return selection
def run(mtzin, rescut, mtzout):
# Open mtz
mtz_file = iotbx.mtz.object(mtzin)
miller_arrays = mtz_file.as_miller_arrays()
print "Opening", mtzin
mtz_dataset = None
labels = ["H", "K", "L"]
for i, ar in enumerate(miller_arrays):
d_spacings = ar.d_spacings().data()
sel = aniso_res_cut_selection(ar, *rescut)
print "%d reflections removed from %s" % (sum(~sel), ar.info().label_string())
fake_label = 2 * string.uppercase[i]
for lab in guess_array_output_labels(ar):
labels.append(lab)
array_types = get_original_array_types(mtz_file, ar.info().labels)
default_types = iotbx.mtz.default_column_types(ar)
if len(default_types) == len(array_types):
column_types = array_types
else:
column_types = None
mtz_dataset = add_array_to_mtz_dataset(mtz_dataset, ar.select(sel), fake_label,
column_types)
# Decide labels and write mtz file
mtz_object = mtz_dataset.mtz_object()
invalid_chars = re.compile("[^A-Za-z0-9_\-+\(\)]")
used = dict([ (label, 0) for label in labels ])
for i, column in enumerate(mtz_object.columns()):
if column.label() != labels[i] :
label = labels[i]
original_label = label
assert used[label] == 0
try:
column.set_label(label)
except RuntimeError, e:
if ("new_label is used already" in str(e)) :
col_names = [ col.label() for col in mtz_object.columns() ]
raise RuntimeError(("Duplicate column label '%s': current labels "+
"are %s; user-specified output labels are %s.") %
(label, " ".join(col_names), " ".join(labels)))
else:
used[original_label] += 1
mtz_object.write(file_name=mtzout)
print
print "Writing:", mtzout
print
# run()
if __name__ == "__main__":
import sys
mtzin = sys.argv[1]
res_cut = map(float, sys.argv[2].split(",")) # resolution cutoff along a*,b*,c*
mtzout = os.path.splitext(os.path.basename(mtzin))[0] + "_anisocutoff.mtz"
run(mtzin= mtzin, rescut= res_cut, mtzout=mtzout)
```
#### File: kamo_clone/cctbx_progs/calc_DF_over_F.py
```python
import iotbx.file_reader
from cctbx.array_family import flex
def run(hklin):
arrays = iotbx.file_reader.any_file(hklin).file_server.miller_arrays
for arr in arrays:
if not arr.anomalous_flag():
continue
print arr.info()
if arr.is_complex_array():
arr = arr.as_amplitude_array() # must be F
ano = arr.anomalous_differences()
ave = arr.average_bijvoet_mates()
ano, ave = ano.common_sets(ave)
print " <d''/mean>=", flex.mean(flex.abs(ano.data()) / ave.data())
print " <d''>/<mean>=", flex.mean(flex.abs(ano.data())) / flex.mean(ave.data())
print
if __name__ == "__main__":
import sys
run(sys.argv[1])
```
#### File: kamo_clone/cctbx_progs/compare_model_r_factors.py
```python
import math
import iotbx.mtz
import iotbx.phil
from cctbx.array_family import flex
from cctbx import miller
from iotbx.reflection_file_editor import is_rfree_array
from iotbx.reflection_file_utils import get_r_free_flags_scores
from get_ccano_mtz_mtz import remove_phase
from eval_Rfree_factors_with_common_reflections import get_flag
from pylab import *
from matplotlib.ticker import FuncFormatter
master_params_str="""\
dmin = None
.type = float
dmax = None
.type = float
nbins = 20
.type = int
flag_name = None
.type = str
flag_value = None
.type = int
plot = *rfree *rwork
.type = choice(multi=True)
"""
def calc_r(fo_sel, fm_sel):
return flex.sum(flex.abs(fo_sel.data()-fm_sel.data())) / flex.sum(fo_sel.data())
# calc_r()
def run(mtz_files, params):
fig, ax1 = plt.subplots()
for mtzfile in mtz_files:
arrays = iotbx.mtz.object(file_name=mtzfile).as_miller_arrays()
remove_phase(arrays)
f_obs = filter(lambda s: "F-obs-filtered" in s.info().label_string(), arrays)
f_model = filter(lambda s: "F-model" in s.info().label_string(), arrays)
if not (len(f_obs) == len(f_model) == 1):
print "File %s does not contain single F-obs-filtered and F-model" % mtzfile
continue
#flag = get_flag(arrays, params.flag_name, params.flag_value)
flag = get_flag(arrays, params.flag_name, params.flag_value)
if flag is None:
print "File %s does not contain test flag" % mtzfile
continue
assert f_obs[0].anomalous_flag() == f_model[0].anomalous_flag()
if f_obs[0].anomalous_flag():
flag = flag.generate_bijvoet_mates()
flag = flag.resolution_filter(d_max=params.dmax, d_min=params.dmin)
f_obs, f_model = f_obs[0], f_model[0]
#f_obs, f_model = map(lambda d: d.resolution_filter(d_max=params.dmax, d_min=params.dmin),
# (f_obs, f_model))
f_model, flag = f_model.common_sets(flag)
f_obs, flag = f_obs.common_sets(flag)
f_model, flag = f_model.common_sets(flag)
#f_model = f_model.common_set(flag)
#f_obs = f_obs.common_set(flag)
binner = f_obs.setup_binner(n_bins=params.nbins)
plot_data = [[], [], []]
for i_bin in binner.range_used():
dmax, dmin = binner.bin_d_range(i_bin)
flag_sel = flag.resolution_filter(d_max=dmax, d_min=dmin)
fo_sel = f_obs.common_set(flag_sel)
fm_sel = f_model.common_set(flag_sel)
print fo_sel.size(), fm_sel.size(), flag_sel.size()
r_free = calc_r(fo_sel.select(flag_sel.data()), fm_sel.select(flag_sel.data()))
r_work = calc_r(fo_sel.select(~flag_sel.data()), fm_sel.select(~flag_sel.data()))
plot_data[0].append(1./dmin**2)
plot_data[1].append(r_free)
plot_data[2].append(r_work)
if "rfree" in params.plot:
plot(plot_data[0], plot_data[1], label=mtzfile[-30:]+":free")
if "rwork" in params.plot:
plot(plot_data[0], plot_data[2], label=mtzfile[-30:]+":work")
legend(loc="upper left")
xlabel('d^-2')
ylabel("R-factors")
s2_formatter = lambda x,pos: "inf" if x == 0 else "%.2f" % (1./math.sqrt(x))
gca().xaxis.set_major_formatter(FuncFormatter(s2_formatter))
show()
# run()
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
mtz_files = filter(lambda s:s.endswith(".mtz"), args)
run(mtz_files, params)
```
#### File: kamo_clone/cctbx_progs/eval_Rfree_CCfree_from_refine_mtz.py
```python
import iotbx.mtz
from cctbx.array_family import flex
from cctbx import miller
from iotbx.reflection_file_editor import is_rfree_array, get_best_resolution, get_original_array_types
from iotbx.reflection_file_utils import get_r_free_flags_scores
def get_flag(arrays, flag_name=None, flag_value=None):
flag_array = None
# Get flag array
if flag_name is None:
flags = filter(lambda x: is_rfree_array(x, x.info()), arrays)
if len(flags) == 0:
print " No R free flags like column found."
return None
elif len(flags) > 1:
print " More than one column which looks like R free flag:"
for f in flags:
print " ", f.info().label_string()
return None
else:
flag_name = flags[0].info().label_string()
flag_array = flags[0]
print "# Guessing R free flag:", flag_name
else:
flags = filter(lambda x: flag_name==x.info().label_string(), arrays)
if len(flags) == 0:
print " Specified flag name not found:", flag
quit()
else:
print "# Use specified flag:", flag
flag_array = flags[0]
# Get flag number
if flag_value is None:
flag_scores = get_r_free_flags_scores(miller_arrays=[flag_array], test_flag_value=flag_value)
flag_value = flag_scores.test_flag_values[0]
print "# Guessing flag number:", flag_value
else:
print "# Specified flag number:", flag_value
return flag_array.customized_copy(data=flag_array.data() == flag_value)
# get_flag()
def get_arrays(f):
arrays = iotbx.mtz.object(f).as_miller_arrays()
f_obs = filter(lambda x:x.info().labels[0]=="F-obs-filtered", arrays)[0]
f_model = filter(lambda x:x.info().labels[0]=="F-model", arrays)[0].as_amplitude_array()
flag = get_flag(arrays)
print "#",f, "includes %d reflections. (d= %.2f - %.2f)" % ((f_obs.data().size(),)+ f_obs.d_max_min())
f_obs, flag = f_obs.common_sets(flag)
f_model, flag = f_model.common_sets(flag)
f_model, flag = f_model.common_sets(flag)
# Assuming f_obs and f_model is an already common set.
return f_obs, f_model, flag
# get_arrays()
def calc_r(f_obs, f_model):
return flex.sum(flex.abs(f_obs.data() - f_model.data())) / flex.sum(f_obs.data())
# calc_r()
def calc_cc(obs, model, as_intensity=True):
if as_intensity:
obs, model = map(lambda x:x.as_intensity_array(), (obs, model))
corr = flex.linear_correlation(obs.data(), model.data())
if corr.is_well_defined(): return corr.coefficient()
else: return float("nan")
# calc_cc()
def run(mtzfile):
f_obs, f_model, flag = get_arrays(mtzfile)
f_obs.setup_binner(auto_binning=True)
f_model.setup_binner(auto_binning=True)
e_obs, e_model = map(lambda x:x.quasi_normalize_structure_factors(), (f_obs, f_model))
binner = f_obs.setup_binner(reflections_per_bin=400)
for i_bin in binner.range_used():
dmax, dmin = binner.bin_d_range(i_bin)
sel = binner.selection(i_bin)
sel_work = sel & ~flag.data()
sel_free = sel & flag.data()
#f_obs_w, f_obs_t = f_obs.select(~flag.data()), f_obs.select(flag.data())
#f_model_w, f_model_t = f_model.select(~flag.data()), f_model.select(flag.data())
f_obs_w_b, f_model_w_b = f_obs.select(sel_work), f_model.select(sel_work)
f_obs_t_b, f_model_t_b = f_obs.select(sel_free), f_model.select(sel_free)
e_obs_w_b, e_model_w_b = e_obs.select(sel_work), e_model.select(sel_work)
e_obs_t_b, e_model_t_b = e_obs.select(sel_free), e_model.select(sel_free)
r_work, r_free = calc_r(f_obs_w_b, f_model_w_b), calc_r(f_obs_t_b, f_model_t_b)
cc_work_E, cc_free_E = calc_cc(e_obs_w_b, e_model_w_b, False), calc_cc(e_obs_t_b, e_model_t_b, False)
cc_work, cc_free = calc_cc(f_obs_w_b, f_model_w_b, True), calc_cc(f_obs_t_b, f_model_t_b, True)
tmp = dmax, dmin, r_free, r_work, cc_free, cc_work, cc_free_E, cc_work_E, mtzfile
print "%6.3f %6.3f %.4f %.4f % 7.4f % 7.4f % 7.4f % 7.4f %s" % tmp
# run()
if __name__ == "__main__":
import sys
print " dmax dmin rfree rwork ccfree ccwork ccfree.E ccwork.E file"
for f in sys.argv[1:]:
run(f)
```
#### File: kamo_clone/cctbx_progs/exclude_resolution_range.py
```python
import sys, os, optparse
import string
import re
import iotbx.mtz
from cctbx.array_family import flex
from iotbx.reflection_file_editor import guess_array_output_labels
# Original is in iotbx.reflection_file_editor
def get_original_array_types (mtz_file, original_labels) :
array_types = ""
mtz_columns = mtz_file.column_labels()
mtz_types = mtz_file.column_types()
mtz_crossref = dict(zip(mtz_columns, mtz_types))
for label in original_labels :
array_types += mtz_crossref[label]
return array_types
# Original is in iotbx.reflection_file_editor
def add_array_to_mtz_dataset (mtz_dataset, output_array, fake_label, column_types) :
if mtz_dataset is None :
mtz_dataset = output_array.as_mtz_dataset(column_root_label=fake_label,
column_types=column_types)
else :
mtz_dataset.add_miller_array(miller_array=output_array,
column_root_label=fake_label,
column_types=column_types)
return mtz_dataset
def run(mtz, mtz_out, ranges):
# Open mtz
mtz_file = iotbx.mtz.object(mtz)
miller_arrays = mtz_file.as_miller_arrays()
print "Opening", mtz
mtz_dataset = None
labels = ["H", "K", "L"]
for i, ar in enumerate(miller_arrays):
d_spacings = ar.d_spacings().data()
sel = flex.bool(d_spacings.size(), True)
for d1, d2 in ranges:
dmax, dmin = max(d1,d2), min(d1,d2)
tmp = d_spacings > dmax
tmp |= dmin > d_spacings
sel &= tmp
print "%d reflections removed from %s" % (sum(~sel), ar.info().label_string())
fake_label = 2 * string.uppercase[i]
for lab in guess_array_output_labels(ar):
labels.append(lab)
array_types = get_original_array_types(mtz_file, ar.info().labels)
default_types = iotbx.mtz.default_column_types(ar)
if len(default_types) == len(array_types):
column_types = array_types
else:
column_types = None
mtz_dataset = add_array_to_mtz_dataset(mtz_dataset, ar.select(sel), fake_label,
column_types)
# Decide labels and write mtz file
mtz_object = mtz_dataset.mtz_object()
invalid_chars = re.compile("[^A-Za-z0-9_\-+\(\)]")
used = dict([ (label, 0) for label in labels ])
for i, column in enumerate(mtz_object.columns()):
if column.label() != labels[i] :
label = labels[i]
original_label = label
assert used[label] == 0
try:
column.set_label(label)
except RuntimeError, e:
if ("new_label is used already" in str(e)) :
col_names = [ col.label() for col in mtz_object.columns() ]
raise RuntimeError(("Duplicate column label '%s': current labels "+
"are %s; user-specified output labels are %s.") %
(label, " ".join(col_names), " ".join(labels)))
else:
used[original_label] += 1
mtz_object.write(file_name=mtz_out)
print
print "Writing:", mtz_out
print
# run()
if __name__ == "__main__":
parser = optparse.OptionParser(usage="usage: %prog [options] mtzfile")
parser.add_option("--ranges","-r", action="append", dest="ranges", type=str, help="like 2.5,3")
parser.add_option("--mtz-out","-o", action="store", dest="mtz_out", type=str, help="output MTZ file")
(opts, args) = parser.parse_args(sys.argv)
if len(args) < 2 or opts.ranges is None:
print parser.print_help()
quit()
if not os.path.isfile(args[1]):
print "File not found:", args[1]
quit()
if opts.mtz_out is None:
opts.mtz_out = os.path.splitext(os.path.basename(args[1]))[0] + "_cut.mtz"
if os.path.isfile(opts.mtz_out):
print "File already exists:", opts.mtz_out
print "Please remove the file or change the output name with -o option"
quit()
ranges = []
for r in opts.ranges:
sp = map(lambda x:float(x), r.split(","))
assert len(sp) == 2
ranges.append(sp)
run(mtz=args[1], mtz_out=opts.mtz_out, ranges=ranges)
```
#### File: kamo_clone/cctbx_progs/get_mean_for_hkl.py
```python
import iotbx.file_reader
from cctbx.array_family import flex
import iotbx.phil
master_params_str = """
d_min = None
.type = float
d_max = None
.type = float
label = None
.type = str
"""
def run(mtzin, params):
arrays = iotbx.file_reader.any_file(mtzin).file_server.miller_arrays
if len(arrays) == 1:
array = arrays[0]
else:
sel = filter(lambda x: x.info().label_string()==params.label, arrays)
if len(sel) != 1:
print "Possible choices for label=:"
for a in arrays:
print " %s" % a.info().label_string()
return
else:
array = sel[0]
array = array.resolution_filter(d_min=params.d_min, d_max=params.d_max)
print flex.mean(array.data().as_double())
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
run(args[0], params)
```
#### File: yamtbx/dataproc/adxv.py
```python
import socket
import subprocess
import time
import os
import getpass
import tempfile
class Adxv:
def __init__(self, adxv_bin=None, no_adxv_beam_center=True):
self.adxv_bin = adxv_bin
self.no_adxv_beam_center = no_adxv_beam_center
if self.adxv_bin is None: self.adxv_bin = "adxv"
self.adxv_proc = None # subprocess object
self.adxv_port = 8100 # adxv's default port. overridden later.
self.sock = None
self.spot_type_counter = -1
# __init__()
def start(self, cwd=None):
adxv_comm = self.adxv_bin + " -socket %d"
if self.no_adxv_beam_center: adxv_comm += " -no_adxv_beam_center"
if not self.is_alive():
# find available port number
sock_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_test.bind(("localhost", 0))
self.adxv_port = sock_test.getsockname()[1]
sock_test.close()
# start adxv
self.adxv_proc = subprocess.Popen(adxv_comm%self.adxv_port, shell=True, cwd=cwd)
for i in xrange(10): # try for 5 seconds.
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # On OSX(?), need to re-create object when failed
self.sock.connect(("localhost", self.adxv_port))
break
except socket.error:
time.sleep(.5)
continue
# start()
def is_alive(self):
return self.adxv_proc is not None and self.adxv_proc.poll() is None # None means still running.
# is_alive()
def open_image(self, imgfile, raise_window=True):
self.start(cwd=os.path.dirname(imgfile))
sent = self.sock.send("load_image %s\n"%imgfile)
if sent == 0:
raise RuntimeError("adxv load failed! Close adxv and double-click again.")
if raise_window:
sent = self.sock.send("raise_window Control\n") # raise_window is available from adxv 1.9.9
sent = self.sock.send("raise_window Image\n")
#sock.close()
# open_image()
def open_hdf5(self, h5file, frameno_or_path, tmpdir=None, raise_window=True, binning=1):
from yamtbx.dataproc import eiger
if tmpdir is None:
tmpdir = "/dev/shm" if os.path.isdir("/dev/shm") else tempfile.gettempdir()
imgfile = os.path.join(tmpdir, "adxvtmp-%s-%s.cbf"%(getpass.getuser(), os.getpid()))
eiger.extract_to_minicbf(h5file, frameno_or_path, imgfile, binning=binning)
self.open_image(imgfile, raise_window=raise_window)
# open_hdf5()
def define_spot(self, color, radius=0, box=0):
self.spot_type_counter += 1
sent = self.sock.send("box %d %d\n" % (box,box)) # seems ignored?
sent = self.sock.send("define_type %d color %s radius %d\n"%(self.spot_type_counter, color, radius))
print sent
if sent == 0:
print "define_spot failed!"
sent = self.sock.send("box 20 20\n")
return self.spot_type_counter
# define_spot()
def load_spots(self, spots):
if len(spots) == 0:
return
sent = self.sock.send("load_spots %d\n" % len(spots))
for x, y, t in spots:
sent = self.sock.send("%.2f %.2f %d\n" % (x, y, t))
sent = self.sock.send("end_of_pack\n")
# load_spots()
# class Adxv
```
#### File: auto/command_line/merge_single_images_integrated.py
```python
from yamtbx.dataproc.xds import xds_ascii
from yamtbx.dataproc.crystfel import hkl as crystfel_hkl
from yamtbx.util import read_path_list
import yamtbx_utils_ext
from cctbx import miller
from cctbx import crystal
from cctbx import sgtbx
from cctbx.array_family import flex
from libtbx import easy_mp
import iotbx.scalepack.merge
import iotbx.phil
import libtbx.phil
import os
import sys
import numpy
import random
import multiprocessing
import threading
import cPickle as pickle
master_params_str = """\
lstin = None
.type = path
dials_phil = None
.type = path
dials_phil_selection_lst = None
.type = path
.help = can be used when subset of dials_phil= is merged. Give a list of reflection files
start_after = None
.type = int
stop_after = None
.type = int
method = *mc xscale
.type = choice(multi=False)
.help = mc: Monte-Carlo (same as CrystFEL) or xscale
nproc = 1
.type = int(value_min=1)
usecell = *given mean median
.type = choice(multi=False)
unit_cell = None
.type = floats(size=6)
space_group = None
.type = str
anomalous_flag = true
.type = bool
sigma_calculation = *population experimental
.type = choice(multi=False)
min_peak = None
.type = float
min_peak_percentile = None
.type = float
dials_data = *prf sum
.type = choice(multi=False)
skip_rejected = true
.type = bool
.help = skip data with negative sigmas
correct_peak = false
.type = bool
.help = If true, intensity will be multiplied by PEAK
cancel_rlp = false
.type = bool
.help = divide intensities by RLP
calc_split = false
.type = bool
split_method = *alternate random
.type = choice(multi=False)
random_seed = 1234
.type = int
output = *mtz *sca *crystfel
.type = choice(multi=True)
prefix = merged
.type = str
.help = output prefix
dmin = None
.type = float
dmax = None
.type = float
scaling {
parameter = k kb
.type = choice(multi=False)
bscale_ref = min mean max *asis
.type = choice(multi=False)
.help = when min, all data will be scaled down.
reference = None
.type = path
.help = scaling reference. if None, two-pass scaling.
reference_label = None
.type = str
calc_cc = false
.type = bool
}
polarization {
correct = false
.type = bool
plane_normal = 0,1,0
.type = floats(size=3)
incident_beam_direction = 0,0,1
.type = floats(size=3)
fraction = 0.99
.type = float
}
"""
def read_list(lstin):
ret = []
for l in open(lstin):
if "#" in l: l = l[:l.index("#")]
l = l.strip()
if l == "": continue
if not os.path.isfile(l):
print "WARNING:: not exists: %s" % l
continue
ret.append(l)
return ret
# read_list()
def read_dials_phil(phil_in, dials_phil_selection_lst):
master_str = """\
input {
experiments = None
.type = path
.multiple = true
reflections = None
.type = path
.multiple = true
}
"""
params = iotbx.phil.process_command_line(args=[phil_in], master_string=master_str).work.extract()
assert len(params.input.experiments) == len(params.input.reflections)
assert all(map(lambda x: os.path.isfile(x), params.input.experiments))
assert all(map(lambda x: os.path.isfile(x), params.input.reflections))
ret = zip(params.input.experiments, params.input.reflections)
if dials_phil_selection_lst:
selected_files = set(read_path_list(dials_phil_selection_lst))
ret = filter(lambda x: x[1] in selected_files, ret)
return ret
# read_dials_phil()
#@profile
def get_data_from_xac(params, xac):
if xac.endswith(".pkl"):
tmp = pickle.load(open(xac))
else:
tmp = xds_ascii.XDS_ASCII(xac)
sel_remove = flex.bool(tmp.iobs.size(), False)
if params.min_peak is not None:
sel = tmp.peak < params.min_peak
sel_remove |= sel
elif params.min_peak_percentile is not None:
q = numpy.percentile(tmp.peak, params.min_peak_percentile)
print "percentile %.2f %s" % (q, xac)
sel = tmp.peak < q
sel_remove |= sel
if params.skip_rejected: sel_remove |= (tmp.sigma_iobs <= 0)
if params.dmin is not None:
sel_remove |= ~tmp.as_miller_set().resolution_filter_selection(d_min=params.dmin)
if params.correct_peak:
sel_remove |= (tmp.peak < 1) # remove PEAK==0
# Remove selected
print "DEBUG:: removing %d reflections" % sel_remove.count(True) #sum(sel_remove)#
tmp.remove_selection(sel_remove)
if not params.skip_rejected: tmp.sigma_iobs = flex.abs(tmp.sigma_iobs)
# Correct I,sigI if needed
if params.correct_peak:
tmp.iobs *= tmp.peak * .01
tmp.sigma_iobs *= tmp.peak * .01
if params.cancel_rlp:
tmp.iobs /= tmp.rlp
tmp.sigma_iobs /= tmp.rlp
if params.polarization.correct:
# Only works with single-panel detector!!
# Assumes detector fast = (1,0,0), slow = (0,1,0)
sin_sq_2theta = tmp.symm.unit_cell().sin_sq_two_theta(tmp.indices, tmp.wavelength)
cos_sq_2theta = 1. - sin_sq_2theta
sin_theta = tmp.wavelength / tmp.symm.unit_cell().d(tmp.indices) / 2.
Eppi = numpy.cross(params.polarization.plane_normal, params.polarization.incident_beam_direction)
Eppi /= numpy.linalg.norm(Eppi)
S = flex.vec3_double(tmp.xd - tmp.orgx, tmp.yd - tmp.orgy,
flex.double(tmp.xd.size(), tmp.distance/tmp.qx))
S /= S.norms()
zp = S.dot(Eppi.tolist()) * 2. * sin_theta
cosrho = zp / flex.sqrt(sin_sq_2theta)
P0 = 0.5 * (1. + cos_sq_2theta)
PP = (params.polarization.fraction - 0.5) * (2.*cosrho**2 - 1.) * sin_sq_2theta
P = P0 - PP
# Apply correction
tmp.iobs /= P
tmp.sigma_iobs /= P
if 0: # debug
for x, y, p in zip(tmp.xd, tmp.yd, P):
print "pdebug:: %.2f %.2f %.4e" % (x, y, p)
return tmp.i_obs().customized_copy(anomalous_flag=params.anomalous_flag,
space_group_info=sgtbx.space_group_info(params.space_group))
# get_data_from_xac()
def get_data_from_dials(params, files):
from dials.util.options import Importer, flatten_reflections, flatten_experiments
importer = Importer(files, read_experiments=True, read_reflections=True)
reflections = flatten_reflections(importer.reflections)
experiments = flatten_experiments(importer.experiments)
assert len(reflections) == len(experiments) == 1
xs = crystal.symmetry(experiments[0].crystal.get_unit_cell(), space_group=experiments[0].crystal.get_space_group())
tmp = reflections[0].select(reflections[0]["id"] >= 0)
assert max(tmp["id"]) == 0
if params.dials_data == "sum":
assert "intensity.sum.value" in tmp
assert "intensity.sum.variance" in tmp
else:
assert "intensity.prf.value" in tmp
assert "intensity.prf.variance" in tmp
intensity_key = "intensity.sum.value" if params.dials_data == "sum" else "intensity.prf.value"
variance_key = "intensity.sum.variance" if params.dials_data == "sum" else "intensity.prf.variance"
sel_remove = flex.bool(tmp.size(), False)
if params.min_peak is not None:
sel = tmp["partiality"] < params.min_peak/100.
sel_remove |= sel
elif params.min_peak_percentile is not None:
q = numpy.percentile(tmp["partiality"], params.min_peak_percentile)
print "percentile %.2f %s" % (q*100., xac)
sel = tmp["partiality"] < q
sel_remove |= sel
if params.skip_rejected: sel_remove |= tmp[variance_key] <= 0
if params.dmin is not None:
sel_remove |= xs.unit_cell().d(tmp["miller_index"]) < params.dmin
if params.correct_peak:
sel_remove |= (tmp["partiality"] < .01) # remove PEAK==0
# Remove selected
print "DEBUG:: removing %d reflections" % sel_remove.count(True) #sum(sel_remove)#
tmp = tmp.select(~sel_remove)
ret = miller.array(miller.set(xs, tmp["miller_index"], params.anomalous_flag),
data=tmp[intensity_key],
sigmas=flex.sqrt(flex.abs(tmp[variance_key])))
scale = flex.double(ret.size(), 1.)
if not params.cancel_rlp and "lp" in tmp: scale *= tmp["lp"]
if "dqe" in tmp: scale /= tmp["dqe"]
if params.correct_peak: scale *= tmp["partiality"]
ret = ret.apply_scaling(factor=scale)
if params.cancel_rlp and params.polarization.correct:
raise "Not implemented"
return ret
# get_data_from_dials()
#@profile
def scale_data(indices, iobs, scale_ref, parameter, calc_cc):
k, b, cc = 1, float("nan"), float("nan")
sortp = yamtbx_utils_ext.sort_permutation_fast_less(indices)
indices = indices.select(sortp)
iobs = iobs.select(sortp)
sel0, sel1 = yamtbx_utils_ext.my_common_indices(scale_ref.indices(), indices)
#indices = indices.select(sel1)
iobs_c = iobs.select(sel1)
ref_c = scale_ref.data().select(sel0)
if iobs_c.size() < 10 and ref_c.size() < 10:
return k, b, cc
if parameter == "k":
k = flex.sum(ref_c*iobs_c) / flex.sum(flex.pow2(iobs_c))
elif parameter == "kb":
from yamtbx.dataproc.scale_data import kBdecider
kbd = kBdecider(scale_ref,
miller.array(scale_ref.customized_copy(indices=indices),data=iobs))
k, b = kbd.run()
else:
raise "Never reaches here"
if calc_cc:
corr = flex.linear_correlation(ref_c, iobs_c)
if corr.is_well_defined(): cc = corr.coefficient()
return k, b, cc
# scale_data()
#@profile
def mc_integration(params, input_files, input_type, scale_ref=None, split_idxes=None):
"""
split_idxes: [i]->{0,1} if index given, returns 0 or 1, the group indicator.
"""
import yamtbx_dataproc_crystfel_ext
assert input_type in ("xds_ascii", "dials")
sgtype = sgtbx.space_group_info(params.space_group).type()
manager = multiprocessing.Manager()
scale_strs = manager.list()
def load_xds_or_dials(i, file_in, scout):
print "Reading %5d %s" %(i, file_in if input_type=="xds_ascii" else file_in[1])
if input_type=="xds_ascii":
tmp = get_data_from_xac(params, file_in)
else:
tmp = get_data_from_dials(params, file_in)
miller.map_to_asu(sgtype,
params.anomalous_flag,
tmp.indices())
k, b = None, None
if scale_ref is not None:
k, b, cc = scale_data(tmp.indices(), tmp.data(), scale_ref, params.scaling.parameter, params.scaling.calc_cc)
scout.append("%s %.4e %.4e %.4f\n" % (file_in if input_type=="xds_ascii" else file_in[1], k, b, cc))
#tmp.iobs *= k
#tmp.sigma_iobs *= k
#if b == b: # nan if not calculated
# d_star_sq = tmp.symm.unit_cell().d_star_sq(tmp.indices)
# tmp.iobs *= flex.exp(-b*d_star_sq)
# tmp.sigma_iobs *= flex.exp(-b*d_star_sq)
return i, tmp, k, b
# load_xds_or_dials()
if params.nproc > 1:
# It may cost much memory..
xds_data = easy_mp.pool_map(fixed_func=lambda x: load_xds_or_dials(x[0],x[1], scale_strs),
args=map(lambda x:x, enumerate(input_files)),
processes=params.nproc)
else:
# create generator
xds_data = (load_xds_or_dials(i, x, scale_strs) for i, x in enumerate(input_files))
merger = yamtbx_dataproc_crystfel_ext.merge_equivalents_crystfel()
merger_split = None
if split_idxes is not None:
merger_split= map(lambda x: yamtbx_dataproc_crystfel_ext.merge_equivalents_crystfel(), xrange(2))
print "Start merging"
cells = []
bs = [] # b-factor list
bs_split = [[], []] # b-factor list for split data
for i, x, k, b in xds_data:
sys.stdout.write("Merging %7d\r" % (i+1))
sys.stdout.flush()
data, sigmas = x.data(), x.sigmas()
if None not in (k,b):
data *= k
sigmas *= k
if b == b: # nan if not calculated
d_star_sq = x.unit_cell().d_star_sq(x.indices())
data *= flex.exp(-b*d_star_sq)
sigmas *= flex.exp(-b*d_star_sq)
bs.append(b)
if params.sigma_calculation == "population":
merger.add_observations(x.indices(), data)
else: # experimental
merger.add_observations(x.indices(), data, sigmas)
cells.append(x.unit_cell().parameters())
if split_idxes is not None:
if b is not None and b==b: bs_split[split_idxes[i]].append(b)
if params.sigma_calculation == "population":
merger_split[split_idxes[i]].add_observations(x.indices(), data)
else: # experimental
merger_split[split_idxes[i]].add_observations(x.indices(), data, sigmas)
print "\nDone."
if scale_ref is not None:
scales_out = open(params.prefix+"_scales.dat", "w")
print >>scales_out, "file k b cc"
for s in scale_strs: scales_out.write(s)
# Merge
if params.sigma_calculation == "population":
merger.merge()
else: # experimental
merger.merge(sigma="experimental sigma")
# Construct arrays
cells = numpy.array(cells)
if params.usecell=="mean":
cell = map(lambda i: cells[:,i].mean(), xrange(6))
elif params.usecell=="median":
cell = map(lambda i: numpy.median(cells[:,i]), xrange(6))
else:
cell = params.unit_cell
miller_set = miller.set(crystal_symmetry=crystal.symmetry(cell, params.space_group),
indices=merger.indices,
anomalous_flag=params.anomalous_flag)
iobs = miller.array(miller_set, data=merger.data, sigmas=merger.sigmas).set_observation_type_xray_intensity()
reds = miller.array(miller_set, data=merger.redundancies)
sel = iobs.resolution_filter_selection(d_min=params.dmin)
iobs = iobs.select(sel)
reds = reds.select(sel)
if len(bs) > 0 and params.scaling.bscale_ref != "asis":
if params.scaling.bscale_ref == "mean": bref = sum(bs)/float(len(bs))
elif params.scaling.bscale_ref == "min": bref = min(bs)
elif params.scaling.bscale_ref == "max": bref = max(bs)
else: raise "Never reaches here"
print "Changing B-factor reference to %s (apply %.3f to all)" % (params.scaling.bscale_ref, -bref)
scale = flex.exp(bref*iobs.unit_cell().d_star_sq(iobs.indices()))
iobs = iobs.customized_copy(data=iobs.data()*scale,
sigmas=iobs.sigmas()*scale)
extra = []
if split_idxes is not None:
for m, bs in zip(merger_split, bs_split):
if params.sigma_calculation == "population":
m.merge()
else: # experimental
m.merge(sigma="experimental sigma")
miller_set = miller.set(crystal_symmetry=crystal.symmetry(cell, params.space_group),
indices=m.indices,
anomalous_flag=params.anomalous_flag)
a = miller.array(miller_set, data=m.data, sigmas=m.sigmas).set_observation_type_xray_intensity()
r = miller.array(miller_set, data=m.redundancies)
if len(bs) > 0 and params.scaling.bscale_ref != "asis":
if params.scaling.bscale_ref == "mean": bref = sum(bs)/float(len(bs))
elif params.scaling.bscale_ref == "min": bref = min(bs)
elif params.scaling.bscale_ref == "max": bref = max(bs)
else: raise "Never reaches here"
print "Changing B-factor reference to %s (apply %.3f to all) for split data" % (params.scaling.bscale_ref, -bref)
scale = flex.exp(bref*a.unit_cell().d_star_sq(a.indices()))
a = a.customized_copy(data=a.data()*scale,
sigmas=a.sigmas()*scale)
extra.append((a,r))
return iobs, reds, extra
# mc_integration()
def write_out(params, array, redundancies, suffix=None):
prefix = params.prefix
if suffix is not None: prefix += suffix
if "sca" in params.output:
iotbx.scalepack.merge.write(file_name="%s.sca" % prefix,
miller_array=array,
scale_intensities_for_scalepack_merge=True)
if "mtz" in params.output and array.size() > 0:
mtz_dataset = array.as_mtz_dataset(column_root_label="I")
mtz_dataset.add_miller_array(miller_array=redundancies, column_root_label="MULT")
mtz_object = mtz_dataset.mtz_object()
mtz_object.write(file_name="%s.mtz" % prefix)
if "crystfel" in params.output:
crystfel_hkl.write_file(filename="%s.hkl" % prefix,
i_obs=array, nmeas=redundancies)
# write_out()
#@profile
def run(params):
libtbx.phil.parse(master_params_str).format(params).show(prefix=" ")
print
if params.space_group is None:
print "Give space_group!"
quit()
if params.usecell == "given" and params.unit_cell is None:
print "Give unit_cell if usecell=given! Otherwise give usecell=mean"
quit()
if params.lstin:
input_files = read_list(params.lstin)
input_type = "xds_ascii"
elif params.dials_phil:
input_files = read_dials_phil(params.dials_phil, params.dials_phil_selection_lst)
input_type = "dials"
if params.start_after is not None:
input_files = input_files[params.start_after:]
if params.stop_after is not None:
input_files = input_files[:params.stop_after]
print "%3d %s files will be merged" % (len(input_files), input_type)
scale_ref = None
# Limitations
if params.scaling.reference is not None:
scale_ref = None
raise "Sorry, not supported."
random.seed(params.random_seed)
# For split stats
split_idxes = None
if params.calc_split:
split_idxes = ([0,1]*(len(input_files)//2+1))[:len(input_files)]
if params.split_method == "alternate":
pass
elif params.split_method == "random":
random.shuffle(split_idxes)
else:
raise "Never reaches here"
ofs = open(params.prefix+"_split_idxes.dat", "w")
for i, f in zip(split_idxes, input_files): ofs.write("%d %s\n"%(i,f if input_type=="xds_ascii" else f[1]))
ofs.close()
if params.method == "mc":
iobs, reds, sp = mc_integration(params, input_files, input_type, scale_ref, split_idxes)
if params.scaling.parameter is not None and params.scaling.reference is None:
write_out(params, iobs, reds, suffix="_beforescale")
if params.calc_split:
for i, s in enumerate(sp):
write_out(params, s[0], s[1], suffix="_beforescale_sp%d"%(i+1))
print "Second pass with scaling"
sortp = yamtbx_utils_ext.sort_permutation_fast_less(iobs.indices())
iobs = iobs.select(sortp)
iobs, reds, sp = mc_integration(params, input_files, input_type, iobs, split_idxes)
elif params.method == "xscale":
raise "Not supported yet"
else:
raise "Never reaches here"
write_out(params, iobs, reds)
if "crystfel" in params.output:
import iotbx.pdb
s = iotbx.pdb.format_cryst1_record(iobs)
open(params.prefix+"_cell.pdb", "w").write(s)
if params.calc_split:
for i, s in enumerate(sp):
write_out(params, s[0], s[1], suffix="_sp%d"%(i+1))
# run()
def run_from_args(argv):
cmdline = iotbx.phil.process_command_line(args=argv,
master_string=master_params_str)
params = cmdline.work.extract()
run(params)
if __name__ == "__main__":
import sys
run_from_args(sys.argv[1:])
```
#### File: auto/command_line/multi_check_cell_consistency.py
```python
import iotbx.phil
from cctbx import uctbx
from cctbx import sgtbx
from cctbx import crystal
from cctbx.crystal import reindex
from cctbx.uctbx.determine_unit_cell import NCDist
from cctbx.sgtbx import pointgroup_tools
from yamtbx.dataproc.xds.xparm import XPARM
from yamtbx.dataproc.xds.xds_ascii import XDS_ASCII
from yamtbx.dataproc import pointless
from yamtbx.dataproc.xds import correctlp
from yamtbx.dataproc.dials.command_line import run_dials_auto
from yamtbx import util
from yamtbx.util import xtal
import os
import sys
import networkx as nx
import numpy
master_params_str = """
topdir = None
.type = path
xdsdir = None
.type = path
.multiple = true
.help = Either topdir= or (multiple) xdsdir= should be specified.
tol_length = 0.1
.type = float
.help = relative_length_tolerance
tol_angle = 5
.type = float
.help = absolute_angle_tolerance in degree
do_pointless = False
.type = bool
.help = Run pointless for largest group data to determine symmetry
"""
class CellGraph:
def __init__(self, tol_length=None, tol_angle=None):
self.tol_length = tol_length if tol_length else 0.1
self.tol_angle = tol_angle if tol_angle else 5
self.G = nx.Graph()
self.p1cells = {} # key->p1cell
self.dirs = {} # key->xdsdir
self.symms = {} # key->symms
self.cbops = {} # (key1,key2) = cbop
# __init__()
def get_p1cell_and_symm(self, xdsdir):
dials_hkl = os.path.join(xdsdir, "DIALS.HKL")
xac_file = util.return_first_found_file(("XDS_ASCII.HKL", "XDS_ASCII.HKL.org",
"XDS_ASCII_fullres.HKL.org", "XDS_ASCII_fullres.HKL",
"XDS_ASCII.HKL_noscale.org", "XDS_ASCII.HKL_noscale"),
wd=xdsdir)
p1cell, xs = None, None
if xac_file:
correct_lp = util.return_first_found_file(("CORRECT.LP_noscale", "CORRECT.LP"), wd=xdsdir)
if not correct_lp:
print "CORRECT.LP not found in %s" % xdsdir
return None, None
p1cell = correctlp.get_P1_cell(correct_lp, force_obtuse_angle=True)
try:
xac = XDS_ASCII(xac_file, read_data=False)
except:
print "Invalid XDS_ASCII format:", xac_file
return None, None
xs = xac.symm
elif os.path.isfile(dials_hkl): # DIALS
xs = run_dials_auto.get_most_possible_symmetry(xdsdir)
if xs is None:
print "Cannot get crystal symmetry:", xdsdir
return None, None
p1cell = list(xs.niggli_cell().unit_cell().parameters())
# force obtuse angle
tmp = map(lambda x: (x[0]+3,abs(90.-x[1])), enumerate(p1cell[3:])) # Index and difference from 90 deg
tmp.sort(key=lambda x: x[1], reverse=True)
if p1cell[tmp[0][0]] < 90:
tmp = map(lambda x: (x[0]+3,90.-x[1]), enumerate(p1cell[3:])) # Index and 90-val.
tmp.sort(key=lambda x: x[1], reverse=True)
for i,v in tmp[:2]: p1cell[i] = 180.-p1cell[i]
p1cell = uctbx.unit_cell(p1cell)
return p1cell, xs
# get_p1cell_and_symm()
def add_proc_result(self, key, xdsdir):
if key in self.G: return #G.remove_node(key)
p1cell, symm = self.get_p1cell_and_symm(xdsdir)
if None in (p1cell, symm): return
self.p1cells[key] = p1cell
self.dirs[key] = xdsdir
self.symms[key] = symm
connected_nodes = []
for node in list(self.G.nodes()):
other_cell = self.p1cells[node]
if other_cell.is_similar_to(p1cell, self.tol_length, self.tol_angle):
connected_nodes.append(node)
else:
cosets = reindex.reindexing_operators(crystal.symmetry(other_cell, 1),
crystal.symmetry(p1cell, 1),
self.tol_length, self.tol_angle)
if cosets.double_cosets is not None:
self.cbops[(node,key)] = cosets.combined_cb_ops()[0]
print p1cell, other_cell, self.cbops[(node,key)], other_cell.change_basis(self.cbops[(node,key)])
connected_nodes.append(node)
# Add nodes and edges
self.G.add_node(key)
for node in connected_nodes:
self.G.add_edge(node, key)
# add_proc_result()
def _transformed_cells(self, keys):
cells = [self.p1cells[keys[0]].parameters()]
for key in keys[1:]:
cell = self.p1cells[key]
if (keys[0], key) in self.cbops:
cell = cell.change_basis(self.cbops[(keys[0], key)])
elif (key, keys[0]) in self.cbops:
cell = cell.change_basis(self.cbops[(key, keys[0])].inverse()) # correct??
cells.append(cell.parameters())
return cells
# _transformed_cells()
def _average_p1_cell(self, keys):
cells = numpy.array(self._transformed_cells(keys))
return map(lambda i: cells[:,i].mean(), xrange(6))
# _average_p1_cell()
def group_xds_results(self, out, show_details=True):
print >>out, "Making groups from %d results\n" % len(self.p1cells) # Show total and failed!!
self.groups = map(lambda g: list(g), nx.connected_components(self.G))
self.groups.sort(key=lambda x:-len(x))
self.grouped_dirs = []
self.reference_symmetries = []
#details_str = "group file a b c al be ga\n"
#ofs_debug = open("cell_debug.dat", "w")
#ofs_debug.write("group xdsdir a b c al be ga\n")
for i, keys in enumerate(self.groups):
self.reference_symmetries.append([])
avg_cell = uctbx.unit_cell(self._average_p1_cell(keys))
print >>out, "[%2d]"%(i+1), len(keys), "members:"
print >>out, " Averaged P1 Cell=", " ".join(map(lambda x:"%.2f"%x, avg_cell.parameters()))
#from yamtbx.util.xtal import format_unit_cell
#for xd, uc in zip(map(lambda k:self.dirs[k], keys), self._transformed_cells(keys)):
# ofs_debug.write("%3d %s %s\n" % (i, xd, format_unit_cell(uc)))
#print >>out, " Members=", keys
if show_details:
# by explore_metric_symmetry
sg_explorer = pointgroup_tools.space_group_graph_from_cell_and_sg(avg_cell, sgtbx.space_group_info("P1").group(), max_delta=10)
tmp = []
for obj in sg_explorer.pg_graph.graph.node_objects.values():
pg = obj.allowed_xtal_syms[0][0].space_group().build_derived_reflection_intensity_group(True).info()
cbop = obj.allowed_xtal_syms[0][1]
trans_cell = avg_cell.change_basis(cbop)
if pg.group() == sgtbx.space_group_info("I2").group():
print >>out, "Warning!! I2 cell was given." # this should not happen..
# Transform to best cell
fbc = crystal.find_best_cell(crystal.symmetry(trans_cell, space_group_info=pg,
assert_is_compatible_unit_cell=False),
best_monoclinic_beta=False) # If True, C2 may result in I2..
cbop = fbc.cb_op() * cbop
trans_cell = trans_cell.change_basis(fbc.cb_op())
#print "debug:: op-to-best-cell=", fbc.cb_op()
# If beta<90 in monoclinic system, force it to have beta>90
if pg.group().crystal_system() == "Monoclinic" and trans_cell.parameters()[4] < 90:
op = sgtbx.change_of_basis_op("-h,-k,l")
cbop = op * cbop
trans_cell = trans_cell.change_basis(op)
tmp.append([0, pg, trans_cell, cbop, pg.type().number()])
# Calculate frequency
for pgnum in set(map(lambda x: x[-1], tmp)):
sel = filter(lambda x: tmp[x][-1]==pgnum, xrange(len(tmp)))
pgg = tmp[sel[0]][1].group()
if len(sel) == 1:
freq = len(filter(lambda x: self.symms[x].space_group().build_derived_reflection_intensity_group(True) == pgg, keys))
tmp[sel[0]][0] = freq
else:
trans_cells = map(lambda x: numpy.array(tmp[x][2].parameters()), sel)
for key in keys:
if self.symms[key].space_group().build_derived_reflection_intensity_group(True) != pgg: continue
cell = numpy.array(self.symms[key].unit_cell().parameters())
celldiffs = map(lambda tc: sum(abs(tc-cell)), trans_cells)
min_key = celldiffs.index(min(celldiffs))
tmp[sel[min_key]][0] += 1
print >>out, " Possible symmetries:"
print >>out, " freq symmetry a b c alpha beta gamma reindex"
for freq, pg, trans_cell, cbop, pgnum in sorted(tmp, key=lambda x:x[-1]):
print >> out, " %4d %-10s %s %s" % (freq, pg, " ".join(map(lambda x:"%6.2f"%x, trans_cell.parameters())), cbop)
self.reference_symmetries[i].append((pg, trans_cell, freq))
print >>out, ""
dirs = map(lambda x: self.dirs[x], keys)
self.grouped_dirs.append(dirs)
# group_xds_results()
def get_reference_symm(self, group_idx, rs_idx):
# XXX should be able to specify space group with screws
if group_idx >= len(self.reference_symmetries):
return None
if rs_idx >= len(self.reference_symmetries[group_idx]):
return None
pg, cell, freq = self.reference_symmetries[group_idx][rs_idx]
return crystal.symmetry(cell,
space_group_info=pg,
assert_is_compatible_unit_cell=False)
# get_reference_symm()
def get_selectable_symms(self, group_idx):
if group_idx >= len(self.reference_symmetries):
return []
return self.reference_symmetries[group_idx]
# get_selectable_symms()
def get_most_frequent_symmetry(self, group_idx):
# Should call after self.group_xds_results()
symms = filter(lambda x: x[2]>0, self.reference_symmetries[group_idx])
symms.sort(key=lambda x: x[2], reverse=True)
if len(symms) == 0: return None
if len(symms) > 1 and symms[0][0].group() == sgtbx.space_group_info("P1").group():
return crystal.symmetry(symms[1][1], space_group_info=symms[1][0],
assert_is_compatible_unit_cell=False)
else:
return crystal.symmetry(symms[0][1], space_group_info=symms[0][0],
assert_is_compatible_unit_cell=False)
# get_most_frequent_symmetry()
def get_symmetry_reference_matched(self, group_idx, ref_cs):
ref_pg = ref_cs.space_group().build_derived_reflection_intensity_group(True)
ref_cell = ref_cs.unit_cell()
symms = filter(lambda x: x[0].group()==ref_pg, self.reference_symmetries[group_idx])
if len(symms) == 0: return None
if len(symms) > 1:
# TODO if different too much?
celldiffs = map(lambda s: s[1].bases_mean_square_difference(ref_cell), symms)
min_idx = celldiffs.index(min(celldiffs))
return crystal.symmetry(symms[min_idx][1], space_group_info=symms[min_idx][0],
assert_is_compatible_unit_cell=False)
else:
return crystal.symmetry(symms[0][1], space_group_info=symms[0][0],
assert_is_compatible_unit_cell=False)
# get_symmetry_reference_matched()
def get_group_symmetry_reference_matched(self, ref_cs):
ref_v6 = xtal.v6cell(ref_cs.niggli_cell().unit_cell())
ncdists = []
for i, keys in enumerate(self.groups):
v6 = xtal.v6cell(uctbx.unit_cell(self._average_p1_cell(keys)).niggli_cell())
ncdists.append(NCDist(v6, ref_v6))
print "Group %d: NCDist to reference: %f" % (i+1, ncdists[-1])
return ncdists.index(min(ncdists))+1
# get_group_symmetry_reference_matched()
def is_all_included(self, keys):
all_nodes = set(self.G.nodes_iter())
return all_nodes.issuperset(keys)
# is_all_included()
def get_subgraph(self, keys):
copied_obj = CellGraph(self.tol_length, self.tol_angle)
copied_obj.G = self.G.subgraph(keys)
copied_obj.p1cells = dict((k, self.p1cells[k]) for k in keys)
copied_obj.dirs = dict((k, self.dirs[k]) for k in keys)
copied_obj.symms = dict((k, self.symms[k]) for k in keys)
copied_obj.cbops = dict((k, self.cbops[k]) for k in self.cbops if k[0] in keys or k[1] in keys) # XXX may be slow
return copied_obj
# get_subgraph()
# class CellGraph
def run(params, out=sys.stdout):
cm = CellGraph(tol_length=params.tol_length, tol_angle=params.tol_angle)
if not params.xdsdir and params.topdir:
params.xdsdir = map(lambda x: x[0], filter(lambda x: any(map(lambda y: y.startswith("XDS_ASCII.HKL"), x[2])) or "DIALS.HKL" in x[2],
os.walk(params.topdir)))
for i, xdsdir in enumerate(params.xdsdir):
cm.add_proc_result(i, xdsdir)
cm.group_xds_results(out)
ret = cm.grouped_dirs
if len(ret) == 0:
return cm
print >>out
print >>out, "About the largest group:"
for idx, wd in enumerate(ret[0]):
xac_hkl = os.path.join(wd, "XDS_ASCII.HKL")
correct_lp = os.path.join(wd, "CORRECT.LP")
print >>out, "%.3d %s" % (idx, os.path.relpath(wd, params.topdir) if params.topdir is not None else wd),
if not os.path.isfile(xac_hkl):
print >>out, "Unsuccessful"
continue
sg = XDS_ASCII(xac_hkl, read_data=False).symm.space_group_info()
clp = correctlp.CorrectLp(correct_lp)
if "all" in clp.table:
cmpl = clp.table["all"]["cmpl"][-1]
else:
cmpl = float("nan")
ISa = clp.a_b_ISa[-1]
print >>out, "%10s ISa=%5.2f Cmpl=%5.1f " % (sg, ISa, cmpl)
if params.do_pointless:
worker = pointless.Pointless()
files = map(lambda x: os.path.join(x, "INTEGRATE.HKL"), ret[0])
#print files
files = filter(lambda x: os.path.isfile(x), files)
print >>out, "\nRunning pointless for the largest member."
result = worker.run_for_symm(xdsin=files,
logout="pointless.log",
tolerance=10, d_min=5)
if "symm" in result:
print >>out, " pointless suggested", result["symm"].space_group_info()
if 0:
import pylab
pos = nx.spring_layout(G)
#pos = nx.spectral_layout(G)
#pos = nx.circular_layout(G)
#nx.draw_networkx_nodes(G, pos, node_size = 100, nodelist=others, node_color = 'w')
nx.draw_networkx_nodes(G, pos, node_size = 100, node_color = 'w')
nx.draw_networkx_edges(G, pos, width = 1)
nx.draw_networkx_labels(G, pos, font_size = 12, font_family = 'sans-serif', font_color = 'r')
pylab.xticks([])
pylab.yticks([])
pylab.savefig("network.png")
pylab.show()
return cm
# run()
def run_from_args(argv):
cmdline = iotbx.phil.process_command_line(args=argv,
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
for arg in args:
if os.path.isdir(arg) and params.topdir is None:
params.topdir = arg
if not params.xdsdir and params.topdir is None:
params.topdir = os.getcwd()
run(params)
# run_from_args()
if __name__ == "__main__":
import sys
run_from_args(sys.argv[1:])
```
#### File: dataproc/auto/resolution_cutoff.py
```python
import sys
from libtbx import slots_getstate_setstate
from iotbx import merging_statistics
from libtbx.utils import null_out
from libtbx import adopt_init_args
import numpy
def fun_ed_aimless(s, d0, r):
# CC1/2 fitting equation used in Aimless, suggested by Ed Pozharski
return 0.5 * (1 - numpy.tanh((s-d0)/r))
# fun_ed_aimless()
def resolution_fitted(d0, r, cchalf_min=0.5):
return 1./numpy.sqrt((numpy.arctanh(1.-2.*cchalf_min)*r + d0))
# resolution_fitted()
def fit_curve_for_cchalf(s2_list, cc_list, log_out, verbose=True):
import scipy.optimize
def fun(x, s, cc):
d0, r = x
return fun_ed_aimless(s,d0,r) - cc
# fun()
if not isinstance(s2_list, numpy.ndarray): s2_list = numpy.array(s2_list)
if not isinstance(cc_list, numpy.ndarray): cc_list = numpy.array(cc_list)
# Remove NaN
sel_notnan = (cc_list==cc_list)&(s2_list==s2_list)
s2_list = s2_list[sel_notnan]
cc_list = cc_list[sel_notnan]
# Check size here!!
if len(s2_list) < 3:
log_out.write(" Error! number of elements too small (%d)\n" % len(s2_list))
return float("nan"), float("nan")
x0 = [0.5*min(s2_list), 1.] #Initial d0, r
log_out.write(" Initial d0, r = %s\n" % x0)
lsq = scipy.optimize.least_squares(fun, x0, args=(s2_list, cc_list), loss="soft_l1", f_scale=.3)
#lsq = fit_ed(s_list, cc_list)
if verbose:
log_out.write(""" Least-square result:
message: %s
nfev: %s
njev: %s
optimality: %s
status: %s
success: %s
""" % (lsq.message, lsq.nfev, lsq.njev, lsq.optimality, lsq.status, lsq.success))
d0, r = lsq.x
log_out.write(" Final d0, r = %s\n" % lsq.x)
return lsq.x
# fit_curve_for_cchalf()
def initial_estimate_byfit_cchalf(i_obs, cc_half_min, anomalous_flag, log_out):
# Up to 200 bins. If few reflections, 50 reflections per bin. At least 9 shells.
n_bins = max(min(int(i_obs.size()/50. + .5), 200), 9)
log_out.write("Using %d bins for initial estimate\n" % n_bins)
i_obs.setup_binner(n_bins=n_bins)
bins = []
s_list, cc_list = [], []
for bin in i_obs.binner().range_used():
unmerged = i_obs.select(i_obs.binner().selection(bin))
try:
bin_stats = merging_statistics.merging_stats(unmerged,
anomalous=anomalous_flag)
s_list.append(1./bin_stats.d_min**2)
cc_list.append(bin_stats.cc_one_half)
except RuntimeError: # complains that no reflections left after sigma-filtering.
continue
d0, r = fit_curve_for_cchalf(s_list, cc_list, log_out)
shells_and_fit = (s_list, cc_list, (d0, r))
d_min = resolution_fitted(d0, r, cc_half_min)
return d_min, shells_and_fit
# initial_estimate_byfit_cchalf()
class estimate_resolution_based_on_cc_half:
def __init__(self, i_obs, cc_half_min, cc_half_tol, n_bins, anomalous_flag=False, log_out=null_out()):
adopt_init_args(self, locals())
log_out.write("estimate_resolution_based_on_cc_half: cc_half_min=%.4f, cc_half_tol=%.4f n_bins=%d\n" % (cc_half_min, cc_half_tol, n_bins))
self.d_min_data = i_obs.d_min()
self.shells_and_fit = ()
self.d_min, self.cc_at_d_min = self.estimate_resolution()
# __init__()
def show_plot(self, show=True, filename=None):
if not self.shells_and_fit: return
import matplotlib
matplotlib.use('Agg') # Allow to work without X
import pylab
from matplotlib.ticker import FuncFormatter
s2_formatter = lambda x,pos: "inf" if x == 0 else "%.2f" % (1./numpy.sqrt(x))
s_list, cc_list, (d0, r) = self.shells_and_fit
fitted = fun_ed_aimless(s_list, d0, r)
fig, ax1 = pylab.plt.subplots()
pylab.plot(s_list, cc_list, label="CC1/2", marker="o", color="blue")
pylab.plot(s_list, fitted, label="(1-tanh((s^2%+.2e)/%.2e))/2"%(-d0,r), marker=None, color="red")
pylab.legend()
pylab.xlabel("resolution (d^-2)")
pylab.ylabel("CC1/2")
pylab.gca().xaxis.set_major_formatter(FuncFormatter(s2_formatter))
if filename: pylab.savefig(filename)
if show: pylab.show()
# show_plot()
def cc_outer_shell(self, d_min):
binner = self.i_obs.setup_binner(d_min=d_min, n_bins=self.n_bins)
bin_stats = merging_statistics.merging_stats(self.i_obs.select(binner.selection(binner.range_used()[-1])), anomalous=False)
return bin_stats.cc_one_half
# cc_outer_shell()
def estimate_resolution(self):
if self.i_obs.size() == 0:
self.log_out.write("No reflections.\n")
return None, None
d_min, self.shells_and_fit = initial_estimate_byfit_cchalf(self.i_obs, self.cc_half_min, self.anomalous_flag, self.log_out)
d_min = float("%.2f"%d_min)
if d_min < self.d_min_data:
self.log_out.write("Warning: Initial estimate is higher than the limit of data (%.4f A < %.4f A)\n" %(d_min, self.d_min_data))
d_min = self.d_min_data
cc = self.cc_outer_shell(d_min)
self.log_out.write("Initial estimate: %.4f A (CC1/2= %.4f)\n" %(d_min, cc))
if cc >= self.cc_half_min and abs(cc - self.cc_half_min) < self.cc_half_tol:
return d_min, cc
if cc > self.cc_half_min + self.cc_half_tol:
upper_bound = d_min
lower_bound = d_min
for i in xrange(1,300):
if d_min-.01*i <= self.d_min_data: break
lower_bound = d_min-.01*i
cc = self.cc_outer_shell(lower_bound)
self.log_out.write(" CC1/2= %.4f at %.4f A\n" %(cc, d_min-.01*i))
if cc < self.cc_half_min:
break
else:
lower_bound = d_min
for i in xrange(1,300):
upper_bound = d_min+.01*i
cc = self.cc_outer_shell(upper_bound)
self.log_out.write(" CC1/2= %.4f at %.4f A\n" %(cc, d_min+.1*i))
if cc >= self.cc_half_min:
break
self.log_out.write(" Lower, Upper bound: %.4f, %.4f\n" %(lower_bound, upper_bound))
lb, ub = lower_bound, upper_bound
tmp_last = None
while True:
tmp = float("%.2f"%((lb+ub)/2.))
if tmp_last is not None and tmp == tmp_last: return ub, cc
tmp_last = tmp
cc = self.cc_outer_shell(tmp)
self.log_out.write(" CC1/2= %.4f at %.4f A\n" %(cc, tmp))
if cc < self.cc_half_min:
lb = tmp
elif (cc - self.cc_half_min) > self.cc_half_tol:
ub = tmp
else:
return tmp, cc
# estimate_resolution_based_on_cc_half()
class estimate_crude_resolution_cutoffs (slots_getstate_setstate) :
# Original code from iotbx/merging_statistics.py
"""
Uses incredibly simplistic criteria to determine the approximate
resolution limit of the data based on the merging statistics (using much
smaller bins than normal). Not really appropriate for routine use, but
useful for the pedantic and just-curious.
"""
cutoffs_attr = ["i_over_sigma_cut", "r_merge_cut", "r_meas_cut",
"completeness_cut_conservative", "completeness_cut_permissive",
"cc_one_half_cut",]
__slots__ = ["n_bins", "i_over_sigma_min", "r_merge_max", "r_meas_max",
"completeness_min_conservative", "completeness_min_permissive",
"cc_one_half_min", "d_min_overall",] + cutoffs_attr
def __init__ (self,
i_obs,
crystal_symmetry=None,
n_bins=None,
i_over_sigma_min=2.0,
r_merge_max=0.5,
r_meas_max=0.5,
completeness_min_conservative=0.9,
completeness_min_permissive=0.5,
cc_one_half_min=0.5) :
self.n_bins = n_bins
self.i_over_sigma_min = i_over_sigma_min
self.r_merge_max = r_merge_max
self.r_meas_max = r_meas_max
self.completeness_min_conservative = completeness_min_conservative
self.completeness_min_permissive = completeness_min_permissive
self.cc_one_half_min = cc_one_half_min
for attr in self.cutoffs_attr :
setattr(self, attr, None)
# Decide n_bins
if n_bins is None:
n_bins = min(200, int(i_obs.complete_set().indices().size()/500.+.5)) # not well tested.
print "n_bins=",n_bins
i_obs.setup_binner(n_bins=n_bins)
bins = []
for bin in i_obs.binner().range_used():
unmerged = i_obs.select(i_obs.binner().selection(bin))
try:
bin_stats = merging_statistics.merging_stats(unmerged,
anomalous=False)
bins.append(bin_stats)
except RuntimeError: # complains that no reflections left after sigma-filtering.
continue
self.d_min_overall = bins[-1].d_min
d_min_last = float("inf")
for bin in bins :
if (self.i_over_sigma_cut is None) :
if (bin.i_over_sigma_mean < self.i_over_sigma_min) :
self.i_over_sigma_cut = d_min_last
if (self.cc_one_half_cut is None) :
if (bin.cc_one_half < self.cc_one_half_min) :
self.cc_one_half_cut = d_min_last
if (self.r_merge_cut is None) :
if (bin.r_merge > self.r_merge_max) :
self.r_merge_cut = d_min_last
if (self.r_meas_cut is None) :
if (bin.r_meas > self.r_meas_max) :
self.r_meas_cut = d_min_last
if (self.completeness_cut_conservative is None) :
if (bin.completeness < completeness_min_conservative) :
self.completeness_cut_conservative = d_min_last
if (self.completeness_cut_permissive is None) :
if (bin.completeness < completeness_min_permissive) :
self.completeness_cut_permissive = d_min_last
d_min_last = bin.d_min
def show (self, out=sys.stdout, prefix="") :
def format_d_min (value) :
if (value is None) :
return "(use all data)" #% self.d_min_overall
return "%7.3f" % value
print >> out, prefix + "Crude resolution cutoff estimates:"
print >> out, prefix + " resolution of all data : %7.3f" % \
self.d_min_overall
if (self.cc_one_half_min is not None) :
print >> out, prefix + " based on CC(1/2) > %-6g : %s" % \
(self.cc_one_half_min, format_d_min(self.cc_one_half_cut))
if (self.i_over_sigma_min is not None) :
print >> out, prefix + " based on mean(I/sigma) > %-6g : %s" % \
(self.i_over_sigma_min, format_d_min(self.i_over_sigma_cut))
if (self.r_merge_max is not None) :
print >> out, prefix + " based on R-merge < %-6g : %s" % \
(self.r_merge_max, format_d_min(self.r_merge_cut))
if (self.r_meas_max is not None) :
print >> out, prefix + " based on R-meas < %-6g : %s" % \
(self.r_meas_max, format_d_min(self.r_meas_cut))
if (self.completeness_min_conservative is not None) :
print >> out, prefix + " based on completeness > %-6g : %s" % \
(self.completeness_min_conservative,
format_d_min(self.completeness_cut_conservative))
if (self.completeness_min_permissive is not None) :
print >> out, prefix + " based on completeness > %-6g : %s" % \
(self.completeness_min_permissive,
format_d_min(self.completeness_cut_permissive))
```
#### File: dataproc/command_line/cheetahh5tocbf.py
```python
from libtbx import easy_mp
from yamtbx.dataproc import cbf
import h5py
import numpy
import os
import sys
nproc = 11
def save_cbf(wavelen, data, cbfout):
header = """\
# Detector: NOT PILATUS but MPCCD
# Wavelength %f A
""" % wavelen
height, width = data.shape
#print "Saving", cbfout, height, width, data
cbf.save_numpy_data_as_cbf(data.reshape(width*height), width, height, "hdf5_converted", str(cbfout),
pilatus_header=header)
# save_cbf()
def convert_single(h5in, root, cbfout):
f = h5py.File(h5in, "r")
wavelen = f["%s/photon_wavelength_A"%root].value
data = numpy.array(f["%s/data"%root])
save_cbf(wavelen, data, cbfout)
f.close()
print "Processed: %s %s" % (os.path.basename(h5in), root)
# convert_single()
def convert(h5in, par=False):
f = h5py.File(h5in, "r")
if "/LCLS/data" in f:
convert_single(h5in, root="/LCLS", cbfout=os.path.basename(h5in)+".cbf")
else:
for tag in f:
convert_single(h5in, root="/%s"%tag,
cbfout="%s_%s.cbf" % (os.path.basename(h5in), tag))
# convert()
def run(h5_files):
if len(h5_files) == 0: return
if len(h5_files) > 1:
easy_mp.pool_map(fixed_func=convert,
args=h5_files,
processes=nproc)
else:
h5in = h5_files[0]
tags = h5py.File(h5in, "r").keys()
fun = lambda x: convert_single(h5in, root="/%s"%x,
cbfout="%s_%s.cbf" % (os.path.basename(h5in), x))
for tag in tags: fun(tag)
return # parallel reading of single file seems buggy..
easy_mp.pool_map(fixed_func=fun,
args=tags,
processes=nproc)
# run()
if __name__ == "__main__":
import sys
h5_files = filter(lambda x: x.endswith(".h5"), sys.argv[1:])
if len(h5_files) == 0:
print "Usage: %s h5_files" % sys.argv[0]
quit()
run(h5_files)
```
#### File: dataproc/command_line/dump_shika_pickle.py
```python
import pickle
def run(pklin):
stats = pickle.load(open(pklin, "rb"))
print "Files in this pickle:"
for f in stats:
print " %s" % f
print
print
for f in stats:
print f
print "=" * len(f)
stat = stats[f]
print " Detector=", stat.detector
print " Gonio=", stat.gonio
print " Grid coord=", stat.grid_coord
print " Params=", "rawname:",stat.params.distl.image #dir(stat.params)
print " Scan info=", "wavelen:", stat.scan_info.wavelength
print " Spots=", stat.spots.get_n_spots("all")
print " Thumb pos mag=", stat.thumb_posmag
print
if __name__ == "__main__":
import sys
pklin = sys.argv[1]
run(pklin)
```
#### File: dataproc/command_line/easy_anode.py
```python
from yamtbx.util import call
from yamtbx.util.xtal import format_unit_cell
import iotbx.file_reader
import iotbx.scalepack.merge
import iotbx.shelx.hklf
from cctbx import miller
from cctbx.array_family import flex
from iotbx import crystal_symmetry_from_any
from mmtbx.scaling.matthews import p_vm_calculator
from mmtbx.scaling.absolute_scaling import ml_aniso_absolute_scaling
import os
master_phil = """
hklin = None
.type = path
pdbin = None
.type = path
anisotropy_correction = False
.type = bool
wdir = anode
.type = str
"""
def check_symm(xs_hkl, xs_pdb):
pdb_laue = xs_pdb.space_group().build_derived_reflection_intensity_group(False)
hkl_laue = xs_hkl.space_group().build_derived_reflection_intensity_group(False)
if pdb_laue != hkl_laue or not xs_pdb.unit_cell().is_similar_to(xs_hkl.unit_cell()):
print "WARNING! Incompatible symmetry (symmetry mismatch or too different unit cell)"
print " hklin:"
xs_hkl.show_summary(prefix=" ")
print " pdbin:"
xs_pdb.show_summary(prefix=" ")
# check_symm()
def pha2mtz(phain, xs, mtzout):
hkl, f, fom, phi, sigf = [], [], [], [], []
for l in open(phain):
sp = l.split()
if len(sp) != 7: break
hkl.append(tuple(map(int, sp[:3])))
f.append(float(sp[3]))
fom.append(float(sp[4]))
phi.append(float(sp[5]))
sigf.append(float(sp[6]))
if not hkl:
return
f_array = miller.array(miller.set(xs, flex.miller_index(hkl)),
data=flex.double(f),
sigmas=flex.double(sigf))
mtz_ds = f_array.as_mtz_dataset(column_root_label="ANOM", column_types="FQ") # To open with Coot, column type F is required (not D)
mtz_ds.add_miller_array(f_array.customized_copy(data=flex.double(phi),
sigmas=None),
column_root_label="PANOM",
column_types="P")
mtz_ds.add_miller_array(f_array.customized_copy(data=flex.double(fom),
sigmas=None),
column_root_label="FOM",
column_types="W")
mtz_ds.mtz_object().write(mtzout)
# pha2mtz()
def run(hklin, pdbin, wdir, anisotropy_correction=False):
arrays = iotbx.file_reader.any_file(hklin).file_server.miller_arrays
i_arrays = filter(lambda x:x.is_xray_intensity_array() and x.anomalous_flag(),
arrays)
f_arrays = filter(lambda x:x.is_xray_amplitude_array() and x.anomalous_flag(),
arrays)
if not i_arrays and not f_arrays:
print "No anomalous observation data"
return
if os.path.exists(wdir):
print "%s already exists. quiting." % wdir
return
os.mkdir(wdir)
xs = crystal_symmetry_from_any.extract_from(pdbin)
sh_out = open(os.path.join(wdir, "run_anode.sh"), "w")
sh_out.write("#!/bin/sh\n\n")
sh_out.write("shelxc anode <<+ > shelxc.log 2>&1\n")
sh_out.write("cell %s\n" % format_unit_cell(xs.unit_cell()))
sh_out.write("spag %s\n" % str(xs.space_group_info()).replace(" ",""))
if i_arrays:
obs_array = i_arrays[0]
infile = "%s.hkl" % os.path.splitext(os.path.basename(hklin))[0]
in_opt = "%s" % infile
print "Using intensity array:", obs_array.info().label_string()
else:
obs_array = f_arrays[0]
infile = "%s_f.hkl" % os.path.splitext(os.path.basename(hklin))[0]
in_opt ="-f %s" % infile
print "No intensity arrays. Using amplitude arrays instead:", obs_array.info().label_string()
sh_out.write("! data from %s : %s\n" % (os.path.abspath(hklin), obs_array.info().label_string()))
obs_array.crystal_symmetry().show_summary(sh_out, prefix="! ")
check_symm(obs_array.crystal_symmetry(), xs)
n_org = obs_array.size()
obs_array = obs_array.eliminate_sys_absent()
n_sys_abs = n_org - obs_array.size()
if n_sys_abs > 0:
print " %d systematic absences removed." % n_sys_abs
if anisotropy_correction:
print "Correcting anisotropy.."
n_residues = p_vm_calculator(obs_array, 1, 0).best_guess
abss = ml_aniso_absolute_scaling(obs_array, n_residues=n_residues)
abss.show()
tmp = -2. if i_arrays else -1.
b_cart = map(lambda x: x*tmp, abss.b_cart)
obs_array = obs_array.apply_debye_waller_factors(b_cart=b_cart)
sh_out.write("sad %s\n" % in_opt)
iotbx.shelx.hklf.miller_array_export_as_shelx_hklf(obs_array, open(os.path.join(wdir, infile), "w"),
normalise_if_format_overflow=True)
sh_out.write("+\n\n")
sh_out.write('ln -s "%s" anode.pdb\n\n' % os.path.relpath(pdbin, wdir))
sh_out.write("anode anode\n")
sh_out.close()
call(cmd="sh", arg="./run_anode.sh", wdir=wdir)
pha_file = os.path.join(wdir, "anode.pha")
if os.path.isfile(pha_file):
pha2mtz(pha_file, xs, os.path.join(wdir, "anode.pha.mtz"))
print "Done. See %s/" % wdir
fa_file = os.path.join(wdir, "anode_fa.hkl")
if os.path.isfile(fa_file):
r = iotbx.shelx.hklf.reader(open(fa_file))
fa_array = r.as_miller_arrays(crystal_symmetry=xs)[0]
print "\nData stats:"
print " # Cmpl.o = Anomalous completeness in original data"
print " # Cmpl.c = Anomalous completeness in shelxc result (rejections)"
print " # SigAno = <d''/sigma> in shelxc result"
print " d_max d_min Cmpl.o Cmpl.c SigAno"
binner = obs_array.setup_binner(n_bins=12)
for i_bin in binner.range_used():
d_max_bin, d_min_bin = binner.bin_d_range(i_bin)
obs_sel = obs_array.resolution_filter(d_max_bin, d_min_bin)
obs_sel_ano = obs_sel.anomalous_differences()
fa_sel = fa_array.resolution_filter(d_max_bin, d_min_bin)
cmplset = obs_sel_ano.complete_set(d_max=d_max_bin, d_min=d_min_bin).select_acentric()
n_acentric = cmplset.size()
sigano = flex.mean(fa_sel.data()/fa_sel.sigmas()) if fa_sel.size() else float("nan")
print " %5.2f %5.2f %6.2f %6.2f %6.2f" % (d_max_bin, d_min_bin,
100.*obs_sel_ano.size()/n_acentric,
100.*fa_sel.size()/n_acentric,
sigano)
lsa_file = os.path.join(wdir, "anode.lsa")
if os.path.isfile(lsa_file):
print ""
flag = False
for l in open(lsa_file):
if "Strongest unique anomalous peaks" in l:
flag = True
elif "Reflections written to" in l:
flag = False
if flag:
print l.rstrip()
if os.path.isfile(("anode_fa.res")):
x = iotbx.shelx.cctbx_xray_structure_from(file=open("anode_fa.res"))
open("anode_fa.pdb", "w").write(x.as_pdb_file())
# run()
if __name__ == "__main__":
import sys
import iotbx.phil
cmdline = iotbx.phil.process_command_line_with_files(args=sys.argv[1:],
master_phil_string=master_phil,
reflection_file_def="hklin",
pdb_file_def="pdbin")
params = cmdline.work.extract()
run(params.hklin, params.pdbin, params.wdir, params.anisotropy_correction)
```
#### File: dataproc/command_line/eiger_check_deadmodules.py
```python
import h5py
import numpy
#from yamtbx.dataproc import cbf
def get_module_info(h):
ret = []
dsp = h["/entry/instrument/detector/detectorSpecific/"]
for k in sorted(filter(lambda x: x.startswith("detectorModule_"), dsp.keys())):
rot = dsp[k]["data_rotation"].value
org = dsp[k]["data_origin"][:]
siz = dsp[k]["data_size"][:]
assert rot in (0, 180)
print "#",k, org, siz, rot
if rot == 180:
org = org - siz
ret.append((k, org, siz))
return ret
def data_iterator(h):
for k in sorted(h["/entry/data"].keys()):
if not h["/entry/data"].get(k): continue
for i in xrange(h["/entry/data"][k].shape[0]):
yield h["/entry/data"][k][i]
def numbers_to_range(n):
ret = "%d" % n[0]
for i in xrange(1, len(n)):
if n[i]-n[i-1] <= 1:
if ret[-1] != "-": ret += "-"
if i==len(n)-1: ret += "%d"%n[i]
else:
if ret[-1] == "-":
ret += "%d"%n[i-1]
if i<len(n): ret += ",%d"%n[i]
else: ret += ",%d" % n[i]
return ret
def run(masterf):
print "# %s" % masterf
h = h5py.File(masterf, "r")
modules = get_module_info(h)
print "frame module num.bad percent.bad"
ret = {}
for fn, data in enumerate(data_iterator(h)):
#if fn<10000: continue
#tmp = numpy.zeros(data.shape, dtype=numpy.int32)
for name, origin, size in modules:
#tmp[origin[1]:origin[1]+size[1],
# origin[0]:origin[0]+size[0]] += j+1
d = data[origin[1]:origin[1]+size[1],
origin[0]:origin[0]+size[0]]
bad_value = 2**(d.dtype.itemsize*8)-1
num_bad = numpy.sum(d == bad_value)
frac = num_bad/float(d.size)
if frac>0.5:
print "%6d %s %6d %5.1f" % (fn+1, name, num_bad, frac*100.)
ret.setdefault(name, []).append(fn+1)
#cbf.save_numpy_data_as_cbf(tmp.flatten(),
# size1=tmp.shape[1], size2=tmp.shape[0],
# title="test",
# cbfout="junk.cbf")
if ret:
print "# Problematic modules:"
for name in sorted(ret):
print "# %s %d frames (%s)" % (name, len(ret[name]), numbers_to_range(ret[name]))
if __name__ == "__main__":
import sys
run(sys.argv[1])
```
#### File: dataproc/command_line/nexush52cbf.py
```python
from yamtbx.dataproc import cbf
import h5py
import numpy
import os
def make_dummy_pilatus_header(h5):
safe_str = lambda x,y,d: x[y].value if y in x else d
safe_val = lambda x,y,d: x[y][0] if y in x else d
try:
det = h5["entry"]["instrument"]["detector"]
except KeyError:
return ""
return """\
# Detector: NOT PILATUS but %(detector)s
# %(date)s
# Pixel_size %(pixel_size_x)e m x %(pixel_size_y)e m
# %(sensor_material)s sensor, thickness %(sensor_thickness)e m^M
# Exposure_time %(exp_time)f s
# Wavelength %(wavelength)f A
# Detector_distance %(distance)f m
# Beam_xy (%(orgx).2f, %(orgy).2f) pixels
# Angle_increment %(osc_width)f deg.
""" % dict(detector= safe_str(det, "description", ""),
date= safe_str(det["detectorSpecific"], "data_collection_date", ""),
pixel_size_x= safe_val(det, "x_pixel_size", 0),
pixel_size_y= safe_val(det, "y_pixel_size", 0),
sensor_material= safe_str(det, "sensor_material", ""),
sensor_thickness= safe_val(det, "sensor_thickness", 0),
exp_time= safe_val(det, "count_time", 0),
wavelength= safe_val(h5["entry"]["instrument"]["beam"], "wavelength", 0) if "beam" in h5["entry"]["instrument"] else 0,
distance= safe_val(det, "detector_distance", 0),
orgx= safe_val(det, "beam_center_x", 0),
orgy= safe_val(det, "beam_center_y", 0),
osc_width=-1, # Should be found in f["entry"]["sample"]["rotation_angle_step"]
)
# make_dummy_pilatus_header()
def get_mask_info(val):
"""
https://www.dectris.com/nexus.html#main_head_navigation
Contains a bit field for each pixel to signal dead, blind or high or otherwise unwanted or undesirable pixels.
The bits have the following meaning:
0 gap (pixel with no sensor)
1 dead
2 under responding
3 over responding
4 noisy
5-31 -undefined-
"""
ret = []
if val&1:
ret.append("gap (0)")
if val&2:
ret.append("dead (1)")
if val&4:
ret.append("under responding (2)")
if val&8:
ret.append("over responding (3)")
if val&16:
ret.append("noisy (4)")
return ",".join(ret)
# get_mask_info()
def run(h5in, cbf_prefix):
f = h5py.File(h5in, "r")
if "instrument" not in f["entry"]:
print "Error: This is not master h5 file."
return
dname = os.path.dirname(cbf_prefix)
if dname != "" and not os.path.exists(dname):
os.makedirs(dname)
print "dir created:", dname
# Analyze pixel_mask
pixel_mask = numpy.array(f["entry"]["instrument"]["detector"]["detectorSpecific"]["pixel_mask"])
print "No. of unuseful pixels:"
for val in set(pixel_mask.flatten()):
if val==0: continue
print "", get_mask_info(val), (pixel_mask==val).sum()
print
# Extract and write data
data = filter(lambda x: x.startswith("data_"), f["entry"])
count = 0
for key in sorted(data):
print "Extracting", key
im = f["entry"][key]
print " shape=", im.shape
print " dtype=", im.dtype
nframes, height, width = im.shape
for i in xrange(nframes):
count += 1
cbfout = "%s_%.6d.cbf" % (cbf_prefix, count)
data = im[i,].astype(numpy.int32)
data[pixel_mask>0] = -1
cbf.save_numpy_data_as_cbf(data.reshape(width*height), width, height, "hdf5_converted", cbfout,
pilatus_header=make_dummy_pilatus_header(f))
print " ", cbfout
print
# run()
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print "Usage: %s master-h5 prefix" % os.path.basename(sys.argv[0])
print
print "for example: %s series_10_master.h5 /tmp/series10" % os.path.basename(sys.argv[0])
print " then writes /tmp/series10_000001.cbf, ...."
quit()
h5in = sys.argv[1]
if len(sys.argv) > 2:
cbf_prefix = sys.argv[2]
else:
p = os.path.basename(h5in).replace("_master.h5","")
cbf_prefix = "cbf_%s/%s" % (p,p)
run(h5in, cbf_prefix)
```
#### File: crystfel/command_line/compare_hkl.py
```python
from yamtbx.dataproc import crystfel
import iotbx.phil
from cctbx.array_family import flex
import math
master_params_str = """\
pdb = None
.type = path
.help = Symmetry source
datout = shells_all.dat
.type = path
.help = Output dat file
dmin = None
.type = float
dmax = None
.type = float
nshells = 20
.type = int
fom = *cc *ccano *rsplit
.type = choice(multi=True)
"""
def calc_cc(a1, a2):
correlation = flex.linear_correlation(a1.data(), a2.data())
if correlation.is_well_defined(): return correlation.coefficient()
else: return float("nan")
# calc_cc()
def calc_ccano(a1, a2):
if not (a1.anomalous_flag() and a2.anomalous_flag()):
return float("nan")
a1, a2 = map(lambda x:x.anomalous_differences(), (a1,a2))
return calc_cc(a1, a2)
# calc_ccano()
def calc_rsplit(a1, a2):
num = flex.sum(flex.abs(a1.data() - a2.data()))
den = flex.sum(a1.data() + a2.data()) / 2.
return 1./math.sqrt(2.) * num / den
# clac_rsplit()
def run(hklfiles, params):
arrays = map(lambda x: crystfel.hkl.HKLfile(symm_source=params.pdb, hklin=x), hklfiles)
for a in arrays: a.set_resolution(d_min=params.dmin, d_max=params.dmax)
ofs = open(params.datout, "w")
ofs.write(" dmax dmin nref cmpl red1 red2 %s\n" % " ".join(map(lambda x: "%7s"%x,params.fom)))
a1, a2 = arrays[0].array.common_sets(arrays[1].array)
r1, r2 = arrays[0].redundancies.common_sets(arrays[1].redundancies)
r1, r2 = map(lambda x: x.as_double(), (r1, r2))
binner = a1.setup_binner(n_bins=params.nshells)
for i_bin in binner.range_used():
sel = binner.selection(i_bin)
d_max, d_min = binner.bin_d_range(i_bin)
r1s, r2s = r1.select(sel), r2.select(sel)
r1sm = flex.mean(r1s.data()) if r1s.size()>0 else float("nan")
r2sm = flex.mean(r2s.data()) if r2s.size()>0 else float("nan")
a1s, a2s = a1.select(sel), a2.select(sel)
ofs.write("%6.2f %6.2f %5d %5.1f %6.1f %6.1f " % (d_max, d_min, a1s.size(),
a1s.completeness(d_max=d_max)*100.,
r1sm, r2sm))
if "cc" in params.fom: ofs.write("% 7.4f " % calc_cc(a1s, a2s))
if "ccano" in params.fom: ofs.write("% 7.4f " % calc_ccano(a1s, a2s))
if "rsplit" in params.fom: ofs.write("% 7.4f " % calc_rsplit(a1s, a2s))
ofs.write("\n")
ofs.write("# overall %5d %5.1f %6.1f %6.1f " % (a1.size(),
a1.completeness(d_max=params.dmax)*100.,
flex.mean(r1.data()), flex.mean(r2.data())))
if "cc" in params.fom: ofs.write("% 7.4f " % calc_cc(a1, a2))
if "ccano" in params.fom: ofs.write("% 7.4f " % calc_ccano(a1, a2))
if "rsplit" in params.fom: ofs.write("% 7.4f " % calc_rsplit(a1, a2))
ofs.write("\n")
# run()
if __name__ == "__main__":
import sys
import os
if "-h" in sys.argv[1:] or "--help" in sys.argv[1:]:
print "All parameters:\n"
iotbx.phil.parse(master_params_str).show(prefix=" ", attributes_level=1)
quit()
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
hklfiles = []
for arg in args:
if os.path.isfile(arg):
if ".hkl" in arg: hklfiles.append(arg)
elif params.pdb is None: params.pdb = arg
if params.pdb is None:
print "Give pdb file"
quit()
if len(hklfiles) != 2:
print "Give two hkl files."
quit()
run(hklfiles, params)
```
#### File: dataproc/crystfel/geom.py
```python
import re
import numpy
import sys
#float_or_1 = lambda x: 1 if x == "" else float(x)
float_or_1 = lambda x: (1,-1)[len(x)] if x == "" or x == "-" else float(x)
class Sensor:
re_x = re.compile("([-0-9\.]*) *x")
re_y = re.compile("([-+0-9\.]*) *y")
def __init__(self, clen=None, res=None):
for k in ("min_fs", "min_ss", "max_fs", "max_ss", "fs", "ss", "corner_x", "corner_y", "coffset"):
setattr(self, k, None)
self.clen = clen
self.res = res
# __init__()
def set_info(self, key, valstr):
if key == "min_fs":
self.min_fs = int(float(valstr))
elif key == "min_ss":
self.min_ss = int(float(valstr))
elif key == "max_fs":
self.max_fs = int(float(valstr))
elif key == "max_ss":
self.max_ss = int(float(valstr))
elif key == "fs":
self.fs = [0., 0.]
r_x = self.re_x.search(valstr)
r_y = self.re_y.search(valstr)
if r_x: self.fs[0] = float_or_1(r_x.group(1))
if r_y: self.fs[1] = float_or_1(r_y.group(1))
if not r_x and not r_y: print "Wrong parameter value for fs:", valstr
elif key == "ss":
self.ss = [0., 0.]
r_x = self.re_x.search(valstr)
r_y = self.re_y.search(valstr)
if r_x: self.ss[0] = float_or_1(r_x.group(1))
if r_y: self.ss[1] = float_or_1(r_y.group(1))
if not r_x and not r_y: print >>sys.stderr, "Wrong parameter value for ss:", valstr
elif key == "corner_x":
self.corner_x = float(valstr)
elif key == "corner_y":
self.corner_y = float(valstr)
elif key == "res":
self.res = float(valstr)
elif key == "clen":
self.clen = float(valstr)
elif key == "coffset":
self.coffset = float(valstr)
else:
print >>sys.stderr, "Warning - Unknown paramter:", key
# read_info()
# class Sensor
class Geomfile:
def __init__(self, geom_in=None):
self.clen = None
self.res = None
if geom_in is not None:
self.read_file(geom_in)
# __init__()
def read_file(self, geom_in):
self.sensors = {}
for l in open(geom_in):
if ";" in l: l = l[:l.find(";")]
l = l.strip()
if "=" in l:
sp = map(lambda x:x.strip(), l.split("="))
if len(sp) > 2:
print "Warning: more than one = in this line. Ignored:", l
continue
lhs, rhs = sp
if lhs == "clen":
self.clen = rhs #float
elif lhs == "res":
self.res = float(rhs)
elif lhs == "adu_per_eV":
self.adu_per_eV = rhs #float
elif lhs == "max_adu":
self.max_adu = rhs # float
elif "/" in lhs:
sp = lhs.split("/")
if len(sp) > 2:
print >>sys.stderr, "Warning: more than one / in left hand. Ignored:", l
continue
sid, lhs2 = sp
if sid not in self.sensors:
self.sensors[sid] = Sensor(clen=self.clen, res=self.res) # XXX res and clen must be defined first (before the sensor defs; if sensor defs don't have clen nor res)
self.sensors[sid].set_info(lhs2, rhs)
# read_file()
def get_extent(self):
inf = float("inf")
fmin, fmax, smin, smax = inf, -inf, inf, -inf
for sid in self.sensors:
s = self.sensors[sid]
c = numpy.array((s.corner_x, s.corner_y))
fs, ss = numpy.array(s.fs), numpy.array(s.ss)
#for x in xrange(s.max_fs-s.min_fs+1):
# for y in xrange(s.max_ss-s.min_ss+1):
for x in (0, s.max_fs-s.min_fs):
for y in (0, s.max_ss-s.min_ss):
v = c + fs * x + ss * y
fmin = min(fmin, v[0])
fmax = max(fmax, v[0])
smin = min(smin, v[1])
smax = max(smax, v[1])
return fmin, fmax, smin, smax
# get_extent()
def get_image_extent(self):
max_ss, max_fs = 0, 0
for sid in self.sensors:
s = self.sensors[sid]
max_fs = max(max_fs, s.max_fs)
max_ss = max(max_ss, s.max_ss)
return max_fs, max_ss
# get_image_extent()
def calc_xy_from_fs_ss(self, fs, ss, panel=None, shifts=None):
if panel is None:
for sid in self.sensors:
s = self.sensors[sid]
if s.min_fs <= fs <= s.max_fs and s.min_ss <= ss <= s.max_ss:
panel = sid
break
if panel is None:
return float("nan"), float("nan")
s = self.sensors[panel]
fs, ss = fs - s.min_fs, ss - s.min_ss
x = s.corner_x + s.fs[0]*fs + s.ss[0]*ss
y = s.corner_y + s.fs[1]*fs + s.ss[1]*ss
if shifts is not None: # shifts in meter
x += shifts[0]*s.res
y += shifts[1]*s.res
return x,y
# calc_xy_from_fs_ss()
# class Geomfile
```
#### File: hkl2000/command_line/calc_scales_for_xfiles.py
```python
from yamtbx.dataproc.hkl2000.xfile import DenzoXfile
from yamtbx.dataproc.scale_data import kBdecider, kBdecider3
import iotbx.phil
import iotbx.file_reader
from cctbx.array_family import flex
import traceback
master_params_str = """
lstin = None
.type = path
d_min = None
.type = float
d_max = None
.type = float
sigma_cutoff = None
.type = float
reference {
file = None
.type = path
label = None
.type = str
d_min = None
.type = float
d_max = None
.type = float
}
show_plot = False
.type = bool
help = False
.type = bool
"""
def run(params, xfiles):
# read reference
arrays = iotbx.file_reader.any_file(params.reference.file).file_server.miller_arrays
arrays = filter(lambda ar: ar.is_xray_data_array(), arrays)
if params.reference.label is not None:
arrays = filter(lambda ar: ar.info().label_string() == params.reference.label, arrays)
if len(arrays) != 1:
print "Can't decide data to use in reference file:", params.reference.file
print "Choose label"
for ar in arrays: print ar.info().label_string()
return
refdata = arrays[0].as_intensity_array()
refdata = refdata.resolution_filter(d_max=params.reference.d_max, d_min=params.reference.d_min)
print "file n.common k b cc.org cc.mean cc.scaled a b c al be ga"
for xf in xfiles:
print "# Reading", xf
try:
xfile = DenzoXfile(xf)
except:
traceback.print_exc()
continue
a = xfile.miller_array(anomalous_flag=refdata.anomalous_flag())
a = a.select(a.sigmas() > 0)
a = a.resolution_filter(d_min=params.d_min, d_max=params.d_max)
if params.sigma_cutoff is not None:
a = a.select(a.data()/a.sigmas() >= params.sigma_cutoff)
a = a.merge_equivalents(use_internal_variance=False).array()
tmp, a = refdata.common_sets(a, assert_is_similar_symmetry=False)
n_common = tmp.size()
if n_common == 0:
print "# No useful reflection in this file. skip."
continue
corr = flex.linear_correlation(tmp.data(), a.data())
cc_org = corr.coefficient() if corr.is_well_defined() else float("nan")
# Calc CC in resolution bin and average
tmp.setup_binner(auto_binning=True)
cc_bins = []
for i_bin in tmp.binner().range_used():
sel = tmp.binner().selection(i_bin)
corr = flex.linear_correlation(tmp.select(sel).data(), a.select(sel).data())
if not corr.is_well_defined(): continue
cc_bins.append(corr.coefficient())
cc_mean = sum(cc_bins) / float(len(cc_bins)) if len(cc_bins) > 0 else float("nan")
# Determine scale and B
k, b = kBdecider(tmp, a).run()
bfac = flex.exp(-b * a.d_star_sq().data()) if b != 0 else 1.
corr = flex.linear_correlation(tmp.data(), a.data() * k*bfac)
cc_scaled = corr.coefficient() if corr.is_well_defined() else float("nan")
print "%s %5d %.3e %.3e %.4f %.4f %.4f" % (xf, n_common, k, b, cc_org, cc_mean, cc_scaled),
print ("%.3f "*6)%a.unit_cell().parameters()
if params.show_plot:
import pylab
from matplotlib.ticker import FuncFormatter
s3_formatter = lambda x,pos: "inf" if x == 0 else "%.2f" % (x**(-1/3))
fig, ax1 = pylab.plt.subplots()
plot_x = map(lambda i: tmp.binner().bin_d_range(i)[1]**(-3), tmp.binner().range_used())
#for name, ar in (("reference", tmp), ("data", a)):
vals = map(lambda i: flex.mean(tmp.data().select(tmp.binner().selection(i))), tmp.binner().range_used())
pylab.plot(plot_x, vals, label="reference")
scale = flex.sum(tmp.data()*a.data()) / flex.sum(flex.pow2(a.data()))
print "Linear-scale=", scale
vals = map(lambda i: scale*flex.mean(a.data().select(tmp.binner().selection(i))), tmp.binner().range_used())
pylab.plot(plot_x, vals, label="data")
vals = map(lambda i: flex.mean((a.data()*k*bfac).select(tmp.binner().selection(i))), tmp.binner().range_used())
pylab.plot(plot_x, vals, label="data_scaled")
"""
from mmtbx.scaling import absolute_scaling, relative_scaling
ls_scaling = relative_scaling.ls_rel_scale_driver(tmp, tmp.customized_copy(data=a.data(),sigmas=a.sigmas()), use_intensities=True, scale_weight=True, use_weights=True)
ls_scaling.show()
vals = map(lambda i: flex.mean(ls_scaling.derivative.resolution_filter(*tmp.binner().bin_d_range(i)).data()), tmp.binner().range_used())
pylab.plot(plot_x, vals, label="data_scaled2")
"""
pylab.legend()
pylab.xlabel('resolution (d^-3)')
pylab.ylabel('<I>')
pylab.setp(pylab.gca().get_legend().get_texts(), fontsize="small")
pylab.title('Scaled with B-factors (%.2f)' % b)
pylab.gca().xaxis.set_major_formatter(FuncFormatter(s3_formatter))
ax2 = ax1.twinx()
ax2.plot(plot_x, cc_bins, "black")
ax2.set_ylabel('CC')
pylab.show()
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
if "-h" in args: params.help = True
if params.help:
print "Parameter syntax:\n"
iotbx.phil.parse(master_params_str).show(prefix=" ")
print
print "Usage: calc_scales_for_xfiles.py [.x files] [lstin=xfiles.lst] reference.sca"
quit()
reffile = filter(lambda x: x.endswith((".sca",".mtz")), args)
assert len(reffile) == 1
params.reference.file = reffile[0]
xfiles = filter(lambda x: x.endswith(".x"), args)
if params.lstin is not None:
for l in open(params.lstin):
if "#" in l: l = l[:l.index("#")]
l = l.strip()
if l != "": xfiles.append(l)
run(params, xfiles)
```
#### File: myspotfinder/command_line/spot_finder_backend.py
```python
import iotbx.phil
import libtbx.phil
import os
import stat
import time
import datetime
import getpass
import zmq
import re
import Queue
import collections
#import sqlite3
import pysqlite2.dbapi2 as sqlite3
import threading
import traceback
import numpy
import cPickle as pickle
import hashlib
from PIL import Image
from multiprocessing import Process
#import inotify.adapters # use yamtbx.python -mpip install inotify
from yamtbx.dataproc.myspotfinder import shikalog
from yamtbx.dataproc.myspotfinder import config_manager
from yamtbx.dataproc.myspotfinder import spot_finder_for_grid_scan
from yamtbx.dataproc import bl_logfiles
from yamtbx.dataproc import eiger
from yamtbx import util
#DEBUG for inotify
#import logging
#logger = logging.getLogger("inotify.adapters")
#logger.setLevel(logging.DEBUG)
#handlers = logging.StreamHandler()
#handlers.setLevel(logging.DEBUG)
#handlers.setFormatter(logging.Formatter("%(asctime)-15s %(levelname)s : %(message)s"))
#logger.addHandler(handlers)
master_params_str = """\
topdir = None
.type = path
.help = Root directory
bl = 32xu 41xu 26b2 44xu 45xu
.type = choice(multi=False)
.help = Choose beamline where you start SHIKA
date = "today"
.type = str
.help = Data collection date ("today" or %Y-%d-%m format)
blconfig = None
.type = path
.help = Override default blconfig path (/isilon/blconfig/bl$bl/)
nproc = 4
.type = int
.help = Number of processors used for spot finding
ports = 5557,5558,5559
.type = ints(size=3,value_min=1024,value_max=49151)
.help = Port numbers used by ZeroMQ.
dbdir = /isilon/cluster/log/shika/db
.type = path
.help = location to write sqlite3 db file.
logroot = /isilon/cluster/log/shika/
.type = path
mode = *eiger_streaming bsslog zoo watch_ramdisk
.type = choice(multi=False)
env = *oys ppu
.type = choice(multi=False)
.help = Excetution environment
eiger_host = "192.168.163.204"
.type = str
.help = "EIGER hostname or ip-address"
#incomplete_file_workaround = 0
# .type = float
# .help = wait given seconds after detecting new image
force_ssh_from = None
.type = str
.help = Users must not change this parameter.
only_check_in_last_hours = 1
.type = float
.help = "Only check diffscan.log modified during the last specified hours"
ramdisk_walk_interval = 2
.type = float
"""
params = None
def retry_until_success(f, arg=None):
args = (arg,) if arg is not None else ()
return util.retry_until_noexc(f, args, ntry=30, outf=shikalog.warning)
class DiffScanManager:
def __init__(self):
self.clear()
# __init__()
def clear(self):
self.scanlog = {} # directory: BssDiffscanLog object
self.found_imgs = set()
self.finished = {} # {filename:timestamp}; list of filenames of which analysis was completed
# clear()
def add_scanlog(self, slog):
slog = os.path.abspath(slog)
self.scanlog[os.path.dirname(slog)] = bl_logfiles.BssDiffscanLog(slog)
# add_scanlog()
def add_dir(self, slogdir):
self.add_scanlog(os.path.join(slogdir, "diffscan.log"))
# add_dir()
def update_scanlogs(self):
for logdir, slog in self.scanlog.items():
if os.path.isfile(slog.scanlog):
slog.parse()
else:
shikalog.error("diffraction scan log is not found!: %s" %slog.scanlog)
continue
# if no update since all images processed
mtime = os.path.getmtime(slog.scanlog)
if mtime == self.finished.get(slog.scanlog, -1): continue
# Update 'processed files' using database
dbfile = os.path.join(logdir, "_spotfinder", "shika.db")
for _ in xrange(10):
try:
if not os.path.exists(os.path.dirname(dbfile)): os.mkdir(os.path.dirname(dbfile))
con = sqlite3.connect(dbfile, timeout=30)
break
except sqlite3.OperationalError:
shikalog.warning("Connecting to %s failed. Retrying" % dbfile)
cur = con.cursor()
# status TABLE
#c = cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='status';")
c = retry_until_success(cur.execute, "SELECT name FROM sqlite_master WHERE type='table' AND name='status';")
if c.fetchone() is not None:
#cur.execute("select filename from status")
retry_until_success(cur.execute, "select filename from status")
processed_files = map(lambda x:os.path.join(logdir, x[0]), cur.fetchall())
print "debug::",processed_files
self.found_imgs.update(processed_files)
# check if all images are processed
if len(processed_files) == sum(map(lambda x: len(x.filename_idxes), slog.scans)):
shikalog.info("All %d images in %s are already processed. Will not check unless diffscan.log updated"%(len(processed_files), slog.scanlog))
self.finished[slog.scanlog] = mtime
#con.commit()
retry_until_success(con.commit)
con.close()
# update_scanlogs()
def get_unprocessed_images(self, env=None):
ret = []
self.update_scanlogs()
for slogdir, slog in self.scanlog.items():
for scan in slog.scans:
fcs = map(lambda x: (os.path.join(slogdir, x[0]), x[1]), scan.filename_idxes)
#print "fix=", fcs
#if env == "ppu": fcs_proxy = map(lambda x: (re.sub("^/isilon/users/", "/ramdisk/", x[0]), x[1]), fcs)
if env == "ppu": f_mod = lambda x: re.sub("^/isilon/users/", "/ramdisk/", x)
else: f_mod = lambda x: x
unproc = filter(lambda x: x[0] not in self.found_imgs and os.path.isfile(f_mod(x[0])), fcs)
ret.extend(map(lambda x:x+(scan,), unproc))
self.found_imgs.update(map(lambda x: x[0], ret))
return ret # (filename, idx, scan object)
# get_unprocessed_images()
def remove_found(self, files): # when user wants to recalculate..
self.found_imgs.difference_update(files)
# remove_found()
def needs_to_be_processed(self, filename):
"""
Check if the given file needs to be processed.
No need to process if
- not included in diffscan.log
- first image in row (only if BSS creates such non-sense files)
"""
scaninfo = self.get_scan_info(filename)
if scaninfo is None:
return False
# return True here *if* BSS no longer creates such non-sense files.
# this should be an option.
if scaninfo.is_shutterless():
r = scaninfo.get_file_number_based_on_template(filename)
num = int(r.group(1))
if scaninfo.hpoints > 1:
return num%(scaninfo.hpoints+1) != 0 # if remainder is 0, discard the image.
else:
return num != 0 # discard 000.img
else:
return True
# needs_to_be_processed()
def get_grid_coord(self, filename):
dirname = os.path.dirname(filename)
if dirname not in self.scanlog:
shikalog.warning("get_grid_coord(): directory is not found: %s" % dirname)
return None
return self.scanlog[dirname].get_grid_coord(os.path.basename(filename))
# get_grid_coord()
def get_scan_info(self, filename):
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
if not dirname in self.scanlog:
shikalog.warning("get_scan_info(): directory is not found: %s" % dirname)
return None
for scan in reversed(self.scanlog[dirname].scans):
if scan.match_file_with_template(filename):
return scan
shikalog.warning("get_scan_info(): Not in scans: %s" % dirname)
return None
# get_scan_info()
def get_gonio_xyz_phi(self, filename):
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
if not dirname in self.scanlog:
return None
for scan in reversed(self.scanlog[dirname].scans):
for f, c in scan.filename_coords:
if basename == f:
if scan.is_shutterless():
return list(c[0]) + [scan.fixed_spindle]
else:
return list(c[0]) + [scan.osc_start]
return None
# get_gonio_xyz_phi()
# class DiffScanManager
class WatchScanlogThread:
def __init__(self, queue, topdir, beamline=None, expdate="today"):
self.queue = queue
self.topdir = topdir
self.interval = 5
self.thread = None
#self.latest_dir = None
self.keep_going = True
self.running = True
self.beamline = beamline
self.last_bsslog = None
self.last_bsslog_line = 0
self.expdate = None
if expdate != "today": self.expdate = datetime.datetime.strptime(expdate, "%Y-%m-%d")
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self._cached_dirs = {}
#self.thread.start()
def start(self, interval=None):
# Thread should be already started.
# Just start to notify the latest directory.
self.notify_latest_dir = True
#wx.PostEvent(self.parent, EventLogWatcherStarted())
if interval is not None:
self.interval = interval
# If accidentally stopped
if not self.is_running():
self.keep_going = True
self.running = True
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
pass
def is_running(self): return self.thread is not None and self.thread.is_alive()
def find_in_directory(self, topdir):
scanlogs = [] # (filename, date)
for root, dirnames, filenames in os.walk(topdir):
if "diffscan.log" in filenames:
scanlog = os.path.join(root, "diffscan.log")
scanlogs.append((scanlog, os.path.getmtime(scanlog)))
return scanlogs
# find_in_directory()
def find_in_bsslog(self, topdir):
def read_bsslog_line(l):
if self.beamline in ("41xu","45xu") and "/ramdisk/" in l: l = l.replace("/ramdisk/","/isilon/users/", 1) # XXX should check if Pilatus or not!!
if topdir not in l: return
l = l[l.index(topdir):]
if " " in l: l = l[:l.index(" ")]
if ",.img" in l: l = l[:l.index(",.img")]
return os.path.dirname(l)
basedate = datetime.datetime.today() if self.expdate is None else self.expdate
if self.last_bsslog is None:
shikalog.debug("checking yesterday's bss log")
self.last_bsslog = os.path.join(params.blconfig, "log",
(basedate - datetime.timedelta(days=1)).strftime("bss_%Y%m%d.log"))
if not os.path.isfile(self.last_bsslog):
shikalog.info("Yesterday's log not found: %s"%self.last_bsslog)
current_bsslog = os.path.join(params.blconfig, "log", basedate.strftime("bss_%Y%m%d.log"))
if self.last_bsslog is not None and self.last_bsslog != current_bsslog and os.path.isfile(self.last_bsslog):
shikalog.debug("reading last-log %s from %d" % (os.path.basename(self.last_bsslog), self.last_bsslog_line))
for i, l in enumerate(open(self.last_bsslog)):
if i <= self.last_bsslog_line: continue
# read last log!
found = read_bsslog_line(l)
if found is not None: self._cached_dirs[found] = time.time()
# reset for reading current log
self.last_bsslog_line = 0
if os.path.isfile(current_bsslog):
shikalog.debug("reading curr-log %s from %d" % (os.path.basename(current_bsslog), self.last_bsslog_line))
i = -1 # in case empty file
for i, l in enumerate(open(current_bsslog)):
if i <= self.last_bsslog_line: continue
# read current log!
found = read_bsslog_line(l)
if found is not None: self._cached_dirs[found] = time.time()
# set for next reading
self.last_bsslog_line = i
else:
shikalog.info("bsslog not found: %s"%current_bsslog)
self.last_bsslog = current_bsslog
scanlogs = map(lambda x: os.path.join(x, "diffscan.log"), self._cached_dirs)
uid = os.getuid()
scanlogs = filter(lambda x: os.path.isfile(x) and os.stat(x).st_uid==uid, scanlogs)
if params.only_check_in_last_hours is not None and params.only_check_in_last_hours > 0:
now = time.time()
last_seconds = params.only_check_in_last_hours*60*60
scanlogs = filter(lambda x: (now-os.path.getmtime(x))<last_seconds, scanlogs)
if scanlogs: shikalog.debug("found diffscan.log in bsslog: %s" % scanlogs)
for k in self._cached_dirs.keys():
# clear old cache
if time.time() - self._cached_dirs[k] > 60*5: del self._cached_dirs[k]
return map(lambda x: (x, os.path.getmtime(x)), scanlogs)
# find_in_bsslog()
def run_inner(self, method="bsslog"):
assert method in ("bsslog", "os.walk")
startt = time.time()
if method == "bsslog":
scanlogs = self.find_in_bsslog(self.topdir)
else:
scanlogs = self.find_in_directory(self.topdir)
shikalog.debug("WatchScanlogThread.run_inner(method=%s) took %.3f sec for finding" % (method,
time.time()-startt))
if len(scanlogs) > 0:
scanlogs.sort(key=lambda x:x[1], reverse=True)
for x in scanlogs: self.queue.put(x)
# run_inner()
def run(self):
def mysleep():
if self.interval < 1:
time.sleep(self.interval)
else:
for i in xrange(int(self.interval/.5)):
if self.keep_going:
time.sleep(.5)
# mysleep()
shikalog.info("WatchScanlogThread loop STARTED")
while self.keep_going:
#shikalog.debug("in WatchScanlogThread timer")
try:
self.run_inner()
except:
shikalog.error("Error in WatchScanlogThread\n%s" % (traceback.format_exc()))
mysleep()
shikalog.info("WatchScanlogThread loop FINISHED")
self.running = False
# run()
# class WatchScanlogThread
class WatchDirThread:
def __init__(self, queue, pushport):
self.interval = 5
self.thread = None
self.queue = queue
self.dirs = set()
self.diffscan_manager = DiffScanManager()
self.zmq_context = zmq.Context()
self.ventilator_send = self.zmq_context.socket(zmq.PUSH)
self.ventilator_send.bind("tcp://*:%d"%pushport)
def start(self, interval=None):
self.stop()
self.keep_going = True
self.running = True
if interval is not None:
self.interval = interval
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
if self.is_running():
#shikalog.info("Stopping WatchDirThread.. Wait.")
self.keep_going = False
self.thread.join()
else:
pass
#shikalog.info("WatchDirThread already stopped.")
def is_running(self): return self.thread is not None and self.thread.is_alive()
def run_inner(self):
#shikalog.debug("in WatchDirThread timer")
startt = time.time()
while not self.queue.empty():
scanlog, scanlogmtime = self.queue.get()
self.diffscan_manager.add_scanlog(scanlog)
if params.mode == "eiger_streaming":
# Not sure this is needed, but clearly *not* needed when mode != "eiger_streaming"
# because get_unprocessed_images() calls update_scanlogs()
self.diffscan_manager.update_scanlogs()
else:
new_imgs = self.diffscan_manager.get_unprocessed_images(env=params.env)
#print "new_imgs=", new_imgs
for img, idx, scan in new_imgs:
img_actual = img
if params.env=="ppu": img_actual = re.sub("^/isilon/users/", "/ramdisk/", img)
header = dict(file_prefix=scan.get_prefix()[:-1],
frame=idx-1,
raster_horizontal_number=scan.hpoints, raster_vertical_number=scan.vpoints,
raster_horizontal_step=scan.hstep, raster_vertical_step=scan.vstep,
raster_scan_direction=scan.scan_direction, raster_scan_path=scan.scan_path,
)
shikalog.debug("Sending %s,%s" % (img, idx))
msg = dict(imgfile=img, header=header,
cbf_data=open(img_actual, "rb").read())
self.ventilator_send.send_pyobj(msg)
shikalog.debug("WatchDirThread.run_inner took %.3f sec" % (time.time()-startt))
def run(self):
#shikalog.info("WatchDirThread loop STARTED")
while self.keep_going:
try:
self.run_inner()
except:
shikalog.error("Error in WatchDirThread\n%s" % (traceback.format_exc()))
if self.interval < 1:
time.sleep(self.interval)
else:
for i in xrange(int(self.interval/.5)):
if self.keep_going:
time.sleep(.5)
shikalog.info("WatchDirThread loop FINISHED")
self.running = False
#wx.PostEvent(self.parent, EventDirWatcherStopped()) # Ensure the checkbox unchecked when accidentally exited.
# run()
# class WatchDirThread
def walk_nolink(top, topdown=True, onerror=None):
# Original /misc/oys/xtal/cctbx/snapshots/dials-v1-8-3/base/lib/python2.7/os.py
try:
names = os.listdir(top)
except os.error, err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
for name in names:
try: st = os.lstat(os.path.join(top, name))
except: continue # ignore deleted file
if stat.S_ISDIR(st.st_mode):
dirs.append((name,st))
else:
nondirs.append((name,st))
if topdown:
yield top, dirs, nondirs
for name,_ in dirs:
new_path = os.path.join(top, name)
for x in walk_nolink(new_path, topdown, onerror):
yield x
if not topdown:
yield top, dirs, nondirs
# walk_nolink()
class WatchRamdiskThread:
def __init__(self, pushport, interval):
self.interval = interval
self.thread = None
self.zmq_context = zmq.Context()
self.ventilator_send = self.zmq_context.socket(zmq.PUSH)
self.ventilator_send.bind("tcp://*:%d"%pushport)
def start(self, interval=None):
self.stop()
self.keep_going = True
self.running = True
if interval is not None:
self.interval = interval
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
if self.is_running():
#shikalog.info("Stopping WatchRamdiskThread.. Wait.")
self.keep_going = False
self.thread.join()
else:
pass
#shikalog.info("WatchRamdiskThread already stopped.")
def is_running(self): return self.thread is not None and self.thread.is_alive()
def run_inner_inotify(self):
#shikalog.debug("in WatchRamdiskThread timer")
startt = time.time()
def is_processed(path, touch):
if not os.path.exists(touch): return False
mtime_touch = os.path.getmtime(touch)
mtime_path = os.path.getmtime(path)
return mtime_touch > mtime_path
itree = inotify.adapters.InotifyTree('/ramdisk',
mask=inotify.constants.IN_MOVED_TO|inotify.constants.IN_CLOSE_WRITE)
for header, type_names, path, filename in itree.event_gen(yield_nones=False):
print type_names, path, filename
if "IN_ISDIR" in type_names: continue
#if "IN_MOVED_TO" not in type_names: continue
if not filename.endswith(".cbf"): continue
imgf = os.path.join(path, filename)
# Check if already processed
touch = imgf+".touch"
if is_processed(imgf, touch): continue
# Send it!
img_isilon = re.sub("^/ramdisk/", "/isilon/users/", imgf)
header = dict(file_prefix=os.path.basename(img_isilon[:img_isilon.rindex("_")]),
frame=int(imgf[imgf.rindex("_")+1:imgf.rindex(".cbf")])-1)
shikalog.debug("Sending %s,%s" % (header["file_prefix"], header["frame"]+1))
msg = dict(imgfile=img_isilon, header=header,
cbf_data=open(imgf, "rb").read())
self.ventilator_send.send_pyobj(msg)
util.touch_file(touch) # mark as processed
shikalog.debug("WatchRamdiskThread.run_inner stopped in %.3f sec" % (time.time()-startt))
# run_inner_inotify()
def run_inner_walk(self):
def need_process(root, path_lst, filename_dict):
# filenames is list of (filename, lstat); directory name not included
# path_lst is (path, lstat(path)); directory name not included
path = os.path.join(root, path_lst[0])
lst = path_lst[1]
# don't process if link
if stat.S_ISLNK(lst.st_mode): return False
touch = path_lst[0] + ".touch"
# process if .touch not exists
mtime_touch = filename_dict.get(touch, None)
if mtime_touch is None: return lst.st_size >= 1000 and "Comment" in open(path).read(1000)
mtime_path = lst.st_mtime
if mtime_touch > mtime_path:
return False
else:
shikalog.debug("Newer than touch (%s; %s <= %s)"%(path, mtime_touch, mtime_path))
return lst.st_size >= 1000 and "Comment" in open(path).read(1000)
# need_process()
uid = os.getuid()
start_time = time.time()
n_dir = 0
for root, dirnames, filenames in walk_nolink("/ramdisk"):
n_dir += 1
if os.stat(root).st_uid != uid: continue
cbf_files = filter(lambda x: x[0].endswith(".cbf"), filenames)
filename_dict = dict(filenames)
new_files = filter(lambda x: need_process(root, x, filename_dict), cbf_files)
new_files = map(lambda x: os.path.join(root, x[0]), new_files)
for imgf in new_files:
img_isilon = re.sub("^/ramdisk/", "/isilon/users/", imgf)
header = dict(file_prefix=os.path.basename(img_isilon[:img_isilon.rindex("_")]),
frame=int(imgf[imgf.rindex("_")+1:imgf.rindex(".cbf")])-1)
shikalog.debug("Sending %s" % img_isilon)
msg = dict(imgfile=img_isilon, header=header,
cbf_data=open(imgf, "rb").read())
self.ventilator_send.send_pyobj(msg)
util.touch_file(imgf+".touch") # mark as processed
shikalog.debug("WatchRamdiskThread.run_inner_walk finished in %.3f sec (%d dirs)" % (time.time()-start_time, n_dir))
# run_inner_walk()
def run(self):
shikalog.info("WatchRamdiskThread loop STARTED")
while self.keep_going:
try:
self.run_inner_walk()
except:
shikalog.error("Error in WatchRamdiskThread\n%s" % (traceback.format_exc()))
if self.interval < 1:
time.sleep(self.interval)
else:
for i in xrange(int(self.interval/.5)):
if self.keep_going:
time.sleep(.5)
shikalog.info("WatchRamdiskThread loop FINISHED")
self.running = False
# run()
# class WatchRamdiskThread
class ResultsManager:
def __init__(self, rqueue, dbdir):
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.interval = 3
self.dbdir = dbdir
self.rqueue = rqueue
self._diffscan_params = {}
# __init__()
def start(self):
if not self.is_running():
self.keep_going = True
self.running = True
self.thread.start()
# start()
def is_running(self): return self.thread is not None and self.thread.is_alive()
def update_diffscan_params(self, msg):
wdir = str(msg["data_directory"])
prefix = str(msg["file_prefix"])
key = os.path.join(wdir, "_spotfinder", prefix)
if key in self._diffscan_params: del self._diffscan_params[key]
hstep = float(msg["raster_horizontal_step"])
# raster_horizontal_number was raster_horizotal_number until bss_jul04_2017
hpoint = int(msg.get("raster_horizotal_number", msg.get("raster_horizontal_number")))
vstep = float(msg["raster_vertical_step"])
vpoint = int(msg["raster_vertical_number"])
if "raster_scan_direction" in msg and "raster_scan_path" in msg:
scan_direction = str(msg["raster_scan_direction"])
scan_path = str(msg["raster_scan_path"])
else:
scanlog = os.path.join(wdir, "diffscan.log")
if not os.path.isfile(scanlog): return
slog = bl_logfiles.BssDiffscanLog(scanlog)
slog.remove_overwritten_scans()
matched = filter(lambda x: x.get_prefix()==prefix+"_", slog.scans)
if matched:
scan_direction = matched[-1].scan_direction
scan_path = matched[-1].scan_path
else:
return
self._diffscan_params[key] = (vpoint, vstep, hpoint, hstep, scan_direction, scan_path)
shikalog.info("_diffscan_params %s = %s" % (key, self._diffscan_params[key]))
# update_diffscan_params()
def get_raster_grid_coordinate(self, msg):
try:
header = msg["header"]
hstep = float(header["raster_horizontal_step"])
# raster_horizontal_number was raster_horizotal_number until bss_jul04_2017
hpoint = int(header.get("raster_horizotal_number", header.get("raster_horizontal_number")))
vstep = float(header["raster_vertical_step"])
vpoint = int(header["raster_vertical_number"])
scan_direction = str(header["raster_scan_direction"])
scan_path = str(header["raster_scan_path"])
return bl_logfiles.BssDiffscanLog.get_grid_coord_internal(vpoint, vstep, hpoint, hstep, msg["idx"], False, scan_direction, scan_path)
except (KeyError, TypeError):
shikalog.warning("not bringing info for grid coord: %s %s" % (msg.get("file_prefix"), msg.get("idx")))
#pass
wdir = str(msg["work_dir"])
key = os.path.join(wdir, str(msg["file_prefix"]))
if key in self._diffscan_params:
vpoint, vstep, hpoint, hstep, scan_direction, scan_path = self._diffscan_params[key]
return bl_logfiles.BssDiffscanLog.get_grid_coord_internal(vpoint, vstep, hpoint, hstep, msg["idx"], False, scan_direction, scan_path)
else:
shikalog.warning("_diffscan_params not available for %s" % key)
scanlog = os.path.join(wdir, "..", "diffscan.log")
gcxy = None
if os.path.isfile(scanlog):
slog = bl_logfiles.BssDiffscanLog(scanlog)
slog.remove_overwritten_scans()
gcxy = slog.calc_grid_coord(prefix=str(msg["file_prefix"])+"_", num=msg["idx"]) # may return None
if gcxy is None: gcxy = [float("nan")]*2
return gcxy
# get_raster_grid_coordinate()
def run(self):
shikalog.info("ResultsManager loop STARTED")
dbfile, summarydat, con, cur = None, None, None, None
rcon = sqlite3.connect(os.path.join(self.dbdir, "%s.db"%getpass.getuser()), timeout=10)
rcur = rcon.cursor()
#c = rcur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='updates';")
c = retry_until_success(rcur.execute, "SELECT name FROM sqlite_master WHERE type='table' AND name='updates';")
if c.fetchone() is None:
#rcur.execute("""create table updates (dirname text primary key,
# time real);""")
retry_until_success(rcur.execute, """create table updates (dirname text primary key,
time real);""")
while self.keep_going:
try:
messages = collections.OrderedDict()
while not self.rqueue.empty():
msg = self.rqueue.get()
if "bss_job_mode" in msg:
shikalog.info("bssinfo= %s"%msg)
try:
self.update_diffscan_params(msg)
except:
shikalog.error("Error in update_diffscan_params%s" % (traceback.format_exc()))
else:
messages.setdefault(os.path.normpath(str(msg["work_dir"])), []).append(msg)
for wdir in messages:
tmp = os.path.join(wdir, "shika.db")
if dbfile != tmp:
dbfile = tmp
con, cur = None, None
for _ in xrange(10):
try:
con = sqlite3.connect(dbfile, timeout=30)
break
except sqlite3.OperationalError:
shikalog.warning("Connecting to %s failed. Retrying" % dbfile)
if con is None:
shikalog.error("Could not connect to %s." % dbfile)
else:
cur = con.cursor()
summarydat = os.path.join(wdir, "summary.dat")
if not os.path.isfile(summarydat) or not os.path.getsize(summarydat):
open(summarydat, "w").write("prefix x y kind data filename\n")
canvas_data = {}
for msg in messages[wdir]:
imgf = os.path.basename(str(msg["imgfile"]))
spots_is = map(lambda x: x[2], msg["spots"])
if cur is not None:
# 'status'
c = cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='status';")
if c.fetchone() is None:
cur.execute("""create table status (filename text primary key);""")
cur.execute("insert or replace into status values (?)", (imgf,)) # for backward
# 'stats'
c = cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='stats';")
if c.fetchone() is None:
cur.execute("""create table stats (imgf text primary key, nspot real, total real, mean real);""")
cur.execute("insert or replace into stats values (?, ?,?,?)",
(imgf, len(msg["spots"]),
sum(spots_is),
sum(spots_is) / len(msg["spots"]) if len(msg["spots"])>0 else 0
))
# 'spots'
c = cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='spots';")
if c.fetchone() is None:
cur.execute("create table spots (filename text primary key, spots blob);")
# save jpg
if "jpgdata" in msg and msg["jpgdata"]:
jpgdir = os.path.join(wdir,
"thumb_%s_%.3d" % (str(msg["file_prefix"]), msg["idx"]//1000))
try: os.mkdir(jpgdir)
except: pass
jpgout = os.path.join(jpgdir, imgf+".jpg")
open(jpgout, "wb").write(msg["jpgdata"])
del msg["jpgdata"]
elif "thumbdata" in msg and msg["thumbdata"]:
#assert len(msg["thumbdata"])==600*600*3
thumbw = int(numpy.sqrt(len(msg["thumbdata"])/3))
assert len(msg["thumbdata"]) == 3*thumbw*thumbw
prefix = str(msg["file_prefix"])
idx = (msg["idx"]-1)//100
tmpdir = os.path.join(os.path.expanduser("~"), ".shikatmp")
if not os.path.exists(tmpdir): os.mkdir(tmpdir)
tmpfile = os.path.join(tmpdir, "%s_%s_%.3d.pkl" % (hashlib.sha256(wdir).hexdigest(), prefix, idx))
if (prefix, idx) in canvas_data:
canvas, ninc, _ = canvas_data[(prefix, idx)]
elif os.path.isfile(tmpfile):
shikalog.debug("loading thumbnail data from %s" % tmpfile)
canvas, ninc, _ = pickle.load(open(tmpfile))
else:
canvas, ninc = Image.new("RGB", (thumbw*10, thumbw*10), (0, 0, 0)), 0
thumbimg = Image.frombytes("RGB", (thumbw, thumbw), msg["thumbdata"])
idx2 = (msg["idx"]-1)%100
x, y = idx2%10, idx2//10
canvas.paste(thumbimg, (x*thumbw, y*thumbw))
ninc += 1
#canvas.save(jpgout, "JPEG", quality=90, optimize=True) # IT MAY COST TOO MUCH TIME (SHOULD RUN THIS JUST ONCE?)
# calc max frame number
hpoint = int(msg["header"].get("raster_horizotal_number", msg["header"].get("raster_horizontal_number"))) # raster_horizontal_number was raster_horizotal_number until bss_jul04_2017
vpoint = int(msg["header"]["raster_vertical_number"])
n_max = hpoint * vpoint
idx_max = (n_max-1)//100
n_in_last = n_max - idx_max*100
canvas_data[(prefix, idx)] = (canvas, ninc,
ninc == 100 or (idx_max==idx and ninc == n_in_last) # jpeg-completed flag
)
del msg["thumbdata"]
if cur is not None:
cur.execute("insert or replace into spots values (?, ?)",
(imgf, sqlite3.Binary(pickle.dumps(msg, -1))))
# summary.dat
try:
#tmptmp = time.time()
gcxy = self.get_raster_grid_coordinate(msg)
with open(summarydat, "a") as ofs:
kinds = ("n_spots", "total_integrated_signal","median_integrated_signal")
data = (len(msg["spots"]), sum(spots_is), numpy.median(spots_is))
for k, d in zip(kinds, data):
ofs.write("%s_ % .4f % .4f %s %s %s\n" % (str(msg["file_prefix"]),
gcxy[0], gcxy[1], k, d, imgf))
#shikalog.info("##DEBUG time: %f" %( time.time()-tmptmp))
except:
shikalog.error("Error in summary.dat generation at %s\n%s" % (wdir, traceback.format_exc()))
for prefix, idx in canvas_data:
tmpdir = os.path.join(os.path.expanduser("~"), ".shikatmp")
jpgdir = os.path.join(wdir, "thumb_%s" % prefix)
if not os.path.exists(tmpdir): os.mkdir(tmpdir)
if not os.path.exists(jpgdir): os.mkdir(jpgdir)
tmpfile = os.path.join(tmpdir, "%s_%s_%.3d.pkl" % (hashlib.sha256(wdir).hexdigest(), prefix, idx))
jpgout = os.path.join(jpgdir, "%s_%.6d-%.6d.jpg" % (prefix, idx*100+1, (idx+1)*100))
jpgtmp = os.path.join(jpgdir, ".tmp-%s_%.6d-%.6d.jpg" % (prefix, idx*100+1, (idx+1)*100))
shikalog.info("saving thumbnail jpeg as %s" % jpgout)
canvas_data[(prefix, idx)][0].save(jpgtmp, "JPEG", quality=50, optimize=True)
os.rename(jpgtmp, jpgout) # as it may take time
if canvas_data[(prefix, idx)][2]:
if os.path.isfile(tmpfile): os.remove(tmpfile)
else:
shikalog.info("saving thumbnail data to %s" % tmpfile)
pickle.dump(canvas_data[(prefix, idx)], open(tmpfile, "w"), -1)
while True:
try: con.commit()
except sqlite3.OperationalError:
shikalog.warning("sqlite3.OperationalError. Retrying.")
time.sleep(1)
continue
break
rcur.execute("insert or replace into updates values (?,?)", (wdir, time.time()))
rcon.commit()
shikalog.info("%4d results updated in %s" % (len(messages[wdir]), wdir))
except:
shikalog.error("Exception: %s" % traceback.format_exc())
time.sleep(self.interval)
self.running = False
shikalog.info("ResultsManager loop FINISHED")
# run()
# ResultsManager
def results_receiver(rqueue, pullport, results_manager):
zmq_context = zmq.Context()
receiver = zmq_context.socket(zmq.PULL)
receiver.bind("tcp://*:%d"%pullport)
last_times = []
while True:
msg = receiver.recv_pyobj()
if "bss_job_mode" not in msg:
last_times.append(time.time())
if len(last_times) > 50: last_times = last_times[-50:]
last_times = filter(lambda x: last_times[-1]-x < 50, last_times)
hz = float(len(last_times))/(last_times[-1]-last_times[0]) if len(last_times)>1 else 0
shikalog.info("%s %6.2f Hz (last %2d)" % (msg["imgfile"], hz, len(last_times)))
rqueue.put(msg)
#results_manager.start()
# results_receiver()
def worker(wrk_num, params):
context = zmq.Context()
# Set up a channel to receive work from the ventilator
work_receiver = context.socket(zmq.PULL)
work_receiver.connect("tcp://127.0.0.1:%d"%params.ports[0])
eiger_receiver = context.socket(zmq.PULL)
eiger_receiver.connect("tcp://%s:9999"%params.eiger_host)
# Set up a channel to send result of work to the results reporter
results_sender = context.socket(zmq.PUSH)
results_sender.connect("tcp://127.0.0.1:%d"%params.ports[1])
# Set up a channel to receive control messages over
control_receiver = context.socket(zmq.SUB)
control_receiver.connect("tcp://127.0.0.1:%d"%params.ports[2])
control_receiver.setsockopt(zmq.SUBSCRIBE, "")
# Set up a poller to multiplex the work receiver and control receiver channels
poller = zmq.Poller()
poller.register(work_receiver, zmq.POLLIN)
poller.register(control_receiver, zmq.POLLIN)
if params.mode == "eiger_streaming":
poller.register(eiger_receiver, zmq.POLLIN)
params_dict = {}
for key in config_manager.sp_params_strs:
params_str = config_manager.sp_params_strs[key] + config_manager.get_common_params_str()
master_params = libtbx.phil.parse(spot_finder_for_grid_scan.master_params_str)
working_params = master_params.fetch(sources=[libtbx.phil.parse(params_str)])
params_dict[key] = working_params.extract()
shikalog.info("worker %d ready" % wrk_num)
# Loop and accept messages from both channels, acting accordingly
while True:
socks = dict(poller.poll())
# the message came from work_receiver channel
if socks.get(work_receiver) == zmq.POLLIN:
msg = work_receiver.recv_json()
imgfile = str(msg["imgfile"])
pkey = config_manager.get_key_by_img(imgfile)
#params_str = config_manager.sp_params_strs[pkey] + config_manager.get_common_params_str()
shikalog.info("wrker%.2d: %s"%(wrk_num, msg))
#master_params = libtbx.phil.parse(spot_finder_for_grid_scan.master_params_str)
#working_params = master_params.fetch(sources=[libtbx.phil.parse(params_str)])
#working_params.show()
#dparams = working_params.extract()
dparams = params_dict[pkey]
dparams.work_dir = os.path.join(os.path.dirname(imgfile), "_spotfinder") # PPU-case!!?
if os.path.exists(dparams.work_dir): assert os.path.isdir(dparams.work_dir)
else:
try: os.mkdir(dparams.work_dir)
except: pass
result = spot_finder_for_grid_scan.run(imgfile, dparams)
result.update(msg)
result["work_dir"] = dparams.work_dir
result["params"] = dparams
results_sender.send_pyobj(result)
# the message from EIGER
if socks.get(eiger_receiver) == zmq.POLLIN:
frames = eiger_receiver.recv_multipart(copy = False)
header, data = eiger.read_stream_data(frames)
if util.None_in(header, data): continue
#params_str = config_manager.sp_params_strs[("BL32XU", "EIGER9M", None, None)] + config_manager.get_common_params_str()
#master_params = libtbx.phil.parse(spot_finder_for_grid_scan.master_params_str)
#working_params = master_params.fetch(sources=[libtbx.phil.parse(params_str)])
#working_params.show()
dparams = params_dict[("BL32XU", "EIGER9M", None, None)] #working_params.extract()
dparams.work_dir = os.path.join(str(header["data_directory"]), "_spotfinder")
if os.path.exists(dparams.work_dir): assert os.path.isdir(dparams.work_dir)
else:
try: os.mkdir(dparams.work_dir)
except: pass
shikalog.info("Got data: %s"%header)
imgfile = os.path.join(header["data_directory"],
"%s_%.6d.img"%(str(header["file_prefix"]), header["frame"]+1))
result = spot_finder_for_grid_scan.run(imgfile, dparams, data_and_header=(data, header))
result["work_dir"] = dparams.work_dir
result["params"] = dparams
result["imgfile"] = imgfile
result["template"] = "%s_%s.img"%(str(header["file_prefix"]), "?"*6)
result["idx"] = header["frame"]+1
results_sender.send_pyobj(result)
#os.remove(imgfile)
# the message for control
if socks.get(control_receiver) == zmq.POLLIN:
msg = control_receiver.recv_pyobj()
if "params" in msg:
params_dict = msg["params"]
shikalog.info("worker %d: Parameters updated" % wrk_num)
shikalog.info("Worker %d finished." % wrk_num)
# worker()
def run_from_args(argv):
if "-h" in argv or "--help" in argv:
print "All parameters:\n"
iotbx.phil.parse(master_params_str).show(prefix=" ", attributes_level=1)
return
cmdline = iotbx.phil.process_command_line(args=argv,
master_string=master_params_str)
global params
params = cmdline.work.extract()
args = cmdline.remaining_args
if params.topdir is None: params.topdir = os.getcwd()
if params.mode == "zoo":
if params.bl == "32xu": params.blconfig = "/isilon/BL32XU/BLsoft/PPPP/10.Zoo/ZooConfig/"
elif params.bl == "26b2": params.blconfig = "/isilon/users/rikenbl/rikenbl/Zoo/ZooConfig/"
if params.blconfig is None:
params.blconfig = "/isilon/blconfig/bl%s" % params.bl
shikalog.config(params.bl, "backend", params.logroot)
if params.force_ssh_from is not None:
shikalog.info("SSH_CONNECTION= %s" % os.environ.get("SSH_CONNECTION", ""))
if "SSH_CONNECTION" not in os.environ:
print
print "ERROR!! Cannot get host information! SHIKA cannot start."
print "Please contact staff. Need to use ssh or change parameter."
print
return
ssh_from = os.environ["SSH_CONNECTION"].split()[0]
if ssh_from != params.force_ssh_from:
print
print "ERROR!! Your host is not allowed! SHIKA cannot start here."
if ssh_from[:ssh_from.rindex(".")] == "10.10.126" and 162 <= int(ssh_from[ssh_from.rindex(".")+1:]) <= 170:
shikalog.debug("Access from Oyster computer.")
print "You appear to be in Oyster. Please *LOGOUT* from Oyster, and try to start SHIKA from your local computer!!"
else:
print "Please contact staff. Need to access from the allowed host."
print
return
#import subprocess
#import sys
#pickle.dump(params, open("/tmp/params.pkl","w"),-1)
#pp = []
for i in xrange(params.nproc):
Process(target=worker, args=(i,params)).start()
#p = subprocess.Popen(["%s -"%sys.executable], shell=True, stdin=subprocess.PIPE)
#p.stdin.write("from yamtbx.dataproc.myspotfinder.command_line.spot_finder_backend import worker\nimport pickle\nworker(%d, pickle.load(open('/tmp/params.pkl')))"%i)
#p.stdin.close()
#pp.append(p)
rqueue = Queue.Queue()
results_manager = ResultsManager(rqueue=rqueue, dbdir=params.dbdir)
if params.mode == "watch_ramdisk":
ramdisk_watcher = WatchRamdiskThread(pushport=params.ports[0],
interval=params.ramdisk_walk_interval)
ramdisk_watcher.start()
elif params.mode != "eiger_streaming":
queue = Queue.Queue()
scanlog_watcher = WatchScanlogThread(queue, topdir=params.topdir,
beamline=params.bl, expdate=params.date)
dir_watcher = WatchDirThread(queue, pushport=params.ports[0])
scanlog_watcher.start()
dir_watcher.start()
results_manager.start() # this blocks!??!
results_receiver(rqueue=rqueue, pullport=params.ports[1], results_manager=results_manager)
#for p in pp: p.wait()
if params.nproc==0:
while True:
time.sleep(1)
# run_from_args()
if __name__ == "__main__":
import sys
run_from_args(sys.argv[1:])
```
#### File: yamtbx/dataproc/spotfinder_info.py
```python
import collections
from cctbx.array_family import flex
all_keys = ('all',
'spots_non-ice',
'hi_pass_resolution_spots',
'ice_free_resolution_spots',
'lo_pass_resolution_spots',
'spots_unimodal',
'spots_low_skew',
'spots_good_intensity',
'spots_separated')
def filter_spots_with_ex_resolution_range(spots, resolutions, exclude_resolution_ranges):
# TODO: more efficient way!
ret = []
for spot, res in zip(spots, resolutions):
found = False
for rng in exclude_resolution_ranges:
if min(rng) < res < max(rng):
found = True
break
if not found:
ret.append(spot)
return ret
# filter_spots_with_ex_resolution_range()
class SpotsBase:
def __init__(self):
self.all_spots = None # flex.distl_spot type
self.spots = collections.OrderedDict() # {descriptor: flex.int}
self.intensities = {}
self.resolutions = {}
self.total_integrated_signal = {}
self.mean_integrated_signal= {}
self.median_integrated_signal= {}
self.n_spots = {}
# __init__()
def keys(self):
return ("all",) + tuple(self.spots.keys())
# keys()
def get_spots(self, key, exclude_resolution_ranges=[]):
key = self.find_nearest_key(key)
spots = self.all_spots if key == "all" else [self.all_spots[i] for i in self.spots[key]]
if exclude_resolution_ranges == []:
return spots
else:
return filter_spots_with_ex_resolution_range(spots, self.resolutions[key], exclude_resolution_ranges)
# get_spots()
def find_nearest_key(self, key):
assert key in all_keys or key == "xds"
if key in self.spots.keys() or key == "all": # fast ver
return key
for key in all_keys[all_keys.index(key)::-1]:
if key in self.spots.keys():
return key
return "all"
# find_nearest_key()
def get_total_integrated_signal(self, key, exclude_resolution_ranges=[]):
key = self.find_nearest_key(key)
if exclude_resolution_ranges == []:
return self.total_integrated_signal[key]
else:
intensities = filter_spots_with_ex_resolution_range(self.intensities[key],
self.resolutions[key],
exclude_resolution_ranges)
return flex.sum(flex.double(intensities))
# get_total_integrated_signal()
def get_mean_integrated_signal(self, key, exclude_resolution_ranges=[]):
key = self.find_nearest_key(key)
if exclude_resolution_ranges == []:
return self.mean_integrated_signal[key]
else:
intensities = filter_spots_with_ex_resolution_range(self.intensities[key],
self.resolutions[key],
exclude_resolution_ranges)
if len(intensities) > 0:
return flex.sum(flex.double(intensities)) / len(intensities)
else:
return 0
# get_mean_integrated_signal()
def get_median_integrated_signal(self, key, exclude_resolution_ranges=[]):
key = self.find_nearest_key(key)
if exclude_resolution_ranges == []:
if hasattr(self, "median_integrated_signal"): # backward compatibility
return self.median_integrated_signal[key]
else:
print "NOTE: median is not available. using mean value."
return self.mean_integrated_signal[key]
else:
intensities = filter_spots_with_ex_resolution_range(self.intensities[key],
self.resolutions[key],
exclude_resolution_ranges)
if len(intensities) > 0:
return flex.median(flex.double(intensities))
else:
return 0
# get_median_integrated_signal()
def get_n_spots(self, key, exclude_resolution_ranges=[]):
if exclude_resolution_ranges == []:
return self.n_spots[self.find_nearest_key(key)]
else:
return len(self.get_spots(key, exclude_resolution_ranges))
# get_n_spots()
def get_resolutions(self, key):
return self.resolutions[self.find_nearest_key(key)]
# get_resolutions()
def get_summed_peaks(self, key):
pass
class DummySpot: # Dummy spot class for XDS result
def __init__(self, x, y, d, intensity):
self.x = x
self.y = y
self.resolution = d
self.intensity = intensity
# __init__()
def max_pxl_x(self): return self.y # Swapped!
def max_pxl_y(self): return self.x
# class DummySpot
class XDSSpots(SpotsBase):
def __init__(self, spots):
SpotsBase.__init__(self)
self.set_spots(spots)
def set_spots(self, spots):
self.all_spots = []
for x, y, d, intensity in spots:
self.all_spots.append(DummySpot(x, y, d, intensity))
self.spots["xds"] = range(len(self.all_spots))
for k in self.keys():
summed_wts = [spot.intensity for spot in self.get_spots(k)]
self.intensities[k] = summed_wts
self.resolutions[k] = [spot.resolution for spot in self.get_spots(k)] # XXX calculate resolution!!
total_summed = sum(summed_wts)
if len(summed_wts) > 0:
self.mean_integrated_signal[k] = total_summed / len(summed_wts)
self.median_integrated_signal[k] = flex.median(flex.double(summed_wts))
else:
self.mean_integrated_signal[k] = 0.
self.median_integrated_signal[k] = 0.
self.total_integrated_signal[k] = total_summed
self.n_spots[k] = len(summed_wts)
# set_spots()
class DistlSpots(SpotsBase):
def __init__(self, fstats):
SpotsBase.__init__(self)
"""
fstats is the result of practical_heuristics.heuristics_base.oneImage()
"""
self.set_fstats(fstats)
# __init__()
def set_fstats(self, fstats):
self.all_spots = None
self.spots = collections.OrderedDict()
self.total_integrated_signal = {}
self.mean_integrated_signal= {}
self.median_integrated_signal= {}
self.n_spots = {}
for k in sorted(fstats.nodes.keys()):
node = fstats.nodes[k] # XXX some data in node.data will be corrupted (e.g. resolution, wts) but x and y coordinates look ok (it works later). why?
if node.descriptor == "spots_total":
self.all_spots = node.data
else:
self.spots[node.descriptor] = node.data
# Pre-calculate stats
for k in self.keys():
summed_wts = [flex.sum(spot.wts) for spot in self.get_spots(k)]
self.intensities[k] = summed_wts
self.resolutions[k] = [spot.resolution for spot in self.get_spots(k)]
total_summed = flex.sum(flex.double(summed_wts))
if len(summed_wts) > 0:
self.mean_integrated_signal[k] = total_summed / len(summed_wts)
self.median_integrated_signal[k] = flex.median(flex.double(summed_wts))
else:
self.mean_integrated_signal[k] = 0.
self.median_integrated_signal[k] = 0.
self.total_integrated_signal[k] = total_summed
self.n_spots[k] = len(summed_wts)
# set_fstats()
def get_summed_peaks(self, key):
return flex.double([flex.sum(spot.wts) for spot in self.get_spots(key)])
# get_total_integrated_signal()
```
#### File: xds/command_line/get_cc_with_multiple_targets.py
```python
import iotbx.phil
import iotbx.file_reader
import iotbx.reflection_file_utils
from cctbx.array_family import flex
from mmtbx.scaling.absolute_scaling import kernel_normalisation
from yamtbx import util
from yamtbx.util.xtal import CellConstraints
from yamtbx.dataproc.xds import xds_ascii
import collections
master_params_str = """\
lstin = None
.type = path
.help = "List of XDS_ASCII.HKL"
dat_out = "cc_with_targets.dat"
.type = path
.help = "Output file"
normalization = *no E rel_B wilson_B
.type = choice(multi=False)
.help = "Normalization of each intensities"
d_min = None
.type = float
d_max = None
.type = float
"""
def calc_cc(ari, arj):
ari, arj = ari.common_sets(arj, assert_is_similar_symmetry=False)
corr = flex.linear_correlation(ari.data(), arj.data())
if corr.is_well_defined():
return corr.coefficient(), ari.size()
else:
return float("nan"), ari.size()
# calc_cc()
def read_target_files(target_files, d_min, d_max, normalization, log_out):
ret = collections.OrderedDict()
for i, f in enumerate(target_files):
f = iotbx.file_reader.any_file(f, force_type="hkl", raise_sorry_if_errors=True)
arrays = f.file_server.get_miller_arrays(None)
scores = iotbx.reflection_file_utils.get_xray_data_scores(arrays, ignore_all_zeros=True,
prefer_anomalous=False, prefer_amplitudes=False)
array = arrays[scores.index(max(scores))]
log_out.write("# target%.3d = %s %s\n" % (i, array.info(), array.d_max_min()))
if array.anomalous_flag(): array = array.average_bijvoet_mates()
array = array.as_intensity_array().resolution_filter(d_max=d_max, d_min=d_min)
if normalization == "E":
normaliser = kernel_normalisation(array, auto_kernel=True)
ret[f] = array.customized_copy(data=array.data()/normaliser.normalizer_for_miller_array,
sigmas=array.sigmas()/normaliser.normalizer_for_miller_array if array.sigmas() else None)
else:
ret[f] = array
return ret
# read_target_files()
def run(params, target_files):
assert params.normalization in ("no", "E")
ofs = open(params.dat_out, "w")
xac_files = util.read_path_list(params.lstin)
targets = read_target_files(target_files, params.d_min, params.d_max, params.normalization, ofs)
cellcon = CellConstraints(targets.values()[0].space_group())
#for i, t in enumerate(targets): ofs.write("# target%.3d = %s\n" % (i,t))
ofs.write("# normalization = %s\n" % params.normalization)
ofs.write("# d_min, d_max = %s, %s\n" % (params.d_min, params.d_max))
ofs.write("file %s " % cellcon.get_label_for_free_params())
ofs.write(" ".join(map(lambda x: "cc.%.3d nref.%.3d"%(x,x), xrange(len(targets)))))
ofs.write("\n")
for xac_file in xac_files:
print "reading", xac_file
xac = xds_ascii.XDS_ASCII(xac_file)
xac.remove_rejected()
iobs = xac.i_obs(anomalous_flag=False).merge_equivalents(use_internal_variance=False).array()
ofs.write("%s %s" % (xac_file, cellcon.format_free_params(iobs.unit_cell())))
fail_flag = False
if params.normalization == "E":
try:
normaliser = kernel_normalisation(iobs, auto_kernel=True)
iobs = iobs.customized_copy(data=iobs.data()/normaliser.normalizer_for_miller_array,
sigmas=iobs.sigmas()/normaliser.normalizer_for_miller_array)
except:
fail_flag = True
for i, ta in enumerate(targets.values()):
if fail_flag:
ofs.write(" % .4f %4d" % cc_num)
else:
cc_num = calc_cc(iobs, ta)
ofs.write(" % .4f %4d" % cc_num)
ofs.write("\n")
# run()
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
targets = cmdline.remaining_args
run(params, targets)
```
#### File: xds/command_line/make_plot_to_compare_correctlp.py
```python
from yamtbx.dataproc.xds.correctlp import CorrectLp
import iotbx.phil
from collections import OrderedDict
import math
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
master_params_str="""\
plot = *ios *rmeas *ccano *cmpl sigano cchalf red
.type = choice(multi=True)
.help = What to plot
output = "plot.pdf"
.type = path
rdataout = "for_R.dat"
.type = path
"""
def run(params, args):
ofs = open(params.rdataout, "w")
ofs.write("name s2max variable value\n")
for_plot = OrderedDict()
for p in params.plot:
print "Preparing", p
for_plot[p] = OrderedDict()
trans_table = dict(ios="i_over_sigma",
rmeas="r_meas",
ccano="cc_ano",
cmpl="cmpl",
sigano="sig_ano",
cchalf="cc_half",
red="redundancy")
for lpfile, label in ((args[2*i],args[2*i+1]) for i in xrange((len(args))//2)):
lp = CorrectLp(lpfile)
print label, lpfile, lp.space_group.info(), "anomalous=%s"%lp.anomalous_flag
ofs.write("# %s %s %s anomalous=%s\n" % (label, lpfile, lp.space_group.info(), lp.anomalous_flag))
plot_x = map(lambda x:1/x**2, lp.table["all"]["dmin"][:-1])
for p in params.plot:
plot_y = lp.table["all"][trans_table[p]][:-1]
for_plot[p][label] = plot_x, plot_y
for px, py in zip(plot_x, plot_y):
ofs.write("%s %.5f %s %f\n" % (label, px, trans_table[p], py))
fig, ax = plt.subplots()
#plt.title("Comapring xds results")
s2_formatter = lambda x,pos: "inf" if x == 0 else "%.2f" % (1./math.sqrt(x))
for i, p in enumerate(params.plot):
ax = plt.subplot(len(params.plot),1,i+1)
for lab in for_plot[p]:
plot_x, plot_y = for_plot[p][lab]
ax.plot(plot_x, plot_y, label=lab, marker="o")
ax.set_ylabel(trans_table[p])
ax.xaxis.set_major_formatter(FuncFormatter(s2_formatter))
plt.xlabel("Resolution [A]")
#plt.legend()
leg = plt.legend(loc='center left', bbox_to_anchor=(1,0.5), numpoints=1)
fig.subplots_adjust(top=0.8)
fig.savefig(params.output, bbox_extra_artists=(leg,), bbox_inches='tight')
plt.show()
print """
Instruction for R:
R
library(ggplot2)
d <- read.table("%s", h=T)
number_ticks <- function(n) {function(limits) pretty(limits, n)}
ggplot(d, aes(x=s2max, y=value, colour=factor(name))) + geom_point() + geom_line() + facet_grid(variable~., scale="free") + scale_x_continuous(label=function(x)sprintf("%%.2f", 1/sqrt(x)), breaks=number_ticks(10))
""" % params.rdataout
# run()
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
run(params, args)
```
#### File: xds/command_line/xds2shelx.py
```python
import os
from yamtbx.util import call
from yamtbx.util import xtal
from yamtbx.dataproc.xds.xds_ascii import XDS_ASCII
from cctbx import sgtbx
def xds2shelx(xds_file, dir_name, prefix=None, dmin=None, dmax=None, force_anomalous=False, space_group=None, flag_source=None, add_flag=False):
if prefix is None:
prefix = os.path.splitext(os.path.basename(xds_file))[0]
hklout = prefix + ".hkl"
# if output file already exists, exit.
if os.path.isfile(os.path.join(dir_name, hklout)):
raise Exception(os.path.join(dir_name, hklout), "already exists.")
# read header
xac = XDS_ASCII(xds_file, read_data=False)
wavelength = xac.wavelength
if xac.wavelength is None and xac.input_files:
wavelength = float(xac.input_files.values()[0][1])
else:
wavelength = 1.0
anom_flag = xac.anomalous
if force_anomalous: anom_flag = True
sginfo_org = xac.symm.space_group_info()
if space_group:
sginfo = sgtbx.space_group_info(space_group)
else:
sginfo = sginfo_org
sg = sginfo.group()
# make output directory
if not os.path.isdir(dir_name):
os.makedirs(dir_name)
logout = open(os.path.join(dir_name, "xds2shelx.log"), "w")
print >>logout, "xds2shelx.py running in %s" % os.getcwd()
print >>logout, "output directory: %s" % dir_name
print >>logout, "original file: %s" % xds_file
print >>logout, "flag_source: %s" % flag_source
print >>logout, "space group: %s (original=%s, requested space_group=%s)" % (sginfo, sginfo_org, space_group)
if sginfo_org.group().build_derived_reflection_intensity_group(False) != sg.build_derived_reflection_intensity_group(False):
print >>logout, " WARNING!! specified space group is incompatible with original file (%s)." % sginfo_org
print >>logout, "anomalous: %s (original=%s force_anomalous=%s)" % (anom_flag, xac.anomalous, force_anomalous)
print >>logout, ""
logout.flush()
if sg.is_centric() and not sg.is_origin_centric():
print >>logout, "Error: in shelx, the origin must lie on a center of symmetry."
logout.flush()
return
##
if not os.path.exists(os.path.join(dir_name, "original")):
os.symlink(xds_file, os.path.join(dir_name, "original"))
##
# prepare XDSCONV.INP and run
#
with open(os.path.join(dir_name, "XDSCONV.INP"), "w") as ofs:
ofs.write("OUTPUT_FILE=%s SHELX\n" % hklout)
ofs.write("INPUT_FILE=original\n")
ofs.write("MERGE= FALSE\n")
ofs.write("FRIEDEL'S_LAW= %s\n" % ("FALSE" if anom_flag else "TRUE"))
if None not in (dmin, dmax):
ofs.write("INCLUDE_RESOLUTION_RANGE= %s %s\n" % (dmax, dmin))
call(cmd="xdsconv",
wdir=dir_name,
expects_in=["original"],
expects_out=[hklout],
stdout=logout
)
cell_str = xtal.format_unit_cell(xac.symm.unit_cell(), lfmt="%8.4f", afmt="%7.3f")
with open(os.path.join(dir_name, "%s.ins"%prefix), "w") as ofs:
ofs.write("CELL %.4f %s\n" % (wavelength, cell_str))
ofs.write("ZERR 1 0 0 0 0 0 0\n")
ofs.write("LATT %s\n" % xtal.shelx_latt(sg))
for iop in range(1, sg.n_smx()):
ofs.write("SYMM %s\n" % sg(iop).as_xyz(decimal=True, t_first=True, symbol_letters="XYZ"))
ofs.write("SFAC C N O S\n")
ofs.write("UNIT 6 6 6 6\n")
ofs.write("FIND 10\n") # TODO more intelligent
ofs.write("NTRY 1000\n")
ofs.write("HKLF 4\n")
ofs.write("END\n")
#if flag_source is not None:
# copy_test_flag(os.path.join(dir_name, hklout), flag_source, log_out=logout)
#elif add_flag:
# add_test_flag(os.path.join(dir_name, hklout), log_out=logout)
if __name__ == "__main__":
import sys
import optparse
parser = optparse.OptionParser(usage="usage: %prog [options] [XDS_ASCII.HKL]")
parser.add_option("--dir","-d", action="store", type=str, dest="dir", default="shelx",
help="output directory")
parser.add_option("--anomalous","-a", action="store_true", dest="anomalous", help="force anomalous")
parser.add_option("--dmin", action="store", dest="dmin", help="high resolution cutoff") # as str
parser.add_option("--dmax", action="store", dest="dmax", help="low resolution cutoff") # as str
#parser.add_option("--copy-test-flag","-r", action="store", dest="flag_source", help="")
#parser.add_option("--add-test-flag", action="store_true", dest="add_flag", help="")
parser.add_option("--space-group", action="store", type=str, dest="sg", help="Space group number or name")
(opts, args) = parser.parse_args(sys.argv)
##
# the default input file name is "XDS_ASCII.HKL"
#
if len(args) < 2:
xds_file = os.path.abspath("XDS_ASCII.HKL")
else:
xds_file = os.path.abspath(args[1])
if not os.path.isfile(xds_file):
print "Cannot open", xds_file
sys.exit(1)
xds2shelx(xds_file, dir_name=opts.dir,
dmin=opts.dmin, dmax=opts.dmax, force_anomalous=opts.anomalous,
#flag_source=opts.flag_source, add_flag=opts.add_flag,
space_group=opts.sg)
```
#### File: xds/command_line/xds_check_spots_with_image.py
```python
from yamtbx.dataproc.xds import idxreflp
from yamtbx.dataproc.xds import get_xdsinp_keyword
from yamtbx.dataproc import dataset
from yamtbx.dataproc.adxv import Adxv
import iotbx.phil
import time
import os
import socket
import subprocess
master_params_str = """
adxv_bin = adxv
.type = str
image = 1
.type = int
xds_dir = "."
.type = path
interactive = True
.type = bool
"""
def run(params):
xds_inp = os.path.join(params.xds_dir, "XDS.INP")
spot_xds = os.path.join(params.xds_dir, "SPOT.XDS")
spots = idxreflp.SpotXds(spot_xds).indexed_and_unindexed_by_frame_on_detector()
inputs = get_xdsinp_keyword(xds_inp)
filename_template = dict(inputs).get("NAME_TEMPLATE_OF_DATA_FRAMES", "")
if filename_template == "":
print "Error! Can't find filename from XDS.INP"
return
# Start adxv
adxv = Adxv(params.adxv_bin)
adxv.start(params.xds_dir)
type_indexed = adxv.define_spot("blue")
type_unindexed = adxv.define_spot("red")
num = params.image
while True:
print "Showing image %d" % num
img_file = dataset.template_to_filenames(filename_template, num, num)[0]
adxv.open_image(img_file)
uninds = map(lambda x: [x[0], x[1], type_unindexed], spots["unindexed"].get(num, []))
inds = map(lambda x: [x[0], x[1], type_indexed], spots["indexed"].get(num, []))
print "Showing %d Indexed spots (blue)" % len(inds)
print "Showing %d Unindexed spots (red)" % len(uninds)
adxv.load_spots(inds+uninds)
time.sleep(1) # wait until adxv finishes process..
num = int(raw_input("Next image number?: "))
# run()
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv[1:],
master_string=master_params_str)
params = cmdline.work.extract()
args = cmdline.remaining_args
run(params)
```
#### File: xds/command_line/xds_dump_CORRECT.LP.py
```python
from yamtbx.dataproc.xds import correctlp
def run(f):
lp = correctlp.CorrectLp(f)
print lp.snippets["ISa"]
print lp.snippets["table1"]
return lp
# run()
if __name__ =="__main__":
import sys
for f in sys.argv[1:]:
print f
print "===================="
run(f)
print
```
#### File: xds/command_line/xds_inp_for_multisegment_pilatus.py
```python
from yamtbx.dataproc.command_line.pilatus_for_crystfel import make_geom_decomposed_for_raw
from yamtbx.dataproc.XIO import XIO
from yamtbx.dataproc import crystfel
from yamtbx.dataproc.crystfel.command_line.geom_for_xds import geom_to_xdsinp_str
def run(img_in):
#im = XIO.Image(img_in)
#make_geom_decomposed_for_raw(im.header, (2527, 2463), "junk.geom")
#geom = crystfel.geom.Geomfile("junk.geom")
#print geom_to_xdsinp_str(geom)
shape = (2527, 2463)
panel = 0
if shape == (2527, 2463):
for j in xrange(12):
for i in xrange(5):
panel += 1
print """\
SEGMENT= %(min_fs)d %(max_fs)d %(min_ss)d %(max_ss)d ! %(panel).2d
DIRECTION_OF_SEGMENT_X-AXIS= 1.000000 0.000000 0
DIRECTION_OF_SEGMENT_Y-AXIS= 0.000000 1.000000 0
SEGMENT_ORGX= 0
SEGMENT_ORGY= 0
SEGMENT_DISTANCE= 0
REFINE_SEGMENT=\
""" % dict(panel=panel,
min_ss=j*212+1, max_ss=j*212+195,
min_fs=i*494+1, max_fs=i*494+487
)
if __name__ == "__main__":
import sys
run(sys.argv[1])
```
#### File: xds/command_line/xds_integrate_lp_to_xparm.py
```python
import os
from yamtbx.dataproc.xds.xparm import get_xparm_from_integrate_lp
def run(integrate_lp, frame_num):
xparm_str = get_xparm_from_integrate_lp(integrate_lp, frame_num)
open("XPARM.XDS.%.4d"%frame_num, "w").write(xparm_str)
# run()
if __name__ == "__main__":
import sys
integrate_lp = sys.argv[1]
frame = int(sys.argv[2])
run(integrate_lp, frame)
```
#### File: xds/command_line/xds_merge_framecbf.py
```python
from yamtbx.dataproc import cbf
def run(files, cbfout):
merged = None
for i, f in enumerate(files):
repl = -10 * (i+1)
print "%s %d" % (f, repl)
data, ndimfast, ndimmid = cbf.load_minicbf_as_numpy(f)
if i == 0:
merged = data.copy()
continue
merged[data==-10] = 65540 # repl # for adxv visualization. only assuming two files.
cbf.save_numpy_data_as_cbf(merged, ndimfast, ndimmid, "merged_predictions", cbfout)
if __name__ == "__main__":
import sys
if len(sys.argv) < 3:
print "Usage: %s FRAME.1.cbf FRAME.2.cbf .." % sys.argv[0]
quit()
run(sys.argv[1:], "FRAME_merged.cbf")
```
#### File: xds/command_line/xds_plot_integrate.py
```python
"""
TODO: plot differences in direct beam and rotation axis
"""
import sys
from yamtbx.dataproc.xds import integratelp
from yamtbx.util.xtal import CellConstraints
def make_plot(lp, log_out):
ofs = open(log_out, "w")
ofs.write("$TABLE: Parameters estimated for each frame:\n")
ofs.write("$GRAPHS\n")
ofs.write(":scales")
ofs.write(":A:1,2:\n")
ofs.write(":number of overloaded/strong/unexpected reflections")
ofs.write(":A:1,3,4,5:\n")
ofs.write(":SIGMAB (beam divergence e.s.d.)")
ofs.write(":A:1,6:\n")
ofs.write(":SIGMAR (reflecting range e.s.d.)")
ofs.write(":A:1,7:\n")
ofs.write("$$\n")
ofs.write("Frame scale overlods nstrong nrej sigmaD sigmaM $$\n$$\n")
for f, scale, novl, nstr, nrej, sd, sm in zip(lp.frames, lp.scales, lp.overloads, lp.strongs, lp.rejecteds, lp.sigmads, lp.sigmars):
ofs.write("%5d %s %d %d %d %s %s\n" % (f, scale, novl, nstr, nrej, sd, sm))
ofs.write("$$\n")
ofs.write("\n\n\n")
ofs.write("$TABLE: Parameters estimated for each block:\n")
ofs.write("$GRAPHS\n")
ofs.write(":unit cell length a")
ofs.write(":A:1,2:\n")
cellconstr = CellConstraints(lp.space_group)
if not cellconstr.is_b_equal_a():
ofs.write(":unit cell length b")
ofs.write(":A:1,3:\n")
if not cellconstr.is_c_equal_a_b():
ofs.write(":unit cell length c")
ofs.write(":A:1,4:\n")
if not cellconstr.is_angle_constrained("alpha"):
ofs.write(":unit cell angle alpha")
ofs.write(":A:1,5:\n")
if not cellconstr.is_angle_constrained("beta"):
ofs.write(":unit cell angle beta")
ofs.write(":A:1,6:\n")
if not cellconstr.is_angle_constrained("gamma"):
ofs.write(":unit cell angle gamma")
ofs.write(":A:1,7:\n")
ofs.write(":rotations off from initial orientation")
ofs.write(":A:1,8,9,10:\n")
ofs.write(":distance")
ofs.write(":A:1,11:\n")
ofs.write(":deviations from predicted positions")
ofs.write(":A:1,12,13:\n")
ofs.write(":beam center")
ofs.write(":A:1,14,15:\n")
ofs.write(":missetting angles")
ofs.write(":A:1,16,17,18:\n")
ofs.write("$$\n")
ofs.write("#image a b c alpha beta gamma rotx roty rotz dist spot spindle orgx orgy phix phiy phiz$$\n$$\n")
for images, param in sorted(lp.blockparams.items()):
for i in images:
print >>ofs, "%4d " % i, " ".join(param.get("cell", ["D"]*6)), " ".join(param.get("rotation", ["D"]*3)), param.get("dist","D"), param.get("spot","D"), param.get("spindle","D"), " ".join(param.get("orig",["D"]*2)), " ".join(param.get("misset",["D"]*3))
ofs.write("$$\n")
ofs.write("\n\n\n")
ofs.write("$TABLE: sigmaB and sigmaR on 9 areas for each block:\n")
ofs.write("$GRAPHS\n")
ofs.write(":SIGMAB")
ofs.write(":A:1,2,3,4,5,6,7,8,9,10:\n")
ofs.write(":SIGMAR")
ofs.write(":A:1,11,12,13,14,15,16,17,18,19:\n")
ofs.write("$$\n")
ofs.write("#image %s %s$$\n$$\n" % (" ".join(["sigmab%d"%x for x in range(1,10)]), " ".join(["sigmar%d"%x for x in range(1,10)])))
for images, param in sorted(lp.blockparams.items()):
for i in images:
print >>ofs, "%4d " % i, " ".join(param["sigmab9"]), " ".join(param["sigmar9"])
ofs.write("$$\n")
ofs.write("\n\n\n")
# make_plot()
def run(int_lp, log_out="plot_integrate.log"):
lpobj = integratelp.IntegrateLp(int_lp)
make_plot(lpobj, log_out)
# run()
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
int_lp = sys.argv[1]
else:
int_lp = "INTEGRATE.LP"
log_out = "plot_integrate.log"
run(int_lp, log_out)
print
print "Run:"
print "loggraph", log_out
```
#### File: xds/command_line/xds_setup_scala_exp.py
```python
"""
Helper script for setting up mtz files (?) for comparing results with scala
Usage:
PHENIX_TRUST_OTHER_ENV=1 xds_setup_scala_exp.py
What we want to compare:
1. XDS CORRECT
a. all defaults
b. change WFAC1?
c.
2. Scala after INTEGRATE
3. Scala after CORRECT
a. all defaults
b. CORRECTIONS= NBATCH= 1 MINIMUM_I/SIGMA= 50 WFAC1= 2
c. CORRECTIONS= MODULATION NBATCH= 1
How about REFINE(CORRECT)=???
CORRECT related parameters
OVERLOAD=, SILICON=, SENSOR_THICKNESS=, MINIMUM_ZETA=, FRACTION_OF_POLARIZATION=, POLARIZATION_PLANE_NORMAL=
AIR=, REFINE(CORRECT)=, MINPK=, WFAC1=, STRICT_ABSORPTION_CORRECTION=, PATCH_SHUTTER_PROBLEM=, NBATCH=, REJECT_ALIEN=
This script MUST be used after geometric parameters are converged. Do recycling GXPARM->XPARM; INTEGRATE->CORRECT !
Scala's schemes:
Scheme.
1. Backup XDS.INP, CORRECT.LP, XDS_ASCII.HKL (and all other CORRECT outputs?)
2. Make INTEGRATE_forscale.mtz with pointless.
3. Run CORRECT with all default parameters and make xds_CORRECT.mtz (with xdsconv), CORRECT_a_forscale.mtz (with pointless)
4. Run CORRECT with setting b and make CORRECT_b_forscale.mtz (with pointless)
5. Run CORRECT with setting c and make CORRECT_c_forscale.mtz (with pointless)
6. Run Scala or Aimless for
- INTEGRATE_forscale.mtz => aimless_INTEGRATE.mtz
- CORRECT_a_forscale.mtz => aimless_CORRECT_a.mtz
- CORRECT_b_forscale.mtz => aimless_CORRECT_b.mtz
- CORRECT_c_forscale.mtz => aimless_CORRECT_c.mtz
"""
import sys, os, optparse, shutil, re, subprocess, random, glob, string
import iotbx.mtz
from cctbx import r_free_utils
from cctbx import miller
from iotbx.reflection_file_utils import get_r_free_flags_scores
from yamtbx.dataproc.xds import *
from yamtbx.util import call
from yamtbx.dataproc.xds.command_line import xds2mtz
backup_needed = ("XDS.INP", "XDS_ASCII.HKL", "CORRECT.LP")
needed_files = ("BKGPIX.cbf", "BLANK.cbf", "GAIN.cbf", "X-CORRECTIONS.cbf", "Y-CORRECTIONS.cbf",
"XDS.INP", "XPARM.XDS")
def check_needed_files():
not_exists = [ f for f in needed_files if not os.path.isfile(f) ]
if not_exists != []:
print "We need these files!"
print " " + ",".join(not_exists)
print
return len(not_exists) == 0
# check_needed_files()
def is_anomalous(xdsinp):
kwds = get_xdsinp_keyword(xdsinp)
for kwd, val in kwds:
if kwd == "FRIEDEL'S_LAW" and val.strip() == "FALSE":
return True
return False
# is_anomalous()
def copy_testflag(mtzfree, mtzin, mtzout):
call(cmd="copy_free_R_flag.py -r %s -o %s %s" % (mtzfree, mtzout, mtzin))
# copy_testflag()
def run_aimless(prefix, wdir):
script_body = """\
pref=%s
mtzin=../${pref}_forscale.mtz
scaled=${pref}_aimless.mtz
truncated=${pref}_aimless_truncate.mtz
#lowres=$2
#highres=$3
aimless \
HKLIN $mtzin \
HKLOUT $scaled \
SCALES ${pref}.scala \
ROGUES ${pref}_scala_rogues.log \
NORMPLOT ${pref}_scala_normplot.xmgr \
ANOMPLOT ${pref}_scala_anomplot.xmgr \
PLOT ${pref}_scala_surface_plot.plt \
CORRELPLOT ${pref}_scala_correlplot.xmgr \
ROGUEPLOT ${pref}_scala_rogueplot.xmgr \
<<eoi
!resolution low $lowres high $highres
!exclude batch ** to **
bins 20
!run 1 batch 1 to 750
!scales rotation spacing 5.000000 bfactor on tails
!cycles 5
""" % (prefix)
if anomalous:
script_body += """\
anomalous on
eoi
ctruncate -hklin $scaled -hklout $truncated -colin "/*/*/[IMEAN,SIGIMEAN]" -colano "/*/*/[I(+),SIGI(+),I(-),SIGI(-)]"
"""
else:
script_body += """\
anomalous off
eoi
ctruncate -hklin $scaled -hklout $truncated -colin "/*/*/[IMEAN,SIGIMEAN]"
"""
open(os.path.join(wdir, "run_aimless.sh"),"w").write(script_body)
call("sh run_aimless.sh",
stdout=open(os.path.join(wdir, "aimless_truncate.log"), "w"),
wdir=wdir)
if __name__ == "__main__":
parser = optparse.OptionParser(usage="usage: %prog [options] pdbfile")
#parser.add_option("--wdir", action="store", dest="wdir", default="see_predict", type=str,
# help="Working directory")
parser.add_option("--mtz-free","-r", action="store", dest="mtz_free", type=str, help="MTZ file for test flag")
parser.add_option("--free-fraction","-f", action="store", dest="free_fraction", type=float, help="Test set fraction if newly set")
(opts, args) = parser.parse_args(sys.argv)
if opts.mtz_free is not None and opts.free_fraction is not None:
print "mtz-free and free-fraction cannot be specified at the same time."
quit()
if 0:
parser.print_help()
quit()
# Check all needed files exist
if not check_needed_files():
sys.exit(1)
xdsinp = "XDS.INP"
workdir = "scale_exp"
os.mkdir(workdir)
os.mkdir(os.path.join(workdir, "scaled"))
anomalous = is_anomalous(xdsinp)
# *. INTEGRATE_forscale.mtz
call("pointless -copy xdsin INTEGRATE.HKL hklout %s/INTEGRATE_forscale.mtz" % workdir)
# 1. Backup XDS.INP, etc. (Make copies; not renaming)
bk_prefix = make_backup(backup_needed)
try:
# CORRECT with a
modify_xdsinp(xdsinp, inp_params=[("JOB","CORRECT"),
("CORRECTIONS", None),
("NBATCH", None),
("MINIMUM_I/SIGMA", None),
("WFAC1", None)
])
call("xds", stdout=open("CORRECT_a_xds.log", "w"))
# make xds_CORRECT.mtz
xds2mtz.xds2mtz(os.path.abspath("XDS_ASCII.HKL"), dir_name=os.path.join(workdir, "xdsconv_CORRECT"),
hklout="xds_CORRECT.mtz")
# make CORRECT_a_forscale.mtz
call("pointless -copy xdsin XDS_ASCII.HKL hklout %s/CORRECT_a_forscale.mtz" % workdir)
make_backup(backup_needed, "CORRECT_a_")
# CORRECT with b
#modify_xdsinp(xdsinp, mode="b")
modify_xdsinp(xdsinp, inp_params=[("JOB","CORRECT"),
("CORRECTIONS", ""),
("NBATCH", "1"),
("MINIMUM_I/SIGMA", "50"),
("WFAC1", "2")
])
call("xds", stdout=open("CORRECT_b_xds.log", "w"))
call("pointless -copy xdsin XDS_ASCII.HKL hklout %s/CORRECT_b_forscale.mtz" % workdir)
make_backup(backup_needed, "CORRECT_b_")
modify_xdsinp(xdsinp, inp_params=[("JOB","CORRECT"),
("CORRECTIONS", "MODULATION"),
("NBATCH", "1"),
("MINIMUM_I/SIGMA", None),
("WFAC1", None)
])
# CORRECT with c
#modify_xdsinp(xdsinp, mode="c")
call("xds", stdout=open("CORRECT_c_xds.log", "w"))
call("pointless -copy xdsin XDS_ASCII.HKL hklout %s/CORRECT_c_forscale.mtz" % workdir)
make_backup(backup_needed, "CORRECT_c_")
finally:
# 6. Revert XDS.INP, etc.
revert_files(backup_needed, bk_prefix)
# Run aimless
print "Running aimless"
for prefix in ("INTEGRATE", "CORRECT_a", "CORRECT_b", "CORRECT_c"):
print "running aimless for", prefix
wdir = os.path.join(workdir, "aimless_%s"%prefix)
os.mkdir(wdir)
run_aimless(prefix, wdir)
truncate_mtz = os.path.join(wdir, prefix+"_aimless_truncate.mtz")
if not os.path.exists(truncate_mtz):
print "WARNING: mtz file was not created:", truncate_mtz
continue
if opts.mtz_free:
copy_testflag(mtzfree=opts.mtz_free,
mtzin=truncate_mtz,
mtzout=os.path.join(workdir, "scaled", prefix+"_aimless_truncate.mtz"))
else:
shutil.copy(truncate_mtz,
os.path.join(workdir, "scaled"))
if opts.mtz_free:
copy_testflag(mtzfree=opts.mtz_free,
mtzin=os.path.join(workdir, "xdsconv_CORRECT", "xds_CORRECT.mtz"),
mtzout=os.path.join(workdir, "scaled", "xds_CORRECT.mtz"))
else:
shutil.copy(os.path.join(os.path.join(workdir, "xdsconv_CORRECT", "xds_CORRECT.mtz")),
os.path.join(workdir, "scaled"))
```
#### File: xds/command_line/xds_try_integrate_params.py
```python
"""
Try several parameters in INTEGRATE step of XDS, and optionally run CORRECT and make mtz files which are ready for refinement.
Usage:
PHENIX_TRUST_OTHER_ENV=1 phenix.python xds_try_integrate_params.py delphi=5,10,15,...
"""
master_params_str = """\
delphi = []
.type = floats
all_possible_delphi = False
.type = bool
all_refine_combinations = False
.type = bool
run_correct = True
.type = bool
.help = Run CORRECT (and make mtz) or not.
mtz_free = None
.type = path
"""
import os
import shutil
import itertools
import traceback
import iotbx.phil
from yamtbx.dataproc.xds import *
from yamtbx.util import call
from yamtbx.dataproc.xds.command_line import xds2mtz
from yamtbx.dataproc.xds import files
from xds_try_scale_params import get_digit, copy_testflag
def make_all_refine_combinations():
all_keywords = ["DISTANCE","BEAM","AXIS","ORIENTATION","CELL"]
result = []
for i in xrange(len(all_keywords)+1):
for c in itertools.combinations(all_keywords, i):
result.append(c)
return result
# make_all_refine_combinations()
def get_delphi_defined(xdsinp):
org_kwd = filter(lambda x:x[0].strip().startswith("DELPHI"), get_xdsinp_keyword(xdsinp))
if len(org_kwd) == 0:
return 5.
else:
return float(org_kwd[-1][1])
# get_delphi_defined()
def get_refine_defined(xdsinp):
org_kwd = filter(lambda x:x[0].strip().startswith("REFINE(INTEGRATE)"), get_xdsinp_keyword(xdsinp))
if len(org_kwd) == 0:
return [("DISTANCE", "BEAM", "ORIENTATION", "CELL")]
else:
return [org_kwd[-1][1].strip().split()]
# get_refine_defined(xdsinp)
def run(params):
xdsinp = "XDS.INP"
workdir = os.getcwd()
# concerning DELPHI=
if len(params.delphi) == 0:
params.delphi = [get_delphi_defined(xdsinp)]
params.delphi.sort()
digit_delphi = str(get_digit(params.delphi))
# concerning REFINE(INTEGRATE)=
if params.all_refine_combinations:
refine_params = make_all_refine_combinations()
else:
refine_params = get_refine_defined(xdsinp)
backup_needed = files.generated_by_INTEGRATE + ("XDS.INP",)
if params.run_correct:
backup_needed += files.generated_by_CORRECT
bk_prefix = None
try:
for rp in refine_params:
rp_str = "refine_none" if len(rp) == 0 else "refine_" + "+".join(rp)
for delphi in params.delphi:
delphi_str = ("delphi_%."+digit_delphi+"f") % delphi
bk_prefix = make_backup(backup_needed) # Backup existing XDS.INP and others
work_name = "INTEGRATE_%s_%s" % (rp_str, delphi_str)
inp_params = [("JOB","INTEGRATE"),
("DELPHI", delphi),
("REFINE(INTEGRATE)", " ".join(rp))
]
if params.run_correct:
inp_params[0] = ("JOB","INTEGRATE CORRECT")
modify_xdsinp(xdsinp, inp_params=inp_params)
call("xds_par", stdout=sys.stdout)
# Move files into a new directory.
os.mkdir(work_name)
for f in backup_needed:
shutil.move(f, work_name)
if params.run_correct:
# make .mtz file
try:
call("xdsstat", stdin="\n", stdout=open(os.path.join(work_name, "XDSSTAT.LP"),"w"), wdir=work_name)
xds2mtz.xds2mtz(os.path.abspath(os.path.join(work_name, "XDS_ASCII.HKL")),
dir_name=os.path.join(workdir, work_name, "ccp4"),
run_ctruncate=False, run_xtriage=True
)
if params.mtz_free is not None:
copy_testflag(mtzfree=params.mtz_free,
mtzin=os.path.join(workdir, work_name, "ccp4", "XDS_ASCII.mtz"))
except:
print traceback.format_exc()
print "Ignoring xds2mtz error.."
print
revert_files(backup_needed, bk_prefix) # Revert XDS.INP and others
finally:
# 6. Revert XDS.INP, etc.
revert_files(backup_needed, bk_prefix)
# run()
if __name__ == "__main__":
import sys
cmdline = iotbx.phil.process_command_line(args=sys.argv,
master_string=master_params_str)
params = cmdline.work.extract()
if params.mtz_free is not None:
params.mtz_free = os.path.abspath(params.mtz_free)
print "Parameters:"
cmdline.work.format(python_object=params).show(out=sys.stdout, prefix=" ")
#if len(params.delphi) == 1:
# print "Specify params!"
# sys.exit(1)
run(params)
```
#### File: dataproc/xds/correctlp.py
```python
from cctbx import sgtbx
from cctbx import uctbx
from yamtbx.util import safe_float
def get_ISa(lp, check_valid=False):
read_flag = False
for l in open(lp):
if "a b ISa" in l:
read_flag = True
elif read_flag:
tmp = l.split()
if len(tmp) != 3:
return float("nan")
a, b, ISa = tmp
if not check_valid or not (a == "4.000E+00" and b == "1.000E-04"):
return float(ISa)
return float("nan")
# get_ISa()
def get_P1_cell(lp, force_obtuse_angle=False):
read_flag = False
for l in open(lp):
if l.startswith(" CHARACTER LATTICE OF FIT a b c alpha beta gamma"):
read_flag = True
elif read_flag and l[14:16] == "aP":
cell = map(float, (l[32:39], l[39:46], l[46:53], l[53:59], l[59:65], l[65:71]))
if force_obtuse_angle:
tmp = map(lambda x: (x[0]+3,abs(90.-x[1])), enumerate(cell[3:])) # Index and difference from 90 deg
tmp.sort(key=lambda x: x[1], reverse=True)
if cell[tmp[0][0]] < 90:
tmp = map(lambda x: (x[0]+3,90.-x[1]), enumerate(cell[3:])) # Index and 90-val.
tmp.sort(key=lambda x: x[1], reverse=True)
for i,v in tmp[:2]: cell[i] = 180.-cell[i]
return uctbx.unit_cell(cell)
return None
# get_P1_cell()
def table_split(l):
# TODO need to change behavior by version?
assert l[50] == l[61] == l[71] == l[98] == "%"
return map(lambda x:x.strip(),
(l[:9], # RESOLUTION LIMIT
l[9:21], l[21:29], l[29:39], # NUMBER OF REFLECTIONS: OBSERVED UNIQUE POSSIBLE
l[39:50], # COMPLETENESS
l[51:61], l[62:71], # R-FACTOR: observed expected
l[72:81], # COMPARED
l[81:89], # I/SIGMA
l[89:98], # R-meas
l[99:107], # CC(1/2)
l[108:114], # AnomalCorr
l[115:123], # SigAno
l[123:] # Nano
))
# table_split
def errortable_split(l):
# TODO need to change behavior by version?
return map(lambda x:x.strip(),
(l[:9], l[9:17], l[17:26], l[26:33], l[33:43], l[43:53], l[53:61], l[61:69], l[69:77]))
# errortable_split
class CorrectLp:
def __init__(self, lpin):
self.anomalous_flag = None
if lpin is not None:
self.parse(lpin)
# __init__()
def get_ISa(self): return self.a_b_ISa[2]
def space_group_str(self): return "?" if self.space_group is None else self.space_group.info()
def parse(self, lpin):
reading = ""
self.table = {}
self.error_table = {}
self.space_group = None
self.unit_cell = map(lambda x: float("nan"), xrange(6))
self.unit_cell_esd = map(lambda x: float("nan"), xrange(6))
self.a_b_ISa = map(lambda x: float("nan"), xrange(3))
self.snippets = {}
lines = open(lpin).readlines()
for i, l in enumerate(lines):
if l.startswith(" FRIEDEL'S_LAW="):
self.anomalous_flag = "FALSE" in l
# ************ SELECTED SPACE GROUP AND UNIT CELL FOR THIS DATA SET ************
elif "SELECTED SPACE GROUP AND UNIT CELL FOR THIS DATA SET" in l:
reading = "selected_symmetry"
elif reading == "selected_symmetry":
# Info before refinement
if "SPACE_GROUP_NUMBER=" in l:
self.space_group = sgtbx.space_group_info(int(l[l.index("=")+1:])).group()
elif "UNIT_CELL_CONSTANTS=" in l:
# Will be overridden if refined
tmp = l[l.index("=")+1:]
self.unit_cell = map(float, (tmp[0:9],tmp[9:18],tmp[18:27],tmp[27:35],tmp[35:43],tmp[43:51]))
if "********" in l:
reading = ""
# ******************************************************************************
# REFINEMENT OF DIFFRACTION PARAMETERS USING ALL IMAGES
# ******************************************************************************
elif l.startswith(" UNIT CELL PARAMETERS"):
cell_params = map(float, (l[21:32], l[32:42], l[42:52], l[52:60], l[60:68], l[68:76]))
self.unit_cell = cell_params
elif l.startswith(" E.S.D. OF CELL PARAMETERS"):
esd_params = map(float, (l[26:35], l[35:43], l[43:51], l[51:59], l[59:67], l[67:75]))
self.unit_cell_esd = esd_params
elif l.startswith(" SPACE GROUP NUMBER"):
self.space_group = sgtbx.space_group_info(int(l[20:])).group()
# ******************************************************************************
# CORRECTION PARAMETERS FOR THE STANDARD ERROR OF REFLECTION INTENSITIES
# ******************************************************************************
elif "a b ISa" in l:
reading = "ISa"
self.snippets["ISa"] = l
elif reading == "ISa":
a, b, ISa = map(float, l.split())
reading = ""
self.a_b_ISa = a, b, ISa
self.snippets["ISa"] += l
# ******************************************************************************
# STANDARD ERROR OF REFLECTION INTENSITIES AS FUNCTION OF RESOLUTION
# FOR DATA SET XDS_ASCII.HKL
# ******************************************************************************
elif "RESOLUTION RANGE I/Sigma Chi^2 R-FACTOR R-FACTOR NUMBER ACCEPTED REJECTED" in l:
reading = "error_table"
elif reading == "error_table":
if l.startswith(" ***"):
reading = ""
continue
if len(l.split()) < 3: continue
sp = errortable_split(l)
# Note that the last line is about overall data
self.error_table.setdefault("dmax", []).append(float(sp[0]))
self.error_table.setdefault("dmin", []).append(float(sp[1]))
self.error_table.setdefault("ios", []).append(safe_float(sp[2]))
self.error_table.setdefault("chisq", []).append(safe_float(sp[3]))
self.error_table.setdefault("r_merge", []).append(safe_float(sp[4]))
self.error_table.setdefault("number", []).append(int(sp[6]))
self.error_table.setdefault("nacc", []).append(int(sp[7]))
self.error_table.setdefault("nrej", []).append(int(sp[8]))
#if "SUMMARY OF DATA SET STATISTICS FOR IMAGE":
# ******************************************************************************
# STATISTICS OF SAVED DATA SET "XDS_ASCII.HKL" (DATA_RANGE= x x)
# FILE TYPE: XDS_ASCII MERGE=FALSE FRIEDEL'S_LAW=x
# ******************************************************************************
elif 'STATISTICS OF SAVED DATA SET "XDS_ASCII.HKL"' in l:
reading = "stats_all"
continue
elif "SUBSET OF INTENSITY DATA WITH SIGNAL/NOISE >= -3.0 AS FUNCTION OF RESOLUTION" in l:
self.snippets["table1"] = l
if reading == "stats_all":
key = "all"
self.table[key] = {}
for ll in lines[i+1:i+4]: self.snippets["table1"] += ll
for ll in lines[i+4:i+4+10]:
self.snippets["table1"] += ll
sp = table_split(ll)
assert len(sp) == 14
self.table[key].setdefault("dmin", []).append(float(sp[0]) if sp[0]!="total" else None)
self.table[key].setdefault("redundancy", []).append(float(sp[1])/float(sp[2]) if float(sp[2]) > 0 else 0)
self.table[key].setdefault("cmpl", []).append(float(sp[4][:-1]))
self.table[key].setdefault("r_merge", []).append(float(sp[5][:-1]))
self.table[key].setdefault("i_over_sigma", []).append(float(sp[8]))
self.table[key].setdefault("r_meas", []).append(safe_float(sp[9][:-1]))
self.table[key].setdefault("cc_half", []).append(float(sp[10].replace("*","")))
self.table[key].setdefault("cc_ano", []).append(float(sp[11].replace("*","")))
self.table[key].setdefault("sig_ano", []).append(float(sp[12]))
if sp[0]=="total": # in case less than 9 shells
break
#print "".join(lines[i+4:i+4+10])
# parse()
def resolution_based_on_ios_of_error_table(self, min_ios):
# If all ios is less than min_ios, can't decide resolution.
if "ios" not in self.error_table or max(self.error_table["ios"]) < min_ios:
return float("nan")
flag_last = False
for dmin, ios in zip(self.error_table["dmin"], self.error_table["ios"]):
if flag_last:
return dmin
if ios <= min_ios:
flag_last = True
continue
return self.error_table["dmin"][-1] # the last
# resolution_based_on_ios_of_error_table()
def is_ISa_valid(self):
if "ISa" not in self.snippets: return False
sp = self.snippets["ISa"].splitlines()[-1].split()
a, b, ISa = sp
return not (a == "4.000E+00" and b == "1.000E-04") # in this case error model adjustment is failed.
# is_ISa_valid()
if __name__ == "__main__":
import sys
lp = CorrectLp(sys.argv[1])
print lp.table
print lp.space_group.info(), lp.unit_cell
```
#### File: dataproc/xds/initlp.py
```python
import numpy
class InitLp:
def __init__(self, initlp):
self.initlp = initlp
self.parse()
def parse(self):
self.bkginit_table = []
flag = None
for l in open(self.initlp):
if l.startswith(" FRAME # GAIN MGAIN"):
flag = "gain"
elif l.startswith(" FRAME # SCALE COUNTS MBKG MSCALE MSPOT"):
flag = "bkginit"
elif flag == "gain":
try:
frame, gain, mgain = l.split()
except:
continue
elif flag == "bkginit":
try:
frame, scale, counts, mbkg, mscale, mspot = l.split()
frame, mbkg, mscale, mspot = map(int, (frame, mbkg, mscale, mspot))
scale, counts = map(float, (scale, counts))
self.bkginit_table.append([frame, scale, counts, mbkg, mscale, mspot])
except:
continue
elif "NUMBER OF GAIN VALUES IN TABLE" in l:
flag = None
elif "NUMBER OF PIXELS USED FOR DATA COLLECTION" in l:
flag = None
# parse()
def check_bad_first_frames(self, iqrc=1.5):
# /isilon/users/rikenbl/rikenbl/Staff_26B2/ito/HSA/20190730/_kamoproc/cps1998/w02/data/multi_1-100
counts = numpy.array(map(lambda x: x[2], self.bkginit_table))
frames = map(lambda x: x[0], self.bkginit_table)
q25, q50, q75 = numpy.percentile(counts, [25, 50, 75])
iqr = q75 - q25
lowlim, highlim = q25 - iqrc*iqr, q75 + iqrc*iqr
bad_sel = (counts < lowlim)
bad_first_frames = []
if bad_sel[0]:
print "Bad first frames for", self.initlp
last_i = -1
for i in numpy.where(counts < lowlim)[0]:
if i - last_i > 1: break
last_i = i
print "", frames[i], counts[i]
bad_first_frames.append(frames[i])
return bad_first_frames
# check_bad_first_frames()
if __name__ == "__main__":
import sys
lp = InitLp(sys.argv[1])
lp.check_bad_first_frames()
```
#### File: dataproc/xds/xdsstat.py
```python
from yamtbx.util import safe_float
class XdsstatLp:
def __init__(self, lpin):
if lpin is not None:
self.parse(lpin)
# __init__()
def parse(self, lpin):
self.by_frame = {}
self.by_framediff = {}
reading = ""
for l in open(lpin):
if "Frame #refs #misfits Iobs sigma Iobs/sigma Peak Corr Rmeas #rmeas #unique" in l:
reading = "by_frame"
elif reading == "by_frame":
if "For inline graphs use a Java browser" in l:
reading = ""
continue
if len(l) < 94: continue
# frame, nrefs, nmisfits, iobs, sigma, ios, peak, corr, rmeas, nrmeas, nuniq, (dummy)
sp = l[:6], l[6:13], l[13:18], l[18:28], l[28:38], l[38:48], l[48:58], l[58:68], l[68:78], l[78:85], l[85:92]
self.by_frame.setdefault("frame", []).append(int(sp[0]))
self.by_frame.setdefault("nrefs", []).append(int(sp[1]))
self.by_frame.setdefault("nmisfits", []).append(int(sp[2]))
self.by_frame.setdefault("iobs", []).append(safe_float(sp[3]))
self.by_frame.setdefault("sigma", []).append(safe_float(sp[4]))
self.by_frame.setdefault("ios", []).append(safe_float(sp[5]))
self.by_frame.setdefault("peak", []).append(safe_float(sp[6]))
self.by_frame.setdefault("corr", []).append(safe_float(sp[7]))
self.by_frame.setdefault("rmeas", []).append(safe_float(sp[8]))
self.by_frame.setdefault("nrmeas", []).append(int(sp[9]))
self.by_frame.setdefault("nuniq", []).append(int(sp[10]))
elif "Framediff #refs R_d n-notfriedel Rd-notfriedel n-friedel Rd-friedel" in l:
reading = "by_framediff"
elif reading == "by_framediff":
if "For inline graphs use a Java browser" in l:
reading = ""
continue
if len(l) < 71: continue
# Framediff #refs R_d n-notfriedel Rd-notfriedel n-friedel Rd-friedel, (dummy)
sp = l[:6], l[6:14], l[14:24], l[24:32], l[32:42], l[42:50], l[50:60]
self.by_framediff.setdefault("framediff", []).append(int(sp[0]))
self.by_framediff.setdefault("nrefs", []).append(int(sp[1]))
self.by_framediff.setdefault("rd", []).append(float(sp[2]))
# parse()
# class XdsstatLp
```
#### File: yamtbx/util/xtal.py
```python
"""
NOTE on unit cell constraints determination:
XDS doesn't handle "real" rhombohedral space group (right?).
So, No need to support R3 or R32. They are handled as H3 or H32, maybe.
"""
import math
import numpy
import scipy.constants
from cctbx import uctbx
class CellConstraints:
def __init__(self, space_group):
self.cs = space_group.crystal_system()
self.free_indices = [0] # index of free parameters
if not self.is_b_equal_a(): self.free_indices.append(1)
if not self.is_c_equal_a_b(): self.free_indices.append(2)
for i, an in enumerate(("alpha", "beta", "gamma")):
if not self.is_angle_constrained(an):
self.free_indices.append(i+3)
# __init__()
def is_b_equal_a(self): return self.cs in ("Tetragonal", "Hexagonal", "Trigonal", "Cubic")
def is_c_equal_a_b(self): return self.cs == "Cubic"
def is_angle_constrained(self, angle):
assert angle in ("alpha", "beta", "gamma")
if self.cs == "Triclinic": return False
if self.cs == "Monoclinic": return angle != "beta"
return True
# is_angle_constrained()
def get_label_for_free_params(self, short=True):
ret = ["a", "b", "c",
"al" if short else "alpha",
"be" if short else "beta",
"ga" if short else "gamma"]
ret = map(lambda x: ret[x], self.free_indices)
return " ".join(ret)
# get_label_for_free_params()
def format_free_params(self, uc, lfmt="%6.2f", afmt="%5.1f", sep=" "):
if hasattr(uc, "parameters"):
uc = uc.parameters()
return sep.join(map(lambda x: (lfmt if x<3 else afmt)%uc[x], self.free_indices))
# format_free_params()
# class CellConstraints
def v6cell(cell):
# taken from cctbx/uctbx/determine_unit_cell/target_uc.py
""" Take a reduced Niggli Cell, and turn it into the G6 representation """
if hasattr(cell, "parameters"):
uc = cell.parameters()
else:
uc = cell
a = uc[0] ** 2
b = uc[1] ** 2
c = uc[2] ** 2
d = 2 * uc[1] * uc[2] * math.cos(uc[3]/180.*math.pi)
e = 2 * uc[0] * uc[2] * math.cos(uc[4]/180.*math.pi)
f = 2 * uc[0] * uc[1] * math.cos(uc[5]/180.*math.pi)
return [a, b, c, d, e, f]
def is_same_laue_symmetry(sg1, sg2):
laue = lambda x: x.build_derived_reflection_intensity_group(anomalous_flag=False)
return laue(sg1) == laue(sg2) # == comparison of space_group object is possible.
# is_same_laue_symmetry()
def is_enantiomorphic_space_group_pair(sg1, sg2):
if not sg1.type().is_enantiomorphic(): return False
if not sg2.type().is_enantiomorphic(): return False
return sg1.info().change_hand().group() == sg2
# is_enantiomorphic_space_group_pair()
def is_same_space_group_ignoring_enantiomorph(sg1, sg2):
if sg1 == sg2: return True
if sg1.type().is_enantiomorphic() and sg2.type().is_enantiomorphic():
return sg1.info().change_hand().group() == sg2
return False
# is_same_space_group_ignoring_enantiomorph()
def abc_convert_real_reciprocal(a, b, c):
V = numpy.dot(a, numpy.cross(b, c))
a_ = numpy.cross(b, c) / V
b_ = numpy.cross(c, a) / V
c_ = numpy.cross(a, b) / V
return a_, b_, c_
# abc_convert_real_reciprocal()
def format_unit_cell(uc, lfmt="%6.3f", afmt="%5.2f", sep=" "):
if hasattr(uc, "parameters"):
uc = uc.parameters()
lstr = sep.join(map(lambda x: lfmt%x, uc[:3]))
astr = sep.join(map(lambda x: afmt%x, uc[3:6]))
return lstr + sep + astr
# format_unit_cell()
def electron_voltage_to_wavelength(voltage):
h, m0, e, c = scipy.constants.h, scipy.constants.m_e, scipy.constants.e, scipy.constants.c
wavelen = h/numpy.sqrt(2*m0*e*voltage*(1.+e*voltage/2./m0/c**2)) * 1.e10
return wavelen
# electron_voltage_to_wavelength()
def shelx_latt(space_group):
# http://xray.chem.ualberta.ca/xray/shelxl/LATT.htm
latt_type = str(space_group.info())[0]
latt = dict(P=1, I=2, R=3, F=4, A=5, B=6, C=7).get(latt_type, "") # XXX check :R or :H for R.
if not space_group.is_centric():
latt *= -1
return str(latt)
``` |
{
"source": "7m4mon/gr-em4100_decoder",
"score": 2
} |
#### File: docs/doxygen/swig_doc.py
```python
from __future__ import unicode_literals
import sys, time
from doxyxml import DoxyIndex, DoxyClass, DoxyFriend, DoxyFunction, DoxyFile
from doxyxml import DoxyOther, base
def py_name(name):
bits = name.split('_')
return '_'.join(bits[1:])
def make_name(name):
bits = name.split('_')
return bits[0] + '_make_' + '_'.join(bits[1:])
class Block(object):
"""
Checks if doxyxml produced objects correspond to a gnuradio block.
"""
@classmethod
def includes(cls, item):
if not isinstance(item, DoxyClass):
return False
# Check for a parsing error.
if item.error():
return False
friendname = make_name(item.name())
is_a_block = item.has_member(friendname, DoxyFriend)
# But now sometimes the make function isn't a friend so check again.
if not is_a_block:
is_a_block = di.has_member(friendname, DoxyFunction)
return is_a_block
class Block2(object):
"""
Checks if doxyxml produced objects correspond to a new style
gnuradio block.
"""
@classmethod
def includes(cls, item):
if not isinstance(item, DoxyClass):
return False
# Check for a parsing error.
if item.error():
return False
is_a_block2 = item.has_member('make', DoxyFunction) and item.has_member('sptr', DoxyOther)
return is_a_block2
def utoascii(text):
"""
Convert unicode text into ascii and escape quotes and backslashes.
"""
if text is None:
return ''
out = text.encode('ascii', 'replace')
# swig will require us to replace blackslash with 4 backslashes
out = out.replace(b'\\', b'\\\\\\\\')
out = out.replace(b'"', b'\\"').decode('ascii')
return str(out)
def combine_descriptions(obj):
"""
Combines the brief and detailed descriptions of an object together.
"""
description = []
bd = obj.brief_description.strip()
dd = obj.detailed_description.strip()
if bd:
description.append(bd)
if dd:
description.append(dd)
return utoascii('\n\n'.join(description)).strip()
def format_params(parameteritems):
output = ['Args:']
template = ' {0} : {1}'
for pi in parameteritems:
output.append(template.format(pi.name, pi.description))
return '\n'.join(output)
entry_templ = '%feature("docstring") {name} "{docstring}"'
def make_entry(obj, name=None, templ="{description}", description=None, params=[]):
"""
Create a docstring entry for a swig interface file.
obj - a doxyxml object from which documentation will be extracted.
name - the name of the C object (defaults to obj.name())
templ - an optional template for the docstring containing only one
variable named 'description'.
description - if this optional variable is set then it's value is
used as the description instead of extracting it from obj.
"""
if name is None:
name=obj.name()
if "operator " in name:
return ''
if description is None:
description = combine_descriptions(obj)
if params:
description += '\n\n'
description += utoascii(format_params(params))
docstring = templ.format(description=description)
if not docstring:
return ''
return entry_templ.format(
name=name,
docstring=docstring,
)
def make_func_entry(func, name=None, description=None, params=None):
"""
Create a function docstring entry for a swig interface file.
func - a doxyxml object from which documentation will be extracted.
name - the name of the C object (defaults to func.name())
description - if this optional variable is set then it's value is
used as the description instead of extracting it from func.
params - a parameter list that overrides using func.params.
"""
#if params is None:
# params = func.params
#params = [prm.declname for prm in params]
#if params:
# sig = "Params: (%s)" % ", ".join(params)
#else:
# sig = "Params: (NONE)"
#templ = "{description}\n\n" + sig
#return make_entry(func, name=name, templ=utoascii(templ),
# description=description)
return make_entry(func, name=name, description=description, params=params)
def make_class_entry(klass, description=None, ignored_methods=[], params=None):
"""
Create a class docstring for a swig interface file.
"""
if params is None:
params = klass.params
output = []
output.append(make_entry(klass, description=description, params=params))
for func in klass.in_category(DoxyFunction):
if func.name() not in ignored_methods:
name = klass.name() + '::' + func.name()
output.append(make_func_entry(func, name=name))
return "\n\n".join(output)
def make_block_entry(di, block):
"""
Create class and function docstrings of a gnuradio block for a
swig interface file.
"""
descriptions = []
# Get the documentation associated with the class.
class_desc = combine_descriptions(block)
if class_desc:
descriptions.append(class_desc)
# Get the documentation associated with the make function
make_func = di.get_member(make_name(block.name()), DoxyFunction)
make_func_desc = combine_descriptions(make_func)
if make_func_desc:
descriptions.append(make_func_desc)
# Get the documentation associated with the file
try:
block_file = di.get_member(block.name() + ".h", DoxyFile)
file_desc = combine_descriptions(block_file)
if file_desc:
descriptions.append(file_desc)
except base.Base.NoSuchMember:
# Don't worry if we can't find a matching file.
pass
# And join them all together to make a super duper description.
super_description = "\n\n".join(descriptions)
# Associate the combined description with the class and
# the make function.
output = []
output.append(make_class_entry(block, description=super_description))
output.append(make_func_entry(make_func, description=super_description,
params=block.params))
return "\n\n".join(output)
def make_block2_entry(di, block):
"""
Create class and function docstrings of a new style gnuradio block for a
swig interface file.
"""
descriptions = []
# For new style blocks all the relevant documentation should be
# associated with the 'make' method.
class_description = combine_descriptions(block)
make_func = block.get_member('make', DoxyFunction)
make_description = combine_descriptions(make_func)
description = class_description + "\n\nConstructor Specific Documentation:\n\n" + make_description
# Associate the combined description with the class and
# the make function.
output = []
output.append(make_class_entry(
block, description=description,
ignored_methods=['make'], params=make_func.params))
makename = block.name() + '::make'
output.append(make_func_entry(
make_func, name=makename, description=description,
params=make_func.params))
return "\n\n".join(output)
def make_swig_interface_file(di, swigdocfilename, custom_output=None):
output = ["""
/*
* This file was automatically generated using swig_doc.py.
*
* Any changes to it will be lost next time it is regenerated.
*/
"""]
if custom_output is not None:
output.append(custom_output)
# Create docstrings for the blocks.
blocks = di.in_category(Block)
blocks2 = di.in_category(Block2)
make_funcs = set([])
for block in blocks:
try:
make_func = di.get_member(make_name(block.name()), DoxyFunction)
# Don't want to risk writing to output twice.
if make_func.name() not in make_funcs:
make_funcs.add(make_func.name())
output.append(make_block_entry(di, block))
except block.ParsingError:
sys.stderr.write('Parsing error for block {0}\n'.format(block.name()))
raise
for block in blocks2:
try:
make_func = block.get_member('make', DoxyFunction)
make_func_name = block.name() +'::make'
# Don't want to risk writing to output twice.
if make_func_name not in make_funcs:
make_funcs.add(make_func_name)
output.append(make_block2_entry(di, block))
except block.ParsingError:
sys.stderr.write('Parsing error for block {0}\n'.format(block.name()))
raise
# Create docstrings for functions
# Don't include the make functions since they have already been dealt with.
funcs = [f for f in di.in_category(DoxyFunction)
if f.name() not in make_funcs and not f.name().startswith('std::')]
for f in funcs:
try:
output.append(make_func_entry(f))
except f.ParsingError:
sys.stderr.write('Parsing error for function {0}\n'.format(f.name()))
# Create docstrings for classes
block_names = [block.name() for block in blocks]
block_names += [block.name() for block in blocks2]
klasses = [k for k in di.in_category(DoxyClass)
if k.name() not in block_names and not k.name().startswith('std::')]
for k in klasses:
try:
output.append(make_class_entry(k))
except k.ParsingError:
sys.stderr.write('Parsing error for class {0}\n'.format(k.name()))
# Docstrings are not created for anything that is not a function or a class.
# If this excludes anything important please add it here.
output = "\n\n".join(output)
swig_doc = open(swigdocfilename, 'w')
swig_doc.write(output)
swig_doc.close()
if __name__ == "__main__":
# Parse command line options and set up doxyxml.
err_msg = "Execute using: python swig_doc.py xml_path outputfilename"
if len(sys.argv) != 3:
raise Exception(err_msg)
xml_path = sys.argv[1]
swigdocfilename = sys.argv[2]
di = DoxyIndex(xml_path)
# gnuradio.gr.msq_queue.insert_tail and delete_head create errors unless docstrings are defined!
# This is presumably a bug in SWIG.
#msg_q = di.get_member(u'gr_msg_queue', DoxyClass)
#insert_tail = msg_q.get_member(u'insert_tail', DoxyFunction)
#delete_head = msg_q.get_member(u'delete_head', DoxyFunction)
output = []
#output.append(make_func_entry(insert_tail, name='gr_py_msg_queue__insert_tail'))
#output.append(make_func_entry(delete_head, name='gr_py_msg_queue__delete_head'))
custom_output = "\n\n".join(output)
# Generate the docstrings interface file.
make_swig_interface_file(di, swigdocfilename, custom_output=custom_output)
``` |
{
"source": "7ma7X/scrapbox-python",
"score": 3
} |
#### File: scrapbox-python/scrapbox/client.py
```python
from scrapbox.resource import wrapped_resource
from scrapbox.request import make_request
from functools import partial
class Client(object):
base_url = "https://scrapbox.io/api"
def __getattr__(self, name, **kwargs):
if name not in ("get"):
raise AttributeError
return partial(self._request, name, **kwargs)
def _request(self, method, resource, **kwargs):
url = self._resolve_resource_name(resource)
return wrapped_resource(make_request(method, url, kwargs))
def _resolve_resource_name(self, name):
name = name.rstrip("/").lstrip("/")
return '%s/%s' % (self.base_url, name)
```
#### File: scrapbox-python/scrapbox/resource.py
```python
import json
def wrapped_resource(response):
if "image" in response.headers['Content-Type']:
return response.content
response_content = response.content.decode(response.encoding or 'utf-8')
try:
content = json.loads(response_content)
except ValueError:
content = response_content
return content
``` |
{
"source": "7Magic7Mike7/Qrogue",
"score": 3
} |
#### File: actors/puzzles/enemy.py
```python
from qrogue.game.logic.actors import StateVector
from qrogue.game.logic.collectibles import Collectible
from qrogue.util import RandomManager
from .target import Target
class Enemy(Target):
"""
An Enemy is a Target with a certain chance to flee.
"""
def __init__(self, target: StateVector, reward: Collectible, flee_chance: float):
"""
Creates an Enemy-Target with a given target state vector, a reward and a certain chance to flee.
:param target: the StateVector to reach
:param reward: the Collectible to get as a reward
:param flee_chance: how likely it is for the player to flee from this Target
"""
super().__init__(target, reward)
self.__flee_chance = flee_chance
self.__rm = RandomManager.create_new()
def _on_reached(self):
"""
Nothing to do here.
:return: None
"""
pass
def flee_check(self) -> bool:
return self.__rm.get(msg="Enemy.flee_check()") < self.__flee_chance
def __str__(self):
return "Enemy " + super(Enemy, self).__str__()
class DummyEnemy(Enemy):
def __init__(self, target: StateVector, reward: Collectible, flee_chance: float):
super(DummyEnemy, self).__init__(target, reward, flee_chance)
def _on_reached(self):
pass
```
#### File: logic/collectibles/collectible.py
```python
import math
from abc import ABC, abstractmethod
from enum import Enum
from typing import Iterator
class CollectibleType(Enum):
Consumable = 1
Gate = 2
ActiveItem = 3
PassiveItem = 4
Pickup = 5
Qubit = 6
Multi = 0 # wraps multiple collectibles
def type_str(c_type: CollectibleType) -> str:
if c_type is CollectibleType.Gate:
return " Gate"
else:
return ""
class Collectible(ABC):
def __init__(self, c_type: CollectibleType):
self.__type = c_type
@property
def type(self):
return self.__type
@abstractmethod
def name(self) -> str:
pass
@abstractmethod
def description(self) -> str:
pass
@abstractmethod
def default_price(self) -> int:
pass
@abstractmethod
def to_string(self) -> str:
pass
class MultiCollectible(Collectible):
PRICE_MULT = 0.9
def __init__(self, content: [Collectible]):
super(MultiCollectible, self).__init__(CollectibleType.Multi)
self.__content = content
def name(self) -> str:
return "Collectible Pack"
def description(self) -> str:
desc = "Contains multiple Collectibles:"
for collectible in self.__content:
desc += "\n - " + collectible.name()
return desc
def default_price(self) -> int:
price = 0
for collectible in self.__content:
price += collectible.default_price()
return math.ceil(price * MultiCollectible.PRICE_MULT)
def to_string(self) -> str:
text = "Multi ["
for collectible in self.__content:
text += collectible.to_string() + ", "
return text + "]"
def iterator(self) -> Iterator[Collectible]:
return iter(self.__content)
class ShopItem:
def __init__(self, collectible: Collectible, price: int = -1):
self.__collectible = collectible
if price < 0:
price = collectible.default_price()
self.__price = price
@property
def collectible(self) -> Collectible:
return self.__collectible
@property
def price(self) -> int:
return self.__price
def to_string(self) -> str:
return f"{self.collectible}, {self.price}$"
def __str__(self):
return self.to_string()
```
#### File: dungeon_generator/dungeon_parser/QrogueDungeonListener.py
```python
from antlr4 import *
# This class defines a complete listener for a parse tree produced by QrogueDungeonParser.
class QrogueDungeonListener(ParseTreeListener):
# Enter a parse tree produced by QrogueDungeonParser#start.
def enterStart(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#start.
def exitStart(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#integer.
def enterInteger(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#integer.
def exitInteger(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#complex_number.
def enterComplex_number(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#complex_number.
def exitComplex_number(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#robot.
def enterRobot(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#robot.
def exitRobot(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#layout.
def enterLayout(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#layout.
def exitLayout(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#l_room_row.
def enterL_room_row(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#l_room_row.
def exitL_room_row(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#l_hallway_row.
def enterL_hallway_row(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#l_hallway_row.
def exitL_hallway_row(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#rooms.
def enterRooms(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#rooms.
def exitRooms(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#room.
def enterRoom(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#room.
def exitRoom(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#r_attributes.
def enterR_attributes(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#r_attributes.
def exitR_attributes(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#r_visibility.
def enterR_visibility(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#r_visibility.
def exitR_visibility(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#r_type.
def enterR_type(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#r_type.
def exitR_type(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#r_row.
def enterR_row(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#r_row.
def exitR_row(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#tile.
def enterTile(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#tile.
def exitTile(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#tile_descriptor.
def enterTile_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#tile_descriptor.
def exitTile_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#trigger_descriptor.
def enterTrigger_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#trigger_descriptor.
def exitTrigger_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#message_descriptor.
def enterMessage_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#message_descriptor.
def exitMessage_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#enemy_descriptor.
def enterEnemy_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#enemy_descriptor.
def exitEnemy_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#collectible_descriptor.
def enterCollectible_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#collectible_descriptor.
def exitCollectible_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#energy_descriptor.
def enterEnergy_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#energy_descriptor.
def exitEnergy_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#riddle_descriptor.
def enterRiddle_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#riddle_descriptor.
def exitRiddle_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#shop_descriptor.
def enterShop_descriptor(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#shop_descriptor.
def exitShop_descriptor(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#hallways.
def enterHallways(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#hallways.
def exitHallways(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#hallway.
def enterHallway(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#hallway.
def exitHallway(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#h_attributes.
def enterH_attributes(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#h_attributes.
def exitH_attributes(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#draw_strategy.
def enterDraw_strategy(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#draw_strategy.
def exitDraw_strategy(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#stv_pools.
def enterStv_pools(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#stv_pools.
def exitStv_pools(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#default_stv_pool.
def enterDefault_stv_pool(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#default_stv_pool.
def exitDefault_stv_pool(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#stv_pool.
def enterStv_pool(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#stv_pool.
def exitStv_pool(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#stvs.
def enterStvs(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#stvs.
def exitStvs(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#stv.
def enterStv(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#stv.
def exitStv(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#reward_pools.
def enterReward_pools(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#reward_pools.
def exitReward_pools(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#default_reward_pool.
def enterDefault_reward_pool(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#default_reward_pool.
def exitDefault_reward_pool(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#reward_pool.
def enterReward_pool(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#reward_pool.
def exitReward_pool(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#collectibles.
def enterCollectibles(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#collectibles.
def exitCollectibles(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#collectible.
def enterCollectible(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#collectible.
def exitCollectible(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#messages.
def enterMessages(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#messages.
def exitMessages(self, ctx):
pass
# Enter a parse tree produced by QrogueDungeonParser#message.
def enterMessage(self, ctx):
pass
# Exit a parse tree produced by QrogueDungeonParser#message.
def exitMessage(self, ctx):
pass
```
#### File: world/dungeon_generator/generator.py
```python
from abc import ABC, abstractmethod
from typing import Tuple
from qrogue.game.world.map import Map
from qrogue.util import MapConfig
class DungeonGenerator(ABC):
WIDTH = MapConfig.max_width()
HEIGHT = MapConfig.max_height()
def __init__(self, seed: int, width: int = WIDTH, height: int = HEIGHT):
self.__seed = seed
self._width = width
self._height = height
@property
def seed(self) -> int:
return self.__seed
@property
def width(self) -> int:
return self._width
@property
def height(self) -> int:
return self._height
@abstractmethod
def generate(self, data) -> Tuple[Map, bool]:
pass
```
#### File: world/map/level_map.py
```python
from typing import List, Callable
from qrogue.game.logic.actors import Robot
from qrogue.game.world.navigation import Coordinate
from qrogue.game.world.map import Map, MapType, Room
class LevelMap(Map):
def __init__(self, name: str, file_name: str, seed: int, rooms: List[List[Room]], robot: Robot,
spawn_room: Coordinate, check_achievement_callback: Callable[[str], bool],
trigger_event_callback: Callable[[str], None]):
super().__init__(name, file_name, seed, rooms, robot, spawn_room, check_achievement_callback,
trigger_event_callback)
def get_type(self) -> MapType:
return MapType.Level
```
#### File: world/navigation/navigation.py
```python
from enum import Enum
from qrogue.util import Logger
class Direction(Enum):
Center = (0, 0)
North = (0, -1)
East = (1, 0)
South = (0, 1)
West = (-1, 0)
N = North
E = East
S = South
W = West
Up = North
Right = East
Down = South
Left = West
U = Up
R = Right
D = Down
L = Left
def __init__(self, x: int, y: int):
self.__x = x
self.__y = y
@staticmethod
def from_coordinates(c_from: "Coordinate", c_to: "Coordinate") -> "Direction":
return direction(c_from, c_to)
@staticmethod
def values() -> "[Direction]":
return [Direction.North, Direction.East, Direction.South, Direction.West]
@property
def x(self) -> int:
return self.__x
@property
def y(self) -> int:
return self.__y
def is_horizontal(self) -> bool:
"""
:return: True if direction is East or West, False otherwise
"""
return self.x != 0
def opposite(self) -> "Direction":
if self == Direction.North:
return Direction.South
elif self == Direction.East:
return Direction.West
elif self == Direction.South:
return Direction.North
elif self == Direction.West:
return Direction.East
else:
return Direction.Center
def __add__(self, other) -> "Coordinate":
if isinstance(other, Direction):
return Coordinate(self.x + other.x, self.y + other.y)
elif isinstance(other, Coordinate):
return other + self
else:
Logger.instance().throw(NotImplementedError(f"Adding \"{other}\" to a Coordinate is not supported!"))
class Coordinate:
@staticmethod
def distance(a: "Coordinate", b: "Coordinate") -> int:
return abs(a.x - b.x) + abs(a.y - b.y)
def __init__(self, x: int, y: int):
self.__x = x
self.__y = y
@property
def x(self) -> int:
return self.__x
@property
def y(self) -> int:
return self.__y
def resolve(self) -> (int, int):
return self.__x, self.__y
def linearize(self, row_width: int) -> int:
"""
Returns the index this Coordinate would have if it was stored in a row-wise fashion in a 1D array.
:param row_width: width of one row of the grid stored in a corresponding 1D array
:return:
"""
return self.x + self.y * row_width
def __add__(self, other) -> "Coordinate":
if isinstance(other, Direction):
return Coordinate(self.x + other.x, self.y + other.y)
elif isinstance(other, Coordinate):
return Coordinate(self.x + other.x, self.y + other.y)
else:
Logger.instance().throw(NotImplementedError(f"Adding \"{other}\" to a Coordinate is not supported!"))
def __sub__(self, other) -> "Coordinate":
if isinstance(other, Direction):
return Coordinate(self.x - other.x, self.y - other.y)
elif isinstance(other, Coordinate):
return Coordinate(self.x - other.x, self.y - other.y)
else:
Logger.instance().throw(NotImplementedError(f"Subtracting \"{other}\" from a Coordinate is not supported!"))
def __eq__(self, other) -> bool:
if isinstance(other, Coordinate):
return self.x == other.x and self.y == other.y
return False
def __hash__(self):
return 61 * self.x + 51 * self.y
def __str__(self):
return f"({self.__x}|{self.__y})"
def direction(c_from: Coordinate, c_to: Coordinate) -> Direction:
diff = c_to - c_from
if diff.x == 0 and diff.y == 0:
return Direction.Center
if abs(diff.x) > abs(diff.y):
if diff.x > 0:
return Direction.East
else:
return Direction.West
else:
if diff.y > 0:
return Direction.South
else:
return Direction.North
def distance(a: Coordinate, b: Coordinate) -> int:
diff_x = a.x - b.x
diff_y = a.y - b.y
if diff_x < 0:
diff_x = -diff_x
if diff_y < 0:
diff_y = -diff_y
return diff_x + diff_y
```
#### File: graphics/rendering/color_rules.py
```python
import py_cui
from py_cui.widgets import Widget
from qrogue.game.logic.actors.controllables import ControllableType
from qrogue.game.world.tiles import TileCode
class TileColorer:
__color_manager = {
TileCode.Invalid: py_cui.RED_ON_BLUE,
TileCode.Void: py_cui.CYAN_ON_BLACK,
TileCode.FogOfWar: py_cui.CYAN_ON_BLACK,
TileCode.Floor: py_cui.CYAN_ON_BLACK,
TileCode.Wall: py_cui.BLACK_ON_WHITE,
TileCode.Obstacle: py_cui.BLACK_ON_WHITE,
TileCode.Door: py_cui.CYAN_ON_BLACK,
TileCode.Collectible: py_cui.CYAN_ON_BLACK,
TileCode.Teleport: py_cui.CYAN_ON_BLACK,
TileCode.Message: py_cui.CYAN_ON_BLACK,
TileCode.Controllable: py_cui.GREEN_ON_BLACK,
TileCode.Npc: py_cui.BLUE_ON_BLACK,
TileCode.Enemy: py_cui.RED_ON_BLACK,
TileCode.Boss: py_cui.BLACK_ON_RED,
TileCode.SpaceshipWalk: py_cui.BLACK_ON_WHITE,
}
@staticmethod
def get_color(tile_code: TileCode) -> int:
"""
:param tile_code: code of the Tile we want to get the default color of
:return: integer representing one of the possible foreground-background color comibnations, None for invalid
input
"""
if tile_code in TileColorer.__color_manager:
return TileColorer.__color_manager[tile_code]
class ColorRules:
@staticmethod
def apply_map_rules(widget: Widget) -> None:
for ct in ControllableType.values():
widget.add_text_color_rule(ct.name, TileColorer.get_color(TileCode.Controllable), 'contains',
match_type='regex')
widget.add_text_color_rule('B', TileColorer.get_color(TileCode.Boss), 'contains', match_type='regex')
widget.add_text_color_rule('\d', TileColorer.get_color(TileCode.Enemy), 'contains', match_type='regex')
widget.add_text_color_rule('#', TileColorer.get_color(TileCode.Wall), 'contains', match_type='regex')
widget.add_text_color_rule('o', TileColorer.get_color(TileCode.Obstacle), 'contains', match_type='regex')
widget.add_text_color_rule('c', TileColorer.get_color(TileCode.Collectible), 'contains', match_type='regex')
@staticmethod
def apply_spaceship_rules(widget: Widget):
widget.add_text_color_rule('\.', TileColorer.get_color(TileCode.SpaceshipWalk), 'contains', match_type='regex')
widget.add_text_color_rule('M', TileColorer.get_color(TileCode.Controllable), 'contains', match_type='regex')
widget.add_text_color_rule('R', TileColorer.get_color(TileCode.Npc), 'contains', match_type='regex')
widget.add_text_color_rule('(B|W|N|G)', TileColorer.get_color(TileCode.SpaceshipWalk), 'contains',
match_type='regex')
@staticmethod
def apply_navigation_rules(widget: Widget):
widget.add_text_color_rule('#', TileColorer.get_color(TileCode.Wall), 'contains', match_type='regex')
widget.add_text_color_rule('M', TileColorer.get_color(TileCode.Controllable), 'contains', match_type='regex')
widget.add_text_color_rule('t', TileColorer.get_color(TileCode.Teleport), 'contains', match_type='regex')
widget.add_text_color_rule('\.', TileColorer.get_color(TileCode.Message), 'contains', match_type='regex')
```
#### File: qrogue/management/save_data.py
```python
from qrogue.game.logic.actors import Player, Robot
from qrogue.game.logic.actors.controllables import TestBot, LukeBot
from qrogue.game.world.map import CallbackPack
from qrogue.util import Logger, PathConfig, AchievementManager, RandomManager, CommonPopups, CheatConfig
from qrogue.util.achievements import Achievement
class SaveData:
__ROBOT_SECTION = "[Robots]"
__COLLECTIBLE_SECTION = "[Collectibles]"
__ACHIEVEMENT_SECTION = "[Achievements]"
__instance = None
@staticmethod
def instance() -> "SaveData":
if SaveData.__instance is None:
Logger.instance().throw(Exception("This singleton has not been initialized yet!"))
return SaveData.__instance
@staticmethod
def __empty_save_file() -> str:
pass
def __init__(self):
if SaveData.__instance is not None:
Logger.instance().throw(Exception("This class is a singleton!"))
else:
self.__player = Player()
path = PathConfig.find_latest_save_file()
content = ""
try:
content = PathConfig.read(path, in_user_path=True).splitlines()
except FileNotFoundError:
Logger.instance().error(NotImplementedError("This line should not be reachable! Please send us the log "
"files so we can fix the issue as soon as possible. "
"Thank you!"))
index = content.index(SaveData.__ACHIEVEMENT_SECTION)
achievement_list = []
for i in range(index + 1, len(content)):
achievement = Achievement.from_string(content[i])
if achievement:
achievement_list.append(achievement)
self.__achievements = AchievementManager(achievement_list)
self.__available_robots = [
TestBot(CallbackPack.instance().game_over),
LukeBot(CallbackPack.instance().game_over),
]
SaveData.__instance = self
@property
def achievement_manager(self) -> AchievementManager:
return self.__achievements
@property
def player(self) -> Player:
return self.__player
def get_expedition_seed(self) -> int:
return RandomManager.instance().get_seed(msg="SaveData.get_expedition_seed()") #7 # todo implement
def available_robots(self) -> iter:
return iter(self.__available_robots)
def get_robot(self, index: int) -> Robot:
if 0 <= index < len(self.__available_robots):
return self.__available_robots[index]
return None
def save(self) -> CommonPopups:
if CheatConfig.did_cheat():
return CommonPopups.NoSavingWithCheats
try:
data = ""
data += f"{SaveData.__ROBOT_SECTION}\n"
data += f"{SaveData.__COLLECTIBLE_SECTION}\n"
data += f"{SaveData.__ACHIEVEMENT_SECTION}\n"
data += f"{self.achievement_manager.to_string()}\n"
PathConfig.new_save_file(data)
return CommonPopups.SavingSuccessful
except:
return CommonPopups.SavingFailed
```
#### File: qrogue/management/story.py
```python
import enum
from typing import List
from qrogue.graphics.popups import Popup
from qrogue.management import SaveData
from qrogue.util import ColorConfig, MapConfig, HudConfig
from qrogue.util.help_texts import StoryText, StoryTextType, TutorialText, TutorialTextType
class _StoryProgress(enum.Enum):
RobotShowcase = 0
MoonMission = 1
DarkSideOfMoon = 2
FirstExpedition = 3
NewHorizons = 10 # w1 done, now we can travel to new solar systems (other worlds)
@staticmethod
def progress_order() -> List["_StoryProgress"]:
return [
_StoryProgress.RobotShowcase, _StoryProgress.MoonMission,
_StoryProgress.DarkSideOfMoon,
_StoryProgress.FirstExpedition,
_StoryProgress.NewHorizons,
]
class StoryNarration:
@staticmethod
def __check_achievement(achievement: str) -> bool:
return SaveData.instance().achievement_manager.check_achievement(achievement)
@staticmethod
def __get_story_progress() -> _StoryProgress:
progress = _StoryProgress.RobotShowcase
HudConfig.ShowCoins = False # todo implement more elegant
if StoryNarration.__check_achievement("l1v2"):
progress = _StoryProgress.DarkSideOfMoon
HudConfig.ShowCoins = True # todo implement more elegant
elif StoryNarration.__check_achievement(MapConfig.intro_level()):
progress = _StoryProgress.MoonMission
return progress
@staticmethod
def unlocked_navigation() -> bool:
progress = StoryNarration.__get_story_progress()
ordered_progress = _StoryProgress.progress_order()
return ordered_progress.index(progress) >= ordered_progress.index(_StoryProgress.MoonMission)
@staticmethod
def completed_tutorial() -> bool:
progress = StoryNarration.__get_story_progress()
ordered_progress = _StoryProgress.progress_order()
return ordered_progress.index(progress) >= ordered_progress.index(_StoryProgress.DarkSideOfMoon)
@staticmethod
def unlocked_free_navigation() -> bool:
progress = StoryNarration.__get_story_progress()
ordered_progress = _StoryProgress.progress_order()
return ordered_progress.index(progress) >= ordered_progress.index(_StoryProgress.NewHorizons)
@staticmethod
def entered_navigation():
progress = StoryNarration.__get_story_progress()
if progress is _StoryProgress.MoonMission:
Popup.scientist_says(TutorialText.get(TutorialTextType.Navigation))
elif progress is _StoryProgress.FirstExpedition:
Popup.scientist_says("Move down to go on an expedition.")
@staticmethod
def returned_to_spaceship():
progress = StoryNarration.__get_story_progress()
if progress is _StoryProgress.RobotShowcase:
Popup.scientist_says(StoryText.get(StoryTextType.Intro))
elif progress is _StoryProgress.MoonMission:
# Popup.scientist_says("They were really impressed by the Robots. So...\n")
# Popup.scientist_says("They were really impressed by the Robots. So...\nWe will fly to the moon!")
Popup.scientist_says(StoryText.get(StoryTextType.MoonMission))
elif progress is _StoryProgress.FirstExpedition:
Popup.scientist_says(StoryText.get(StoryTextType.DarkSideOfMoon))
@staticmethod
def scientist_text() -> str:
progress = StoryNarration.__get_story_progress()
if progress is _StoryProgress.RobotShowcase:
return StoryText.get(StoryTextType.Exam)
elif progress is _StoryProgress.MoonMission:
navigation_panel = ColorConfig.highlight_word("Navigation Panel")
n_tile = ColorConfig.highlight_tile("N")
return f"The {navigation_panel} {n_tile} is on the left end of the Spaceship."
return "NO MORE TEXT"
```
#### File: Qrogue/qrogue/qrogue.py
```python
import random
import sys
from typing import Tuple, List
import py_cui.errors
from qrogue.game.logic.actors import Player
from qrogue.game.world.dungeon_generator import QrogueLevelGenerator, QrogueWorldGenerator
from qrogue.game.world.map import CallbackPack
from qrogue.game.world.navigation import Coordinate
from qrogue.management import SaveData, QrogueCUI
from qrogue.util import Config, Logger, RandomManager, PathConfig, GameplayConfig, CheatConfig, Controls
from qrogue.util.key_logger import OverWorldKeyLogger
def __init_singletons(seed: int):
Config.load()
Config.activate_debugging()
def log_print(title: str, text: str):
#print(f"\"{title}\": {text}")
pass
def log_print_error(title: str, text: str):
log_print(f"Error - {title}", text)
Logger(seed)
Logger.instance().set_popup(log_print, log_print_error)
RandomManager(seed)
def start_level(num, level):
pass
def start_fight(robot, enemy, direction):
pass
def start_boss_fight(robot, boss, direction):
pass
def open_riddle(robot, riddle):
pass
def visit_shop(robot, shop_item_list):
pass
def game_over():
pass
CallbackPack(start_level, start_fight, start_boss_fight, open_riddle, visit_shop, game_over)
def __parse_argument(argument: List[str], has_value: bool = False) -> Tuple[bool, str]:
for arg in argument:
if arg in sys.argv:
if has_value:
i = sys.argv.index(arg)
if i + 1 < len(sys.argv):
return True, sys.argv[i + 1]
else:
return True, None
return False, None
def start_qrogue() -> None:
__CONSOLE_ARGUMENT = ["--from-console", "-fc"]
__DEBUG_ARGUMENT = ["--debug", "-d"]
__GAME_DATA_PATH_ARGUMENT = ["--game-data", "-gd"] # path argument
__USER_DATA_PATH_ARGUMENT = ["--user-data", "-ud"] # path argument
#__CONTROLS_ARGUMENT = ["--controls", "-c"] # int argument
__SIMULATION_FILE_ARGUMENT = ["--simulation-path", "-sp"] # path argument
__VALIDATE_MAP_ARGUMENT = ["--validate-map", "-vm"] # path argument
from_console, _ = __parse_argument(__CONSOLE_ARGUMENT)
debugging, _ = __parse_argument(__DEBUG_ARGUMENT)
_, data_folder = __parse_argument(__GAME_DATA_PATH_ARGUMENT, has_value=True)
_, user_data_folder = __parse_argument(__USER_DATA_PATH_ARGUMENT, has_value=True)
_, simulation_path = __parse_argument(__SIMULATION_FILE_ARGUMENT, has_value=True)
_, map_path = __parse_argument(__VALIDATE_MAP_ARGUMENT, has_value=True)
if map_path:
validate_map(map_path)
else:
if simulation_path:
simulate_game(simulation_path, from_console, debugging, data_folder, user_data_folder)
else:
start_game(from_console, debugging, data_folder, user_data_folder)
def setup_game(game_data_path: str = "", user_data_path: str = "") -> None:
"""
Creates the needed folder structure and game config file qrogue_game.config if not already existent.
:param game_data_path: the path where we want to setup the game data
:param user_data_path: the path where we want to load and store the user data (e.g. logs, save data)
:return: None
"""
Config.setup_user_data(user_data_path)
path = PathConfig.launch_config_path()
data = "\n"
data += f"{game_data_path}\n"
data += f"{user_data_path}\n"
PathConfig.write(path, data, in_user_path=False, may_exist=True, append=False)
def start_game(from_console: bool = False, debugging: bool = False, data_folder: str = None,
user_data_folder: str = None):
if PathConfig.load_paths(data_folder, user_data_folder):
return_code = Config.load() # NEEDS TO BE THE FIRST THING WE DO!
else:
return_code = 1
if return_code == 0:
if debugging:
Config.activate_debugging()
seed = random.randint(0, Config.MAX_SEED)
print(f"[Qrogue] Starting game with seed = {seed}")
try:
QrogueCUI(seed).start()
except py_cui.errors.PyCUIOutOfBoundsError:
#print("[Qrogue] ERROR!")
#print("Your terminal window is too small. "
# "Please make it bigger (i.e. maximize it) or reduce the font size.")
print("---------------------------------------------------------")
exit(1)
# flush after the player stopped playing
if GameplayConfig.auto_save() and not CheatConfig.did_cheat():
if SaveData.instance().save():
print("[Qrogue] Successfully saved the game!")
else:
Logger.instance().error("Failed to save the game.", show=False)
Logger.instance().flush()
OverWorldKeyLogger.instance().flush_if_useful()
print("[Qrogue] Successfully flushed all logs and shut down the game without any problems. See you next time!")
else:
print(f"[Qrogue] Error #{return_code}:")
if return_code == 1:
print("qrogue.config is invalid. Please check if the second line describes a valid path (the path "
"to your save files). Using special characters in the path could also cause this error so if the "
"path is valid please consider using another one without special characters.")
if not from_console:
print()
input("[Qrogue] Press ENTER to close the application")
exit(return_code)
def simulate_game(simulation_path: str, from_console: bool = False, debugging: bool = False, data_folder: str = None,
user_data_folder: str = None):
if PathConfig.load_paths(data_folder, user_data_folder):
return_code = Config.load() # NEEDS TO BE THE FIRST THING WE DO!
else:
return_code = 1
if return_code == 0:
if debugging:
Config.activate_debugging()
print(f"[Qrogue] Simulating the game recorded at \"{simulation_path}\"")
try:
QrogueCUI.start_simulation(simulation_path)
except py_cui.errors.PyCUIOutOfBoundsError:
# print("[Qrogue] ERROR!")
# print("Your terminal window is too small. "
# "Please make it bigger (i.e. maximize it) or reduce the font size.")
print("---------------------------------------------------------")
exit(1)
# flush after the player stopped playing
if GameplayConfig.auto_save() and not CheatConfig.did_cheat():
if SaveData.instance().save():
print("[Qrogue] Successfully saved the game!")
else:
Logger.instance().error("Failed to save the game.", show=False)
Logger.instance().flush()
OverWorldKeyLogger.instance().flush_if_useful()
print("[Qrogue] Successfully flushed all logs and shut down the game without any problems. See you next time!")
else:
print(f"[Qrogue] Error #{return_code}:")
if return_code == 1:
print("qrogue.config is invalid. Please check if the second line describes a valid path (the path "
"to your save files). Using special characters in the path could also cause this error so if the path is "
"valid please consider using another one without special characters.")
if not from_console:
print()
input("[Qrogue] Press ENTER to close the application")
exit(return_code)
def validate_map(path: str, is_level: bool = True, in_base_path: bool = True) -> bool:
seed = 7
try:
__init_singletons(seed)
except Exception as ex:
print(f"Error: {ex}")
print("Most likely you used validate_map() while also running the game which is forbidden. Make sure to "
"validate maps separately!")
return False
def check_achievement(achievement: str) -> bool:
return False
def trigger_event(event: str):
pass
def load_map(map_name: str, spawn_pos: Coordinate):
pass
if is_level:
generator = QrogueLevelGenerator(seed, check_achievement, trigger_event, load_map)
else:
player = Player()
generator = QrogueWorldGenerator(seed, player, check_achievement, trigger_event, load_map)
try:
error_occurred = False
generator.generate(path, in_base_path)
except FileNotFoundError as fnf:
error_occurred = True
print(f"Could not find specified file! Error: {fnf}")
except SyntaxError as se:
error_occurred = True
print("Found syntax error!")
print(se)
return not error_occurred
```
#### File: qrogue/test/generation_tests.py
```python
import time
import test_util
from qrogue.game.world.dungeon_generator import DungeonGenerator
from qrogue.game.world.dungeon_generator.random_generator import RandomLayoutGenerator, ExpeditionGenerator
from qrogue.management.save_data import SaveData
def layout_test():
min_duration = (1, -1)
duration_sum = 0
max_duration = (0, -1)
start_seed = 50000
end_seed = 55000
failing_seeds = []
wrong_specials_seeds = []
# seeds that "look weird": [603]
# [47765, 58456, 65084, 74241, 85971]
seeds = list(range(start_seed, end_seed)) #[629, 774, 991, 3280, 5326, 6062, 7289, 8588, 8604, ]
num_of_seeds = len(seeds)
i = 0
for seed in seeds:
if i % 5000 == 0:
print(f"Run {i + 1}): seed = {seed}")
mapgen = RandomLayoutGenerator(seed, DungeonGenerator.WIDTH, DungeonGenerator.HEIGHT)
now_time = time.time()
if not mapgen.generate(debug=False):
failing_seeds.append(mapgen)
duration = time.time() - now_time
duration_sum += duration
if duration < min_duration[0]:
min_duration = (duration, seed)
elif duration > max_duration[0]:
max_duration = (duration, seed)
if not mapgen.check_special_rooms():
wrong_specials_seeds.append(seed)
i += 1
#print(mapgen)
print(f"Average time needed for generating a map: {duration_sum / num_of_seeds} seconds")
print(f"Fastest generation time = {min_duration[0]} for seed = {min_duration[1]}")
print(f"Longest generation time = {max_duration[0]} for seed = {max_duration[1]}")
print()
print("Failing Seeds:")
seeds = []
for mg in failing_seeds:
seeds.append(mg.seed)
print(seeds)
print("Wrong SpecialRooms in Seeds: ")
print(wrong_specials_seeds)
print()
for mg in failing_seeds:
mapgen = RandomLayoutGenerator(mg.seed, DungeonGenerator.WIDTH, DungeonGenerator.HEIGHT)
mapgen.generate(debug=True)
def dungeon_test():
def check_achievement(ach: str) -> bool:
print(f"Checking achievement: {ach}")
return True
def trigger_event(event: str):
print(f"Triggering event: {event}")
def load_map(map_name: str):
print(f"Loading map: {map_name}")
SaveData()
min_duration = (1, -1)
duration_sum = 0
max_duration = (0, -1)
start_seed = 0
end_seed = 5000
failing_seeds = []
seeds = list(range(start_seed, end_seed))
num_of_seeds = len(seeds)
i = -1
for seed in seeds:
if seed == 16:
debug = True
i += 1
if i % 1000 == 0:
print(f"Run {i + 1}): seed = {seed}")
generator = ExpeditionGenerator(seed, check_achievement, trigger_event, load_map)
start_time = time.time()
map, success = generator.generate(SaveData.instance().get_robot(0))
if not success:
failing_seeds.append((generator, seed))
print(f"Failed for seed = {seed}")
duration = time.time() - start_time
duration_sum += duration
if duration < min_duration[0]:
min_duration = (duration, seed)
elif duration > max_duration[0]:
max_duration = (duration, seed)
print(f"Average time needed for generating a map: {duration_sum / num_of_seeds} seconds")
print(f"Fastest generation time = {min_duration[0]} for seed = {min_duration[1]}")
print(f"Longest generation time = {max_duration[0]} for seed = {max_duration[1]}")
print()
print("Failing Seeds:")
seeds = []
for mg, seed in failing_seeds:
print(mg)
seeds.append(seed)
print(seeds)
print()
if test_util.init_singletons(include_config=True):
#layout_test()
dungeon_test()
```
#### File: qrogue/util/util_functions.py
```python
def is_power_of_2(n: int):
if n == 0:
return False
return (n & (n - 1)) == 0
def center_string(text: str, line_width: int, uneven_left: bool = True, character: str = " ") -> str:
"""
Prepends and appends the given character to the text in such a way that the text is centered as good as possible
in a line with the given line width. If the number of characters to add is uneven (i.e. number of prepended and
appended characters cannot be the same) the flag uneven_left decides whether to prepend (= left, default) or
append (= right) should be bigger.
:param text: the text to center in the line
:param line_width: width of the line the text should be centered in
:param uneven_left: whether to prepend or append one character more in case of imbalance, defaults to True
:param character: the character to add, defaults to whitespace " "
:return: centered version of text
"""
if line_width <= len(text):
return text
diff = line_width - len(text)
half1 = int(diff / 2)
half2 = diff - half1
if uneven_left:
return half2 * character + text + half1 * character
else:
return half1 * character + text + half2 * character
``` |
{
"source": "7minus2/Python",
"score": 4
} |
#### File: Python/Excercises/error_catch.py
```python
from time import time
def func_timer(fn):
def wrapper(*args, **kwargs):
timer1 = time()
result = fn(*args, **kwargs)
timer2 = time()
print(f'(It took {timer2-timer1} seconds to run')
return result
return wrapper
@func_timer
def func_error():
while True:
try:
age = int(input('What is your Age Sir/Madam?'))
10/age
print(f'You\'re age is {age}')
except (ValueError, ZeroDivisionError) as err:
print(f'Please enter an integer greater than 0!\nError is: {err}')
continue
else:
print('Thank you for your input, Program Exiting')
break
finally:
print('Function is done running')
if __name__ == '__main__':
func_error()
```
#### File: Python/Excercises/password_check.py
```python
def pass_length_checker():
print('Welcome to password length checker \n Please enter you\'re username + password!')
username = input('Username: ')
password = input('Password: ')
encrypt_pass = int(len(password)) * 'x'
pass_length = len(password)
print(f'{username}, you\'re password {encrypt_pass} is {pass_length} letters long')
if __name__ == '__main__':
pass_length_checker()
```
#### File: Python/Excercises/Pet_excercise.py
```python
class Pets():
animals = []
def __init__(self, animals):
self.animals = animals
def walk(self):
for animal in self.animals:
print(animal.walk())
class Cat():
is_lazy = True
def __init__(self, name, age):
self.name = name
self.age = age
def walk(self):
return f'{self.name} is just walking around'
class Simon(Cat):
def sing(self, sounds):
return f'{sounds}'
class Sally(Cat):
def sing(self, sounds):
return f'{sounds}'
class Simba(Cat):
def sing(self, sounds):
return f'{sounds}'
if __name__ == '__main__':
my_cats = [Simon('Simon', 9), Sally('Sally', 10), Simba('Simba', 7)]
my_pets = Pets(my_cats)
my_pets.walk()
```
#### File: Python/Excercises/random_guessing_game.py
```python
import sys
import random
def rand_game(first_number, second_number, seed):
'''
INFO: Guess a number based on 2 different integers
Example: random_game.py <number1> <number2> <seed>
'''
random.seed(seed)
rand_number = random.randint(first_number, second_number)
while True:
try:
input_number = int(
input(f'Please guess a number between {first_number} and {second_number}:\n'))
if input_number == rand_number:
print(f'You win Sir!!\n')
return rand_number
break
elif input_number > second_number:
print('Your number is out of range\n')
continue
elif input_number < rand_number:
print(f'Hint: Guess a number higher than {input_number}\n')
continue
else:
print(f'Hint: Guess a number lower than {input_number}\n')
continue
except Exception as err:
print(f'Error has occured: {err}\n')
continue
if __name__ == '__main__':
rand_game(1, 10, 2)
# else:
# first_number = sys.argv[1]
# second_number = sys.argv[2]
# seed = sys.argv[3]
# rand_game(first_number, second_number, seed)
```
#### File: Python/Excercises/tesla_input_excercise.py
```python
def checkDriverAge(age=0):
# if not age:
# age = int(input('What is your age?: '))
if int(age) < 18:
print('Sorry, you are too young to drive this car '
'Powering off!')
elif int(age) > 18:
print('Powering On. Enjoy the ride!')
elif int(age) == 18:
print('Congratulations on your first year of'
'driving. Enjoy the ride')
return age
if __name__ == "__main__":
age = checkDriverAge(19)
print(f'You\'re age is {age}')
```
#### File: 7minus2/Python/test.py
```python
import unittest
from Excercises.highest_even import highest_even
from Excercises.couter_excercise import counter_checker1, counter_checker2
from Excercises.random_guessing_game import rand_game
import sys
from mock import patch
class TestMain(unittest.TestCase):
int_num = 12
def setUp(self):
print('Running Test')
def test_highest_even(self):
my_list = [10, 2, 3, 4, 8, 11]
result = highest_even(my_list)
self.assertEqual(result, 10)
def test_counter(self):
num = 11
result = counter_checker1(num)
self.assertEqual(result, 55)
def test_counter1(self):
num = ''
result = counter_checker1(num)
self.assertIsInstance(result, ValueError)
def test_counter1b(self):
num = 'straasdf'
result = counter_checker1(num)
self.assertIsInstance(result, ValueError)
def test_counter2(self):
result = counter_checker2(self.int_num)
self.assertEqual(result, 66)
def test_counter2(self):
result = counter_checker2(self.int_num)
self.assertEqual(result, 66)
@patch('builtins.input', lambda * args: 1)
def test_guessing_game(self):
sys.argv = [None, 1, 12, 2]
result = rand_game(sys.argv[1], sys.argv[2], sys.argv[3])
self.assertEqual(result, 1)
def tearDown(self):
print('Cleaning Up')
if __name__ == '__main__':
unittest.main(verbosity=2)
``` |
{
"source": "7mp/django-dynamic-fixture",
"score": 2
} |
#### File: fixture_algorithms/tests/test_sequential_fixture.py
```python
from django.db import models
from django.test import TestCase
from django_dynamic_fixture.fixture_algorithms.tests.abstract_test_generic_fixture import DataFixtureTestCase
from django_dynamic_fixture.fixture_algorithms.sequential_fixture import SequentialDataFixture, StaticSequentialDataFixture
class SequentialDataFixtureTestCase(TestCase, DataFixtureTestCase):
def setUp(self):
self.fixture = SequentialDataFixture()
def test_it_must_fill_integer_fields_sequencially_by_attribute(self):
self.assertEquals(1, self.fixture.generate_data(models.IntegerField()))
field = models.IntegerField()
field.name = 'x'
self.assertEquals(1, self.fixture.generate_data(field))
self.assertEquals(2, self.fixture.generate_data(field))
def test_it_must_fill_string_with_sequences_of_numbers_by_attribute(self):
self.assertEquals('1', self.fixture.generate_data(models.CharField(max_length=1)))
field = models.CharField(max_length=1)
field.name = 'x'
self.assertEquals('1', self.fixture.generate_data(field))
self.assertEquals('2', self.fixture.generate_data(field))
class StaticSequentialDataFixtureTestCase(TestCase, DataFixtureTestCase):
def setUp(self):
self.fixture = StaticSequentialDataFixture()
def test_it_must_fill_fields_sequencially_by_attribute_if_field_is_unique(self):
field = models.IntegerField(unique=True)
field.name = 'x'
self.assertEquals(1, self.fixture.generate_data(field))
self.assertEquals(2, self.fixture.generate_data(field))
def test_it_must_fill_fields_with_static_value_by_attribute_if_field_is_not_unique(self):
field = models.IntegerField(unique=False)
field.name = 'x'
self.assertEquals(1, self.fixture.generate_data(field))
self.assertEquals(1, self.fixture.generate_data(field))
```
#### File: django-dynamic-fixture/queries/count_queries_on_save.py
```python
from django_dynamic_fixture import new
from django.conf import settings
from django.db import connection
from django import db
from django_dynamic_fixture.django_helper import get_models_of_an_app, is_model_managed, get_unique_model_name, get_apps
class Report(object):
def __init__(self):
self.data = []
self.errors = []
def add_record(self, app, model, queries_insert, queries_update):
self.data.append((app, model, queries_insert, queries_update))
def add_error(self, msg):
self.errors.append(msg)
def export_csv(self, order_by_quantity_queries=False):
if order_by_quantity_queries:
self.data.sort(key=lambda t: t[2], reverse=True)
print 'APP.MODEL;QUERIES ON INSERT;QUERIES ON UPDATE'
for app, model, queries_insert, queries_update in self.data:
print '%s;%s;%s' % (get_unique_model_name(model), queries_insert, queries_update)
for err in self.errors:
print err
class CountQueriesOnSave(object):
def __init__(self):
self.report = Report()
def count_queries_for_model(self, app, model):
try:
model_instance = new(model, print_errors=False)
except Exception as e:
self.report.add_error('- Could not prepare %s: %s' % (get_unique_model_name(model), str(e)))
return
db.reset_queries()
try:
model_instance.save()
except Exception as e:
self.report.add_error('- Could not insert %s: %s' % (get_unique_model_name(model), str(e)))
return
queries_insert = len(connection.queries)
db.reset_queries()
try:
model_instance.save()
except Exception as e:
self.report.add_error('- Could not update %s: %s' % (get_unique_model_name(model), str(e)))
return
queries_update = len(connection.queries)
self.report.add_record(app, model, queries_insert, queries_update)
def execute(self, app_labels=[], exclude_app_labels=[]):
settings.DEBUG = True
apps = get_apps(application_labels=app_labels, exclude_application_labels=exclude_app_labels)
for app in apps:
models = get_models_of_an_app(app)
for model in models:
if not is_model_managed(model):
continue
self.count_queries_for_model(app, model)
return self.report
```
#### File: django-dynamic-fixture/queries/nose_plugin.py
```python
from django.db import connection
from nose.plugins import Plugin
# http://readthedocs.org/docs/nose/en/latest/plugins/interface.html
class Queries(Plugin):
"python manage.py test --with-queries"
name = 'queries'
enabled = True
_queries_by_test_methods = []
def configure(self, options, conf):
"""
Called after the command line has been parsed, with the parsed options and the config container.
Here, implement any config storage or changes to state or operation that are set by command line options.
DO NOT return a value from this method unless you want to stop all other plugins from being configured.
"""
Plugin.configure(self, options, conf)
connection.use_debug_cursor = True
def beforeTest(self, test):
"Called before the test is run (before startTest)."
self.initial_amount_of_queries = len(connection.queries)
def afterTest(self, test):
"Called after the test has been run and the result recorded (after stopTest)."
self.final_amount_of_queries = len(connection.queries)
self._queries_by_test_methods.append((test, self.final_amount_of_queries - self.initial_amount_of_queries))
def report(self, stream):
"""Called after all error output has been printed. Print your
plugin's report to the provided stream. Return None to allow
other plugins to print reports, any other value to stop them.
:param stream: stream object; send your output here
:type stream: file-like object
"""
stream.write('\nREPORT OF AMOUNT OF QUERIES BY TEST:\n')
for x in self._queries_by_test_methods:
testcase = x[0]
queries = x[1]
stream.write('\n%s: %s' % (testcase, queries))
stream.write('\n')
``` |
{
"source": "7mp/nose-json-output",
"score": 3
} |
#### File: nose-json-output/json_logger/jsonformatter.py
```python
import logging
import json
import re
import datetime
import traceback
from inspect import istraceback
#Support order in python 2.7 and 3
try:
from collections import OrderedDict
except ImportError:
pass
# skip natural LogRecord attributes
# http://docs.python.org/library/logging.html#logrecord-attributes
RESERVED_ATTRS = (
'args',
'asctime',
'created',
'exc_info',
'exc_text',
'filename',
'funcName',
#'levelname',
'levelno',
'lineno',
'module',
'msecs',
'message',
'msg',
# 'name',
'pathname',
'process',
'processName',
'relativeCreated',
'stack_info',
'thread',
'threadName')
RESERVED_ATTR_HASH = dict(zip(RESERVED_ATTRS, RESERVED_ATTRS))
def merge_record_extra(record, target, reserved=RESERVED_ATTR_HASH):
"""
Merges extra attributes from LogRecord object into target dictionary
:param record: logging.LogRecord
:param target: dict to update
:param reserved: dict or list with reserved keys to skip
"""
for key, value in record.__dict__.items():
#this allows to have numeric keys
if (key not in reserved
and not (hasattr(key, "startswith")
and key.startswith('_'))):
target[key] = value
return target
class JsonFormatter(logging.Formatter):
"""
A custom formatter to format logging records as json strings.
extra values will be formatted as str() if nor supported by
json default encoder
"""
def __init__(self, *args, **kwargs):
"""
:param json_default: a function for encoding non-standard objects
as outlined in http://docs.python.org/2/library/json.html
:param json_encoder: optional custom encoder
:param prefix: an optional string prefix added at the beginning of
the formatted string
"""
self.json_default = kwargs.pop("json_default", None)
self.json_encoder = kwargs.pop("json_encoder", None)
self.prefix = kwargs.pop("prefix", "")
#super(JsonFormatter, self).__init__(*args, **kwargs)
logging.Formatter.__init__(self, *args, **kwargs)
if not self.json_encoder and not self.json_default:
def _default_json_handler(obj):
'''Prints dates in ISO format'''
if isinstance(obj, datetime.datetime):
if obj.year < 1900:
# strftime do not work with date < 1900
return obj.isoformat()
return obj.strftime(self.datefmt or '%Y-%m-%dT%H:%M')
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.time):
return obj.strftime('%H:%M')
elif istraceback(obj):
tb = ''.join(traceback.format_tb(obj))
return tb.strip()
elif isinstance(obj, Exception):
return "Exception: %s" % str(obj)
return str(obj)
self.json_default = _default_json_handler
self._required_fields = self.parse()
self._skip_fields = dict(zip(self._required_fields,
self._required_fields))
self._skip_fields.update(RESERVED_ATTR_HASH)
def parse(self):
"""Parses format string looking for substitutions"""
standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE)
return standard_formatters.findall(self._fmt)
def add_fields(self, log_record, record, message_dict):
"""
Override this method to implement custom logic for adding fields.
"""
for field in self._required_fields:
log_record[field] = record.__dict__.get(field)
log_record.update(message_dict)
merge_record_extra(record, log_record, reserved=self._skip_fields)
def process_log_record(self, log_record):
"""
Override this method to implement custom logic
on the possibly ordered dictionary.
"""
return log_record
def format(self, record):
"""Formats a log record and serializes to json"""
message_dict = {}
if isinstance(record.msg, dict):
message_dict = record.msg
record.message = None
else:
record.message = record.getMessage()
# only format time if needed
if "asctime" in self._required_fields:
record.asctime = self.formatTime(record, self.datefmt)
# Display formatted exception, but allow overriding it in the
# user-supplied dict.
if record.exc_info and not message_dict.get('exc_info'):
message_dict['exc_info'] = self.formatException(record.exc_info)
try:
log_record = OrderedDict()
except NameError:
log_record = {}
self.add_fields(log_record, record, message_dict)
log_record = self.process_log_record(log_record)
return "%s%s" % (self.prefix,
json.dumps(log_record,
default=self.json_default,
cls=self.json_encoder))
``` |
{
"source": "7nocturnal/Gchart",
"score": 3
} |
#### File: Gchart/code/test.py
```python
# def request2size(size):
# SIZE_SZ = 8
# MALLOC_ALIGN_MASK = 2 * SIZE_SZ - 1
# MINSIZE = 32
# if (size + SIZE_SZ + MALLOC_ALIGN_MASK < MINSIZE):
# new_size = MINSIZE
# else:
# new_size = (size + SIZE_SZ + MALLOC_ALIGN_MASK) & (~MALLOC_ALIGN_MASK)
# return new_size
``` |
{
"source": "7NoteDancing/Note",
"score": 3
} |
#### File: Note/nn/ART2.py
```python
import tensorflow as tf
import numpy as np
import pickle
class ART2:
def __init__(self,data=None,r_neure=None,p=None,epislon=None,a=10,b=10,c=0.1,d=0.9,e=0,theta=0.2):
self.data=data
self.c_neure=data.shape[-1]
self.r_neure=r_neure
self.p=p
self.epislon=epislon
self.a=a
self.b=b
self.c=c
self.d=d
self.e=e
self.theta=theta
def init(self):
self.W=tf.ones(shape=[self.c_neure,self.r_neure],dtype=tf.float16)/(1+self.d)*np.sqrt(self.c_neure)
self.T=tf.zeros(shape=[self.r_neure,self.c_neure],dtype=tf.int8)
return
def f(self,x):
if x>=self.theta:
pass
else:
return (2*self.theta*x**2)/(x**2+self.theta**2)
def r_layer(self,p):
h=tf.matmul(p,self.W)
return h,self.T[np.argmax(h)]
def c_layer(self,data):
w=0
x=0
v=0
u=0
p=0
q=0
data=data.reshape([1,-1])
while True:
u_=u
w=data+self.a*u
x=w/(self.e+np.sqrt(np.sum(w**2)))
f1=self.f(x)
f2=self.f(q)
v=f1+self.b*f2
u=v/(self.e+np.sqrt(np.sum(v**2)))
p=u
q=p/(self.e+np.sqrt(np.sum(p**2)))
if np.sum(np.abs(u-u_)<=self.epislon)==len(u):
break
return u,p
def reset(self,u,h,t):
vector=self.r_vector
vector[np.argmax(h)]=self.d
p=u+vector*t
r=(u+self.c*p)/(self.e+np.sqrt(np.sum(u**2))+np.sqrt(np.sum(p**2)))
return np.sqrt(np.sum(r**2))
def _search(self,u,h,t,vector):
a=1
resonance=False
while True:
h=h*vector
t=self.T[np.argmax(h)]
r=self.reset(u,h,t)
if r>=self.p:
resonance=True
if resonance==True:
self.W[:,np.argmax(h)]=self.W[:,np.argmax(h)]+self.d*(1-self.d)*(u/(1-self.d)-self.W[:,np.argmax(h)])
self.T[np.argmax(h)]=self.T[np.argmax(h)]+self.d*(1-self.d)*(u/(1-self.d)-self.T[np.argmax(h)])
resonance=False
self.accumulator+=1
break
elif a==self.r_neure:
tf.concat([self.W,tf.ones(shape=[self.c_neure,1],dtype=tf.float16)/(1+self.d)*np.sqrt(self.c_neure)],axis=1)
tf.concat([self.T,tf.ones(shape=[1,self.c_neure],dtype=tf.int8)],axis=0)
self.W[:,np.argmax(h)]=self.W[:,np.argmax(h)]+self.d*(1-self.d)*(u/(1-self.d)-self.W[:,np.argmax(h)])
self.T[np.argmax(h)]=self.T[np.argmax(h)]+self.d*(1-self.d)*(u/(1-self.d)-self.T[np.argmax(h)])
self.r_neure+=1
self._vector=tf.concat([self._vector,tf.ones([1],dtype=tf.int8)],axis=0)
self.accumulator+=1
break
else:
vector[np.argmax(h)]=0
a+=1
return
def recognition_layer(self,p,W,T):
h=tf.matmul(p,W)
return h,T[np.argmax(h)]
def compare_layer(self,data,a,b,e,theta,epislon):
w=0
x=0
v=0
u=0
p=0
q=0
data=data.reshape([1,-1])
while True:
u_=u
w=data+a*u
x=w/(e+np.sqrt(np.sum(w**2)))
if x>=theta:
pass
else:
f1=(2*theta*x**2)/(x**2+theta**2)
if q>=theta:
pass
else:
f2=(2*theta*q**2)/(q**2+theta**2)
v=f1+b*f2
u=v/(e+np.sqrt(np.sum(v**2)))
p=u
q=p/(e+np.sqrt(np.sum(p**2)))
if np.sum(np.abs(u-u_)<=epislon)==len(u):
break
return u,p
def search(self,u,h,t,W,T,p,d,vector):
a=1
resonance=False
while True:
h=h*vector
t=T[np.argmax(h)]
r=self.reset(u,h,t)
if r>=p:
resonance=True
if resonance==True:
W[:,np.argmax(h)]=W[:,np.argmax(h)]+d*(1-d)*(u/(1-d)-W[:,np.argmax(h)])
T[np.argmax(h)]=T[np.argmax(h)]+d*(1-d)*(u/(1-d)-T[np.argmax(h)])
break
elif a==len(h):
tf.concat([W,tf.ones(shape=[len(h),1],dtype=tf.float16)/(1+d)*np.sqrt(len(h))],axis=1)
tf.concat([T,tf.ones(shape=[1,len(t)],dtype=tf.int8)],axis=0)
W[:,np.argmax(h)]=W[:,np.argmax(h)]+d*(1-d)*(u/(1-d)-W[:,np.argmax(h)])
T[np.argmax(h)]=T[np.argmax(h)]+d*(1-d)*(u/(1-d)-T[np.argmax(h)])
break
else:
vector[np.argmax(h)]=0
a+=1
return
def learn(self):
resonance=False
self._vector=tf.ones(shape=[self.r_neure],dtype=tf.int8)
self.r_vector=tf.zeros(shape=[self.r_neure],dtype=tf.int8)
while True:
self.accumulator=0
if self.accumulator==len(self.data):
break
for i in range(len(self.data)):
u,p=self.c_layer(self.data[i])
h,t=self.r_layer(p)
r=self.reset(u,h,t)
if r>=self.p:
resonance=True
if resonance==True:
self.W[:,np.argmax(h)]=self.W[:,np.argmax(h)]+self.d*(1-self.d)*(u/(1-self.d)-self.W[:,np.argmax(h)])
self.T[np.argmax(h)]=self.T[np.argmax(h)]+self.d*(1-self.d)*(u/(1-self.d)-self.T[np.argmax(h)])
resonance=False
self.accumulator+=1
else:
vector=self._vector
vector[np.argmax(h)]=0
self._search(u,h,t,vector)
return
def save_p(self,path):
parameter_file=open(path+'.dat','wb')
pickle.dump([self.W,self.T],parameter_file)
parameter_file.close()
return
def save(self,path):
output_file=open(path+'\save.dat','wb')
path=path+'\save.dat'
index=path.rfind('\\')
parameter_file=open(path.replace(path[index+1:],'parameter.dat'),'wb')
pickle.dump([self.W,self.T],parameter_file)
pickle.dump(self.c_neure,output_file)
pickle.dump(self.r_neure,output_file)
pickle.dump(self.p,output_file)
pickle.dump(self.epislon,output_file)
pickle.dump(self.a,output_file)
pickle.dump(self.b,output_file)
pickle.dump(self.c,output_file)
pickle.dump(self.d,output_file)
pickle.dump(self.e,output_file)
pickle.dump(self.theta,output_file)
output_file.close()
parameter_file.close()
return
def restore(self,s_path,p_path):
input_file=open(s_path,'rb')
parameter_file=open(p_path,'rb')
parameter=pickle.load(parameter_file)
self.W=parameter[0]
self.T=parameter[1]
self.c_neure=pickle.load(input_file)
self.r_neure=pickle.load(input_file)
self.p=pickle.load(input_file)
self.epislon=pickle.load(input_file)
self.a=pickle.load(input_file)
self.b=pickle.load(input_file)
self.c=pickle.load(input_file)
self.d=pickle.load(input_file)
self.e=pickle.load(input_file)
self.theta=pickle.load(input_file)
input_file.close()
parameter_file.close()
return
```
#### File: RL/VFA/Q_learning.py
```python
import tensorflow as tf
import numpy as np
import pickle
import time
class Q_learning:
def __init__(self,net,state,state_name,action_name,exploration_space,save_episode=True):
self.net=net
self.net_p=None
self.episode=[]
self.state=state
self.state_name=state_name
self.action_name=action_name
self.exploration_space=exploration_space
self.action_len=len(self.action_name)
self.epsilon=None
self.discount=None
self.episode_step=None
self.optimizer=None
self.lr=None
self.save_episode=save_episode
self.loss=0
self.opt_flag=False
self.epi_num=0
self.episode_num=0
self.total_episode=0
self.time=0
self.total_time=0
def init(self,dtype=np.int32):
t3=time.time()
if len(self.action_name)>self.action_len:
self.action=np.concatenate((self.action,np.arange(len(self.action_name)-self.action_len,dtype=dtype)+self.action_len))
self.action_one=np.concatenate((self.action_prob,np.ones(len(self.action_name)-self.action_len,dtype=dtype)))
else:
self.action=np.arange(len(self.action_name),dtype=dtype)
self.action_one=np.ones(len(self.action_name),dtype=dtype)
t4=time.time()
self.time+=t4-t3
return
def set_up(self,net_p=None,epsilon=None,discount=None,episode_step=None,optimizer=None,lr=None,init=False):
if net_p!=None:
self.net_p=net_p
if epsilon!=None:
self.epsilon=epsilon
if discount!=None:
self.discount=discount
if episode_step!=None:
self.episode_step=episode_step
if optimizer!=None:
self.optimizer=optimizer
if lr!=None:
self.lr=lr
self.optimizer.lr=lr
elif self.lr!=None:
self.optimizer.lr=self.lr
if init==True:
self.episode=[]
self.epi_num=0
self.episode_num=0
self.total_episode=0
self.time=0
self.total_time=0
return
def epsilon_greedy_policy(self,s,action_one):
action_prob=action_one
action_prob=action_prob*self.epsilon/len(action_one)
best_action=np.argmax(self.net(self.state[self.state_name[s]]))
action_prob[best_action]+=1-self.epsilon
return action_prob
def _loss(self,s,a,next_s,r):
return (r+self.discount*tf.reduce_max(self.net(self.state[next_s]))-self.net(self.state[s])[a])**2
def explore(self,episode_num):
episode=[]
s=int(np.random.uniform(0,len(self.state_name)))
for _ in range(episode_num):
if self.episode_step==None:
while True:
action_prob=self.epsilon_greedy_policy(s,self.action_one)
a=np.random.choice(self.action,p=action_prob)
next_s,r,end=self.exploration_space[self.state_name[s]][self.action_name[a]]
self.loss+=self._loss(s,a,next_s,r)
if end:
if self.save_episode==True:
episode.append([self.state_name[s],self.action_name[a],r,end])
break
if self.save_episode==True:
episode.append([self.state_name[s],self.self.action_name[a],r])
s=next_s
else:
for _ in range(self.episode_step):
action_prob=self.epsilon_greedy_policy(s,self.action_one)
a=np.random.choice(self.action,p=action_prob)
next_s,r,end=self.exploration_space[self.state_name[s]][self.action_name[a]]
self.loss+=self._loss(s,a,next_s,r)
if end:
if self.save_episode==True:
episode.append([self.state_name[s],self.action_name[a],r,end])
break
if self.save_episode==True:
episode.append([self.state_name[s],self.self.action_name[a],r])
s=next_s
if self.save_episode==True:
self.episode.append(episode)
self.epi_num+=1
return
def learn(self):
with tf.GradientTape() as tape:
gradient=tape.gradient(1/2*self.loss,self.net_p)
if self.opt_flag==True:
self.optimizer(gradient,self.net_p)
else:
self.optimizer.apply_gradients(zip(gradient,self.net_p))
self.loss=self.loss.numpy()
return
def save_p(self,path):
parameter_file=open(path+'.dat','wb')
pickle.dump(self.net_p,parameter_file)
parameter_file.close()
return
def save_e(self,path):
episode_file=open(path+'.dat','wb')
pickle.dump(self.episode,episode_file)
episode_file.close()
return
def save(self,path,i=None,one=True):
if one==True:
output_file=open(path+'\save.dat','wb')
path=path+'\save.dat'
index=path.rfind('\\')
if self.save_episode==True:
episode_file=open(path.replace(path[index+1:],'episode.dat'),'wb')
pickle.dump(self.episode,episode_file)
episode_file.close()
else:
output_file=open(path+'\save-{0}.dat'.format(i+1),'wb')
path=path+'\save-{0}.dat'.format(i+1)
index=path.rfind('\\')
if self.save_episode==True:
episode_file=open(path.replace(path[index+1:],'episode-{0}.dat'.format(i+1)),'wb')
pickle.dump(self.episode,episode_file)
episode_file.close()
self.episode_num=self.epi_num
pickle.dump(self.action_len,output_file)
pickle.dump(self.action,output_file)
pickle.dump(self.action_one,output_file)
pickle.dump(self.epsilon,output_file)
pickle.dump(self.discount,output_file)
pickle.dump(self.episode_step,output_file)
pickle.dump(self.optimizer,output_file)
pickle.dump(self.lr,output_file)
pickle.dump(self.save_episode,output_file)
pickle.dump(self.opt_flag,output_file)
pickle.dump(self.state_one,output_file)
pickle.dump(self.episode_num,output_file)
pickle.dump(self.total_episode,output_file)
pickle.dump(self.total_time,output_file)
output_file.close()
return
def restore(self,s_path,e_path):
input_file=open(s_path,'rb')
if self.save_episode==True:
episode_file=open(e_path,'rb')
self.episode=pickle.load(episode_file)
episode_file.close()
self.action_len=pickle.load(input_file)
self.action=pickle.load(input_file)
self.action_one=pickle.load(input_file)
self.epsilon=pickle.load(input_file)
self.discount=pickle.load(input_file)
self.episode_step=pickle.load(input_file)
self.optimizer=pickle.load(input_file)
self.lr=pickle.load(input_file)
self.save_episode=pickle.load(input_file)
self.opt_flag=pickle.load(input_file)
self.state_one=pickle.load(input_file)
self.episode_num=pickle.load(input_file)
self.total_episode=pickle.load(input_file)
self.total_time=pickle.load(input_file)
input_file.close()
return
``` |
{
"source": "7omatoChan/yys",
"score": 2
} |
#### File: 7omatoChan/yys/main.py
```python
import argparse
import logging
import sys
logging.disable(logging.ERROR)
from airtest.core.helper import G
from common.utils import connect_windows, connect_android
from script import auto_team
from script import chi
from script import tupo
from script import yuling
def _get_parser():
ap = argparse.ArgumentParser(description="yys auto tool")
ap.add_argument('--android', help='connect to android device')
subparsers = ap.add_subparsers(dest="action", help="auto/")
ap_run = subparsers.add_parser("auto", help="自动挂机")
ap_run.add_argument("script", help="挂机脚本")
ap_run = subparsers.add_parser("team", help="自动组队挂机")
ap_run.add_argument("leader", help="队长标记")
return ap
def main(argv=None):
ap = _get_parser()
args = ap.parse_args(argv)
# 连接设备
if args.android:
print("连接安卓设备%s" % args.android)
if connect_android(args.android):
sys.exit(1)
else:
if connect_windows("阴阳师-网易游戏"):
sys.exit(1)
print("\n设备连接成功")
runner = None
if args.action == "auto":
if args.script == "1":
runner = chi
elif args.script == "2":
runner = tupo
elif args.script == "3":
runner = yuling
elif args.action == "team":
runner = auto_team
else:
ap.print_help()
try:
runner.run(args)
except KeyboardInterrupt:
print("退出脚本执行")
except Exception:
raise
if __name__ == "__main__":
main()
``` |
{
"source": "7onetella/vag",
"score": 3
} |
#### File: vag/utils/config.py
```python
import configparser
def read(file_path, debug=False):
data = {}
envs = {}
tags = []
config = configparser.ConfigParser()
config.read(file_path)
section = config.sections()[0]
for key in config[section]:
val = config[section][key]
if key.startswith('env.'):
key = key.replace('env.', '')
envs[key.upper()] = val
continue
if key.startswith('tag.'):
# tag.1 = urlprefix-www.example.com/signin
# tag.2 = urlprefix-www.example.com/signup
tags.append(val)
continue
data[key] = val
data['envs'] = envs
data['tags'] = tags
if debug:
print(data)
return data
```
#### File: vag/utils/cx_schema.py
```python
import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime, Boolean
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
import yaml
from sqlalchemy import select
from flask_login import UserMixin
def get_connection_str():
return os.getenv('CX_DB_CONN')
Base = declarative_base()
class UserObj(UserMixin):
def __init__(self, id_, name, email):
self.id = id_
self.name = name
self.email = email
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
google_id = Column(String(64), nullable=True, unique=False)
username = Column(String(50), nullable=False, unique=True)
password = Column(String(100), nullable=False)
email = Column(String(50), nullable=False)
hashed_email = Column(String(64), nullable=False, unique=False)
is_active = Column(Boolean, nullable=False, default=True)
private_key = Column(String(4000), nullable=True)
public_key = Column(String(1000), nullable=True)
class IDE(Base):
__tablename__ = 'ide'
id = Column(Integer, primary_key=True)
name = Column(String(30))
class UserRepo(Base):
__tablename__ = 'user_repo'
id = Column(Integer, primary_key=True)
uri = Column(String(100))
user_id = Column(Integer, ForeignKey('user.id'))
user = relationship(User)
class RuntimeInstall(Base):
__tablename__ = 'runtime_install'
id = Column(Integer, primary_key=True)
name = Column(String(30), unique=True)
script_body = Column(String(4000))
class UserIDE(Base):
__tablename__ = 'user_ide'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('user.id'))
ide_id = Column(Integer, ForeignKey('ide.id'))
user = relationship(User)
ide = relationship(IDE)
class IDERuntimeInstall(Base):
__tablename__ = 'ide_runtime_install'
id = Column(Integer, primary_key=True)
user_ide_id = Column(Integer, ForeignKey('user_ide.id'))
runtime_install_id = Column(Integer, ForeignKey('runtime_install.id'))
user_ide = relationship(UserIDE)
runtime_install = relationship(RuntimeInstall)
class IDERepo(Base):
__tablename__ = 'ide_repo'
id = Column(Integer, primary_key=True)
user_ide_id = Column(Integer, ForeignKey('user_ide.id'))
user_ide = relationship(UserIDE)
uri = Column(String(100))
def query_data():
engine = create_engine(get_connection_str())
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
usernames = session.query(UserIDE).all()
for username in usernames:
print(f'{username.ide.name}-{username.user.username}')
user_repos = session.query(UserRepo).all()
for user_repo in user_repos:
print(f'{user_repo.uri}')
user_runtime_installs = session.query(IDERuntimeInstall).all()
for user_runtime_install in user_runtime_installs:
print(f'{user_runtime_install.runtime_install.name}')
def drop_create_schema():
engine = create_engine(get_connection_str())
Base.metadata.bind = engine
Base.metadata.drop_all()
Base.metadata.create_all(engine)
if __name__ == '__main__':
drop_create_schema()
```
#### File: vag/utils/cx_test_data.py
```python
import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session
import yaml
from sqlalchemy import select
from vag.utils.cx_schema import *
def user_foo_test_data():
engine = create_engine(get_connection_str())
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# ----------- meta data tables -----------
vscode = IDE(name='vscode')
intellij = IDE(name='intellij')
pycharm = IDE(name='pycharm')
goland = IDE(name='goland')
session.add(vscode)
session.add(intellij)
session.add(pycharm)
session.add(goland)
session.commit()
ember_install = RuntimeInstall(name='emberjs', script_body="""# ember
sudo sudo npm install -g ember-cli
""")
tmux_install = RuntimeInstall(name='tmux', script_body="""# tmux
sudo apt-get install -y tmux
echo -e "\n\nalias s='tmux new -A -s shared'" >> /home/coder/.zshrc
""")
gh_cli_install = RuntimeInstall(name='github cli', script_body="""# github cli gh install
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo gpg --dearmor -o /usr/share/keyrings/githubcli-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
sudo apt-get update
sudo apt-get install gh
""")
poetry_install = RuntimeInstall(name='poetry', script_body="""# poetry
curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python3 -
echo -e "export PATH=\"\$HOME/.poetry/bin:\$PATH\"" >> ~/.zshrc
""")
postgresql_install = RuntimeInstall(name='postgres', script_body="""# vag dependencies
sudo apt-get install -y postgresql
sudo apt-get install -y libpq-dev
""")
session.add(ember_install)
session.add(tmux_install)
session.add(gh_cli_install)
session.add(poetry_install)
session.add(postgresql_install)
session.commit()
# ----------- user related instance tables -----------
new_user = User(username='foo', email='<EMAIL>', password='<PASSWORD>', private_key="""-----BEGIN RSA PRIVATE KEY-----
-----END RSA PRIVATE KEY-----""", public_key='rsa public key', google_id='1234')
session.add(new_user)
session.commit()
user_ide_vscode = UserIDE(user=new_user, ide=vscode)
user_ide_intellij = UserIDE(user=new_user, ide=intellij)
user_ide_pycharm = UserIDE(user=new_user, ide=pycharm)
user_ide_goland = UserIDE(user=new_user, ide=goland)
session.add(user_ide_vscode)
session.add(user_ide_intellij)
session.add(user_ide_pycharm)
session.add(user_ide_goland)
session.commit()
session.add(IDERuntimeInstall(user_ide=user_ide_vscode, runtime_install=ember_install))
session.add(IDERuntimeInstall(user_ide=user_ide_vscode, runtime_install=tmux_install))
session.add(IDERuntimeInstall(user_ide=user_ide_vscode, runtime_install=gh_cli_install))
session.add(IDERuntimeInstall(user_ide=user_ide_vscode, runtime_install=poetry_install))
session.add(IDERuntimeInstall(user_ide=user_ide_vscode, runtime_install=postgresql_install))
session.commit()
containers_repo = UserRepo(uri='<EMAIL>:foo/containers.git', user=new_user)
vag_repo = UserRepo(uri='<EMAIL>:foo/vag.git', user=new_user)
sites_repo = UserRepo(uri='<EMAIL>:foo/sites.git', user=new_user)
users_repo = UserRepo(uri='<EMAIL>:foo/users.git', user=new_user)
session.add(containers_repo)
session.add(vag_repo)
session.add(sites_repo)
session.add(users_repo)
session.commit()
session.add(IDERepo(user_ide=user_ide_vscode, uri=containers_repo.uri))
session.add(IDERepo(user_ide=user_ide_vscode, uri=vag_repo.uri))
session.add(IDERepo(user_ide=user_ide_vscode, uri=sites_repo.uri))
session.add(IDERepo(user_ide=user_ide_vscode, uri=users_repo.uri))
session.commit()
def reset_test_data_foo():
engine = create_engine(get_connection_str())
Base.metadata.bind = engine
Base.metadata.drop_all()
Base.metadata.create_all(engine)
user_foo_test_data()
if __name__ == '__main__':
reset_test_data_foo()
```
#### File: vag/utils/exec.py
```python
import inspect
import os
import sys
import shlex
from shlex import split
from subprocess import Popen, PIPE, STDOUT
import vag
def get_script_path(relative_path):
path = os.path.dirname(inspect.getfile(vag))
return path + '/scripts/{}'.format(relative_path)
def run(cmd, capture_output=False):
# https://www.endpoint.com/blog/2015/01/28/getting-realtime-output-using-python
process = Popen(split(cmd), stdout=PIPE, stderr=STDOUT, shell=False, encoding='utf8')
lines = []
while True:
line = process.stdout.readline()
if line == '' and process.poll() is not None:
break
if line and not capture_output:
print(line.rstrip())
lines.append(line.rstrip())
returncode = process.poll()
return returncode, lines
def run_raw(cmd):
# os.system preserves ANSI escaped output
return_code = os.system(cmd)
return return_code
def fork(cmd_str: str, debug=False):
if debug:
print()
print(cmd_str)
print
if sys.stdin.isatty():
# CREDIT
# https://gist.github.com/bortzmeyer/1284249#gistcomment-3074036
pid = os.fork()
if pid == 0: # a child process
cmd = shlex.split(cmd_str)
os.execv(cmd[0], cmd)
os.waitpid(pid, 0)
``` |
{
"source": "7ooL/api_decora",
"score": 2
} |
#### File: decora_wifi/models/controller.py
```python
from ..base_model import BaseModel
class Controller(BaseModel):
def __init__(self, session, model_id=None):
super(Controller, self).__init__(session, model_id)
@classmethod
def count(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/count"
return session.call_api(api, attribs, 'get')
def count_thermostats(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats/count".format(self._id)
return self._session.call_api(api, attribs, 'get')
@classmethod
def create(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers"
return session.call_api(api, attribs, 'post')
@classmethod
def create_many(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers"
return session.call_api(api, attribs, 'post')
def create_thermostats(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats".format(self._id)
return self._session.call_api(api, attribs, 'post')
def delete_by_id(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def delete_thermostats(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats".format(self._id)
return self._session.call_api(api, attribs, 'delete')
def destroy_by_id_thermostats(self, thermostat_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats/{1}".format(self._id, thermostat_id)
return self._session.call_api(api, attribs, 'delete')
def discover(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/discover".format(self._id)
return self._session.call_api(api, attribs, 'get')
def exists(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/exists".format(self._id)
return self._session.call_api(api, attribs, 'get')
@classmethod
def find(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers"
items = session.call_api(api, attribs, 'get')
result = []
if items is not None:
for data in items:
model = Controller(session, data['id'])
model.data = data
result.append(model)
return result
def find_by_id(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}".format(self._id)
data = self._session.call_api(api, attribs, 'get')
self.data.update(data)
return self
def find_by_id_thermostats(self, thermostat_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats/{1}".format(self._id, thermostat_id)
data = self._session.call_api(api, attribs, 'get')
from .thermostat import Thermostat
model = Thermostat(self._session, data['id'])
model.data = data
return model
@classmethod
def find_one(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/findOne"
return session.call_api(api, attribs, 'get')
def refresh(self):
api = "/Controllers/{0}".format(self._id)
result = self._session.call_api(api, {}, 'get')
if result is not None:
self.data.update(result)
return self
def get_installation(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/installation".format(self._id)
data = self._session.call_api(api, attribs, 'get')
from .installation import Installation
model = Installation(self._session, data['id'])
model.data = data
return model
def get_thermostats(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats".format(self._id)
items = self._session.call_api(api, attribs, 'get')
from .thermostat import Thermostat
result = []
if items is not None:
for data in items:
model = Thermostat(self._session, data['id'])
model.data = data
result.append(model)
return result
def post_message(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/postMessage".format(self._id)
return self._session.call_api(api, attribs, 'post')
def replace_by_id(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/replace".format(self._id)
return self._session.call_api(api, attribs, 'post')
@classmethod
def replace_or_create(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/replaceOrCreate"
return session.call_api(api, attribs, 'post')
def update_attributes(self, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}".format(self._id)
data = self._session.call_api(api, attribs, 'put')
self.data.update(attribs)
return self
def update_by_id_thermostats(self, thermostat_id, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/{0}/thermostats/{1}".format(self._id, thermostat_id)
data = self._session.call_api(api, attribs, 'put')
from .thermostat import Thermostat
model = Thermostat(self._session, data['id'])
model.data = data
return model
@classmethod
def upsert(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers"
data = session.call_api(api, attribs, 'put')
model = Controller(session, data['id'])
model.data = data
return model
@classmethod
def upsert_with_where(cls, session, attribs=None):
if attribs is None:
attribs = {}
api = "/Controllers/upsertWithWhere"
return session.call_api(api, attribs, 'post')
``` |
{
"source": "7ooL/pyvivintsky",
"score": 2
} |
#### File: pyvivintsky/pyvivintsky/vivint_panel.py
```python
import asyncio
from homeauto.api_vivint.pyvivintsky.vivint_device import VivintDevice
from homeauto.api_vivint.pyvivintsky.vivint_api import VivintAPI
from homeauto.api_vivint.pyvivintsky.vivint_wireless_sensor import VivintWirelessSensor
from homeauto.api_vivint.pyvivintsky.vivint_door_lock import VivintDoorLock
from homeauto.api_vivint.pyvivintsky.vivint_unknown_device import VivintUnknownDevice
from homeauto.house import register_security_event
import logging
# This retrieves a Python logging instance (or creates it)
logger = logging.getLogger(__name__)
class VivintPanel(VivintDevice):
"""
Represents the main Vivint panel device
"""
"""
states 1 and 2 come from panels
"""
ARM_STATES = {0: "disarmed", 1: "armed_away", 2: "armed_stay", 3: "armed_stay", 4: "armed_away"}
def __init__(self, vivintapi: VivintAPI, descriptor: dict, panel: dict):
self.__vivintapi: VivintAPI = vivintapi
self.__descriptor = descriptor
self.__panel = panel
self.__child_devices = self.__init_devices()
def __init_devices(self):
"""
Initialize the devices
"""
devices = {}
for device in self.__panel[u"system"][u"par"][0][u"d"]:
devices[str(device[u"_id"])] = self.get_device_class(device[u"t"])(
device, self
)
return devices
def id(self):
return str(self.__panel[u"system"][u"panid"])
def get_armed_state(self):
"""Return panels armed state."""
return self.ARM_STATES[self.__descriptor[u"par"][0][u"s"]]
def street(self):
"""Return the panels street address."""
return self.__panel[u"system"][u"add"]
def zip_code(self):
"""Return the panels zip code."""
return self.__panel[u"system"][u"poc"]
def city(self):
"""Return the panels city."""
return self.__panel[u"system"][u"cit"]
def climate_state(self):
"""Return the climate state"""
return self.__panel[u"system"][u"csce"]
async def poll_devices(self):
"""
Poll all devices attached to this panel.
"""
self.__panel = await self.__vivintapi.get_system_info(self.id())
def get_devices(self):
"""
Returns the current list of devices attached to the panel.
"""
return self.__child_devices
def get_device(self, id):
return self.__child_devices[id]
def update_device(self, id, updates):
self.__child_devices[id].update_device(updates)
def handle_message(self, message):
if u"d" in message[u"da"].keys():
for msg_device in message[u"da"][u"d"]:
self.update_device(str(msg_device[u"_id"]), msg_device)
def handle_armed_message(self, message):
logger.debug(message[u"da"][u"seca"][u"n"]+" set system "+self.ARM_STATES[message[u"da"][u"seca"][u"s"]])
register_security_event(message[u"da"][u"seca"][u"n"],self.ARM_STATES[message[u"da"][u"seca"][u"s"]])
def handle_disarmed_message(self, message):
logger.debug(message[u"da"][u"secd"][u"n"]+" set system "+self.ARM_STATES[message[u"da"][u"secd"][u"s"]])
register_security_event(message[u"da"][u"secd"][u"n"],self.ARM_STATES[message[u"da"][u"secd"][u"s"]])
@staticmethod
def get_device_class(type_string):
mapping = {
VivintDevice.DEVICE_TYPE_WIRELESS_SENSOR: VivintWirelessSensor,
VivintDevice.DEVICE_TYPE_DOOR_LOCK: VivintDoorLock
}
return mapping.get(type_string, VivintUnknownDevice)
```
#### File: pyvivintsky/pyvivintsky/vivint_pubnub_callback.py
```python
from pubnub.callbacks import SubscribeCallback
from pubnub.enums import PNOperationType, PNStatusCategory
import logging
# This retrieves a Python logging instance (or creates it)
logger = logging.getLogger(__name__)
class VivintPubNubCallback(SubscribeCallback):
"""
Basic PubNub Callback to pass messages to the handler
"""
def __init__(self, handler, connected, disconnected):
self.__handler = handler
self.__connected = connected
self.__disconnected = disconnected
def status(self, pubnub, status):
# The status object returned is always related to subscribe but could contain
# information about subscribe, heartbeat, or errors
# use the operationType to switch on different options
if (
status.operation == PNOperationType.PNSubscribeOperation
or status.operation == PNOperationType.PNUnsubscribeOperation
):
if status.category == PNStatusCategory.PNConnectedCategory:
"""Fire off connected event"""
self.__connected()
# pass
# This is expected for a subscribe, this means there is no error or issue whatsoever
elif status.category == PNStatusCategory.PNReconnectedCategory:
pass
# This usually occurs if subscribe temporarily fails but reconnects. This means
# there was an error but there is no longer any issue
elif status.category == PNStatusCategory.PNDisconnectedCategory:
logger.info("Disconnect Event")
# self.__disconnected()
# pass
# This is the expected category for an unsubscribe. This means there
# was no error in unsubscribing from everything
elif status.category == PNStatusCategory.PNUnexpectedDisconnectCategory:
logger.info("PNUnexpectedDisconnectCategory")
pass
# This is usually an issue with the internet connection, this is an error, handle
# appropriately retry will be called automatically
elif status.category == PNStatusCategory.PNAccessDeniedCategory:
pass
# This means that PAM does not allow this client to subscribe to this
# channel and channel group configuration. This is another explicit error
elif status.category == PNStatusCategory.PNAcknowledgmentCategory:
"""
Disconnected Category doesn't seem to fire.
It insteads fires and acknowledgement category
"""
if status.operation == PNOperationType.PNUnsubscribeOperation:
# print(status.is_error())
self.__disconnected()
else:
logger.info("Category: " + str(status.category))
# This is usually an issue with the internet connection, this is an error, handle appropriately
# retry will be called automatically
elif status.operation == PNOperationType.PNHeartbeatOperation:
# Heartbeat operations can in fact have errors, so it is important to check first for an error.
# For more information on how to configure heartbeat notifications through the status
# PNObjectEventListener callback, consult http://www.pubnub.com/docs/python/api-reference-configuration#configuration
if status.is_error():
logger.error("Heartbeat Error")
pass
# There was an error with the heartbeat operation, handle here
else:
pass
# Heartbeat operation was successful
else:
logger.info("Unknown Status: " + str(status.operation))
pass
# Encountered unknown status type
def presence(self, pubnub, presence):
# print("Presence Event!")
# print(presence)
pass # handle incoming presence data
def message(self, pubnub, message):
# handle incoming messages
# print("message")
# print(message.message)
self.__handler(message.message)
def signal(self, pubnub, signal):
# print("signal")
# print(signal.message)
pass # handle incoming signals
``` |
{
"source": "7ooL/web_home_auto",
"score": 2
} |
#### File: web_home_auto/decora/apps.py
```python
from django.apps import AppConfig
import sys, logging
logger = logging.getLogger(__name__)
class DecorasConfig(AppConfig):
name = 'decora'
verbose_name = "Leviton Decora Smart"
if 'runserver' in sys.argv:
def ready(self):
logger.debug('Starting '+self.verbose_name+' App')
import decora.jobs as jobs
jobs.start()
```
#### File: web_home_auto/decora/jobs.py
```python
from decora.models import Switch, Account
from jobs.jobs import build_jobs
from api_decora.decora_wifi import DecoraWiFiSession
from api_decora.decora_wifi.models.person import Person
from api_decora.decora_wifi.models.residential_account import ResidentialAccount
from api_decora.decora_wifi.models.residence import Residence
from django.core.exceptions import ObjectDoesNotExist
import logging, traceback
logger = logging.getLogger(__name__)
def start():
JOBS = (
('Decora', 'Get Decora Device States and Update Database', False, 10, sync_decora),
)
build_jobs(JOBS)
def sync_decora():
try:
decoraAcct = Account.objects.get(pk=1)
except ObjectDoesNotExist as e:
logger.error(e)
except:
logger.error("Error:"+ str(traceback.format_exc()))
else:
session = DecoraWiFiSession()
try:
session.login(decoraAcct.decora_username, decoraAcct.decora_password)
except ValueError as err:
logger.error(str(err).split(',')[0])
except:
logger.error("Error:"+ str(traceback.format_exc()))
else:
perms = session.user.get_residential_permissions()
logger.debug('{} premissions'.format(len(perms)))
all_residences = []
for permission in perms:
logger.debug('Permission: {}'.format(permission))
if permission.residentialAccountId is not None:
acct = ResidentialAccount(session, permission.residentialAccountId)
for res in acct.get_residences():
all_residences.append(res)
elif permission.residenceId is not None:
res = Residence(session, permission.residenceId)
all_residences.append(res)
for residence in all_residences:
for switch in residence.get_iot_switches():
data = {}
data['name'] = switch.name
data['mic_mute'] = switch.mic_mute
data['lang'] = switch.lang
data['fadeOnTime'] = switch.fadeOnTime
data['serial'] = switch.serial
data['configuredAmazon'] = switch.configuredAmazon
data['brightness'] = switch.brightness
data['downloaded'] = switch.downloaded
data['residentialRoomId'] = switch.residentialRoomId
data['env'] = switch.env
data['timeZoneName'] = switch.timeZoneName
data['identify'] = switch.identify
data['blink'] = switch.blink
data['linkData'] = switch.linkData
data['loadType'] = switch.loadType
data['isRandomEnabled'] = switch.isRandomEnabled
data['ota'] = switch.ota
data['otaStatus'] = switch.otaStatus
data['connected'] = switch.connected
data['allowLocalCommands'] = switch.allowLocalCommands
data['long'] = switch.long
data['presetLevel'] = switch.presetLevel
data['timeZone'] = switch.timeZone
data['buttonData'] = switch.buttonData
data['id'] = switch.id
data['apply_ota'] = switch.apply_ota
data['cloud_ota'] = switch.cloud_ota
data['canSetLevel'] = switch.canSetLevel
data['position'] = switch.position
data['customType'] = switch.customType
data['autoOffTime'] = switch.autoOffTime
data['programData'] = switch.programData
data['resKey'] = switch.resKey
data['resOcc'] = switch.resOcc
data['lat'] = switch.lat
data['includeInRoomOnOff'] = switch.includeInRoomOnOff
data['model'] = switch.model
data['random'] = switch.random
data['lastUpdated'] = switch.lastUpdated
data['dimLED'] = switch.dimLED
data['audio_cue'] = switch.audio_cue
data['deleted'] = switch.deleted
data['fadeOffTime'] = switch.fadeOffTime
data['manufacturer'] = switch.manufacturer
data['logging'] = switch.logging
data['version'] = switch.version
data['maxLevel'] = switch.maxLevel
data['statusLED'] = switch.statusLED
data['localIP'] = switch.localIP
data['rssi'] = switch.rssi
data['isAlexaDiscoverable'] = switch.isAlexaDiscoverable
data['created'] = switch.created
data['mac'] = switch.mac
data['dstOffset'] = switch.dstOffset
data['dstEnd'] = switch.dstEnd
data['residenceId'] = switch.residenceId
data['minLevel'] = switch.minLevel
data['dstStart'] = switch.dstStart
data['onTime'] = switch.onTime
data['connectedTimestamp'] = switch.connectedTimestamp
if switch.power == 'OFF':
data['power'] = False
elif switch.power == 'ON':
data['power'] = True
if not Switch.objects.filter(id=(data['id'])).exists():
logger.info('Creating Decora Switch:' + data['name'])
s = (Switch.objects.create)(**data)
s.save()
log_addition(decoraAcct, s)
else:
logger.debug('Updating Decora Switch:' + data['name'])
(Switch.objects.filter(id=(data['id'])).update)(**data)
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext as _
from django.utils.encoding import force_text
def log_addition(acct, object):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=acct.user.id,
content_type_id=ContentType.objects.get_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION
)
```
#### File: web_home_auto/homeauto/admin.py
```python
from django.contrib.admin import AdminSite
class MyAdminSite(AdminSite):
def index(self, request, extra_context=None):
all_users= User.objects.all()
extra_context = extra_context or {"all_users":all_users }
# print(extra_context)
return super(MyAdminSite, self).index(request, extra_context)
from django.contrib import admin
admin.site = MyAdminSite()
admin.site.site_title = 'Home Auto Web Interface'
admin.site.site_header = 'Home Auto'
admin.site.site_url = 'http://ha:8080.com/'
admin.site.index_title = 'House Administration'
admin.site.index_template = 'admin/ha_index.html'
# global actions for objects
def make_discoverable(modeladmin, request, queryset):
queryset.update(enabled=True)
make_discoverable.short_description = "Discoverable by HomeAuto"
def remove_discoverable(modeladmin, request, queryset):
queryset.update(enabled=False)
remove_discoverable.short_description = "Hide from HomeAuto"
# global actions for objects
def enable(modeladmin, request, queryset):
queryset.update(enabled=True)
enable.short_description = "Enable Selected"
def disable(modeladmin, request, queryset):
queryset.update(enabled=False)
disable.short_description = "Disable Selected"
# log entry (activity log)
from django.contrib.admin.models import LogEntry
class adminLogEntry(admin.ModelAdmin):
list_display = ('object_repr', 'action_flag', 'user', 'content_type', 'object_id')
list_filter = ('action_flag', 'user', 'content_type')
search_fields = ('object_repr',)
admin.site.register(LogEntry, adminLogEntry)
# house models
import homeauto.models as house
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'id', 'username', 'enabled')
list_filter = ('enabled',)
actions = [enable, disable]
class HouseMotionDetectorAdmin(admin.ModelAdmin):
list_display = ('name', 'id', 'enabled', 'source', 'source_id')
list_filter = ('enabled','source')
search_fields = ('name','source')
actions = [enable, disable]
class HouseLightAdmin(admin.ModelAdmin):
list_display = ('name', 'id', 'enabled', 'source', 'source_type', 'source_id')
list_filter = ('source', 'source_type')
search_fields = ('name','source', 'source_type')
actions = [enable, disable]
class HouseLockAdmin(admin.ModelAdmin):
list_display = ('name', 'id', 'enabled', 'source', 'source_type', 'source_id')
list_filter = ('source', 'source_type')
search_fields = ('name','source', 'source_type')
actions = [enable, disable]
class HouseSensorAdmin(admin.ModelAdmin):
list_display = ('name', 'id', 'enabled', 'source', 'source_type', 'source_id')
list_filter = ('source', 'source_type')
search_fields = ('name','source', 'source_type')
actions = [enable, disable]
class HouseScheduleAdmin(admin.ModelAdmin):
list_display = ('name', 'id', 'enabled', 'source', 'source_type', 'source_id')
list_filter = ('source', 'source_type')
search_fields = ('name','source', 'source_type')
actions = [enable, disable]
from django import forms
class TriggerForm(forms.ModelForm):
class Meta:
model = house.Trigger
fields = ('name', 'enabled', 'trigger', 'lock')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# self.fields['city'].queryset = City.objects.none()
"""
trigger = models.CharField(max_length=60,choices=TRIGGER_TYPES, default=DEFAULT)
motion_detector = models.OneToOneField(HouseMotionDetector, on_delete=models.CASCADE, blank=True, null=True)
sensor = models.OneToOneField(HouseSensor, on_delete=models.CASCADE, blank=True, null=True)
lock = models.OneToOneField(HouseLock, on_delete=models.CASCADE, blank=True, null=True)
window_start = models.TimeField(default=timezone.now)
window_end = models.TimeField(default=timezone.now)
external_schedule = models.ForeignKey(HouseSchedule, on_delete=models.CASCADE, blank=True, null=True)
external_schedule_delay = models.IntegerField(default=-1, verbose_name='Delay behind external schedule (minutes)')
people = models.ManyToManyField(Person, blank=True)
people_has_left = models.BooleanField(default=False)
people_has_arrived = models.BooleanField(default=False)
security_panel = models.ForeignKey(Panel, on_delete=models.CASCADE, blank=True, null=True)
security_armed_to = models.BooleanField(default=False, verbose_name="When person sets state to")
security_changed_to = models.BooleanField(default=False, verbose_name="When state changes to")
security_armed_state = models.CharField(max_length=60,choices=Common.ARM_STATES, default=Common.DISARMED, verbose_name="Armed State")
hvac_unit = models.ManyToManyField(System, blank=True)
hvac_profile = models.CharField(max_length=60,choices=Common.HVAC_PROFILES, default=Common.HOME)
hvac_value = models.IntegerField(default=0)
hvac_hold = models.BooleanField(default=False)
hvac_heat_mode = models.CharField(max_length=60,choices=Common.HVAC_HEAT_MODES, default=Common.OFF)
event = models.OneToOneField(CustomEvent, on_delete=models.CASCADE, blank=True, null=True)
"""
class TriggerAdmin(admin.ModelAdmin):
search_fields = ('name','trigger')
list_display = ('name', 'enabled', 'trigger', 'id')
list_filter = ('enabled', 'trigger')
# change_form_template = 'trigger_edit.html'
actions = [enable, disable]
# form = TriggerForm
class ActionAdmin(admin.ModelAdmin):
search_fields = ['name']
list_display = ('name', 'enabled', 'action', 'last_action_time')
list_filter = ('enabled', 'action')
actions = [enable, disable]
from django.urls import reverse
from django.utils.html import format_html
from django.utils.safestring import mark_safe
class NuggetAdmin(admin.ModelAdmin):
def trigger_name(self, obj):
display_text = "<br> ".join([
"<a href={}>{}</a>".format(reverse('admin:{}_{}_change'.format(trigger._meta.app_label, trigger._meta.model_name),args=(trigger.pk,)),trigger.name)
if trigger.enabled
else "<a class='redStrike' href={}>{}</a>".format(reverse('admin:{}_{}_change'.format(trigger._meta.app_label, trigger._meta.model_name),args=(trigger.pk,)),trigger.name)
for trigger in obj.triggers.all()
])
if display_text:
return mark_safe(display_text)
return "-"
def action_name(self, obj):
display_text = "<br> ".join([
"<a href={}>{}</a>".format(reverse('admin:{}_{}_change'.format(action._meta.app_label, action._meta.model_name),args=(action.pk,)),action.name)
if action.enabled
else "<a class='redStrike' href={}>{}</a>".format(reverse('admin:{}_{}_change'.format(action._meta.app_label, action._meta.model_name),args=(action.pk,)),action.name)
for action in obj.actions.all()
])
if display_text:
return mark_safe(display_text)
return "-"
# def trigger_count(self, obj):
# return obj.triggers.count()
# trigger_count.short_description = "Trigger Count"
# def action_count(self, obj):
# return obj.actions.count()
# action_count.short_description = "Action Count"
search_fields = ( 'name','triggers__name', 'actions__name')
list_filter = ('enabled', 'only_execute_if_someone_is_home')
list_display = ('name', 'enabled', 'only_execute_if_someone_is_home', 'trigger_name', 'action_name', 'id')
admin.site.register(house.Action, ActionAdmin)
admin.site.register(house.HouseMotionDetector, HouseMotionDetectorAdmin)
admin.site.register(house.HouseLight, HouseLightAdmin)
admin.site.register(house.HouseLock, HouseLockAdmin)
admin.site.register(house.HouseSensor, HouseSensorAdmin)
admin.site.register(house.HouseSchedule, HouseScheduleAdmin)
admin.site.register(house.Trigger, TriggerAdmin)
admin.site.register(house.Account, AccountAdmin)
admin.site.register(house.CustomEvent)
admin.site.register(house.Nugget, NuggetAdmin)
# user models
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
class PersonInline(admin.StackedInline):
model = house.Person
can_delete = False
verbose_name_plural = 'Home Auto Options'
class UserAdmin(BaseUserAdmin):
list_display = ('username', 'first_name', 'last_name', 'is_home')
def is_home(self, obj):
return house.Person.objects.get(user=obj.id).is_home
inlines = (PersonInline,)
admin.site.register(User, UserAdmin)
```
#### File: web_home_auto/homeauto/event_logs.py
```python
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext as _
from django.utils.encoding import force_text
import logging
logger = logging.getLogger(__name__)
sys_admin_user_id = 1
def log_addition(object):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id=sys_admin_user_id,
content_type_id=ContentType.objects.get_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=ADDITION
)
def log_change(object):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id=sys_admin_user_id,
content_type_id=ContentType.objects.get_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=CHANGE,
change_message=_('Changed')
)
def log_deletion(object):
"""
Log that an object will be deleted. Note that this method is called
before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id=sys_admin_user_id,
content_type_id=ContentType.objects.get_for_model(object).pk,
object_id=object.pk,
object_repr=force_text(object),
action_flag=DELETION
)
```
#### File: web_home_auto/homeauto/house.py
```python
import logging, requests, json, smtplib, datetime, os, random, sys, subprocess, traceback
from django.utils import timezone
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from homeauto.models import Trigger, Nugget, Action, HouseLight, Account, Person, CustomEvent
from hue.models import Sensor, Scene, Light, Group, Schedule
from wemo.models import Device as Wemo
from decora.models import Switch
from vivint.models import Device as VivintDevice
from vivint.models import Panel
import hue.actions as HueAction
import wemo.actions as WemoAction
import decora.actions as DecoraAction
from datetime import timedelta
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
logger = logging.getLogger(__name__)
# this is for intercepting databse updates
@receiver(post_save, sender=Person)
def register_person_event(sender, instance, **kwargs):
logger.info("Person:{"+instance.user.username+"} is home {"+str(instance.is_home)+"}")
if instance.is_home:
triggers = Trigger.objects.filter(trigger=(Trigger.PEOPLE), people_has_arrived=True, people=instance)
for t in triggers:
results = []
logger.debug('Evaluating: ' + t.name + " ("+str(t.people.count())+" people)")
if t.enabled:
if t.people.count() > 1:
for person in t.people.all():
# person = Person.objects.get(id=person.id)
if person.is_home:
logger.debug(' TEST (arrive): '+person.user.username + ' is home, matching trigger: '+t.name)
results.append(True)
else:
logger.debug(' TEST (arrive): '+person.user.username + ' is not home, not matching trigger: '+t.name)
results.append(False)
else:
results.append(True)
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
results.append(False)
logger.debug(" Results: "+str(results))
if all(results):
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
evaluate_nuggets(t.id)
else:
triggers = Trigger.objects.filter(trigger=(Trigger.PEOPLE), people_has_left=True, people=instance)
for t in triggers:
results = []
logger.debug('Evaluating: ' + t.name + " ("+str(t.people.count())+" people)")
if t.enabled:
if t.people.count() > 1:
for person in t.people.all():
# person = Person.objects.get(id=person.id)
if not person.is_home:
logger.debug(' TEST (leave): '+person.user.username + ' is not home, matching trigger: '+t.name)
results.append(True)
else:
logger.debug(' TEST (leave): '+person.user.username + ' is home, NOT matching trigger: '+t.name)
results.append(False)
else:
results.append(True)
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
results.append(False)
logger.debug(" Results: "+str(results))
if all(results):
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
evaluate_nuggets(t.id)
def register_watcher_event(event):
logger.debug(event)
if event.event_type == 'created':
logger.info('Received created event - %s.' % event.src_path)
with open(event.src_path) as f:
logger.info(subprocess.run(['cat', event.src_path], stdout=subprocess.PIPE))
if not f.readline().rstrip():
logger.error('Input file is empty')
remove_file(event.src_path)
return
with open(event.src_path) as f:
s = f.readline().rstrip().split(':')
if len(s) == 1:
try:
e = CustomEvent.objects.get(name=(s[0].lower()))
except ObjectDoesNotExist as e:
logger.error(e)
logger.error('There are no watcher events defined for: ' + s[0])
except:
logger.error("Error:"+ str(traceback.format_exc()))
else:
try:
t = Trigger.objects.get(trigger=(Trigger.CUSTOM_EVENT), event__name=(e.name))
if t.enabled:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
eval = True
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
eval = False
except Exception as e:
try:
logger.error(e)
eval = False
finally:
e = None
del e
if eval:
evaluate_nuggets(t.id)
elif len(s) == 2:
key = s[0].lower()
value = s[1]
logger.info('Found:{' + key + '}{' + value+"}")
if key == 'arrive':
try:
p = Person.objects.get(user__username=value)
logger.debug(p.user.first_name+" was found")
if p:
p.is_home = True
p.save()
else:
logger.error('No person was found with the username: ' + str(value))
except:
logger.error("Unexpected error:"+ str(traceback.format_exc()))
elif key == 'leave':
try:
p = Person.objects.get(user__username=value)
logger.debug(p.user.first_name+" was found")
if p:
p.is_home = False
try:
p.save()
except:
logger.error("Unexpected error:"+ str(traceback.format_exc()))
else:
logger.error('No person was found with the username: ' + str(value))
except:
logger.error("Unexpected error:"+ str(traceback.format_exc()))
else:
logger.error('No action defined for key: ' + str(key))
else:
logger.error(event.src_path + ' contains invalid content: ' + str(s))
remove_file(event.src_path)
else:
logger.info('New event - %s.' % event)
logger.debug("end of register watcher event - %s" % event)
def remove_file(path):
if os.path.isfile(path):
logger.debug('removeing ' + path)
try:
os.remove(path)
except:
logger.error("Unexpected error:"+ str(sys.exc_info()[0]))
def register_motion_event(source, device_id):
if 'Hue' in source:
m = Sensor.objects.get(id=device_id)
elif 'Vivint' in source:
m = VivintDevice.objects.get(id=device_id)
logger.info('Sensor{' + source + '}{' + m.name + '}{' + str(m.id) + '}{' + m.type + '}{Active}')
try:
t = Trigger.objects.get(trigger=(Trigger.MOTION), motion_detector__source_id=device_id)
if t.enabled:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
eval = True
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
eval = False
except:
eval = False
if eval:
evaluate_nuggets(t.id)
def register_sensor_event(source, device_id, state):
if 'Hue' in source:
m = Sensor.objects.get(id=device_id)
elif 'Vivint' in source:
m = VivintDevice.objects.get(id=device_id)
logger.info('Sensor{' + source + '}{' + m.name + '}{' + str(m.id) + '}{' + m.type + '}{' + state+'}')
try:
if state == 'Opened':
t = Trigger.objects.get(trigger=(Trigger.SENSOR_OPENED), sensor__source_id=device_id)
elif state == 'Closed':
t = Trigger.objects.get(trigger=(Trigger.SENSOR_CLOSED), sensor__source_id=device_id)
elif state == 'Locked':
t = Trigger.objects.get(trigger=(Trigger.LOCK_LOCKED), lock__source_id=device_id)
elif state == 'Unlocked':
t = Trigger.objects.get(trigger=(Trigger.LOCK_UNLOCKED), lock__source_id=device_id)
else:
logger.error('Sensor has an unknown state of ' + state)
if t.enabled:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
eval = True
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
eval = False
except:
eval = False
if eval:
evaluate_nuggets(t.id)
def check_security_trigger(t):
if t.enabled:
if t.security_armed_to:
logger.debug("Looking for triggers with security_armed_to flagged")
if trigger.people__user__first_name == who[0]:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
evaluate_nuggets(t.id)
if t.security_changed_to:
logger.debug("Looking for triggers with security_changed_to flagged")
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
evaluate_nuggets(t.id)
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
def register_security_event(the_who, state):
try:
who = the_who.split()
username = User.objects.get(first_name=(who[0]), last_name=(who[1])).username
logger.info('Security{Vivint}{' + username + '} set house to {' + state+'}')
except:
logger.info('Security{Vivint}{' + the_who + '} set house to {' + state+']')
finally:
who = the_who.split()
try:
triggers = Trigger.objects.filter(trigger=(Trigger.SECURITY_ARMED_STATE), security_armed_state=state)
except:
logger.error(sys.exc_info()[0])
else:
logger.debug('found '+str(triggers.count())+' tiggers with '+state)
if triggers.count() > 1:
for trigger in triggers:
check_security_trigger(trigger)
else:
check_security_trigger(triggers.first())
def register_hvac_event(who, what, oldValue, newValue):
try:
v = float(oldValue)
logger.info('HvacValue:{' + who + '}{' + what + '}{' + str(oldValue) + '}{' + str(newValue)+"}")
except:
logger.info('HvacStatus:{' + who + '}{' + what + '}{' + str(oldValue) + '}{' + str(newValue)+"}")
def register_time_event(t):
if t.enabled:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{fired}')
evaluate_nuggets(t.id)
else:
logger.debug('Trigger:{' + t.name + '}{' + str(t.id) + '}{disabled}')
def is_anyone_home():
p = Person.objects.all()
for person in p:
if person.is_home:
return True
logger.debug('no one is home')
return False
def is_nugget_runable(nug):
if nug.enabled:
if not nug.only_execute_if_someone_is_home:
return True
if is_anyone_home():
return True
else:
logger.debug(nug.name + ' is disabled')
return False
def evaluate_nuggets(t_id):
nugs = Nugget.objects.filter(triggers=t_id)
for nug in nugs:
if is_nugget_runable(nug):
triggers = nug.triggers.all()
results = []
logger.debug('Evaluating: ' + nug.name + " ("+str(len(triggers))+" triggers)")
for t in triggers:
if t.id == t_id:
logger.debug(' TEST: ' + t.name + ' ' + str(t.id) + ' True')
results.append(True)
else:
if t.trigger == t.MOTION:
if t.motion_detector.source == 0:
try:
state = VivintDevice.objects.get(id=(t.motion_detector.source_id)).state
except ObjectDoesNotExist as e:
logger.error(e)
except:
logger.error("Error:"+ str(traceback.format_exc()))
else:
if state == 'Open':
state = True
elif state == 'Closed':
state = False
else:
state = False
results.append(state)
logger.debug(' TEST: ' + nug.name + ':Vivint:' + t.motion_detector.name + ' state ' + str(state))
elif t.motion_detector.source == 1:
try:
state = Sensor.objects.get(id=(t.motion_detector.source_id)).presence
except ObjectDoesNotExist as e:
logger.error(e)
except:
logger.error("Error:"+ str(traceback.format_exc()))
else:
logger.debug(' TEST: ' + nug.name + ':Hue:' + t.motion_detector.name + ' state ' + str(state))
results.append(state)
else:
logger.warning('There is no motion state lookup for source ' + str(t.motion_detector_id__source))
results.append(False)
elif t.trigger == t.WINDOW:
if t.window_start <= timezone.localtime().time() <= t.window_end:
logger.debug(" TEST: "+t.name + ' timeframe state True')
results.append(True)
else:
logger.debug(" TEST: "+t.name + ' timeframe state False')
results.append(False)
elif t.trigger == t.SCHEDULE:
logger.error('code has not been written for SCHEDULE trigger')
results.append(False)
elif t.trigger == t.SENSOR_OPENED:
logger.error('code has not been written for SENSOR_OPENED trigger')
results.append(False)
elif t.trigger == t.SENSOR_CLOSED:
logger.error('code has not been written for SENSOR_CLOSED trigger')
results.append(False)
elif t.trigger == t.LOCK_UNLOCKED:
logger.error('code has not been written for LOCK_UNLOCKED trigger')
results.append(False)
elif t.trigger == t.LOCK_LOCKED:
logger.error('code has not been written for LOCK_LOCKED trigger')
results.append(False)
elif t.trigger == t.HVAC_ACTIVITY:
logger.error('code has not been written for HVAC_ACTIVITY trigger')
results.append(False)
elif t.trigger == t.HVAC_FAN:
logger.error('code has not been written for HVAC_FAN trigger')
results.append(False)
elif t.trigger == t.HVAC_HITS_TEMP:
logger.error('code has not been written for HVAC_HITS_TEMP trigger')
results.append(False)
elif t.trigger == t.HVAC_HOLD:
logger.error('code has not been written for HVAC_HOLD trigger')
results.append(False)
elif t.trigger == t.HVAC_HEATMODE:
logger.error('code has not been written for HVAC_HEATMODE trigger')
results.append(False)
elif t.trigger == t.HVAC_FILTRLVL:
logger.error('code has not been written for HVAC_FILTRLVL trigger')
results.append(False)
elif t.trigger == t.HVAC_HUMLVL:
logger.error('code has not been written for HVAC_HUMLVL trigger')
results.append(False)
elif t.trigger == t.CUSTOM_EVENT:
logger.error('code has not been written for CUSTOM_EVENT trigger')
results.append(False)
elif t.trigger == t.SECURITY_ARMED_STATE:
try:
state = Panel.objects.get(id=(t.security_panel.id)).armed_state
except ObjectDoesNotExist as e:
logger.error(e)
except:
logger.error("Error:"+ str(traceback.format_exc()))
else:
if state == t.security_armed_state:
logger.debug(" TEST: "+t.security_armed_state+' matches armed state '+state)
results.append(True)
else:
logger.debug(" TEST: "+t.security_armed_state+' does not match armed state '+state)
results.append(False)
elif t.trigger == t.PEOPLE:
if t.people_has_left:
try:
for person in t.people.all():
if person.is_home:
logger.debug(" TEST: "+person.user.username + ' found to be home False')
results.append(False)
else:
logger.debug(" TEST: "+person.user.username + ' found to be not home True')
results.append(True)
except:
results.append(False)
elif t.people_has_arrived:
try:
for person in t.people.all():
if person.is_home:
logger.debug(" TEST: "+person.user.username + ' found to be home True')
results.append(True)
else:
logger.debug(" TEST: "+person.user.username + ' found not to be home False')
results.append(False)
except:
results.append(False)
else:
logger.error('No nugget evaluation has been defined for: ' + t.trigger)
logger.debug(" Results: "+str(results))
if all(results):
execute_actions(nug.id)
else:
logger.debug('Nugget ' + nug.name + ' is not runable and its trigger(s) fired')
def execute_actions(n_id):
nug = Nugget.objects.get(id=n_id)
for action in nug.actions.all():
if action.enabled:
if action.action_grace_period <= 0 or timezone.localtime() - action.last_action_time > timedelta(minutes=(int(action.action_grace_period))):
run_action(action,nug)
logger.debug("execute_actions:{"+action.name+"}{"+str(action.id)+"}{"+action.action+"}{"+str(action.enabled)+"}{"+nug.name+"}{"+str(nug.id)+"}")
else:
logger.debug('Not running '+action.name+' as it is within the cool down period of ' + str(action.action_grace_period))
else:
logger.debug("execute_actions:{"+action.name+"}{"+str(action.id)+"}{"+action.action+"}{"+str(action.enabled)+"}{"+nug.name+"}{"+str(nug.id)+"}")
def run_action(action, nug):
logger.debug("Attempting to run "+action.name)
if action.action == action.PLAY_SCENE:
scenes = action.scenes.all()
if scenes:
for scene in scenes:
s = Scene.objects.get(id=(scene.id))
HueAction.set_scene_trans_time(s, action.scenes_transition_time)
HueAction.play_scene(s)
# reset scene states to fast
HueAction.set_scene_trans_time(s,3)
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.PLAY_RANDOM_SCENE:
scenes = action.scenes.all()
if scenes:
scene = random.choice(scenes)
s = Scene.objects.get(id=(scene.id))
HueAction.set_scene_trans_time(s, action.scenes_transition_time)
HueAction.play_scene(s)
# reset scene states to fast
HueAction.set_scene_trans_time(s,3)
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.FLASH_SCENE:
scenes = action.scenes.all()
if scenes:
for scene in scenes:
HueAction.flash_scene(scene)
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.TURN_ON:
lights = action.lights.all()
if lights:
for light in action.lights.all():
if light.source == 1:
if light.source_type == 0:
HueAction.turn_on_light(Light.objects.get(id=(light.source_id)))
update_last_run(action, nug)
elif light.source_type == 1:
HueAction.turn_on_group(Group.objects.get(id=(light.source_id)))
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source.type)+"}")
elif light.source == 2:
if light.source_type == 4:
if WemoAction.turn_on_light(Wemo.objects.get(id=(light.source_id))):
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source.type)+"}")
elif light.source == 3:
if light.source_type == 3:
if DecoraAction.turn_on_light(Switch.objects.get(id=(light.source_id))):
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source.type)+"}")
else:
logger.error("run_action:{"+action.name+"}{failed}{No source for " + str(lights.source) + " type " + str(light.source.type)+"}")
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.TURN_OFF:
lights = action.lights.all()
if lights:
for light in lights:
if light.source == 1: # 1 hue
if light.source_type == 0: # 0 Bulb
HueAction.turn_off_light(Light.objects.get(id=(light.source_id)))
elif light.source_type == 1: # Group
HueAction.turn_off_group(Group.objects.get(id=(light.source_id)))
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source.type)+"}")
elif light.source == 2: # 2 wemo
if light.source_type == 4: # Plug
if WemoAction.turn_off_light(Wemo.objects.get(id=(light.source_id))):
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source.type)+"}")
elif light.source == 3: # 3 decora
if light.source_type == 3: # swicth 3
if DecoraAction.turn_off_light(Switch.objects.get(id=(light.source_id))):
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source)+"}")
else:
logger.error("run_action:{"+action.name+"}{failed}{No source for " + str(lights.source) + " type " + str(light.source.type)+"}")
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.BLINK_HUE:
lights = action.lights.all()
if lights:
for light in lights:
if light.source == 1:
if light.source_type == 1:
HueAction.blink_group(Group.objects.get(id=(light.source_id)))
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Unkown source_type "+str(light.source)+"}")
else:
logger.error("run_action:{"+action.name+"}{failed}{No source for " + str(lights.source) + " type " + str(light.source.type)+"}")
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.SEND_TEXT:
people = action.people.all()
if people:
for person in people:
send_text(person.text_address, action.text_message)
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.HVAC_SET_ACTIVITY:
logger.error("run_action:{"+action.name+"}{failed}{No action code has been developed for this type}")
elif action.action == action.SECURITY_SET_STATE:
logger.error("run_action:{"+action.name+"}{failed}{No action code has been developed for this type}")
elif action.action == action.PEOPLE_LEAVE:
people = action.people.all()
if people:
for p in people:
p.is_home = False
p.save()
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.PEOPLE_ARRIVE:
people = action.people.all()
if people:
for p in people:
p.is_home = True
p.save()
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{Query set is empty}")
elif action.action == action.DISABLE_TRIGGER:
triggers = action.triggers.all()
if triggers.count() == 0:
logger.warning("No triggers defined to disable")
else:
for t in triggers:
t.enabled = False
t.save()
update_last_run(action, nug)
elif action.action == action.ENABLE_TRIGGER:
triggers = action.triggers.all()
if triggers.count() == 0:
logger.warning("No triggers defined to disable")
else:
for t in triggers:
t.enabled = True
t.save()
update_last_run(action, nug)
else:
logger.error("run_action:{"+action.name+"}{failed}{No action code has been developed for this type}")
def update_last_run(action, nug):
Action.objects.filter(id=(action.id)).update(last_action_time=(timezone.localtime()))
logger.info("run_action:{"+nug.name+"}{"+action.name+"}{success}")
def send_text(phone, message):
logger.debug('attempting to send text ' + message)
sent_from = 'Home-Auto'
to = [phone]
subject = 'Home-Auto'
email_text = message
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
server.ehlo()
gmail = Account.objects.get(name='Gmail')
server.login(gmail.username, gmail.password)
server.sendmail(sent_from, to, email_text)
server.close()
logger.info("Text:{"+message+"}")
```
#### File: web_home_auto/homeauto/views.py
```python
from django.shortcuts import render
#import homeauto.models as house
#import hue.models as hue
#import homeauto.models.wemo as device
from django.http import JsonResponse
import simplejson as json
def dashboard_index(request):
# persons = house.Person.objects.all()
# services = house.Service.objects.all()
# lights = hue.Light.objects.all()
# context = {'persons':persons,
# 'services':services,
# 'lights':lights}
context = {}
return render(request, 'dashboard_index.html', context)
"""
def people_index(request):
persons = house.Person.objects.all()
context = {'persons': persons}
return render(request, 'people_index.html', context)
def person_detail(request, pk):
person = house.Person.objects.get(pk=pk)
context = {'person': person}
return render(request, 'person_detail.html', context)
def services_index(request):
services = house.Service.objects.all()
context = {'services': services}
return render(request, 'services_index.html', context)
def service_detail(request, pk):
service = house.Service.objects.get(pk=pk)
context = {'service': service}
return render(request, 'service_detail.html', context)
#def lights_index(request):
# lights = hue.Light.objects.all()
# wemos = device.Wemo.objects.all()
# context = {'lights':lights,
# 'wemos':wemos}
# return render(request, 'lights_index.html', context)
def light_detail(request):
return render(request, 'light_detail.html', context)
def groups_index(request):
return render(request, 'groups_index.html', context)
def group_detail(request):
return render(request, 'group_detail.html', context)
def scenes_index(request):
return render(request, 'scenes_index.html', context)
def scene_detail(request):
return render(request, 'scene_detail.html', context)
"""
```
#### File: web_home_auto/hue/actions.py
```python
from hue.models import Group, Light, Scene, Bridge, SceneLightstate, Sensor, Schedule
from django.db.models.signals import pre_save, post_save, post_delete
from django.dispatch import receiver
import logging, traceback, requests, json, time
from requests.exceptions import ConnectionError
logger = logging.getLogger(__name__)
# group callback to delete off hue hub
@receiver(post_delete, sender=Group)
def delete_group_on_hub(sender, instance, **kwargs):
delete_group(instance.bridge, instance.id)
def delete_group(bridge, gid):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/groups/'+str(gid)
delete_command(api_url)
# group calback to update on hue hub
@receiver(post_save, sender=Group)
def set_group_attributes_on_hub(sender, instance, **kwargs):
set_group_attributes(instance.bridge, instance)
def set_group_attributes(bridge, group):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/groups/'+str(group.id)
payload = {'name':group.name, 'lights':convert_m2m_to_json_array(group.lights.all())}
if put_command(api_url, payload):
logger.info("{group:"+group.name+"}{id:"+str(group.id)+"}")
# scene callback to delete off hue hub
@receiver(post_delete, sender=Scene)
def delete_scene_on_hub(sender, instance, **kwargs):
delete_scene(instance.bridge, instance.id)
def delete_scene(bridge, sid):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/scenes/'+sid
delete_command(api_url)
# scene callback to update on hue hub
@receiver(post_save, sender=Scene)
def set_scene_attributes_on_hub(sender, instance, **kwargs):
if instance.group is not None:
set_scene_attributes(instance.bridge, instance)
def set_scene_attributes(bridge, scene):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/scenes/'+str(scene.id)
payload = {'name':scene.name, 'lights':convert_m2m_to_json_array(scene.lights.all())}
if put_command(api_url, payload):
logger.info("{scene:"+scene.name+"}{id:"+str(scene.id)+"}")
# light callback to delete off hue hub
@receiver(post_delete, sender=Light)
def delete_light_on_hub(sender, instance, **kwargs):
delete_light(instance.bridge, instance.id)
def delete_light(bridge, lid):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/lights/'+str(lid)
delete_command(api_url)
# light callback to update on hue hub
@receiver(post_save, sender=Light)
def set_light_attributes_on_hub(sender, instance, **kwargs):
set_light_name(instance.bridge, instance)
set_light_state(instance.bridge, instance)
def set_light_name(bridge, light):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/lights/'+str(light.id)
payload = {'name':light.name}
if put_command(api_url, payload):
logger.info("{light:"+light.name+"}{id:"+str(light.id)+"}")
def set_light_state(bridge, light):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/lights/'+str(light.id)+"/state"
data = {}
data['on'] = light.on
data['alert'] = light.alert
if light.on:
data['hue'] = light.hue
data['effect'] = light.effect
data['bri'] = light.bri
data['sat'] = light.sat
data['ct'] = light.ct
data['xy'] = light.xy
payload = data
if put_command(api_url, payload):
logger.info("{light}:"+light.name+"}{id:"+str(light.id)+"}")
def turn_on_light(light):
api_url = 'http://' + light.bridge.ip + '/api/' + light.bridge.username + '/lights/' + str(light.id) + '/state'
payload = {'on':True, 'transitiontime':100}
if put_command(api_url, payload):
logger.info("{light:"+light.name+"}{id:"+str(light.id)+"}")
def turn_off_light(light):
api_url = 'http://' + light.bridge.ip + '/api/' + light.bridge.username + '/lights/' + str(light.id) + '/state'
payload = {'on':False, 'transitiontime':100}
if put_command(api_url, payload):
logger.info("{light:"+light.name+"}{id:"+str(light.id)+"}")
def turn_off_group(group):
api_url = 'http://' + group.bridge.ip + '/api/' + group.bridge.username + '/groups/' + str(group.id) + '/action'
payload = {'on':False, 'transitiontime':100}
if put_command(api_url, payload):
logger.info("{group:"+group.name+"}{id:"+str(group.id)+"}")
def turn_on_group(group):
api_url = 'http://' + group.bridge.ip + '/api/' + group.bridge.username + '/groups/' + str(group.id) + '/action'
payload = {'on':True, 'transitiontime':100}
if put_command(api_url, payload):
logger.info("{group:"+group.name+"}{id:"+str(group.id)+"}")
def blink_group(group):
api_url = 'http://' + group.bridge.ip + '/api/' + group.bridge.username + '/groups/' + str(group.id) + '/action'
payload = {'on':True, 'alert':'select'}
if put_command(api_url, payload):
logger.info("{group:"+group.name+"}{id:"+str(group.id)+"}")
def play_scene(scene):
api_url = 'http://' + scene.bridge.ip + '/api/' + scene.bridge.username + '/groups/' + str(scene.group.id) + '/action'
payload = {'scene': scene.id}
if put_command(api_url, payload):
logger.info("{scene:"+scene.name+"}{id:"+str(scene.id)+"}")
def create_scene(bridge, payload):
api_url = 'http://' + bridge.ip + '/api/' + bridge.username + '/scenes/'
r = post_command(api_url, payload)
if r is not None:
id = r.json()[0]['success']['id']
return id
return None
def flash_scene(scene):
# get the group of the scene
g = Scene.objects.get(id=scene.id).group
data = {}
data['name'] = "TEMP_"+scene.name
data['recycle'] = True
light_list = []
lightstates = {}
for light in g.lights.all():
light_list.append(str(light.id))
lightstates[str(light.id)] = get_light_state(light)
data['lights'] = light_list
data['lightstates'] = lightstates
temp_s_id = create_scene(g.bridge, data)
time.sleep(.2)
if temp_s_id is not None:
# play the flash scene
play_scene(scene)
time.sleep(2)
# return lights to previous state
api_url = 'http://' + g.bridge.ip + '/api/' + g.bridge.username + '/groups/' + str(g.id) + '/action'
payload = {'scene': temp_s_id}
put_command(api_url, payload)
delete_scene(g.bridge,temp_s_id)
#def restore_light_state():
def get_light_state(light):
api_url = 'http://' + light.bridge.ip + '/api/' + light.bridge.username + '/lights/' + str(light.id)
r = get_command(api_url)
if r is not None:
data = {}
lightstates = r.json()['state']
if 'on' in lightstates:
data['on'] = lightstates['on']
if 'bri' in lightstates:
data['bri'] = lightstates['bri']
if 'sat' in lightstates:
data['sat'] = lightstates['sat']
if 'ct' in lightstates:
data['ct'] = lightstates['ct']
if 'xy' in lightstates:
data['xy'] = lightstates['xy']
return data
logger.error('Unable to get light state')
return None
def convert_trans_time(transtime):
# transtime should come in as seconds, convert to multiple of 100ms
tt = ((transtime*1000)/100)
# max time allowed by hue lights, it will fail to set otherwise, i think this is 1.5 hours
if tt > 65535:
tt = 65543
return tt
def set_scene_trans_time(scene, transtime):
transtime = convert_trans_time(transtime)
if transtime > 0:
# get the current light states in the scene to update transition times
api_url='http://'+ scene.bridge.ip +'/api/'+ scene.bridge.username +'/scenes/'+ str(scene.id)
r = get_command(api_url)
if r is not None:
json_str = json.dumps(r.json())
json_objects = json.loads(json_str)
for lights in json_objects['lightstates'].items():
lights[1]['transitiontime'] = int(transtime)
api_url='http://'+ scene.bridge.ip +'/api/'+ scene.bridge.username +'/scenes/'+ str(scene.id) +'/lightstates/'+ lights[0]
payload = lights[1]
put_command(api_url, payload)
def put_command(api_url, payload):
logger.debug('api: '+api_url)
logger.debug('payload: '+json.dumps(payload))
try:
r = requests.put(api_url, data=(json.dumps(payload)))
logger.debug('response: '+r.text)
if 'error' in r.text:
logger.error('payload tried: ' + str(payload))
for line in json.loads(r.text):
if 'error' in line:
logger.error(line)
else:
logger.debug(line)
return False
except:
logger.error('except ' + str(api_url))
logger.error('except ' + str(payload))
logger.error("Error:"+ str(traceback.format_exc()))
return False
return True
def post_command(api_url, payload):
try:
r = requests.post(api_url, data=(json.dumps(payload)))
if 'error' in r.text:
logger.error('payload tried: ' + str(payload))
for line in json.loads(r.text):
if 'error' in line:
logger.error(line)
else:
logger.debug(line)
else:
logger.info(r.text)
except:
logger.error('except ' + str(api_url))
logger.error('except ' + str(payload))
logger.error("Error:"+ str(traceback.format_exc()))
return None
else:
return r
def delete_command(api_url):
try:
r = requests.delete(api_url)
if 'error' in r.text:
logger.error('api_url tried: ' + str(api_url))
for line in json.loads(r.text):
if 'error' in line:
logger.error(line)
else:
logger.debug(line)
else:
logger.info(r.text)
except:
logger.error(str(api_url))
logger.error("Error:"+ str(traceback.format_exc()))
def get_command(api_url):
try:
r = requests.get(api_url)
except ConnectionError as e:
logger.error(str(api_url))
logger.error(str(e))
return None
except:
logger.error(str(api_url))
logger.error("Error:"+ str(traceback.format_exc()))
return None
else:
return r
def convert_m2m_to_json_array(objects):
first = True
s = []
for obj in objects:
s.append(str(obj.id))
logger.debug(obj.id)
return s
```
#### File: web_home_auto/jobs/fields.py
```python
from functools import partial
from itertools import groupby
from operator import attrgetter
from django.forms.models import ModelChoiceIterator, ModelChoiceField
class GroupedModelChoiceIterator(ModelChoiceIterator):
def __init__(self, field, groupby):
self.groupby = groupby
super().__init__(field)
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
queryset = self.queryset
# Can't use iterator() when queryset uses prefetch_related()
if not queryset._prefetch_related_lookups:
queryset = queryset.iterator()
for group, objs in groupby(queryset, self.groupby):
yield (group, [self.choice(obj) for obj in objs])
class GroupedModelChoiceField(ModelChoiceField):
def __init__(self, *args, choices_groupby, **kwargs):
if isinstance(choices_groupby, str):
choices_groupby = attrgetter(choices_groupby)
elif not callable(choices_groupby):
raise TypeError('choices_groupby must either be a str or a callable accepting a single argument')
self.iterator = partial(GroupedModelChoiceIterator, groupby=choices_groupby)
super().__init__(*args, **kwargs)
```
#### File: web_home_auto/jobs/models.py
```python
from django.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
class Group(models.Model):
name = models.CharField(max_length=48)
def __str__(self):
return '{}'.format(self.name)
class Command(models.Model):
name = models.CharField(max_length=60)
group = models.ForeignKey(Group, blank=False, on_delete=models.CASCADE)
def __str__(self):
return '{}'.format(self.name)
class Job(models.Model):
command = models.OneToOneField(Command, on_delete=models.CASCADE, blank=True, null=True)
interval = models.IntegerField(default=600,validators=[MinValueValidator(0),MaxValueValidator(3600)], verbose_name='Run Interval (seconds)' )
enabled = models.BooleanField(default=False)
```
#### File: web_home_auto/vivint/apps.py
```python
from django.apps import AppConfig
import logging, sys
logger = logging.getLogger(__name__)
class VivintConfig(AppConfig):
name = 'vivint'
verbose_name = "Vivint Security System"
if 'runserver' in sys.argv:
def ready(self):
logger.debug('Starting Vivint App')
import vivint.jobs as jobs
jobs.start()
```
#### File: web_home_auto/watchers/apps.py
```python
from django.apps import AppConfig
import time, logging, os, sys
from watchdog.events import PatternMatchingEventHandler
from watchdog.observers import Observer
logger = logging.getLogger(__name__)
class WatchersConfig(AppConfig):
name = 'watchers'
verbose_name = "Directory Watchers"
if 'runserver' in sys.argv:
def ready(self):
logger.debug('Starting Watcher App')
self.clean()
self.start()
def clean(self):
from watchers.models import Directory
directories = Directory.objects.all()
if directories:
for directory in directories:
if directory.enabled:
# cleanup directory before starting
filelist = [ f for f in os.listdir(directory.directory) ]
for f in filelist:
os.remove(os.path.join(directory.directory, f))
logger.warning("cleaning up file: "+f)
else:
logger.warning('Directory: ' + directory.directory + ' is not enabled')
else:
logger.error('Will not start watchers on directories as none exist')
def start(self):
from watchers.models import Directory
directories = Directory.objects.all()
if directories:
for directory in directories:
if directory.enabled:
logger.info("Starting Watcher on: "+directory.directory)
w = Watcher(directory.directory)
try:
w.run()
except KeyboardInterrupt:
logger.error('Watcher Stopped.')
else:
logger.warning('Directory: ' + directory.directory + ' is not enabled')
else:
logger.error('Will not start watchers on directories as none exist')
class Watcher:
def __init__(self, dir):
self.observer = Observer()
self.DIRECTORY_TO_WATCH = dir
def run(self):
observer = Observer()
observer.schedule(event_handler=Handler('*'), path=self.DIRECTORY_TO_WATCH)
observer.daemon = False
try:
observer.start()
except KeyboardInterrupt:
logger.error('Watcher Stopped.')
observer.join(2)
class Handler(PatternMatchingEventHandler):
@staticmethod
def on_any_event(event):
from homeauto.house import register_watcher_event
logger.debug('New event - %s.' % event)
if event.event_type == 'created':
register_watcher_event(event)
``` |
{
"source": "7oshi7/Two-Circles",
"score": 3
} |
#### File: 7oshi7/Two-Circles/main.py
```python
import os
import sys
from main_ui import Ui_MainWindow
from help_ui import Ui_Form
from PySide6.QtWidgets import QApplication, QDialog, QMainWindow
from PySide6 import QtGui
import matplotlib.pyplot as plt
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
try:
# PyInstaller creates a temp folder and stores path in _MEIPASS
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
class MainWindow(QMainWindow):
# Initialize main window
def __init__(self):
QMainWindow.__init__(self)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.setWindowIcon(QtGui.QIcon(resource_path('icon.png'))) # set icon
# Connect buttons to functions
self.ui.build_button.clicked.connect(self.process_input)
self.ui.help_button.clicked.connect(self.help_message)
# Call help window to appear
def help_message(self):
self.help = HelpWindow()
self.help.show()
# Process user's input
def process_input(self):
try:
# Read data from text fields and assign them to variables
x = self.ui.xLineEdit.text()
y = self.ui.yLineEdit.text()
r = self.ui.rLineEdit.text()
x2 = self.ui.x2LineEdit.text()
y2 = self.ui.y2LineEdit.text()
r2 = self.ui.r2LineEdit.text()
t = [x, y, r, x2, y2, r2] # Pack vars into list
# Iterate through list 't', replacing commas with dots
# also converting strings to float numbers and appending
# them to 'coordinates' list.
coordinates = []
for i in t:
i = i.replace(',', '.')
i = float(i)
coordinates.append(i)
self.ui.status_bar.showMessage('') # Clear status bar
draw(coordinates) # Call draw function with coordinates
# Catch invalid input and display error message in status bar
except ValueError:
self.ui.status_bar.showMessage(
'Input error! Please, use numbers only.')
class HelpWindow(QDialog):
# Initialize help window
def __init__(self):
QDialog.__init__(self)
self.ui = Ui_Form()
self.ui.setupUi(self)
self.setWindowIcon(QtGui.QIcon(resource_path('info.png'))) # set icon
# Making 'Close' button to close help window
self.ui.close_button.clicked.connect(self.close)
# Draw circles with given coordinates using matplotlib
def draw(coordinates):
x, y, r, x2, y2, r2 = coordinates # unpack variables
fig, ax = plt.subplots()
fig.patch.set_facecolor('xkcd:gray')
ax.set_facecolor('xkcd:dark gray')
ax.add_patch(plt.Circle((x, y), r, color='white', alpha=0.5))
ax.add_patch(plt.Circle((x2, y2), r2, color='#00ffff', alpha=0.5))
ax.set_aspect('equal', adjustable='datalim')
ax.plot()
plt.show()
# Run app
if __name__ == "__main__":
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec())
``` |
{
"source": "7ossamamahmoud/Bioinformatics-Algorithms",
"score": 3
} |
#### File: Bioinformatics-Algorithms/BLAST DNA/DNA.py
```python
Match ={
'A':{'A':1, 'T':2, 'G':3, 'C':4},
'T':{'A':2, 'T':1, 'G':2, 'C':2},
'G':{'A':3, 'T':2, 'G':1, 'C':3},
'C':{'A':4, 'T':2, 'G':3, 'C':1}
}
#multiple sequance every sequance in line
def Database(filename):
file=open(filename)
DB=[]
for line in file:
DB.append(line.rstrip())
file.close()
return DB
#one sequance in one line
def Sequance(filename):
file = open(filename)
Seq=(file.read())
file.close()
return Seq
def GetWords(seq):
Words=[]
for i in range(len(seq)):
if len(seq[i:i+11]) < 11:
continue
else:
Words.append(seq[i:i+11])
return Words
def Checkwords(Words,Seq):
if len(Words)==len(Seq)-11+1:
return True
else:
return False
def GetNeighborhood(Words,T):
list=['A','T','C','G']
Neighborhood=[]
seeds = []
for i in range (len(Words)):
for j in range (len(Words[i])):
for k in range(len(list)):
score = 0
add=Words[i].replace(Words[i][j],list[k])
if len(add)==11 and add not in Neighborhood:
for x in range (len(add)):
score+=Match[add[x]][Words[i][x]]
#print("word",Words[i],"add",add,"score",score)
Neighborhood.append(add)
if score >= T :
seeds.append((add,i))
return Neighborhood,seeds
#hit contain [0]index in query [1]which seq in DB [2]which index in DB seq [3]seed ale 3ml hit
def Gethits(Db,seeds):
Hitindex=[]
for i in range(len(seeds)):
for j in range(len(Db)):
if seeds[i][0] in Db[j]:
indexindb=Db[j].find(seeds[i][0])
Hitindex.append((seeds[i][1],j,indexindb,seeds[i][0]))
return Hitindex
def Extend(hits,seq,db):
HSP = []
#################Forword#################
for i in range (len(hits)):
finalSEQ = ""
finalDb = ""
count = 1
oldscore = 0
newscore = 0
forword = 0
backword = 0
#hmshy forword w backword l7d emta
maxcount = 0
if len(seq) < len(db):
minlen = len(seq)
forword = minlen - 11 - (hits[i][0])
backword = len(seq) - 11 - forword
maxcount = min(forword,backword)
elif len(db[hits[i][1]]) < len(seq):
minlen = len(db[hits[i][1]])
forword = minlen - 11 - (hits[i][2])
backword = len(db[hits[i][1]]) - 11 - forword
maxcount = min(forword,backword)
#old score between seed and hit in db
sindex = hits[i][0]
dseq = db[hits[i][1]]
dindex = hits[i][2]
query = hits[i][3]
dbbbb = dseq[dindex:dindex + 11]
finalSEQ += query
finalDb += dbbbb
for k in range(len(hits[i][3])):
oldscore += Match[query[k]][dbbbb[k]]
#can't extend in 2 direction
if maxcount == 0:
#no extend
if forword == 0 and backword == 0:
HSP.append((query,dbbbb,oldscore,"noextend"))
#extend forword
elif forword > 0 and backword == 0:
countf = 0
newscore += oldscore
for a in range(forword):
# 7arf 7arf l2odam
queryword = seq[sindex + 11 + countf]
##
dbword = dseq[dindex + 11 + countf]
# newscore
newscore += Match[queryword][dbword]
if newscore < oldscore:
HSP.append((finalSEQ, finalDb, oldscore,"f"))
continue
else:
finalSEQ += queryword
finalDb += dbword
oldscore = newscore
countf += 1
HSP.append((finalSEQ, finalDb, newscore,"f"))
#extend backword
elif forword == 0 and backword > 0:
countb = 0
newscore += oldscore
for j in range(backword):
# 7arf 7arf lwara
queryword = seq[sindex - countb]
##
dbword = dseq[dindex - countb]
#newscore
newscore += Match[queryword][dbword]
if newscore < oldscore:
finalSEQ += query
finalDb += dbbbb
HSP.append((finalSEQ, finalDb, newscore, "b"))
continue
else:
finalSEQ += queryword
finalDb += dbword
oldscore = newscore
countb += 1
#extend in 2 direction
elif maxcount>0:
for j in range(maxcount):
#forword&backword
sindex = hits[i][0]
#7tet 7arf mn wara + seed + 7arf mn 2odam
queryword = seq[sindex - count]
queryword += query
queryword += seq[sindex+2+count]
##
dbword = dseq[dindex - count]
dbword += dbbbb
dbword += dseq[dindex+2+count]
#newscore
for l in range(len(dbword)):
newscore += Match[queryword[l]][dbword[l]]
if newscore < oldscore:
HSP.append((finalSEQ,finalDb,oldscore,"f&b"))
continue
else:
HSP.append((queryword,dbword,newscore,"f&b"))
count += 1
return HSP
def CalcScore(H):
Score = 0
for i in range (len(H)):
Score += H[i][2]
return Score
Db=Database("DNADatabase.txt")
Seq=Sequance("DNA.txt")
Words=GetWords(Seq)
Check = Checkwords(Words,Seq)
neighborhood,seeds=GetNeighborhood(Words,1)
hits=Gethits(Db,seeds)
Hsps = Extend(hits,Seq,Db)
print(hits)
print(Hsps)
print("Score =",CalcScore(Hsps))
'''print(data)
print(Seq)
print(Words)
print(Check)
print(neighborhood)
print(seeds)
#print(len(neighborhood))
print(hits)'''
```
#### File: 7ossamamahmoud/Bioinformatics-Algorithms/Phylogenetic Tree.py
```python
def Alphabetical_Order(Start, End):
ClusterPairs = []
for i in range(ord(Start), ord(End) + 1): # Ord Fun : Take character argument and returns the Unicode Code of it
ClusterPairs.append(
chr(i)) # Chr Fun : Take Integer argument (Unicode Value) and returns Character representing its value
return ClusterPairs
def MaTMiniCell(Matrix):
# infinite float -> useful for finding lowest values for something like in matrix & handle -ve , +ve float numbers
MinCell = float("inf")
a = -1
b = -1
for i in range(len(Matrix)):
for j in range(len(Matrix[i])):
if Matrix[i][j] < MinCell:
MinCell = Matrix[i][j]
a = i
b = j
return a, b # Return the indexes of the First Min Cell in the Matrix
def PairingOfLeaves(ClusterPairs, F_Index, S_Index):
if S_Index < F_Index:
F_Index, S_Index = S_Index, F_Index # The Way I Swap the Indexes In Python
# Combining the Pairs In 1st Index of the Paired Leaves
ClusterPairs[F_Index] = "(" + ClusterPairs[F_Index] + " , " + ClusterPairs[S_Index] + ")"
del ClusterPairs[S_Index] # Remove the Immediately redundant pair Part in the 2nd Index
print(ClusterPairs)
def RebuildingZMatrix(matrix, F_Index, S_Index):
if S_Index < F_Index:
F_Index, S_Index = S_Index, F_Index # Swapping the Indexes
row = []
print("Distance Matrix : " ,matrix, "\n")
# Rebuild the Row indexes
for i in range(0, F_Index):
# Calculate Avg. of two cell that they in the same column and after finishing this we del the redundant row at last step
row.append((matrix[F_Index][i] + matrix[S_Index][i]) / 2)
# Put the Paired Leaves in the 1st Index of row ex. if f.i=0 , s.i=1 --> index of combined leaves is = 0
matrix[F_Index] = row
# Hint : Second row now contains the values of the row below and so on Where 2nd_Row_indexes < S_index for Column
for i in range(F_Index + 1, S_Index):
matrix[i][F_Index] = (matrix[i][F_Index] + matrix[S_Index][i]) / 2
# Calculate the remaining rows' Values given the row position of the min cell
for i in range(S_Index + 1, len(matrix)):
matrix[i][F_Index] = (matrix[i][F_Index] + matrix[i][S_Index]) / 2
del matrix[i][S_Index] # Remove Immediately the redundant 2nd Index Column
del matrix[S_Index] # Remove Immediately the redundant 2nd Index row
def UPGMA(matrix, ClusterPairs):
while ( len(ClusterPairs) > 1 ):
i, j = MaTMiniCell(matrix)
PairingOfLeaves(ClusterPairs, i, j) # Update Cluster Pairs
RebuildingZMatrix(matrix, i, j) # Combine and rebuild Matrix Values
print("\nFinal Clusters : ")
return ClusterPairs[0] # Return the final Cluster Pairs
# Main
Matrix_Characters = Alphabetical_Order("A", "E")
MATRIX = [
[], # A
[20], # B
[60, 50], # C
[100, 90, 40], # D
[90, 80, 50, 30], # E
]
print(UPGMA(MATRIX, Matrix_Characters))
''' [] j=0 j=1 j=2 j=3 j=4
- A B C D E
i=0 A 0
i=1 B 20 0
i=2 C 60 50 0
i=3 D 100 90 40 0
i=4 E 90 80 50 30 0
Length of Matrix is = 5
'''
``` |
{
"source": "7own/FGT_Auditor",
"score": 3
} |
#### File: 7own/FGT_Auditor/parse_vip.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from codecs import open
from os import path
import sys
import re
import csv
import os
# OptionParser imports
from optparse import OptionParser
from optparse import OptionGroup
# Python 2 and 3 compatibility
if (sys.version_info < (3, 0)):
fd_read_options = 'rb'
fd_write_options = 'wb'
else:
fd_read_options = 'r'
fd_write_options = 'w'
# Handful patterns
# -- Entering vip definition block
p_entering_vip_block = re.compile(r'^\s*config firewall vip$', re.IGNORECASE)
p_entering_subvip_block = re.compile(r'^\s*config .*$', re.IGNORECASE)
# -- Exiting vip definition block
p_exiting_vip_block = re.compile(r'^end$', re.IGNORECASE)
# -- Commiting the current vip definition and going to the next one
p_vip_next = re.compile(r'^next$', re.IGNORECASE)
# -- vip number
p_vip_number = re.compile(r'^\s*edit\s+(?P<vip_number>\S*)', re.IGNORECASE)
# -- vip setting
p_vip_set = re.compile(r'^\s*set\s+(?P<vip_key>\S+)\s+(?P<vip_value>.*)$', re.IGNORECASE)
# Functions
def parse(options,full_path):
"""
Parse the data according to several regexes
"""
global p_entering_vip_block, p_exiting_vip_block, p_vip_next, p_vip_number, p_vip_set
in_vip_block = False
vip_list = []
vip_elem = {}
order_keys = []
if (options.input_file != None):
with open(options.input_file, mode=fd_read_options) as fd_input:
for line in fd_input:
line = line.strip()
# We match a vip block
if p_entering_vip_block.search(line):
in_vip_block = True
# We are in a vip block
if in_vip_block:
if p_vip_number.search(line):
vip_number = p_vip_number.search(line).group('vip_number')
vip_number = re.sub('["]', '', vip_number)
vip_elem['id'] = vip_number
if not('id' in order_keys):
order_keys.append('id')
# We match a setting
if p_vip_set.search(line):
vip_key = p_vip_set.search(line).group('vip_key')
if not(vip_key in order_keys):
order_keys.append(vip_key)
vip_value = p_vip_set.search(line).group('vip_value').strip()
vip_value = re.sub('["]', '', vip_value)
vip_elem[vip_key] = vip_value
# We are done with the current vip id
if p_vip_next.search(line):
vip_list.append(vip_elem)
vip_elem = {}
# We are exiting the vip block
if p_exiting_vip_block.search(line):
in_vip_block = False
return (vip_list, order_keys)
else:
# for files in os.listdir(os.path.abspath(options.input_folder)):
with open(full_path, mode=fd_read_options) as fd_input:
for line in fd_input:
line = line.strip()
# We match a vip block
if p_entering_vip_block.search(line):
in_vip_block = True
# We are in a vip block
if in_vip_block:
if p_vip_number.search(line):
vip_number = p_vip_number.search(line).group('vip_number')
vip_number = re.sub('["]', '', vip_number)
vip_elem['id'] = vip_number
if not('id' in order_keys):
order_keys.append('id')
# We match a setting
if p_vip_set.search(line):
vip_key = p_vip_set.search(line).group('vip_key')
if not(vip_key in order_keys):
order_keys.append(vip_key)
vip_value = p_vip_set.search(line).group('vip_value').strip()
vip_value = re.sub('["]', '', vip_value)
vip_elem[vip_key] = vip_value
# We are done with the current vip id
if p_vip_next.search(line):
vip_list.append(vip_elem)
vip_elem = {}
# We are exiting the vip block
if p_exiting_vip_block.search(line):
in_vip_block = False
return (vip_list, order_keys)
``` |
{
"source": "7pairs/django-textsearch-ja",
"score": 2
} |
#### File: django-textsearch-ja/dtj/models.py
```python
from django.db import models
class VectorField (models.Field):
def __init__( self, *args, **kwargs ):
kwargs['null'] = True
kwargs['editable'] = False
kwargs['serialize'] = False
super( VectorField, self ).__init__( *args, **kwargs )
def db_type( self, connection=None ):
return 'tsvector'
class SearchableModel (models.Model):
search_index = VectorField()
class Meta:
abstract = True
def update_index( self ):
if hasattr( self, '_search_manager' ):
self._search_manager.update_index( pk=self.pk )
def save( self, *args, **kwargs ):
super( SearchableModel, self ).save( *args, **kwargs )
if hasattr( self, '_auto_reindex' ):
if self._auto_reindex:
self.update_index()
else:
self.update_index()
class SearchManager (models.Manager):
def __init__( self, fields=None, config=None ):
self.fields = fields
self.default_weight = 'A'
self.config = config and config or 'pg_catalog.english'
self._vector_field_cache = None
super( SearchManager, self ).__init__()
def contribute_to_class( self, cls, name ):
setattr( cls, '_search_manager', self )
super( SearchManager, self ).contribute_to_class( cls, name )
def _find_text_fields( self ):
fields = [f for f in self.model._meta.fields if isinstance(f,(models.CharField,models.TextField))]
return [f.name for f in fields]
def _vector_field( self ):
if self._vector_field_cache is not None:
return self._vector_field_cache
vectors = [f for f in self.model._meta.fields if isinstance(f,VectorField)]
if len(vectors) != 1:
raise ValueError( "There must be exactly 1 VectorField defined for the %s model." % self.model._meta.object_name )
self._vector_field_cache = vectors[0]
return self._vector_field_cache
vector_field = property( _vector_field )
def _vector_sql( self, field, weight=None ):
if weight is None:
weight = self.default_weight
f = self.model._meta.get_field( field )
return "setweight( to_tsvector( '%s', coalesce(\"%s\",'') ), '%s' )" % (self.config, f.column, weight)
def update_index( self, pk=None ):
from django.db import connection
clauses = []
if self.fields is None:
self.fields = self._find_text_fields()
if isinstance( self.fields, (list,tuple) ):
for field in self.fields:
clauses.append( self._vector_sql(field) )
else:
for field, weight in self.fields.items():
clauses.append( self._vector_sql(field,weight) )
vector_sql = ' || '.join( clauses )
where = ''
if pk is not None:
if isinstance( pk, (list,tuple) ):
ids = ','.join( [str(v) for v in pk] )
where = " WHERE \"%s\" IN (%s)" % (self.model._meta.pk.column, ids)
else:
where = " WHERE \"%s\" = %s" % (self.model._meta.pk.column, pk)
sql = "UPDATE \"%s\" SET \"%s\" = %s%s;" % (self.model._meta.db_table, self.vector_field.column, vector_sql, where)
cursor = connection.cursor()
cursor.execute( sql )
def search( self, query, rank_field=None, rank_normalization=32, use_web_query=False ):
if use_web_query:
to_tsquery_string = "to_tsquery('%s',web_query('%s'))"
else:
to_tsquery_string = "to_tsquery('%s','%s')"
cond = unicode(query).translate({
ord(u"'"): u"''",
ord(u"%"): None,
ord(u"("): None,
ord(u")"): None,
ord(u"|"): None,
})
ts_query = to_tsquery_string % (self.config, cond)
where = "\"%s\" @@ %s" % (self.vector_field.column, ts_query)
select = {}
order = []
if rank_field is not None:
select[rank_field] = 'ts_rank( "%s", %s, %d )' % (self.vector_field.column, ts_query, rank_normalization)
order = ['-%s' % rank_field]
return self.all().extra( select=select, where=[where], order_by=order )
``` |
{
"source": "7pairs/pyny",
"score": 2
} |
#### File: pyny/pyny/api.py
```python
import json
from urllib.request import urlopen
# 流山市オープンデータWeb APIのURL
NAGAREYAMA_WEB_API_URL = 'http://nagareyama.ecom-plat.jp/map/api/feature/8?layers=%s&pagenum=%d'
class WebApiError(Exception):
"""
Web APIへのリクエストが正常に完了しなかったことを示す例外。
"""
def get_by_id(layer_id, feature_id):
"""
指定されたレイヤID、項目IDにマッチするデータを取得する。
:param layer_id: レイヤID
:type layer_id: str
:param feature_id: 項目ID
:type feature_id: int
:return: マッチするデータが存在する場合はそのデータ、存在しない場合はNone
:rtype: dict
"""
# すべてのデータを取得する
all_data = get_all_data(layer_id)
# 項目IDが一致するデータを探索する
for data in all_data:
if data['feature_id'] == feature_id:
return data
else:
return None
def get_data(layer_id, count):
"""
指定されたレイヤIDにマッチするデータを指定された件数ぶん取得する。
:param layer_id: レイヤID
:type layer_id: str
:param count: 件数
:type count: int
:return: 辞書にまとめられたデータのリスト
:rtype: list
"""
# 当該レイヤIDのデータを取得する
data = _get_json(NAGAREYAMA_WEB_API_URL % (layer_id, count))
return data['results']
def get_all_data(layer_id):
"""
指定されたレイヤIDにマッチするすべてのデータを取得する。
:param layer_id: レイヤID
:type layer_id: str
:return: 辞書にまとめられたデータのリスト
:rtype: list
"""
# データの件数を取得する
data_count = get_data_count(layer_id)
# 当該レイヤIDのすべてのデータを取得する
return get_data(layer_id, data_count)
def get_data_count(layer_id):
"""
指定されたレイヤIDにマッチするデータの件数を取得する。
:param layer_id: レイヤID
:type layer_id: str
:return: データの件数
:rtype: int
"""
# データの件数を取得する
data = _get_json(NAGAREYAMA_WEB_API_URL % (layer_id, 1))
return int(data['num'])
def _get_json(url):
"""
指定されたURLにGETでアクセスし、結果のJSONをPythonオブジェクトとして取得する。
:param url: URL
:type url: str
:return: Pythonオブジェクトに変換したJSONの内容
:rtype: dict
:raises WebApiError: Web APIへのリクエストが正常に完了しなかった
"""
# JSONを取得してPythonオブジェクトに変換する
try:
with urlopen(url) as response:
encoding = response.headers.get_content_charset() or 'utf-8'
return json.loads(response.read().decode(encoding, 'ignore'))
except Exception as e:
raise WebApiError(e)
``` |
{
"source": "7Pettay7/CoinBot",
"score": 3
} |
#### File: CoinBot/coinbot/bot.py
```python
import nextcord
from nextcord.ext import commands
from setup import Configuration as config
from controller import Controller
setup = config()
data = Controller(setup.get_secrets())
# Bot config
# ------------------------------------------------------
help_command = commands.DefaultHelpCommand(
no_category='Commands')
bot = commands.Bot(command_prefix='!',
help_command=help_command,
description='CoinBot is used to retrieve cryptocurrency data along with users\' portfolio data.')
class Coinbot:
"""Class to utilize the Discord API to receive commands from user"""
@bot.command(help='Gets balance of a specific currency in user\'s portfolio. Ex: balance BTC')
async def balance(ctx, ticker):
crypto = data.portfolio_data()[ticker]
embed = nextcord.Embed(
title='__**{}:**__'.format(ticker),
timestamp=ctx.message.created_at)
embed.add_field(
name='Balances:',
value='> Dollars: ${:.2f}\n> Units: {:.4f}'.format(
crypto['dollars'],
crypto['units']))
await ctx.send(embed=embed)
@bot.command(help='Gets user\'s portfolio data of all owned currencies')
async def portfolio(ctx):
portfolio = data.portfolio_data()[0]
portfolio_total = data.portfolio_data()[1]
prices = data.current_data()
embed = nextcord.Embed(
title='__**Portfolio:**__')
for key, val in portfolio.items():
embed.add_field(
name='{}'.format(key),
value='> Dollars: ${:.2f}\n> Units: {:.4f}\n> Sell price: ${}'.format(
val['dollars'],
val['units'],
prices[key]),
inline=True)
embed.add_field(name='__TOTAL__',
value='> ${:.2f}'.format(
portfolio_total
))
await ctx.send(embed=embed)
@bot.event
async def on_ready():
print('Successfully started!')
if __name__ == '__main__':
bot.run(setup.get_secrets()[3])
``` |
{
"source": "7Puentes/Eco7P",
"score": 3
} |
#### File: Eco7P/pair/__init__.py
```python
def combomaker(x):
combos = itertools.combinations(x, 2)
usable_pairs = []
for i in combos:
usable_pairs.append(i)
return usable_pairs
# Takes a list of ticker symbols as an input and returns a list of
# all possible combination without repetition
# In[24]:
def ssd(X,Y):
#This function returns the sum of squared differences between two lists, in addition the
#standard deviation of the spread between the two lists are calculated and reported.
spread = [] #Initialize variables
std = 0
cumdiff = 0
for i in range(len(X)): #Calculate and store the sum of squares
cumdiff += (X[i]-Y[i])**2
spread.append(X[i]-Y[i])
std = np.std(spread) #Calculate the standard deviation
return cumdiff,std
# In[28]:
def pairsmatch(x, daxnorm):
allpairs = combomaker(x)
squared = []
std = []
for i in allpairs:
squared.append(ssd(daxnorm[i[0]],daxnorm[i[1]])[0])
std.append(ssd(daxnorm[i[0]],daxnorm[i[1]])[1])
distancetable = pd.DataFrame({'Pair' : allpairs, 'SSD' : squared, 'Standard Deviation' : std})
distancetable.sort(columns=['SSD'], axis=0, ascending=True, inplace=True)
return distancetable
def norming(x):
#if isinstance(x, basestring):
# return x
return x / x[0]
``` |
{
"source": "7Rd2d/sqlmapper",
"score": 3
} |
#### File: sqlmapper/examples/aio_mysql0.py
```python
import asyncio
from sqlmapper.aio import Connection
async def main():
db = await Connection(host='127.0.0.1', user='root', db='example', autocreate=True, read_commited=True)
await db.book.add_column('id', 'int', primary=True, auto_increment=True, exist_ok=True)
await db.book.add_column('name', 'text', exist_ok=True)
await db.book.add_column('value', 'int', exist_ok=True)
await db.book.insert({'name': 'ubuntu', 'value': 16})
await db.commit()
d = await db.book.find_one(1)
print(d)
await db.book.update(1, {'value': 18})
await db.commit()
for d in await db.book.find({'name': 'ubuntu'}):
print(d)
await db.book.delete({'value': 18})
await db.commit()
print(await db.book.count())
async for name in db:
print('table', name)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
```
#### File: sqlmapper/examples/context.py
```python
from sqlmapper import Connection
def main():
db = Connection(host='127.0.0.1', user='root', db='example', autocreate=True, read_commited=True)
def add_row(name):
with db:
print('insert', name)
db.book.insert({'name': name})
@db.on_commit
def msg():
print('commit', name)
add_row('RedHat')
print()
with db:
add_row('Linux')
add_row('Ubuntu')
add_row('Debian')
print('* group commit')
if __name__ == '__main__':
main()
```
#### File: sqlmapper/examples/example0.py
```python
from sqlmapper import Connection
def run_mysql():
run(Connection(host='127.0.0.1', user='root', db='example', autocreate=True, read_commited=True))
def run_psql():
run(Connection(engine='postgresql', host='127.0.0.1', user='postgres', password='<PASSWORD>', db='example', autocreate=True))
def run_sqlite():
run(Connection(engine='sqlite'))
def run(db):
db.book.add_column('id', 'int', primary=True, auto_increment=True, exist_ok=True)
db.book.add_column('name', 'text', exist_ok=True)
db.book.add_column('value', 'int', exist_ok=True)
db.book.insert({'name': 'ubuntu', 'value': 16})
db.commit()
d = db.book.find_one(1)
print(d)
db.book.update(1, {'value': 18})
db.commit()
for d in db.book.find({'name': 'ubuntu'}):
print(d)
db.book.delete({'value': 18})
db.commit()
print(db.book.count())
if __name__ == '__main__':
run_psql()
run_mysql()
run_sqlite()
```
#### File: sqlmapper/examples/tablelist.py
```python
from sqlmapper import Connection
def main():
db = Connection(host='127.0.0.1', user='root', db='example', autocreate=True)
for table in db:
print(table)
if __name__ == '__main__':
main()
```
#### File: sqlmapper/sqlmapper/base_engine.py
```python
class MultiException(Exception):
def __init__(self, e):
super(MultiException, self).__init__()
self.exceptions = e
class BaseEngine(object):
def __init__(self):
if not hasattr(self, 'local'):
self.local = type('local', (object,), {})()
self.thread_init()
def thread_init(self):
if hasattr(self.local, 'tables'):
return
self.local.tables = {}
self.local.commit = []
self.local.rollback = []
def fire_event(self, success):
self.thread_init()
fnlist = self.local.commit if success else self.local.rollback
self.local.commit = []
self.local.rollback = []
exceptions = []
for fn in fnlist:
try:
fn()
except Exception as e:
exceptions.append(e)
if exceptions:
if len(exceptions) == 1:
raise exceptions[0]
else:
raise MultiException(exceptions)
def on_commit(self, fn):
self.thread_init()
self.local.commit.append(fn)
def on_rollback(self, fn):
self.thread_init()
self.local.rollback.append(fn)
``` |
{
"source": "7RedViolin/mbox2eml",
"score": 3
} |
#### File: 7RedViolin/mbox2eml/mbox2eml.py
```python
import argparse
import os
import sys
import hashlib
months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']
def lets_go(file, folder):
blank_lines_count = 2
file_count = 0
file_skipped_count = 0
is_dupe = False
with open(file, "r") as mbox_file:
print("Processing file: {0}".format(file))
print("Please wait ...")
for line in mbox_file:
line_stripped = line.strip()
line_parts = line_stripped.split(' ')
if blank_lines_count >= 1 and line_stripped.startswith("From "):
msg_year = line_parts[7]
msg_month = "{:0>2}".format(int(months.index(line_parts[3])) + 1)
msg_day = line_parts[4]
msg_time = line_parts[5].replace(':','')
msg_hash = hashlib.sha1(line_stripped.encode("UTF-8")).hexdigest()
eml_file = "{0}{1}{2}-{3}-{4}.eml".format(msg_year,msg_month,msg_day,msg_time,msg_hash[:10])
full_output = os.path.join(folder, eml_file)
if os.path.isfile(full_output):
is_dupe = True
file_skipped_count += 1
print("File skipped: {0}".format(full_output))
continue
new_file = open(full_output, "a")
file_count += 1
print("File created: {0}".format(full_output))
new_file.write("{0}".format(line))
else:
if is_dupe == True:
continue
new_file = open(full_output, "a")
new_file.write("{0}".format(line))
if line_stripped == '':
blank_lines_count += 1
else:
blank_lines_count = 0
def main():
parser = argparse.ArgumentParser(description="MBOX to eml files")
parser.add_argument('--file','-f',type=str,required=True,help="MBOX file")
parser.add_argument('--output','-o',type=str,required=False,help="Output directory. Defaults to '~/Desktop/results/'")
args = parser.parse_args()
if args.output is not None:
folder_name = args.output
else:
folder_name = os.environ.get('HOME') + '/Desktop/results'
#Check if folder exists. If not, make one
if os.path.isdir(folder_name) is False:
try:
os.mkdir(folder_name)
except:
sys.exit("Error! Could not create directory {0}".format(folder_name))
else:
print("Directory {0} already exists. Data may be overwritten.".format(folder_name))
if os.path.exists(args.file) is False:
sys.exit("Error! Could not find file {0}".format(args.file))
else:
print("Using file {0}".format(args.file))
lets_go(args.file, folder_name)
if __name__ == "__main__":
sys.exit(main())
``` |
{
"source": "7rick03ligh7/Adversarial_Normalization",
"score": 2
} |
#### File: src/scripts/main.py
```python
import os
import sys
import shutil
sys.path.append(os.path.sep.join(sys.path[0].split(os.path.sep)[:-2]))
import pandas as pd
from pytorch_lightning import loggers as pl_loggers
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint
from src.pl_models.vgg_like import VGGLike
from src.pl_models.vgg_like_adversarial import VGGLike_Adversarial
import multiprocessing as mp
import argparse
import json
from src.utils.utils import generate_logname
from tqdm import tqdm
import time
import logging
logging.getLogger('lightning').setLevel(0)
import warnings
warnings.filterwarnings('ignore')
def worker(pid, queue, model_params):
# if os.path.exists(model_params['logpath']):
# shutil.rmtree(model_params['logpath'])
# os.makedirs(model_params['logpath'], exist_ok=True)
if model_params['adversarial']:
model = VGGLike_Adversarial(0, mp.Queue(), model_params)
else:
model = VGGLike(pid, queue, model_params)
tb_logger = pl_loggers.TensorBoardLogger(
model_params['logdir'],
name=model_params['logname'],
version=''
)
os.makedirs(model_params['logpath']+'/checkpoints/best/', exist_ok=True)
checkpoint_callback = ModelCheckpoint(
filepath=model_params['logpath']+'/checkpoints/best/',
save_top_k=1,
verbose=False,
monitor='val_acc',
mode='max',
prefix=''
)
if 'CUDA_VISIBLE_DEVICES' in os.environ.keys():
gpus = os.environ['CUDA_VISIBLE_DEVICES']
else:
gpus = 1
print('!!!!!!!! WARNING WARNING WARNING WARNING !!!!!!!!')
print('!!!!!!!! CUDA_VISIBLE_DEVICES IS NOT EXIST !!!!!!!!')
trainer = Trainer(
logger=tb_logger,
checkpoint_callback=checkpoint_callback,
max_epochs=model_params['epochs'],
num_sanity_val_steps=0,
weights_summary=None,
progress_bar_refresh_rate=0,
track_grad_norm=1,
gpus=gpus
)
trainer.fit(model)
pd.DataFrame(model.history['train_loss'], columns=['train_loss']).to_csv(model_params['logpath']+'/train_loss.csv', index=False)
pd.DataFrame(model.history['train']).to_csv(model_params['logpath']+'/train.csv', index=False)
pd.DataFrame(model.history['val']).to_csv(model_params['logpath']+'/val.csv', index=False)
def main_parallel(models_params):
progress_bars = []
processes = []
progress_queue = mp.Queue()
for i, model_params in enumerate(models_params):
proc = mp.Process(target=worker, args=(i, progress_queue, model_params,))
processes.append(proc)
proc.start()
time.sleep(5) # crutch for printing progress bar after all debug printings
for i, _ in enumerate(models_params):
progress_bars.append(tqdm(total=model_params['epochs']))
while any([proc.is_alive() for proc in processes]):
time.sleep(1)
pid, status = progress_queue.get()
if status > -1:
progress_bars[pid].update(1)
if status == model_params['epochs']-1:
processes[pid].join()
def main_debug(model_params):
model_params = models_params[0]
if model_params['adversarial']:
model = VGGLike_Adversarial(0, mp.Queue(), model_params)
else:
model = VGGLike(0, mp.Queue(), model_params)
tb_logger = pl_loggers.TensorBoardLogger(
model_params['logdir'],
name=model_params['logname'],
version=''
)
os.makedirs(model_params['logpath']+'/checkpoints/best/', exist_ok=True)
checkpoint_callback = ModelCheckpoint(
filepath=model_params['logpath']+'/checkpoints/best/',
save_top_k=1,
verbose=False,
monitor='val_acc',
mode='max',
prefix=''
)
trainer = Trainer(
logger=tb_logger,
checkpoint_callback=checkpoint_callback,
max_epochs=model_params['epochs'],
num_sanity_val_steps=0,
weights_summary=None,
progress_bar_refresh_rate=0,
track_grad_norm=1,
gpus=1,
# val_percent_check=0.1,
# train_percent_check=0.5
)
trainer.fit(model)
pd.DataFrame(model.history['train_loss'], columns=['train_loss']).to_csv(model_params['logpath']+'/train_loss.csv', index=False)
pd.DataFrame(model.history['train']).to_csv(model_params['logpath']+'/train.csv', index=False)
pd.DataFrame(model.history['val']).to_csv(model_params['logpath']+'/val.csv', index=False)
if model_params['adversarial']:
pd.DataFrame(model.adv_history['generator_loss']).to_csv(model_params['logpath']+'/generator_loss.csv', index=False)
pd.DataFrame(model.adv_history['adversarial_loss']).to_csv(model_params['logpath']+'/adversarial_loss.csv', index=False)
if __name__ == '__main__':
# mp.set_start_method('spawn')
parser = argparse.ArgumentParser()
parser.add_argument('--logdir', type=str, required=True)
parser.add_argument('--params_file', type=str, required=True)
parser.add_argument('--epochs', type=int, required=True)
parser.add_argument('-adversarial', type=bool, default=False, const=True, nargs='?')
parser.add_argument('-debug', type=bool, default=False, const=True, nargs='?')
parser.add_argument('-debug_sn', type=bool, default=False, const=True, nargs='?')
arguments = parser.parse_args()
with open(arguments.params_file, 'r') as f:
models_params = json.load(f)
for model_params in models_params:
model_params['classes'] = [
'airplane',
'automobile',
'bird',
'cat',
'deer',
'dog',
'frog',
'horse',
'ship',
'truck'
]
logname = generate_logname(model_params)
logpath = './' + arguments.logdir + '/' + logname
model_params['logdir'] = arguments.logdir
model_params['logname'] = logname
model_params['logpath'] = logpath
model_params['epochs'] = arguments.epochs
model_params['adversarial'] = arguments.adversarial
model_params['debug_sn'] = arguments.debug_sn
if arguments.debug:
print('DEBUUUUUUUUUUUUUUUUUUUG!!!!!!!')
main_debug(models_params)
else:
print('PARALLLLLEEEEEEEEEEEEEEL!!!!!!')
main_parallel(models_params)
print('========= Done!!! =========')
``` |
{
"source": "7rick03ligh7/pyhicrep",
"score": 3
} |
#### File: pyhicrep/core/mean_filter.py
```python
import numpy as np
from numba import njit
from typing import NoReturn
@njit
def mean_filter_upper_ndiag(mat: np.ndarray,
new_mat: np.ndarray,
h: int,
max_bins: int) -> NoReturn: # pragma: no cover
"""mean filter function
Applying mean filter to first max_bins diagonals
Parameters
----------
mat : np.ndarray
input 2d ndarray
new_mat : np.ndarray
output 2d ndarray (result)
h : int
mean filter kernel radius -> kernel size = 2*h+1
max_bins : int
control applying mean filter to the first max_bins diagonals
Returns
-------
NoReturn
No return
"""
rows = mat.shape[1]
for i in range(rows):
for j in range(i, min(i+max_bins, rows)):
kernel_sum = 0
kernel_size = 0
for k_i in range(max(i-h, 0), min(i+h+1, rows)):
for k_j in range(max(j-h, 0), min(j+h+1, rows)):
kernel_sum += mat[k_i, k_j]
kernel_size += 1
new_mat[i, j] = kernel_sum/kernel_size
def mean_filter_upper_ndiag_nonjit(mat: np.ndarray,
new_mat: np.ndarray,
h: int,
max_bins: int) -> NoReturn:
"""mean filter function
Applying mean filter to first max_bins diagonals
NOTE: this is no njit implementation of mean filter for testing.
njit implementation should be an EXACT copy of this function.
Parameters
----------
mat : np.ndarray
input 2d ndarray
new_mat : np.ndarray
output 2d ndarray (result)
h : int
mean filter kernel radius -> kernel size = 2*h+1
max_bins : int
control applying mean filter to the first max_bins diagonals
Returns
-------
NoReturn
No return
"""
rows = mat.shape[1]
for i in range(rows):
for j in range(i, min(i+max_bins, rows)):
kernel_sum = 0
kernel_size = 0
for k_i in range(max(i-h, 0), min(i+h+1, rows)):
for k_j in range(max(j-h, 0), min(j+h+1, rows)):
kernel_sum += mat[k_i, k_j]
kernel_size += 1
new_mat[i, j] = kernel_sum/kernel_size
```
#### File: pyhicrep/cpu_single/run_single.py
```python
from ..core.calc_scc import calc_scc
from ..core.mean_filter import mean_filter_upper_ndiag
from ..utils import (open_cooler_file,
get_out_filepath,
save_to_csv,
save_to_txt)
from ..logger import log
from ..core.utils import warm_up_smooth
import logging
from typing import NoReturn
from tqdm import tqdm
import numpy as np
@log
def run_between_two_file(filepathes: list,
chromnames: list,
bin_size: int,
h: int,
max_bins: int) -> list:
all_scores = []
path1, path2 = filepathes
mat1, file1 = open_cooler_file(path1, bin_size=bin_size)
mat2, file2 = open_cooler_file(path2, bin_size=bin_size)
scores = []
for chrom in chromnames:
hic1 = mat1.fetch(chrom)
hic2 = mat2.fetch(chrom)
hic1_smoothed = np.zeros(hic1.shape, dtype=float)
hic2_smoothed = np.zeros(hic2.shape, dtype=float)
mean_filter_upper_ndiag(hic1.A, hic1_smoothed, h=h, max_bins=max_bins)
mean_filter_upper_ndiag(hic2.A, hic2_smoothed, h=h, max_bins=max_bins)
score = calc_scc(hic1_smoothed, hic2_smoothed, max_bins=max_bins)
scores.append(score)
all_scores.append([file1, file2, scores])
logging.info(f"{file1} {file2} done.")
return all_scores
@log
def run_between_multiple_files(filepathes: list,
chromnames: list,
bin_size: int,
h: int,
max_bins: int,
is_pbar: bool) -> list:
all_scores = []
if is_pbar:
main_pbar = tqdm(total=len(filepathes[:-1]),
leave=False,
ascii=" 123456789#",
position=0)
for i, path1 in enumerate(filepathes[:-1]):
paths2 = filepathes[i+1:]
if is_pbar:
second_pbar = tqdm(total=len(paths2),
leave=False,
ascii=" 123456789#",
position=1)
mat1, file1 = open_cooler_file(path1, bin_size=bin_size)
for j, path2 in enumerate(paths2):
mat2, file2 = open_cooler_file(path2, bin_size=bin_size)
scores = []
for chrom in chromnames:
hic1 = mat1.fetch(chrom)
hic2 = mat2.fetch(chrom)
hic1_smoothed = np.zeros(hic1.shape, dtype=float)
hic2_smoothed = np.zeros(hic2.shape, dtype=float)
mean_filter_upper_ndiag(hic1.A, hic1_smoothed,
h=h, max_bins=max_bins)
mean_filter_upper_ndiag(hic2.A, hic2_smoothed,
h=h, max_bins=max_bins)
score = calc_scc(hic1_smoothed, hic2_smoothed,
max_bins=max_bins)
scores.append(score)
all_scores.append([file1, file2, scores])
if is_pbar:
second_pbar.update(1)
logging.info(f"{file1} {file2} done.")
if is_pbar:
main_pbar.update(1)
second_pbar.close()
if is_pbar:
main_pbar.close()
return all_scores
@log
def run_single(filepathes: list,
max_bins: int,
h: int,
chromnames: list,
out_file="",
result_folder="",
bin_size=-1,
to_csv=False,
is_pbar=False
) -> NoReturn:
# warm up njit
warm_up_smooth()
# List for calculated SCC
all_scores = []
# Calculate SCC between two files
if len(filepathes) == 2:
all_scores = run_between_two_file(filepathes,
chromnames,
bin_size,
h,
max_bins)
# Calculate SCC between multiple files
else:
all_scores = run_between_multiple_files(filepathes,
chromnames,
bin_size,
h,
max_bins,
is_pbar)
# Calculate filepath
filepath = get_out_filepath(out_file=out_file,
result_folder=result_folder)
# Saving to txt
save_to_txt(filepath, chromnames=chromnames, all_scores=all_scores)
logging.info(f"txt result saved in {filepath}")
# Saving to csv
if to_csv:
filenames = [path.split('/')[-1] for path in filepathes]
save_to_csv(filepath,
chromnames=chromnames,
indexnames=filenames)
logging.info(f"csv result saved in {filepath}")
``` |
{
"source": "7Rocky/AoC-2017",
"score": 4
} |
#### File: AoC-2017/day_05/main.py
```python
def escape_maze(maze, level):
steps, i = 0, 0
while 0 <= i < len(maze):
steps += 1
offset = maze[i]
maze[i] += 1
if level == 2 and offset >= 3:
maze[i] -= 2
i += offset
return steps
def main():
f = open('input.txt')
maze = []
for line in f:
maze.append(int(line.strip()))
f.close()
for level in [1, 2]:
steps = escape_maze(maze.copy(), level)
print(f'Steps to escape the maze ({ level }): { steps }')
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_11/main.py
```python
steps = {
'ne': 1+1j,
'n': 2j,
'se': 1-1j,
'sw': -1-1j,
's': -2j,
'nw': -1+1j
}
def get_steps(pos):
num_steps = abs(pos.real)
if abs(pos.imag) - abs(pos.real) > 0:
num_steps += (abs(pos.imag) - abs(pos.real)) / 2
return int(num_steps)
def main():
f = open('input.txt')
for line in f:
moves = [m for m in line.strip().split(',')]
f.close()
max_num_steps, pos = 0, 0
for s in [steps[m] for m in moves]:
pos += s
num_steps = get_steps(pos)
if max_num_steps < num_steps:
max_num_steps = num_steps
print(f'Number of steps to final position (1): { num_steps }')
print(f'Number of steps to furthest position (2): { max_num_steps }')
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_16/main.py
```python
def move_programs(programs, move):
if move[0] == 's':
despl = int(move[1:])
programs = programs[-despl:] + programs[:-despl]
elif move[0] == 'x':
a, b = map(int, move[1:].split('/'))
programs[a], programs[b] = programs[b], programs[a]
elif move[0] == 'p':
a, b = map(programs.index, move[1:].split('/'))
programs[a], programs[b] = programs[b], programs[a]
return programs
def main():
f = open('input.txt')
programs = [bytes([i + 97]).decode() for i in range(16)]
initial_programs = programs.copy()
for line in f:
moves = line.strip().split(',')
f.close()
i = 0
while i < 1_000_000_000:
for move in moves:
programs = move_programs(programs, move)
if i == 0:
print(f"Programs order (1): { ''.join(programs) }")
i += 1
if programs == initial_programs:
i = (1_000_000_000 // i) * i
print(f"Programs order (2): { ''.join(programs) }")
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_17/main.py
```python
def add_number(numbers, cur, steps):
n = len(numbers)
pos = (cur + steps) % n
return numbers[:pos] + [n] + numbers[pos:], pos + 1
def main():
steps = 328
numbers = [0]
pos = 0
for _ in range(2017):
numbers, pos = add_number(numbers, pos, steps)
print(f'Value after 2017 (1): { numbers[numbers.index(2017) + 1] }')
for n in range(2017, 50_000_000):
pos = (pos + steps) % (n + 1) + 1
if pos == 1:
number_after_0 = n + 1
print(f'Value after 0 (2): { number_after_0 }')
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_18/main.py
```python
class Program:
def __init__(self, program_id):
self.registers = {'p': program_id}
self.count = 0
self.sounds = []
self.pc = 0
def extract(self, instruction):
number = None
if instruction.count(' ') > 1:
instruction, reg, number = instruction.split(' ')
try:
number = int(number)
except ValueError:
number = self.registers[number]
else:
instruction, reg = instruction.split(' ')
if not self.registers.get(reg):
self.registers[reg] = 0
return instruction, reg, number
def process(self, instructions, program=None):
sound = 0
while self.pc < len(instructions):
instruction, reg, number = self.extract(instructions[self.pc])
if program:
end = self.co_process(instruction, reg, number, program)
if end:
return
else:
sound = self.process_alone(sound, instruction, reg, number)
if self.count > 0:
return sound
self.pc += 1
def do_instruction(self, instruction, reg, number):
if instruction == 'set':
self.registers[reg] = number
elif instruction == 'add':
self.registers[reg] += number
elif instruction == 'mul':
self.registers[reg] *= number
elif instruction == 'mod':
self.registers[reg] %= number
elif instruction == 'jgz':
try:
self.pc += number - 1 if int(reg) > 0 else 0
except ValueError:
self.pc += number - 1 if self.registers[reg] > 0 else 0
def process_alone(self, sound, instruction, reg, number):
if instruction == 'snd':
sound = self.registers[reg]
elif instruction == 'rcv' and self.registers[reg] != 0:
self.count += 1
else:
self.do_instruction(instruction, reg, number)
return sound
def co_process(self, instruction, reg, number, program):
if instruction == 'snd':
program.sounds.append(self.registers[reg])
self.count += 1
elif instruction == 'rcv':
if len(self.sounds) == 0:
return True
self.registers[reg] = self.sounds.pop(0)
else:
self.do_instruction(instruction, reg, number)
def main():
f = open('input.txt')
instructions = [line.strip() for line in f]
f.close()
sound = Program(0).process(instructions)
print(f'First frequency received (1): { sound }')
programs = [Program(0), Program(1)]
i = 0
while True:
programs[i % 2].process(instructions, programs[(i + 1) % 2])
i += 1
if len(programs[i % 2].sounds) == 0:
break
print(
f'Number of sent frequencies by Program 1 (2): { programs[1].count }')
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_20/main.py
```python
import re
class Particle:
def __init__(self, number, position, velocity, acceleration):
self.number = number
self.position = position
self.velocity = velocity
self.acceleration = acceleration
self.destroyed = False
def move(self):
for i in range(len(self.acceleration)):
self.velocity[i] += self.acceleration[i]
for i in range(len(self.velocity)):
self.position[i] += self.velocity[i]
def distance(self):
x, y, z = self.position
return abs(x) + abs(y) + abs(z)
def get_positions_count(particles):
positions_count = {}
for particle in particles:
position_str = ''.join(map(str, particle.position))
if not positions_count.get(position_str):
positions_count[position_str] = [particle]
else:
positions_count[position_str].append(particle)
return positions_count
def remaining_particles(particles, positions_count):
for position in positions_count:
if len(positions_count[position]) > 1:
for particle in positions_count[position]:
particle.destroyed = True
return [particle for particle in particles if not particle.destroyed]
def main():
f = open('input.txt')
particles, colliding_particles = [], []
for i, line in enumerate(f):
m = re.match(
r'p=<(-?\d+),(-?\d+),(-?\d+)>, v=<(-?\d+),(-?\d+),(-?\d+)>, a=<(-?\d+),(-?\d+),(-?\d+)>', line)
particles.append(Particle(i, [int(m[1]), int(m[2]), int(m[3])], [int(
m[4]), int(m[5]), int(m[6])], [int(m[7]), int(m[8]), int(m[9])]))
colliding_particles.append(Particle(i, [int(m[1]), int(m[2]), int(m[3])], [int(
m[4]), int(m[5]), int(m[6])], [int(m[7]), int(m[8]), int(m[9])]))
f.close()
finished = 1000
min_distance = 2 ** 32 - 1
while finished != 0:
for particle in particles:
particle.move()
finished -= 1
for particle in particles:
distance = particle.distance()
if min_distance > distance:
min_distance = distance
finished += 1
break
particles.sort(key=lambda p: p.distance())
print(f'Closest particle (1): { particles[0].number }')
remaining = len(colliding_particles)
finished = 100
while finished != 0:
for particle in colliding_particles:
particle.move()
positions_count = get_positions_count(colliding_particles)
colliding_particles = remaining_particles(
colliding_particles, positions_count)
if remaining == len(colliding_particles):
finished -= 1
else:
remaining = len(colliding_particles)
print(f'Remaining particle (2): { remaining }')
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_21/main.py
```python
import math
from functools import reduce
def unnormalize(pattern):
return pattern.split('/')
def normalize(image):
return '/'.join(image)
def count_on(image):
return reduce(lambda x, y: x + y.count('#'), image, 0)
def flip(image):
new_image = []
for row in image:
new_image.append(row[::-1])
return new_image
def rotate(image):
new_image = []
for i in range(len(image)):
new_row = []
for j in range(len(image)):
new_row.append(image[len(image) - 1 - j][i])
new_image.append(''.join(new_row))
return new_image
def transform(image, position):
if position == 0:
return image
if position == 1:
return flip(image)
if position == 2:
return rotate(image)
if position == 3:
return flip(rotate(image))
if position == 4:
return rotate(rotate(image))
if position == 5:
return flip(rotate(rotate(image)))
if position == 6:
return rotate(rotate(rotate(image)))
if position == 7:
return flip(rotate(rotate(rotate(image))))
def get_subimages(image):
subimages = []
if len(image) % 2 == 0:
for j in range(int(len(image) / 2)):
for i in range(int(len(image) / 2)):
subimages.append([image[2 * j][2 * i:2 * (i + 1)],
image[2 * j + 1][2 * i:2 * (i + 1)]])
elif len(image) % 3 == 0:
for j in range(int(len(image) / 3)):
for i in range(int(len(image) / 3)):
subimages.append([image[3 * j][3 * i:3 * (i + 1)],
image[3 * j + 1][3 * i:3 * (i + 1)],
image[3 * j + 2][3 * i:3 * (i + 1)]])
return subimages
def join_subimages(subimages):
image = []
width = int(math.sqrt(len(subimages)))
rows = [''] * len(subimages[0])
for i, subimage in enumerate(subimages):
for j in range(len(rows)):
rows[j] += subimage[j]
if (i + 1) % width == 0:
for row in rows:
image.append(row)
rows = [''] * len(subimages[0])
return image
def enhance(image, rules):
subimages = get_subimages(image)
new_subimages = []
for subimage in subimages:
for i in range(8):
match = rules.get(normalize(transform(subimage, i)))
if match != None:
new_subimages.append(unnormalize(match))
break
return join_subimages(new_subimages)
def main():
f = open('input.txt')
rules = {}
for line in f:
split = line.strip().split(' => ')
rules[split[0]] = split[1]
f.close()
image = ['.#.', '..#', '###']
for i in range(18):
if i == 5:
print(f'Pixels on after 5 iterations (1): { count_on(image) }')
image = enhance(image, rules)
print(f'Pixels on after 18 iterations (2): { count_on(image) }')
if __name__ == '__main__':
main()
```
#### File: AoC-2017/day_24/main_test.py
```python
import io
import sys
import unittest
from main import main
class TestMain(unittest.TestCase):
def test_main(self):
rescued_stdout = io.StringIO()
sys.stdout = rescued_stdout
main()
want = 'Maximum bridge strength (1): 1868\n' + \
'Longest bridge strength (2): 1841\n'
sys.stdout = sys.__stdout__
self.assertEqual(want, rescued_stdout.getvalue())
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "7Rocky/AoC-2021",
"score": 3
} |
#### File: AoC-2021/day_03/main.py
```python
def match(crit, rep):
n_bits = len(rep[0])
for i in range(n_bits):
if len(rep) == 1:
break
bit_sum = sum(int(number[i]) for number in rep)
match = crit(bit_sum, len(rep) / 2)
rep = list(filter(lambda s, i=i, m=match: s[i] == m, rep))
return int(rep[0], 2)
def main():
with open('input.txt') as f:
report = list(map(lambda s: s.strip(), f.readlines()))
n_bits = len(report[0])
gamma = 0
for i in range(n_bits):
bit_sum = sum(int(number[i]) for number in report)
gamma += 2 ** (n_bits - i - 1) * int(bit_sum > len(report) / 2)
epsilon = ~ gamma & (2 ** n_bits - 1)
print(f'Power consumption (1): { gamma * epsilon }')
oxygen = match(lambda n, m: '0' if n < m else '1', report.copy())
co2 = match(lambda n, m: '1' if n < m else '0', report.copy())
print(f'Life support (2): { oxygen * co2 }')
if __name__ == '__main__':
main()
```
#### File: AoC-2021/day_09/main.py
```python
from functools import reduce
def bfs(root, points):
queue = [root]
visited_states = {root}
basin_size = 1
while len(queue) > 0:
i, j = queue[0]
queue = queue[1:]
ps = [(i - 1, j), (i + 1, j), (i, j - 1), (i, j + 1)]
for n in ps:
if n not in visited_states and points[n[1]][n[0]] != 9:
basin_size += 1
queue.append(n)
visited_states.add(n)
return basin_size
def main():
points = [[9]]
with open('input.txt') as f:
for line in f.readlines():
points.append([9] + list(map(int, list(line.strip()))) + [9])
points[0] = [9] * len(points[1])
points.append(points[0])
size = (len(points[0]), len(points))
low_points, basin_sizes = [], []
for j in range(1, size[1] - 1):
for i in range(1, size[0] - 1):
ps = [points[j - 1][i], points[j + 1][i],
points[j][i - 1], points[j][i + 1]]
if all(map(lambda p, i=i, j=j: points[j][i] < p, ps)):
low_points.append(points[j][i])
basin_sizes.append(bfs((i, j), points))
basin_sizes.sort()
basins_prod = reduce(lambda x, y: x * y, basin_sizes[-3:], 1)
print(f'Risk of low points (1): { sum(low_points) + len(low_points) }')
print(f'Product of three largest basins (2): { basins_prod }')
if __name__ == '__main__':
main()
```
#### File: AoC-2021/day_12/main.py
```python
def dfs(root, nodes, visited, paths, special, path=''):
if root == 'end':
return paths.add(f'{path}end')
path += f'{root},'
if root.islower() and root not in special:
visited.add(root)
if root in special:
special.clear()
for node in nodes[root]:
if node not in visited:
dfs(node, nodes, visited.copy(), paths, special.copy(), path)
def main():
nodes = {}
with open('input.txt') as f:
for line in f.readlines():
src, dst = line.strip().split('-')
if src in nodes:
nodes[src].add(dst)
else:
nodes[src] = {dst}
if dst in nodes:
nodes[dst].add(src)
else:
nodes[dst] = {src}
paths = set()
dfs('start', nodes, {'start'}, paths, set())
print(f'Number of paths (1): { len(paths) }')
paths.clear()
for node in nodes.keys():
if node not in {'start', 'end'} and node.islower():
dfs('start', nodes, {'start'}, paths, {node})
print(f'Number of paths (2): { len(paths) }')
if __name__ == '__main__':
main()
``` |
{
"source": "7Rocky/IoT_Microservices",
"score": 3
} |
#### File: stats-ms/test/test.py
```python
import json
import unittest
from src.measures.humidity import Humidity
from src.measures.light import Light
from src.measures.temperature import Temperature
class TestStatsMS(unittest.TestCase):
def test_humidity(self):
humidity = Humidity('humidities', 60)
self.assertEqual(humidity.queue, 'humidities')
self.assertEqual(humidity.max_items, 60)
with open('test/humidities.json', 'r') as f:
data = list(json.loads(f.read()))
self.assertEqual(
humidity.calculate_stats(data),
{
'end_date': 'Sun, 26 Apr 2020 13:21:35 GMT',
'end_timestamp': 1587907295530,
'init_date': 'Sun, 26 Apr 2020 13:21:34 GMT',
'init_timestamp': 1587907294530,
'max_value': 40,
'mean_value': 30.0,
'measure': 'humidity',
'ip': '192.168.1.50',
'min_value': 20,
'n_samples': 2,
'real_values': [ 40, 20 ],
'sensor': 'Grove - Moisture',
'std_deviation': 14.1,
'time_span': 1000,
'username': 'Rocky'
}
)
def test_light(self):
light = Light('lights', 60)
self.assertEqual(light.queue, 'lights')
self.assertEqual(light.max_items, 60)
with open('test/lights.json', 'r') as f:
data = list(json.loads(f.read()))
self.assertEqual(
light.calculate_stats(data),
{
'digital_values': [ 1, 0 ],
'end_date': 'Sun, 26 Apr 2020 13:21:35 GMT',
'end_timestamp': 1587907295530,
'init_date': 'Sun, 26 Apr 2020 13:21:34 GMT',
'init_timestamp': 1587907294530,
'mean_value': 0.5,
'measure': 'light',
'ip': '192.168.1.50',
'n_samples': 2,
'sensor': 'Smart LED',
'time_span': 1000,
'username': 'Rocky'
}
)
def test_temperature(self):
temperature = Temperature('temperatures', 60)
self.assertEqual(temperature.queue, 'temperatures')
self.assertEqual(temperature.max_items, 60)
with open('test/temperatures.json', 'r') as f:
data = list(json.loads(f.read()))
self.assertEqual(
temperature.calculate_stats(data),
{
'end_date': 'Sun, 26 Apr 2020 13:21:35 GMT',
'end_timestamp': 1587907295530,
'init_date': 'Sun, 26 Apr 2020 13:21:34 GMT',
'init_timestamp': 1587907294530,
'max_value': 25.4,
'mean_value': 23.9,
'measure': 'temperature',
'ip': '192.168.1.50',
'min_value': 22.4,
'n_samples': 2,
'real_values': [ 25.4, 22.4 ],
'sensor': 'Grove - Temperature',
'std_deviation': 2.1,
'time_span': 1000,
'username': 'Rocky'
}
)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "7RU7H/H4dd1xB4dg3r",
"score": 2
} |
#### File: src/H4dd1xB4dg3r/H4dd1xB4dg3r.py
```python
import multiprocessing as mp
import threading, os, sys, time, dataclasses, subproccess
# import workspace_management internalise it in the class
# slots break in multiple inheritance AVOID, 20% efficiency over no slot dict
@dataclass(slots=True)
class Target:
organisation_root: str
domain_root: str
cidr_range: str
domain_name: str
# TODO BIG DESIGN CHOICES
#domain_name_list: dict - listing in text file is good for tools, !!
# TODO
ansnum: dict
#out-of-scope-related
#blacklisted_domains: #TODO figure it out!!
#blacklisted_subdomains #TODO figure it out!!!
blacklist_gospider: str
project_name_path: str
project_name: str
badger_location: str
def __init__(self):
badger_location = os.pwd
blacklist_gospider = "jpg,jpeg,gif,css,tif,tiff,png,ttf,woff,woff2,ico,pdf,svg,txt"
async def run_sequence(*functions: Awaitable[Any]) -> None:
for function in functions:
await function
async def run_parallelism(*functions: Awaitable[Any]) -> None:
await asyncio.gather(*functions)
# get all ans put into target dict
async def workspace_setup(project_name):
await run_sequence(
check_valid_install()
create_directory_forest()
)
async def check_valid_install():
async def create_directory_forest(project_name):
# All tool names
# All found URLs
# All custom wordlists
# All stardardised wordlists -> transform
# All logs
# Domain_map
# Domain_wordlist mapping
async def populate_workspace():
def new_directory_tree(directory_path):
os.mkdir()
def check_out_of_scope():
# TODO refactor to one function, concat_domainnames, concat_urls!
def screenshotting():
#EyeWitness
# reconftw, osmedeus in parallel to main
# Modularity to be recursively run
# BUT also neatly using bandwidth while dataprocessing
# Strategic Nuclei, fuzzing scans
#
#
# [osmedeus] | C |
# [nuclei][nuclei] [nuclei] | O |
# mainchain ->[OSINT][DOMAIN] [SUBDOMAIN] [SERVER]/[EXPLOITATION] | N |
# [reconftw] | [datahandle] [datahandle] | S |
# | | | | O |
# | [re:Enumeration-Cycle] | L |
# | [The fuzziest branch that is queued sequence of fuzz but not the main branch] | I |
# | | D |
# recursiveMAIN |->[OSINT]-+>[DOMAIN]-+>[SUBDOMAIN]-+>[SERVER]/[EXPLOITATION] | A |
# | T |
# | I |
# | O |
# | N |
#
async def run():
await run_sequence(
#scope_init_bbscope()
await run_parallelism(
all_reconftw()
all_osmedeus()
await run_sequence(
# maintool chain block, all black run_(parallel/sequence) beloew each block for legiability
# bruteforcing with amass need parallel main branch dont actually want to wait for this
# Similar functions for fuff and wfuzz need to be spread *intelligently - i.e filesize of fuzz wordlist threshhold, parallel
# Or have a queue that is sequenctial fuzzs target
)
# Once first main is concluded as secondary scan on !!leaf-most!! collected information is started - NOT TO BE CONFUSED BY PARALLEL Secondary internal AFTER EACH [MODULE] on itself's collected data.
# more than that would be overkill and the secondary is more likely to be slower anyway
await run_sequence(
)
# Absolute consolidation OR another scan on next newest !!leaf-most!! data
)
#TODO
#scope related functions
#async def
# reconftw Framework
async def all_reconftw(self.domain_name, output_path):
process = subprocess.Popen(["/opt/reconftw/reconftw.sh", "-d {domain_name} -a -o reconftw/{time.time}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"reconftw -all scan completed on {domain_name}, check {output_path}")
# osmedeus framework
async def all_osmedeus():
process = subprocess.Popen(["osmedeus", "-f extensive -t {domain_name} "], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"reconftw -all scan completed on {domain_name}, check {output_path}")
#
# OSINT
#
# theHarvester
# -s for shodan
async def osint_theHarvester(target,output_path):
print(f"Beginning theHarvester {target}")
process = subprocess.Popen(["theHarvester", "-d {target} -g -r -f {output_path} --screenshot {output_path}" ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"theHarvester OSINT against {target} complete, check {output_path}")
# recon-ng
async def osint_reconNG(target,output_path):
print(f"Beginning recon-ng {target}")
process = subprocess.Popen(["recon-cli", "-d {target} -g -r -f {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"recon-ng cli OSINT against {target} complete, check {output_path}")
#
# Aquistion Recon
#
async def acquistion_recon_wordlist_generation():
#scrap crunchbase, wiki, google
# GO GO GO
async def acquisition_recon_Amass_ORG(Target.organisation_root, output_path, log_path):
print("Beginning Amass intel -org")
process = subprocess.Popen(["amass", "intel -src -org {Target.organisation_root} -max-dns-queries 2500 -oA {output_path} -{logpath}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("Amass intel -src -org complete")
async def acquisition_recon_Amass_CIDR(Target.cidr_range, output_path, log_path):
print("Beginning Amass intel -src -cidr {}")
process = subprocess.Popen(["amass", "intel -src -cidr {Target.cidr_range} -max-dns-queries 2500 -oA {output_path} -l {log_path}" ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("Amass intel -src -cidr complete")
#
# Acquistion Recon Utility
#
async def acqRec_Util_amass_ans_out(intel_output_path, outpath_file):
print("Getting ASN number from amass intel output")
process = subprocess.Popen(["scripts/script_amassASN_util.sh", "{intel_output_path} {output_file}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("File containing ASN numbers for amass intel -asn completed")
# cat $1 awk -F, '{print $1}' ORS=',' | sed 's/,$//' | xargs -P3 -I@ -d ',' > $2
async def acqRec_Util_concatenate_domain_names():
# scrap new domains
# newdomains -> secondary_list
# newdomains -> big_list
# TODO refactor to one function!
await run_sequence(
await run_parallelism(
# scrapping acquisition recon function(S?)
acquisition_recon_Amass_ORG()
acquisition_recon_Amass_CIDR()
)
await run_parallelism(
acqRec_Util_amass_ans_out()
acqRec_Util_concatenate_domain_names()
)
)
#
# ANS enumeration
#
async def ans_enumeration_Amass(Target.asnum, output_path, log_path):
for ans in Target_asnum:
print("Beginning Amass intel -ans {ans} -oA {output_path} -l {log_path}")
process = subprocess.Popen(["amass", "intel -asn {asn} -max-dns-queries 2500 -oA {output_path} -l {log_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("Amass intel -asn {asn} complete")
print(f"Ans recon for {Target.asnum} completed")
#
# ANS enumeration Utility
#
async def ansEnum_Util_concatenate_domain_names():
# scrap new domains
# newdomains -> secondary_list
# newdomains -> big_list
# TODO refactor to one function!
await run_sequence(
ans_enumeration_Amass()
ansEnum_Util_concatenate_domain_names()
)
#
# Reverse whois
#
# DomLink
# Test usefulness/duration as to whether jsut single domain or domain.txt->{domain}
async def reverse_whois_DOMLink(output_path):
# Get an API key from WHOXY.com
# Set that API key in a file named domLink.cfg in the same directory.
#for target in :
print("Beginning reverseWHOIS with Domlink")
process = subprocess.Popen(["python /opt/DomLink/domLink.py","-D {target} -o {output_path}" ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
#print("")
print(f"ReverseWHOIS Recon with DomLink completed")
#
# Rever whois Utility
#
async def revWhois_Util_concatenate_domain_names():
# scrap new domains
# newdomains -> secondary_list
# newdomains -> big_list
# TODO refactor to one function!
await run_sequence(
reverse_whois_DOMLink()
revWhois_Util_concatenate_domain_names()
)
###########################################
#
# Analyse relationships
#
###########################################
async def analyse_relationships():
#scrap builtwith.com/relationship
async def analyse_relationships_findrelationships(project_name, badger_location, target):
print("findrealtionships.py Started")
if target.contains(".txt"):
process = subprocess.Popen(["scripts/script_findrelationships_multi.sh", "{project_name} {badger_location} {target}", stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
else:
process = subprocess.Popen(["python3", "{badger_location}/scripts/findrelationships.py {target 0> {project_name}/findrelationships/findrelationships_output.txt", stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("findrelationships.py completed")
# #!/bin/bash
# cat $3 | python3 $2/scripts/findrelationships.py 0> $1/findrelationships/findrelationships_output.txt"
###########################################
#
# Dorking - SCRAPPAGE!
#
###########################################
async def dork_target_xtext():
#Scrap Copyright, Privacy, Terms of service Test
#uniq with Generics of each,
#
# Domain Enuemration
#
# Prelimiary domain/subdomain list building
async def domain_enumeration_Assetfinder(domain):
print("Assetfinder Started")
process = subprocess.Popen(["assetfinder", "{domain} 0> assetfinder_output.txt", stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("Assetfinder completed")
# Historic domain/subdomain list building
async def domain_enumeration_Waybackurls():
print(f"Running indenpendent waybackurls")
process = subprocess.Popen(["scripts/script_waybackurl.sh", "{target_list}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed independent waybackurls")
# Bash script to run this, due to subprocess.open([ARG,ARG]):
# This one does go to stdout
# cat $1 | waybackurls > waybackurl_out.txt
#
# Domain Enuemration Utility
#
async def domEnum_Util_concatenate_urls():
# scrap new domains
# newdomains -> secondary_list
# newdomains -> big_list
# TODO refactor to one function!
await run_sequence(
await run_parallelism(
domain_enumeration_Waybackurls()
domain_enumeration_Assetfinder()
)
domEnum_Util_concatenate_urls()
)
# Nuclei
async def subdomain_takeover_Nuclei(target):
if target.contains(".txt"):
targetstr = f"-list {target}"
else:
targetstr = f"-u {target}"
print(f"Running Nuclei with the target flag and arguments set to: {targetStr}")
process = subprocess.Popen(["nuclei", "{targetStr} -me nuclei/"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed Nuclei against {targetStr}")
#
# Subdomain Bruteforcing
#
#async def subdomain_bruteforcing_WordlistGen(): - hmm
async def subdomain_bruteforcing_ShuffleDNS():
#domain_name must be a .txt file
async def subdomain_Amass_Non_Brute(domain_name, output_path, log_path, blacklist):
if blacklist.Contains(".txt"):
blacklistStr = f"-blf {blacklist}"
else:
blacklistStr = f"-bl {blacklist}"
print("Beginning Amass enum -active -ip -src -df {domain_name} {blacklistStr} -oA {output_path} -l {log_path}")
process = subprocess.Popen(["amass", "enum -active -ip -src -df {domain_name} {blacklistStr} -oA {output_path} -l {log_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print("Amass enum -active -ip -src -df {domain_name} {blacklistStr} -oA {output_path} -l {log_path} complete")
#domain_name must be a .txt file
async def subdomain_Amass_Brute(domain_name, output_path, log_path, blacklist_domains, blacklist_subdomains):
if blacklist_domains != "":
blacklistStr = f"-bl {blacklist_domains}"
if blacklist_subdomains != "":
blacklistStr = f"-blf {blacklist_subdomains}"
print(f"Beginning Amass enum -active -ip -src -brute -df {domain_name} {blacklistStr} -oA {output_path} -l {log_path}")
process = subprocess.Popen([" amass", "enum -active -ip -src -brute {domain_name} {blacklistStr} -oA {output_path} -l {log_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Amass enum -active -ip -src -brute {domain_name} {blacklistStr} -oA {output_path} -l {log_path} complete")
#
# Subdomain Bruteforcing Utility
#
# Consoldate list between each tool to improve
# final bruteforcing
await def subdomain_consolidate_lists():
await run_sequence(
subdomain_bruteforcing_ShuffleDNS()
subdomain_consolidate_lists()
subdomain_Amass_Non_Brute()
subdomain_consolidate_lists()
subdomain_Amass_Brute()
)
#
# Subdomain Enumeration
#
# For quick site mapping
# Web crawler needed to brute force and parse sitemap.xml, parse robots.txt, Link Finder
# Gets URLs from Wayback Machine, Common Crawl, Virus Total, Alien Vault
# Finds AWS-S3 from response source
# Finds subdomains from response source
# Generates and verifies links from JavaScript files
async def subdomain_enumeration_Gospider(target, blacklist, output_path):
if blacklist != "":
blacklistStr = f"--blacklist {blacklist}"
if url.contains(".txt"):
siteStr = f"-S {target}" # sites file.txt
else:
siteStr = f"-s {target}" # site domain
print(f"Running Gospider against {siteStr} with {blacklistStr}")
process = subprocess.Popen(["gospider", "{siteStr} -a --subs --sitemap --robots --js {blacklistStr} -o {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed Gospider against {siteStr} with {blacklistStr}")
# TODO
# Must Reappraise wordlists and artifacts
# Hakrawler for JS endpoints
async def subdomain_enumeration_Hakrawler(url_list, output_path):
print("Beginning Hakrawler script")
process = subprocess.Popen(["scripts/script_hakrawler.sh", "{url_list} {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Complete Hakrawler on {url_list} check {output_path}")
#
# #!/bin/bash
# cat $1 | hakrawler 0> $2
#Subdomainizer to analyse JS
async def subdomain_enumeration_Subdomanizer():
# Take a list of domains and probe for working HTTP and HTTPS servers
async def subdomain_enumeration_Httprobe(target_list):
print(f"Running httProbe")
process = subprocess.Popen(["scripts/script_httprob.sh", "{target_list}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed httProbe")
# Bash script to run this, due to subprocess.open([ARG,ARG]):
# cat $FILE | httprobe -c 50 0> httprobe_out.txt
async def subdom_Util_gospider(input_path):
print(f"")
process = subprocess.Popen(["scripts/script_gospiderSD.sh", "{input_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"")
# Bash script to run this, due to subprocess.open([ARG,ARG]):
#grep $@ -Eo '(http|https)://[^/"]+' | anew
# add to subdomain lists
await run_parallelism(
subdomain_enumeration_Gospider()
subdomain_enumeration_Hakrawler()
subdomain_enumeration_Subdomanizer()
subdomain_enumeration_Httprobe()
)
###########################################
#
# Subdomain Scrapping
#
# ALMOST CERTIANLY MAKE THIS ONE GOLANG PIPE
# BEWARE LOTS STRING OPERATIONS AND LOOKUP AND UNIQ AFTER SCRAPPING
#
#shosubgo, when I get shodan key
#async def subdomain_scrapping():
#subscrapper requires api key
async def subdomain_scrapping_Subscrapper():
async def subdomain_scrapping_Amass():
async def subdomain_scrapping_Subfinder():
# github subdomain scraps github
# kingofbugbounty tips
# Search subdomains using github and httpx
# Github-search - Using python3 to search subdomains, httpx filter hosts by up status-code response (200)
async def subdomain_scrapping_GithubSubdomain(api_key_github, domain, output_path):
print(f"Beginning github-subdomain targeting {domain}")
process = subprocess.Popen(["","{api_key_github} {domain} {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed github-subdomain")
#!/bin/bash
# python3 /opt/github-subdomain.py -t $1 -d $2 | httpx --title > $3
await run_parallelism(
subdomain_scrapping_GithubSubdomain()
)
#
#
#
#
########################################
#Cloudbrute
async def cloud_enumeration_Cloudbrute():
###########################################
# -> Data processing -> Further Recon
# Port Analysis -> Service Scanning -+> Brutespray
#
###########################################
async def port_analysis_Dnmasscan():
exit_code = subprocess.call(.scripts/dnmasscan.sh) # FLAGS
async def port_analysis_Util_Pass_dnmascan_out():
async def port_analysis_Masscan(ip_ranges, exclude, output_path)
if exclude.Contains(".txt"):
excludeStr = f"--excludefile {exclude}"
else:
excludeStr = ""
if ip_range.Contains(".txt"):
with open(ip_range, "r") as f:
ips = f.read()
for target in ips:
print(f"Masscan using file:{ip_ranges}, target: {target}")
process = subprocess.Popen(["masscan", "-p0-65535 --rate 10000000 {excludeStr} -oG {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Masscan against {target} complete")
else:
print(f"Masscan using file:{ip_ranges}, target: {target}")
process = subprocess.Popen(["masscan", "-p0-65535 --rate 10000000 {excludeStr} -oG {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Masscan against {target} complete")
async def port_analysis_Util_Pass_masscan_out():
async def service_scanning_Nmap():
#output from masscan
#nmapScriptGun
#nmap service scanning
async def service_scanning_Brutespray():
await run_sequence(
port_analysis_Dnmasscan()
port_analysis_Util_Pass_dnmascan_out()
port_analysis_Masscan()
port_analysis_Util_Pass_masscan_out()
service_scanning_Nmap()
)
# service_scanning_Brutespray()
############################################
#
# Web Scanning
#
############################################
#TODO -port flag
#
async def web_scanning_Nikto(target_list, output_path):
with open(target_list , "r") as f:
urls = f.read()
for target in urls:
print(f"Starting Nikto scanning on {target}")
process = subprocess.Popen(["nikto", "-h {target} -c -o {output_path}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Finished Nikto scanning on {target}")
print(f"Finished all Nikto scanning within {target_list}")
# TODO MORE flags https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/tree/master
async def web_scanning_Web_Cache_Vulnerability_Scanner(json_report):
with open(target_list , "r") as f:
urls = f.read()
for target in urls:
print(f"Testing potential webcache poisioning with Web-Cache-Vulnerability-Scanner: {target}")
process = subprocess.Popen(["wcvs", "--reclimit 1 -gr -gp {json_report} -ej" ]stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed testing of potential webcache poisioning with Web-Cache-Vulnerability-Scanner: {target}")
print(f"Finished all Web-Cache-Vulnerability-Scanner scanning within {target_list}")
# NOT SURE ON quality of this
# TODO Review source code, need stdout, would need multiple runs -is it even worth it?
async def web_scanning_analyze_FEJS_libraries_Is_website_vulnerable(domain_name, protocol):
print(f"")
process = subprocess.Popen(["is-website-vulnerable", "{target} --json --js-lib --desktop"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
process = subprocess.Popen(["is-website-vulnerable", "{target} --json --js-lib --mobile"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"")
async def vuln_osmedeus(target_list):
with open(target_list , "r") as f:
urls = f.read()
for target in urls:
print(f"osmedeus Vulnerablity scan starting against {target}")
process = subprocess.Popen(["osmedeus", "-f extensive-vuln -t {target} "], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"osmedeus Vulnerablity scan completed for {target}")
print(f"Finished all Osmedeus Vuln scanning within {target_list}")
#
# Web scanning Utility
#
# GENERAL NOTE TODO THESE WILL ALMOST CERTAINLY END UP AS WRAPPERS OR APP IN GO
# FOR The superior parsing
#
async def webscan_Util_Nikto_report(nikto_output):
print(f"Compiling Nikto report for Nikto scans {nikto_output}")
process = subprocess.Popen(["reportNikto.go", "{nikto_output}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Completed Nikto Report! Please MANAULLY review at {process}")
async def webscan_Util_gron_wcvs():
# recursively gron the wcvs output
await run_sequence(
await run_parallelism(
web_scanning_Nikto()
web_scanning_Web_Cache_Vulnerability_Scanner()
web_scanning_analyze_FEJS_libraries_Is_website_vulnerable()
vuln_osmedeus()
)
webscan_Util_gron_wcvs()
webscan_Util_Nikto_report()
)
# def webscan_optional_wpscan():
###########################################
#
# Exploitation
#
###########################################
# Vulnerablity found in data processing
# class contruction exploit_target
# switch depending of expo type: xxs, sqli, ssrf
async def fuzz_SSRF_SSRFmap():
# jason haddix sqli statistics paper database nomanclature "id"
async def sql_database_found_SQLMAP():
async def report():
amass viz -d3 domains.txt -o 443 /your/dir/
amass viz -maltego domains.txt -o 443 /your/dir/ # If end up using it
amass viz -visjs domains.txt -o 443 /your/dir/
# Consider use cases later see github useages
# https://github.com/tomnomnom/gron
async def json_analysis_Gron():
async def get_urls_Meg():
async def dostuffwith_Concurl():
async def techstack_analysis_TechStack():
process = subprocess.Popen(["/opt/TechStack.sh", "{}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
#favfreak
async def favicon_analysis_Favfreak():
###########################################
#
#
#
###########################################
# ffuf
# wfuzz
```
#### File: src/H4dd1xB4dg3r/workspace_management.py
```python
toollist = ['reconftw', 'osmedeus', 'nuclei', 'amass', 'domLink', 'assetfinder', 'waybackurls', 'shuffleDNS', 'gospider', 'hakrawler', 'subdomainizer', 'httprobe', 'dnmasscan', 'nmap', 'masscan', 'brutespray', 'nikto', 'wcvs', 'favfreak','techstack', 'gron', 'ssrfmap', 'sqlmap', 'subdomainizer', 'shuffleDNS', 'domlink', 'theharvester', 'reconng', 'findrelationships'];
# for reference update opt_function_name tool here:
#opt_toollist = ['wpscan'];
def create_directory_forest(project_name):
os.mkdir({project_name})
os.mkdir({project_name}/log)
os.mkdir({project_name}/reports)
os.mkdir({project_name}/domainmap)
os.mkdir({project_name}/wordlists)
for tool in toollist:
os.mkdir({project_name}/tool)
process = subprocess.Popen(["recon-cli", "-w {project_name}/reconng/initial"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Workspace directory structure for {project_name} complete.")
def create_directory_addition_tools(toolname):
os.mkdir({project_name}/{toolname})
print(f"Optional tool directory add at: {project_name}/{toolname}")
def create_addition_reconng_workspace(project_name, addition):
process = subprocess.Popen(["recon-cli", "-w {project_name}/reconng/{addition}"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"Recon-ng Workspace directory structure for {project_name}/reconng/{addition} complete.")
def populate_initial_forest(project_name):
process = subprocess.Popen(["curl", "https://datatracker.ietf.org/doc/html/rfc1866 -o {project_name}/wordlists/rfc1866.html"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
process.wait()
print(f"cUrl-ed rfc1866 for uniq wordlist bashing outputed to {project_name}/wordlists/rfc1866.html")
# TODO
# Needs STRUCTURING by domain hierarchy!!
def create_directory_tree_by_address(address, project_name):
if address.Contains(".txt"):
f = open(target_list , "r")
address_list = f.read()
for addr in address_list:
os.mkdir({project_name}/domainmap/{addr}/wordlists)
else:
os.mkdir({project_name}/domainmap/{address}/wordlists)
print(f"Directory structure added to {project_name}/domainmap")
``` |
{
"source": "7RU7H/hacking-scripts",
"score": 3
} |
#### File: hacking-scripts/python3/dec-hexIPformatter.py
```python
import sys
if len(sys.argv) < 3:
print('\nYou must give desired format and IPv4 address as input...')
print('e.g.: D 192.168.10.100')
print('Valid formats D=Decimal H=Hexadecimal\n')
sys.exit(1)
Format = sys.argv[1]
def long(ip):
IP = ip.split('.')
IP = list(map(int, IP))
LongIP = IP[0]*2**24 + IP[1]*2**16 + IP[2]*2**8 + IP[3]
return LongIP
ip = long(sys.argv[2])
if Format == 'D':
print('\nIP as Decimal format: %s' % (ip))
if Format == 'H':
print('\nIP as Hexadecimal format: %s' % (hex(ip)))
``` |
{
"source": "7scientists/vortex",
"score": 2
} |
#### File: store/sql/test_graph_queries.py
```python
import unittest
import os
import six
import gzip
from vortex.sql import Store,Vertex,Edge
from sqlalchemy import create_engine
from sqlalchemy.types import String
from . import SqlStoreTest
def is_descendant_of(vertex,ancestor,visited = None):
if not visited:
visited = {}
if vertex['pk'] in visited:
return False
visited[vertex['pk']] = True
for inE in vertex.inE:
if inE.outV == ancestor:
return True
if is_descendant_of(inE.outV,ancestor,visited = visited):
return True
return False
def is_ancestor_of(vertex,descendant,visited = None):
if not visited:
visited = {}
if vertex['pk'] in visited:
return False
visited[vertex['pk']] = True
for outE in vertex.outE:
if outE.inV == descendant:
return True
if is_ancestor_of(outE.inV,descendant,visited = visited):
return True
return False
class DescendantsAndAncestorsTest(SqlStoreTest):
def setup_vertices(self):
trans = self.store.begin()
self.m = self.store.get_or_create_vertex({'node_type' : 'module'})
self.c = self.store.get_or_create_vertex({'node_type' : 'classdef'})
self.store.get_or_create_edge(self.m,self.c,'body',{'i' : 0})
self.f1 = self.store.get_or_create_vertex({'node_type' : 'functiondef'})
self.store.get_or_create_edge(self.c,self.f1,'body',{'i' : 0})
self.a1 = self.store.get_or_create_vertex({'node_type' : 'assignment'})
self.store.get_or_create_edge(self.f1,self.a1,'body',{'i' : 0})
self.a2 = self.store.get_or_create_vertex({'node_type' : 'assignment'})
self.store.get_or_create_edge(self.f1,self.a2,'body',{'i' : 1})
self.store.commit(trans)
def test_relations(self):
self.setup_vertices()
assert is_descendant_of(self.f1,self.m)
assert is_descendant_of(self.a2,self.m)
assert is_descendant_of(self.a1,self.c)
assert not is_descendant_of(self.c,self.a1)
assert not is_descendant_of(self.m,self.a2)
assert not is_descendant_of(self.m,self.f1)
def test_cyclic_relation(self):
self.setup_vertices()
self.store.get_or_create_edge(self.f1,self.m,'defined_in')
assert is_descendant_of(self.m,self.f1)
def test_descendants(self):
self.setup_vertices()
for vertex in self.store.filter({'node_type' : 'module'}):
descendants = self.store.descendants_of(vertex)
assert len(descendants) == 4
cnt = 0
for descendent_vertex in descendants:
cnt+=1
assert is_descendant_of(descendent_vertex,vertex)
assert cnt == len(descendants)
def test_ancestors(self):
self.setup_vertices()
for vertex in self.store.filter({'node_type' : 'assignment'}):
ancestors = self.store.ancestors_of(vertex,with_self = False)
print(list(ancestors)[0].extra,vertex['pk'])
assert len(ancestors) == 3
cnt = 0
for ancestor_vertex in ancestors:
cnt+=1
assert is_ancestor_of(ancestor_vertex,vertex)
assert cnt == len(ancestors)
def test_ancestors_as_tree(self):
self.setup_vertices()
for vertex in self.store.filter({'node_type' : 'assignment'}):
for vertex_or_pk in [vertex,vertex['pk']]:
v = self.store.ancestors_of(vertex_or_pk,as_tree = True)
assert hasattr(v.inE,'_incoming_edges') and v.inE._incoming_edges is not None
assert self.f1 in [e.outV for e in v.inE._incoming_edges]
assert self.f1 in [e.outV for e in v.inE('body')]
f1 = filter(lambda v:v == self.f1,[e.outV for e in v.inE._incoming_edges])
if six.PY3:
f1 = next(f1)
else:
f1 = f1[0]
assert hasattr(f1.inE,'_incoming_edges') and f1.inE._incoming_edges is not None
assert self.c in [e.outV for e in f1.inE._incoming_edges]
def test_descendants_as_tree(self):
self.setup_vertices()
for vertex_or_pk in [self.c['pk'],self.c]:
v = self.store.descendants_of(vertex_or_pk,as_tree = True)
assert hasattr(v.outE,'_outgoing_edges') and v.outE._outgoing_edges is not None
assert self.f1 in [e.inV for e in v.outE._outgoing_edges]
assert self.f1 in [e.inV for e in v.outE('body')]
f1 = filter(lambda v:v == self.f1,[e.inV for e in v.outE._outgoing_edges])
if six.PY3:
f1 = next(f1)
else:
f1 = f1[0]
assert hasattr(f1.outE,'_outgoing_edges') and f1.outE._outgoing_edges is not None
assert self.a1 in [e.inV for e in f1.outE._outgoing_edges]
def test_ancestors_and_descendants(self):
self.setup_vertices()
qs1 = self.store.descendants_of(self.m)
qs2 = self.store.ancestors_of(self.f1)
qs3 = self.store.filter({'node_type' : 'classdef'})
assert len(qs1) == 4
assert len(qs2) == 2
qs = list(qs1.intersect(qs2).intersect(qs3))
assert len(qs) == 1
assert qs[0] == self.c
for vertex in qs:
assert is_ancestor_of(vertex,self.f1)
assert is_descendant_of(vertex,self.m)
assert vertex['node_type'] == 'classdef'
```
#### File: test/store/store_generators.py
```python
import os
from sqlalchemy import create_engine
from vortex.sql import Store
from sqlalchemy.types import String,VARCHAR
from sqlalchemy.schema import MetaData
class NodeStore(Store):
class Meta(Store.Meta):
vertex_indexes = {'node_type' : {'type' : VARCHAR(64)}}
class StoreGenerator(object):
def __init__(self,test_class):
self.test_class = test_class
self.engine = self.get_engine()
self.store = self.get_store()
def get_store(self):
raise NotImplementedError
def get_engine(self):
raise NotImplementedError
def cleanup(self):
pass
def __del__(self):
pass
class SqlMemoryStoreGenerator(StoreGenerator):
def get_engine(self):
return create_engine('sqlite:///:memory:', echo=False)
def get_store(self):
if hasattr(self.test_class,'Meta'):
return NodeStore(self.engine,meta = self.test_class.Meta)
return NodeStore(self.engine)
sql_store_generators = [SqlMemoryStoreGenerator]
all_store_generators = [SqlMemoryStoreGenerator]
if 'vortex_MSSQL_STORE' in os.environ:
class MSSqlStoreGenerator(SqlMemoryStoreGenerator):
def get_engine(self):
import urllib
import pyodbc
def connection_string():
quoted = urllib.quote_plus(os.environ['vortex_MSSQL_STORE'])
return 'mssql+pyodbc:///?odbc_connect={}'.format(quoted)
engine = create_engine(connection_string(),
echo=False,
deprecate_large_types = True)
self.clear_schema(engine)
return engine
def clear_schema(self,engine):
metadata = MetaData()
metadata.reflect(bind = engine)
metadata.drop_all(engine,checkfirst = True)
def get_store(self):
store = super(MSSqlStoreGenerator,self).get_store()
return store
def cleanup(self):
self.store.close_connection()
self.clear_schema(self.engine)
sql_store_generators.append(MSSqlStoreGenerator)
all_store_generators.append(MSSqlStoreGenerator)
if 'vortex_PSQL_STORE' in os.environ:
class PostgresSqlStoreGenerator(SqlMemoryStoreGenerator):
def get_engine(self):
engine = create_engine('postgres://%s' % os.environ['vortex_PSQL_STORE'], echo=False)
self.clear_schema(engine)
return engine
def clear_schema(self,engine):
metadata = MetaData()
metadata.reflect(bind = engine)
metadata.drop_all(engine,checkfirst = True)
def get_store(self):
store = super(PostgresSqlStoreGenerator,self).get_store()
return store
def cleanup(self):
self.store.close_connection()
self.clear_schema(self.engine)
sql_store_generators.append(PostgresSqlStoreGenerator)
all_store_generators.append(PostgresSqlStoreGenerator)
if 'vortex_ORIENTDB_STORE' in os.environ:
from vortex.orientdb import Store as OrientDBStore
import pyorient
class OrientDBStoreGenerator(SqlMemoryStoreGenerator):
def get_client(self):
self.client = pyorient.OrientDB(os.environ["vortex_ORIENTDB_STORE"],os.environ.get('vortex_ORIENTDB_PORT',2424))
session_id = self.client.connect(os.environ.get("vortex_ORIENTDB_USER",'guest'),os.environ.get("vortex_ORIENTDB_PASSWORD",'<PASSWORD>'))
return self.client
def clear_schema(self,client):
pass
def get_store(self):
store = OrientDBStore(self.get_client())
return store
def cleanup(self):
pass
all_store_generators.append(OrientDBStoreGenerator)
```
#### File: vortex/utils/container.py
```python
from functools import wraps
class ContainerMixin(object):
def __init__(self,data):
self._data = data
def __getitem__(self,key):
return self.data_ro[key]
def __contains__(self,key):
return True if key in self.data_ro else False
def __setitem__(self,key,value):
self.data_rw[key] = value
def __delitem__(self,key):
del self.data_rw[key]
def items(self):
return self.data_ro.items()
def keys(self):
return self.data_ro.keys()
def values(self):
return self.data_ro.values()
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
@property
def data_ro(self):
return self._data
@property
def data_rw(self):
return self._data
@property
def data(self):
return self.data_ro
def __iter__(self):
for key in self.data_ro:
yield key
``` |
{
"source": "7sDream/nonebot-plugin-7s-roll",
"score": 2
} |
#### File: nonebot-plugin-7s-roll/nonebot_plugin_7s_roll/command.py
```python
import re
from nonebot import on_message, on_command
from nonebot.adapters import Bot, Event
from nonebot.log import logger
from nonebot.adapters.cqhttp.permission import GROUP
from nonebot.adapters.cqhttp.message import Message
from nonebot.rule import regex
from .common import START, SEP, CONF
from .roll import roll
RE_ROLL_STR = (
"^(" # 1
+ START
+ CONF.i7s_roll_command
+ " |"
+ CONF.i7s_roll_trigger
# 2 3 4 5 6
+ r" )([0-9adgimnsuvx+\- ]+)( ?(结果)?(大于|小于|大于等于|小于等于|>=|>|<|<=) ?(-?\d{1,10}))?"
)
RE_ROLL_CMD = re.compile(RE_ROLL_STR)
async def roll_command_handler(bot: Bot, event: Event, state: dict):
messages = []
logger.info(f"[7sRoll] received roll command: {event.raw_message}")
if await GROUP(bot, event):
messages.append(f"[CQ:at,qq={event.user_id}]")
match = None
if "_match" in state:
match = state["_matched"]
else:
args = str(event.raw_message).strip()
match = RE_ROLL_CMD.match(args)
if not match:
messages.append("roll 命令格式错误")
messages.append("格式为:roll <表达式>[ <判断方式><目标>]")
messages.append("表达式举例:3d6+1d3-1")
messages.append("判断方式可选:>, <, <=, >=, 或对应中文")
messages.append("目标:需要达成的点数")
return await cmd_roll.finish(Message("\n".join(messages)))
if match.group(1) is None:
return
expr_str, op_str, target = match.group(2, 5, 6)
messages.extend(roll(expr_str, op_str, target))
return await cmd_roll.finish(Message("\n".join(messages)))
cmd_roll = on_command(CONF.i7s_roll_command, priority=1, block=True)
cmd_roll.handle()(roll_command_handler)
message_cmd_roll = on_message(rule=regex(RE_ROLL_STR), priority=2, block=True)
message_cmd_roll.handle()(roll_command_handler)
``` |
{
"source": "7sDream/nonebot_plugin_bam",
"score": 3
} |
#### File: nonebot_plugin_bam/bilibili/user.py
```python
from .api import APIResult
class UserInfo(APIResult):
URL = "https://api.bilibili.com/x/space/acc/info?mid={uid}"
def __init__(self):
super().__init__()
def __initialize__(self, body, *, uid):
self.uid = uid
self.ok = body["code"] == 0
if self.ok:
data = body["data"]
self.nickname = data["name"]
self.rid = data["live_room"]["roomid"]
async def user_info(uid) -> UserInfo:
info = await UserInfo.of(uid=uid)
return info
```
#### File: nonebot_plugin_bam/database/helper.py
```python
from collections import defaultdict
from typing import Dict
from nonebot.log import logger
from peewee import JOIN
from .db import DB
from .tables import BilibiliUser, BilibiliUserStatus, FollowLink, Group
def log_sql(s):
# logger.debug(f"[DB:SQL] {s.sql()}")
return s
def get_all_groups():
yield from log_sql(Group.select())
def get_group(gid: int) -> Group:
for group in log_sql(Group.select().where(Group.gid == gid)):
return group
return None
def add_group(gid: int, group_suid: int):
return log_sql(
Group.insert(gid=gid, super_user=group_suid).on_conflict_replace()
).execute()
def remove_group(group: Group):
group.delete_instance(recursive=True, delete_nullable=True)
def get_users_with_linked_groups_and_status() -> Dict[int, BilibiliUser]:
users = {}
for user in log_sql(
BilibiliUser.select(BilibiliUser, FollowLink, BilibiliUserStatus)
.join(FollowLink, JOIN.LEFT_OUTER)
.switch(BilibiliUser)
.join(BilibiliUserStatus, JOIN.LEFT_OUTER, attr="status")
):
users[user.uid] = user
return users
def clean_users_live_status():
log_sql(BilibiliUserStatus.update(live_status=False)).execute(None)
def clean_user_live_status_in(users):
if len(users) > 0:
log_sql(
BilibiliUserStatus.update(live_status=False).where(
BilibiliUserStatus.bilibili_user.in_(users)
)
).execute()
def set_user_live_status_in(users):
if len(users) > 0:
log_sql(
BilibiliUserStatus.update(live_status=True).where(
BilibiliUserStatus.bilibili_user.in_(users)
)
).execute()
def get_group_with_following_users(gid):
for group in log_sql(
Group.select()
.where(Group.gid == gid)
.join(FollowLink, JOIN.LEFT_OUTER)
.join(BilibiliUser, JOIN.LEFT_OUTER)
):
return group
return None
def get_user(uid):
for user in log_sql(BilibiliUser.select().where(BilibiliUser.uid == uid)):
return user
return None
def add_user(uid, nickname, rid):
user, created = BilibiliUser.get_or_create(
uid=uid, defaults={"nickname": nickname, "rid": rid}
)
if created:
BilibiliUserStatus.create(
bilibili_user=user, newest_activity_id=0, live_status=False
)
else:
user.nickname = nickname
user.rid = rid
user.save()
return user
def add_link(group, user):
FollowLink.create(group=group, bilibili_user=user)
def remove_link(gid, uid):
log_sql(
FollowLink.delete().where(
(FollowLink.group == gid) & (FollowLink.bilibili_user == uid)
)
).execute()
def update_user_newest_activity_id(data: dict[int, int]):
with DB.atomic():
for user, act_id in data.items():
BilibiliUserStatus.update(newest_activity_id=act_id).where(
BilibiliUserStatus.bilibili_user == user
).execute()
```
#### File: nonebot_plugin_bam/tests/test_h5_activity.py
```python
import os
import json
import pytest
def load_data(name):
with open(os.path.join("tests", "act_data", name), "rb") as f:
j = json.load(f)
return j["data"]["card"]
@pytest.fixture
def export():
import nonebot
from nonebot.adapters.cqhttp import Bot as CQHTTPBot
nonebot.init()
driver = nonebot.get_driver()
driver.register_adapter("cqhttp", CQHTTPBot)
nonebot.load_builtin_plugins()
nonebot.load_plugin("nonebot_plugin_bam")
return nonebot.require("nonebot_plugin_bam")
def test_h5(export):
act = export.Activity(load_data("h5.json"))
assert act.uid == 1926156228
assert act.username == "希亚娜Ciyana"
assert act.h5_title == "希亚娜Ciyana的直播日历"
assert (
act.h5_url
== "https://live.bilibili.com/p/html/live-app-calendar/index.html?is_live_webview=1&hybrid_set_header=2&share_source=bili&share_medium=web&bbid=492831DB-BC20-408E-9ADF-872343C28FA6143081infoc&ts=1614573029062#/home/search?ruid=1926156228"
)
print(act.display())
print(act.h5_share_card())
``` |
{
"source": "7sDream/qqbot-linger",
"score": 2
} |
#### File: qqbot-linger/qqbot_linger/main.py
```python
import nonebot
from nonebot.adapters.cqhttp import Bot as CQHTTPBot
def main():
nonebot.init(_env_file=".env")
driver = nonebot.get_driver()
driver.register_adapter("cqhttp", CQHTTPBot)
nonebot.load_builtin_plugins()
nonebot.load_plugin("nonebot_plugin_apscheduler")
nonebot.load_plugin("nonebot_plugin_bam")
nonebot.load_plugin("nonebot_plugin_7s_roll")
nonebot.run()
if __name__ == "__main__":
main()
``` |
{
"source": "7sDream/which_fonts_support",
"score": 3
} |
#### File: which_fonts_support/which_fonts_support/cli.py
```python
import argparse
import binascii
import collections
import re
import sys
import webbrowser
import wcwidth
from .main import available_fonts_for_codepoint
from .preview_server import FontPreviewServer
__RE_GET_NAME__ = re.compile(r'"\.?([^"]+)"')
__RE_UNICODE_HEX__ = re.compile(r'^U\+[0-9a-fA-F]{4,6}$')
def parser_arg():
from . import __version__
parser = argparse.ArgumentParser(
prog="which_fonts_support",
description='Find which fonts support specified character',
epilog='Github: https://github.com/7sDream/which_fonts_support',
)
parser.add_argument(
'char', default='',
help='the character, if you want to check character not in BMP, ' +
'use U+XXXX or U+XXXXXX format.'
)
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument(
'-f', '--fc-list', type=str, default='fc-list', metavar='PATH',
help='provide custom fc-list executable file path',
)
parser.add_argument(
'-v', '--verbose', action='store_true',
help='show each style full name',
)
parser.add_argument(
'-p', '--preview', action='store_true',
help='show font preview for the char in browser',
)
return parser.parse_args()
def get_char_codepoint(c):
assert len(c) == 1
codepoint = ord(c)
return {
'decimal': codepoint,
'hex': hex(codepoint)[2:].rjust(6, '0'),
'utf8': binascii.hexlify(c.encode('utf8')).decode("ascii"),
}
def cli():
args = parser_arg()
if __RE_UNICODE_HEX__.match(args.char):
args.char = chr(int(args.char[2:], 16))
if len(args.char) == 0:
args.char = input('Input one character: ')
if len(args.char) != 1:
sys.stderr.write('Please provide ONE character')
exit(1)
cp = get_char_codepoint(args.char)
codepoint = cp['decimal']
codepoint_hex_str = cp['hex']
codepoint_utf8_seq = cp['utf8']
fullname_to_family_map = {
fullname: family for family, fullname in available_fonts_for_codepoint(codepoint, args.fc_list)
}
family_to_fullname_list_map = collections.defaultdict(list)
for fullname, family in fullname_to_family_map.items():
family_to_fullname_list_map[family].append(fullname)
if len(fullname_to_family_map) == 0:
print("No fonts support this character")
exit(0)
family_style_counts = collections.Counter(fullname_to_family_map.values())
families = sorted(family_style_counts)
max_width = max(map(wcwidth.wcswidth, families)) if not args.verbose else 0
print(
f'Font(s) support the char [ {args.char} ]' +
f'({codepoint}, U+{codepoint_hex_str}, {codepoint_utf8_seq}):'
)
font_preview_server = FontPreviewServer(args.char)
for family in families:
style_count = family_style_counts[family]
print(
family,
' ' * (max_width - wcwidth.wcswidth(family))
if not args.verbose else '',
f' with {style_count} style{"s" if style_count > 1 else ""}'
if not args.verbose else '',
sep=''
)
if style_count > 1 and args.verbose:
for fullname in sorted(family_to_fullname_list_map[family]):
print(' ' * 4, fullname, sep='')
font_preview_server.add_font(family)
if args.preview:
font_preview_server.start()
print('-' * 80)
print("Opening your browser for preview...")
webbrowser.open("http://localhost:" + str(font_preview_server.port) + '/')
input("Press Enter when you finish preview...")
font_preview_server.stop()
if __name__ == '__main__':
cli()
```
#### File: which_fonts_support/which_fonts_support/main.py
```python
import collections
import functools
import subprocess
__all__ = ['available_fonts_for_codepoint']
def __make_lang_obj(items, langs):
obj = collections.defaultdict(list)
for lang, item in zip(langs, items):
obj[lang].append(item)
return obj
def __get_font_by_lang(items, langs, lang='en'):
item = items[0]
obj = __make_lang_obj(items, langs)
if lang in obj:
item = obj[lang][0]
return item
def available_fonts_for_codepoint(codepoint, fc_list_exec):
assert 0 < codepoint < 0x10FFFF
charset_arg = ':charset=' + hex(codepoint)
result = subprocess.run([
fc_list_exec,
'--format',
'%{family}\t%{familylang}\t%{fullname}\t%{fullnamelang}\n',
charset_arg],
stdout=subprocess.PIPE)
if result.returncode != 0:
raise RuntimeError(
'run fc-list failed, please check your environment\n')
descriptions = result.stdout.decode('utf-8', 'replace').split('\n')
for line in descriptions:
parts = list(map(functools.partial(str.split, sep=','), line.split('\t')))
if len(parts) == 4 and all(map(lambda p: len(p) > 0, parts)):
families, families_lang, fullnames, fullnames_lang = parts[0], parts[1], parts[2], parts[3]
if len(families) == len(families_lang) and len(fullnames) == len(fullnames_lang):
family = __get_font_by_lang(families, families_lang)
fullname = __get_font_by_lang(fullnames, fullnames_lang)
yield family.lstrip('.'), fullname.lstrip('.')
``` |
{
"source": "7shantanu7/stock-price-lstm",
"score": 3
} |
#### File: 7shantanu7/stock-price-lstm/about.py
```python
import streamlit as st
def write():
st.image("data/stock-market.webp", width=600)
st.title("NSE India Stock Prediction WebApp Using Machine Learning")
st.markdown("---")
st.subheader("A simple webapp to predict next day stock's High-Low-Open-Close price using machine learning")
st.markdown("---")
st.success("**Click the Company Info/Finance to begin**")
st.text("")
st.text("")
st.text("")
st.text("")
st.text("A group project made by:")
st.text("<NAME>, <NAME> & <NAME>")
st.write("[Link to github repository](https://github.com/7shantanu7/stock-price-lstm)")
``` |
{
"source": "7starsea/config",
"score": 3
} |
#### File: config/python/gzip_test.py
```python
import gzip
import shutil
from io import StringIO
import pandas as pd
import time
import os.path
import binascii
def write_gz_file(raw_filename, gz_filename):
df = pd.read_csv(raw_filename, index_col=False)
df.to_csv(gz_filename, index=False, compression='gzip')
write_gz_file('test.csv', 'test2.csv.gz')
def create_gz_file(raw_filename, gz_filename):
with gzip.open(gz_filename, 'wb', compresslevel=4) as f_out:
with open(raw_filename, 'rb') as f_in:
shutil.copyfileobj(f_in, f_out)
if not os.path.isfile('test.csv.gz'):
create_gz_file('test.csv', 'test.csv.gz')
def is_gz_file(test_gz_filename):
with open(test_gz_filename, 'rb') as test_f:
return binascii.hexlify(test_f.read(2)) == b'1f8b'
print('is_gz_file:', is_gz_file('test.csv.gz'))
def compare_read_time(raw_filename, gz_filename, count=200):
total_raw_time, total_gz_time = 0, 0
for i in range(count):
t1 = time.time()
df = pd.read_csv(raw_filename, index_col=None)
t2 = time.time()
total_raw_time += t2 - t1
for i in range(count):
t1 = time.time()
with gzip.open(gz_filename, 'rb', compresslevel=4) as f_in:
file_content = f_in.read()
data = StringIO(file_content.decode('utf8'))
df = pd.read_csv(data, index_col=None)
t2 = time.time()
total_gz_time += t2 - t1
print('RawReadTime: %.3f seconds GzipReadTime: %.3f seconds' % (total_raw_time, total_gz_time))
compare_read_time('test.csv', 'test.csv.gz')
``` |
{
"source": "7starsea/Prioritized-Experience-Replay",
"score": 2
} |
#### File: 7starsea/Prioritized-Experience-Replay/prioritized_replay.py
```python
import numpy as np
from SharkUtil import PrioritizedExperienceBase
class PrioritizedReplayBuffer(PrioritizedExperienceBase):
def __init__(self, transition, capacity, batch_size, alpha=0.6, eps=1e-10):
"""
:param transition: a namedtuple type,
example: transition=namedtuple("Transition", ("obs", "action", "reward", "next_obs", "done"))
:param capacity:
:param batch_size:
:param alpha:
:param eps:
"""
super(PrioritizedReplayBuffer, self).__init__(capacity, batch_size)
self.transition = transition
self._alpha = alpha
self._eps = eps
self.data = [None for i in range(capacity)]
self._indices = np.zeros(batch_size, dtype=np.int32)
self._priorities = np.zeros(batch_size, dtype=np.float64)
self._max_priority = 1.0
def append(self, item):
assert isinstance(item, self.transition)
index = self.add_c(self._max_priority) # # already with _max_priority**alpha, see update_priorities
self.data[index] = item
def sample(self, beta=.4):
if self.size() <= self.batch_size:
return None, None, None, None
self.sample_c(self._indices, self._priorities)
ind = self._indices >= 0
if len(ind) <= 0:
return None, None, None, None
indices, priorities = self._indices[ind], self._priorities[ind]
weights = np.array(priorities)
np.power(weights, -beta, out=weights) # # ignore product with N since we will normalize by max(weights)
weights /= (np.max(weights) + self._eps)
transitions = [self.data[idx] for idx in indices]
batch = self.transition(*zip(*transitions))
return batch, indices, weights, priorities
def update_priorities(self, indices, old_priorities, priorities):
"""
:param indices: np.1darray
:param old_priorities: np.1darray
:param priorities: np.1darray
:return:
"""
np.clip(priorities, self._eps, None, out=priorities)
np.power(priorities, self._alpha, out=priorities)
self._max_priority = max(self._max_priority, np.max(priorities))
old_priorities = old_priorities * self._decay_alpha
np.maximum(priorities, old_priorities, out=priorities)
self.update_priority_c(indices, priorities)
``` |
{
"source": "7starsea/py_utilities",
"score": 2
} |
#### File: py_utilities/CppGenerator/CPPCSVGenerator.py
```python
from sys import platform as sys_platform
import os.path
from shutil import copyfile
import re
import datetime
import subprocess
import functools
from CPPGeneratorBase import CPPGeneratorBase, ensure_exists_dir
internal_csv_reader_map = dict({
"int": ("get<int>", "is_int"),
"short": ("get<int>", "is_int"),
"enum": ("get<int>", "is_int"),
"unsigned int": ("get<int>", "is_int"),
"unsigned short": ("get<int>", "is_int"),
"long": ("get<long>", "is_int"),
"unsigned long": ("get<unsigned long>", "is_int"),
"long long": ("get<long long>", "is_int"),
"double": ("get<double>", "is_num"),
"long double": ("get<double>", "is_num"),
"float": ("get<double>", "is_num"),
"bool": ("get<int>", "is_int"),
"char": ("get<std::string>", "is_str"),
"str": ("get<std::string>", "is_str"),
"unsigned char": ("get<std::string>", "is_str"),
})
def internal_csv_reader(cpp_key, py_key, raw_type):
cpp = ''
if raw_type in ['char', 'unsigned char', 'str']:
if raw_type == 'unsigned char':
# cpp = 'data.%s = row["%s"].get<std::string>()[0];' % (cpp_key, py_key)
cpp = 'data.%s = row[ind].get<std::string>()[0];' % (cpp_key)
else:
# cpp = 'strncpy(data.%s, row["%s"].get<std::string>().c_str(), sizeof(data.%s)-1);' % (cpp_key, py_key, cpp_key)
cpp = 'strncpy(data.%s, row[ind].get<std::string>().c_str(), sizeof(data.%s)-1);' % (cpp_key, cpp_key)
check_str_size_fmt = """
if(row[ind].get().size() <= sizeof(data.%s)-1){
%s
}else{
std::cerr<<">>> String is too bigger for char %s[] with py_key %s."<<std::endl;
flag = false;
}
"""
cpp = check_str_size_fmt % (cpp_key, cpp, cpp_key, py_key)
elif raw_type in internal_csv_reader_map:
methods = internal_csv_reader_map[raw_type]
# cpp = 'data.%s = (%s)(row["%s"].%s());' % (cpp_key, raw_type, py_key, methods[0])
cpp = 'data.%s = (%s)(row[ind].%s());' % (cpp_key, raw_type, methods[0])
else:
print('Unknow csv reader type:', raw_type)
exit(-1)
return cpp
def _csv_writer_header(keys, struct_id, get_property=None):
source = "template<>\nstd::string CSVWriteRtnHead<%s>()\n{\n\tstd::string header;\n" % struct_id
num = len(keys)
for j in range(num):
key = keys[j]
title = key[1]
# title = key[1].title().replace("_", "").replace("-", "")
is_array = False
if get_property and callable(get_property):
if key[2] != 'char' and 1 == get_property(key[1], 'array'):
is_array = True
sep = 'CSV_SPLITTER_SYMBOL' if j != num - 1 else "'\\n'"
if is_array:
array_size = int(get_property(key[1], 'array_size'))
assert array_size > 0
for i in range(array_size):
if j == num - 1 and i == array_size - 1:
sep = "'\\n';"
source += "\theader.append(\"%s%s\");\theader.push_back(%s);\n" % (title, i + 1, sep)
else:
source += "\theader.append(\"%s\");\theader.push_back(%s);\n" % (title, sep)
source += "\n\treturn header;\n}\n"
source += "template<>\nvoid Internal_CSVWriteHead<%s>(std::ostream &ofs)" % struct_id
source += "\n{\n\tofs << CSVWriteRtnHead<%s>();" % struct_id
source += "\n}\n"
return source
def _csv_writer_content(keys, struct_id, get_property=None):
source = "template<>\nvoid Internal_CSVWriteContent<%s>( const %s & data, std::ostream &ofs)" % (
struct_id, struct_id)
source += "\n{\n\tofs"
num = len(keys)
for j in range(num):
key = keys[j]
is_array = False
if get_property and callable(get_property):
if key[2] != 'char' and 1 == get_property(key[1], 'array'):
is_array = True
sep = 'CSV_SPLITTER_SYMBOL\n\t\t' if j != num - 1 else "'\\n';"
if is_array:
array_size = int(get_property(key[1], 'array_size'))
assert array_size > 0
for i in range(array_size):
if j == num - 1 and i == array_size - 1:
sep = "'\\n';"
source += "<<data.%s[%d]<<%s" % (key[1], i, sep)
else:
source += "<<data.%s<<%s" % (key[1], sep)
source += "\n}\n"
csv_writer_cpp = """
template<>
void CSVWriter<%s>(const std::vector<%s> & vec_data, std::ostream & ofs){
for(auto it = vec_data.begin(); it != vec_data.end(); ++it){
Internal_CSVWriteContent<%s>(*it, ofs);
}
}
template<>
bool CSVWriter<%s>(const std::vector<%s> & vec_data, const std::string & csv_file){
std::ofstream ofs( csv_file.c_str(), std::ios::out | std::ios::trunc);
if(ofs.is_open()){
ofs.setf(std::ios_base::fixed);
Internal_CSVWriteHead<%s>(ofs);
CSVWriter<%s>(vec_data, ofs);
return true;
}
return false;
}
"""
source += csv_writer_cpp % tuple([struct_id] * 7)
return source
class CPPCsvWriterGenerator(CPPGeneratorBase):
def __init__(self, src_folder, dest_folder, datastructs, file_name):
CPPGeneratorBase.__init__(self, src_folder, dest_folder, file_name, search_keyid=False)
self.datastructs = datastructs
self.check_structs(datastructs)
# python_dir = (os.path.dirname(os.path.realpath(__file__)))
#
# csv_hpp = 'read_parameters_csv_base.hpp'
# dest = os.path.join(dest_folder, csv_hpp)
# os.path.isfile(dest) or copyfile(os.path.join(python_dir, csv_hpp), dest)
def _csv_reader(self, keys, struct_id):
cpp = """template<>
bool CSVReaderHelper<%s>(const csv::CSVRow & row, %s & data){
bool flag = true; int ind = -1;
"""
cpp %= (struct_id, struct_id)
tpl_fmt = """
[init_impl]
ind = row.find_column("%s");
if( ind >= 0 ){
if(row[ind].%s()){
[core_impl]
}else{
%s
}
}%s
"""
for key in keys:
tmpKey = key[1].replace(".", "_")
raw_type = key[2]
if raw_type not in internal_csv_reader_map:
unsupported_key(key[1], '_CsvReader', key[2], struct_id)
exit(-1)
# DisplayProperty : DataStructProperty
has_valid_type = 'std::cerr<<">>> Failed to resolve key: %s with type: %s in DataStruct: %s."<<std::endl;\n'
has_valid_type += '\t\t\tflag = false;'
has_valid_type %= (tmpKey, key[2], struct_id)
has_member = 'else{\n\t\tstd::cerr<<">>> Failed to find key: %s in DataStruct: %s."<<std::endl;\n\t\tflag = false;\n\t}'
has_member %= (tmpKey, struct_id)
methods = internal_csv_reader_map[raw_type]
# # read data
if key[2] not in ['char', 'str'] and 1 == self.get_property(struct_id, key[1], 'array'):
array_size = int(self.get_property(struct_id, key[1], 'array_size'))
assert array_size > 0
for i in range(array_size):
arrTmpKey = '%s%d' % (tmpKey, i + 1)
cpp_tpl_code = tpl_fmt % (arrTmpKey, methods[1], has_valid_type, has_member)
cpp_core_impl = internal_csv_reader('%s[%d]' % (key[1], i), arrTmpKey, raw_type)
cpp += cpp_tpl_code.replace('[core_impl]', cpp_core_impl).replace('[init_impl]', '')
else:
cpp_tpl_code = tpl_fmt % (tmpKey, methods[1], has_valid_type, has_member)
cpp_tpl_init = ''
cpp_core_impl = internal_csv_reader(key[1], tmpKey, raw_type)
if key[2] in ['int', 'short', 'unsigned short', 'unsigned int', 'long long',
'double', 'float', 'long double']:
cpp_tpl_init = 'data.%s = (%s)std::numeric_limits<%s>::max();' % (key[1], key[2], key[2])
elif key[2] in ['enum']:
cpp_tpl_init = 'data.%s = (%s)(0);' % (key[1], key[3])
elif key[2] in ["bool", "unsigned char"]:
cpp_tpl_init = 'data.%s = (%s)(0);' % (key[1], key[2])
cpp += cpp_tpl_code.replace('[core_impl]', cpp_core_impl).replace('[init_impl]', cpp_tpl_init)
cpp += "\treturn flag;\n}\n\n"
return cpp.replace('\t', ' ')
def csv_reader(self, csvfile_prefix, include_header=None):
hfile = """#ifndef CSVReader_AUTO_GENERATED_AT_%s_H
#define CSVReader_AUTO_GENERATED_AT_%s_H
#include <vector>
#include <string>
#include <fstream>
#include <cstring>
#include "csv_parser/csv_reader.hpp"
#include "%s"
template<typename DataStruct>
bool CSVReaderHelper(const csv::CSVRow &, DataStruct &){return false;}
template<typename DataStruct>
bool CSVReader2Vec(const char * fileName, std::vector<DataStruct> & vec_data ){
bool flag = true;
csv::CSVReader reader(fileName, csv::DEFAULT_CSV_STRICT);
csv::CSVRow row;
DataStruct data;
while (reader.read_row(row)) {
std::memset(&data, 0, sizeof(DataStruct));;
if( CSVReaderHelper<DataStruct>(row, data) ){
vec_data.push_back(data);
}else{
flag = false;
}
}
return flag;
};
"""
cpp = """#include "%s.h"
#include <iostream>
#include <limits>
"""
h_tpl = "template<>\nbool CSVReaderHelper<%s>(const csv::CSVRow & row, %s & data);\n\n"
if not isinstance(include_header, str):
include_header = self.datastruct_file
tod = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')
hfile %= (tod, tod, include_header)
cpp %= csvfile_prefix
for struct_id in self.datastructs:
if struct_id in self.datastruct_property_dict:
cpp += self._csv_reader(self.datastruct_property_dict[struct_id], struct_id)
hfile += h_tpl % (struct_id, struct_id)
hfile += "\n#endif"
csv_reader_file = csvfile_prefix + ".cpp"
self.writeToFile(csv_reader_file, cpp)
csv_reader_file = csvfile_prefix + ".h"
self.writeToFile(csv_reader_file, hfile)
pass
def csv_writer(self, csvfile_prefix, include_header=None):
hfile = """#ifndef CSVWriter_AUTO_GENERATED_AT_%s_H
#define CSVWriter_AUTO_GENERATED_AT_%s_H
#include <vector>
#include <string>
#include <fstream>
#include <string>
#include "%s"
template<typename DataStruct>
std::string CSVWriteRtnHead(){ return ""; }
template<typename DataStruct>
void Internal_CSVWriteHead(std::ostream &ofs){}
template<typename DataStruct>
void Internal_CSVWriteContent(const DataStruct &, std::ostream &){}
template<typename DataStruct>
void CSVWriter(const std::vector<DataStruct> &, std::ostream &){}
template<typename DataStruct>
bool CSVWriter(const std::vector<DataStruct> &, const std::string &){return false;}
"""
h_tpl = """template<>
std::string CSVWriteRtnHead<%s>();
template<>
void Internal_CSVWriteHead<%s>(std::ostream &ofs);
template<>
void Internal_CSVWriteContent<%s>(const %s &, std::ostream &);
template<>
void CSVWriter<%s>(const std::vector<%s> &, std::ostream &);
template<>
bool CSVWriter<%s>(const std::vector<%s> &, const std::string &);
"""
cpp = """#include "%s.h"
#define CSV_SPLITTER_SYMBOL ','
"""
if not isinstance(include_header, str):
include_header = self.datastruct_file
tod = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')
hfile %= (tod, tod, include_header)
cpp %= csvfile_prefix
for struct_id in self.datastructs:
if struct_id in self.datastruct_property_dict:
cpp += "/*\n * @brief csv_writer_for datastruct:%s\n */\n" % struct_id
fun = functools.partial(self.get_property, struct_id)
cpp += _csv_writer_header(self.datastruct_property_dict[struct_id], struct_id, fun)
cpp += _csv_writer_content(self.datastruct_property_dict[struct_id], struct_id, fun)
cpp += "\n\n"
hfile += "/*\n * @brief csv_writer_for datastruct:%s\n */\n" % struct_id
hfile += h_tpl % tuple([struct_id] * 8)
hfile += "\n\n"
else:
print('[Warning] Failed to parse struct_id %s from file %s' % (struct_id, self.datastruct_file))
hfile += "#endif"
self.writeToFile(csvfile_prefix + ".cpp", cpp)
self.writeToFile(csvfile_prefix + ".h", hfile)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='CSV Reader/Writer cpp Generator')
parser.add_argument('-i', '--include', help="include other header file instead of the parsing header file")
parser.add_argument('-l', '--lib', default='csv', help="library name")
parser.add_argument("-s", "--struct", default='',
help="Specify the datastruct names(separated by comma(,)) for createing csv reader/writer")
parser.add_argument('header', help="The cpp header file that need to parse.")
opt = parser.parse_args()
print(opt)
destination = opt.header
if not os.path.isfile(destination):
print ("The file: %s is not a valid file!" % destination)
exit(-1)
project_dstruct_file = os.path.basename(destination)
if not re.match(r'.{3,}\.h$', project_dstruct_file):
print ("A valid datastruct filename should at least contain three characters!")
exit(-1)
raw_destination = os.path.dirname(destination)
project_name = os.path.splitext(project_dstruct_file)[0]
destination = os.path.join(raw_destination, project_name)
ensure_exists_dir(destination)
src_destination = os.path.join(destination, opt.lib)
ensure_exists_dir(src_destination)
# -------------- folder structure has been setup ------------------------
datastruct_dict = []
if opt.struct:
datastruct_dict = opt.struct.split(',')
else:
with open(opt.header, "rb") as f:
t1 = f.readline().decode('utf8')
f.close()
t1 = t1.strip()
if not t1.startswith("//Configuration:"):
print ("You need to specify a configuration in the first line starting with //Configuration: for file:", opt.header)
exit(-1)
t1 = t1.replace("//Configuration:", "")
datastruct_dict = t1.split(",")
if len(datastruct_dict) < 1:
print ("There is no datastruct, please configure!")
exit(1)
cpp_generator = CPPCsvWriterGenerator(raw_destination, src_destination, datastruct_dict, project_dstruct_file)
cpp_generator.csv_writer("csv_writer", opt.include)
cpp_generator.csv_reader("csv_reader", opt.include)
cpp_generator.create_cmakelists(opt.lib)
```
#### File: py_utilities/CppGenerator/CPPJsonGenerator.py
```python
from sys import platform as sys_platform
import os.path
import re
from shutil import copyfile
import subprocess
from CPPGeneratorBase import CPPGeneratorBase, ensure_exists_dir, unsupported_key
import datetime
internal_json_writer_map = dict({
"int": "Int",
"short": "Int",
"enum": "Int",
"unsigned int": "Uint",
"unsigned short": "Uint",
"long": "Int64",
"unsigned long": "Uint64",
"long long": "Int64",
"double": "Double",
"long double": "Double",
"float": "Double",
"bool": "Bool",
"char": "String",
"str": "String",
"unsigned char": "String",
})
internal_json_reader_map = dict({
"int": ("GetInt", "IsInt"),
"short": ("GetInt", "IsInt"),
"enum": ("GetInt", "IsInt"),
"unsigned int": ("GetUint", "IsUint"),
"unsigned short": ("GetUint", "IsUint"),
"long": ("GetInt64", "IsInt64"),
"unsigned long": ("GetUint64", "IsUint64"),
"long long": ("GetInt64", "IsInt64"),
"double": ("GetDouble", "IsNumber"),
"long double": ("GetDouble", "IsNumber"),
"float": ("GetFloat", "IsNumber"),
"bool": ("GetBool", "IsBool"),
"char": ("GetString", "IsString"),
"str": ("GetString", "IsString"),
"unsigned char": ("GetString", "IsString"),
})
def internal_json_reader(cpp_key, py_key, raw_type):
cpp = ''
if raw_type in ['char', 'unsigned char', 'str']:
if raw_type == 'unsigned char':
cpp = 'data.%s = obj["%s"].GetString()[0];' % (cpp_key, py_key)
else:
cpp = 'strncpy(data.%s, obj["%s"].GetString(), sizeof(data.%s)-1);' % (cpp_key, py_key, cpp_key)
check_str_size_fmt = """
if(obj["%s"].GetStringLength() <= sizeof(data.%s)-1){
%s
}else{
std::cerr<<">>> String is too bigger for char %s[] with py_key %s."<<std::endl;
flag = false;
}
"""
cpp = check_str_size_fmt % (py_key, cpp_key, cpp, cpp_key, py_key)
elif raw_type in internal_json_reader_map:
methods = internal_json_reader_map[raw_type]
cpp = 'data.%s = (%s)(obj["%s"].%s());' % (cpp_key, raw_type, py_key, methods[0])
else:
print('Unknow json reader type:', raw_type)
exit(-1)
return cpp
class CPPJsonGenerator(CPPGeneratorBase):
def __init__(self, src_folder, dest_folder, datastructs, file_name, check_key_when_read):
CPPGeneratorBase.__init__(self, src_folder, dest_folder, file_name, search_keyid=True)
self.datastructs = datastructs
self.check_structs(datastructs, check_key_id=True)
python_dir = (os.path.dirname(os.path.realpath(__file__)))
json_hpp = 'read_parameters_json_base.hpp'
dest = os.path.join(dest_folder, json_hpp)
os.path.isfile(dest) or copyfile(os.path.join(python_dir, json_hpp), dest)
json_hpp = 'read_parameters_json_base.cpp'
dest = os.path.join(dest_folder, json_hpp)
os.path.isfile(dest) or copyfile(os.path.join(python_dir, json_hpp), dest)
json_hpp = 'save_parameters_json_base.hpp'
dest = os.path.join(dest_folder, json_hpp)
os.path.isfile(dest) or copyfile(os.path.join(python_dir, json_hpp), dest)
self.check_key_when_read = check_key_when_read
def _JsonParameterReader(self, keys, struct_id):
cpp = """template<>
bool ReadJsonParametersHelper<%s>(const rapidjson::Value::ConstObject & obj, %s & data){
bool flag = true;
"""
cpp %= (struct_id, struct_id)
tpl_fmt = """
[init_impl]
if(obj.HasMember( "%s" )){
if(obj["%s"].%s()){
[core_impl]
}else{
%s
}
}%s
"""
for key in keys:
tmpKey = key[1].replace(".", "_")
raw_type = key[2]
if raw_type not in internal_json_reader_map:
unsupported_key(key[1], '_JsonParameterReader', key[2], struct_id)
exit(-1)
# DisplayProperty : DataStructProperty
has_valid_type = 'std::cerr<<">>> Failed to resolve key: %s with type: %s in DataStruct: %s."<<std::endl;\n'
has_valid_type += '\t\t\tflag = false;'
has_valid_type %= (tmpKey, key[2], struct_id)
if self.check_key_when_read:
has_member = 'else{\n\t\tstd::cerr<<">>> Failed to find key: %s in DataStruct: %s."<<std::endl;\n\t\tflag = false;\n\t}'
has_member %= (tmpKey, struct_id)
else:
has_member = ''
methods = internal_json_reader_map[raw_type]
# # read data
if key[2] not in ['char', 'str'] and 1 == self.get_property(struct_id, key[1], 'array'):
array_size = int(self.get_property(struct_id, key[1], 'array_size'))
assert array_size > 0
cpp_tpl_code = tpl_fmt % (tmpKey, tmpKey, "IsArray", has_valid_type, has_member)
cpp_arr_tpl = """
const rapidjson::Value & arr = obj["%s"];
if(%d != (int) arr.Size()){
std::cerr<<">>> array_size of %s is invalid in DataStruct %s"<<std::endl;
flag = false;
}else{
for (int i = 0; i < %d; ++i){
data.%s[i] = (%s)(arr[i].%s());
}
}
"""
cpp_arr_tpl %= (tmpKey, array_size, key[1], struct_id, array_size, key[1], raw_type, methods[0])
cpp += cpp_tpl_code.replace('[core_impl]', cpp_arr_tpl).replace('[init_impl]', '')
else:
cpp_tpl_code = tpl_fmt % (tmpKey, tmpKey, methods[1], has_valid_type, has_member)
cpp_tpl_init = ''
cpp_core_impl = internal_json_reader(key[1], tmpKey, raw_type)
if key[2] in ['int', 'short', 'unsigned short', 'unsigned int', 'long long',
'double', 'float', 'long double']:
cpp_tpl_init = 'data.%s = (%s)std::numeric_limits<%s>::max();' % (key[1], key[2], key[2])
elif key[2] in ['enum']:
cpp_tpl_init = 'data.%s = (%s)(0);' % (key[1], key[3])
elif key[2] in ["bool", "unsigned char"]:
cpp_tpl_init = 'data.%s = (%s)(0);' % (key[1], key[2])
if raw_type in ['char', 'str'] and key[1] == self.key_id_map[struct_id]:
cpp += cpp_core_impl
else:
cpp += cpp_tpl_code.replace('[core_impl]', cpp_core_impl).replace('[init_impl]', cpp_tpl_init)
cpp += "\treturn flag;\n}\n\n"
return cpp.replace('\t', ' ')
def JsonParameterReader(self, json_reader_file_prefix, include_header=None):
cpp = """#include "%s.h"
#include <iostream>
#include <limits>
"""
hfile = """#ifndef READ_JSONFILE_USING_RAPIDJSON_%s
#define READ_JSONFILE_USING_RAPIDJSON_%s
#include "read_parameters_json_base.hpp"
#include "%s"
"""
h_tpl = """template<>
bool ReadJsonParametersHelper<%s>(const rapidjson::Value::ConstObject & obj, %s & data);\n
"""
if not isinstance(include_header, str):
include_header = self.datastruct_file
tod = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')
hfile %= (tod, tod, include_header)
cpp %= json_reader_file_prefix
for struct_id in self.datastructs:
if struct_id in self.datastruct_property_dict:
cpp += self._JsonParameterReader(self.datastruct_property_dict[struct_id], struct_id)
hfile += h_tpl % (struct_id, struct_id)
hfile += "\n#endif"
json_reader_file = json_reader_file_prefix + ".cpp"
self.writeToFile(json_reader_file, cpp)
json_reader_file = json_reader_file_prefix + ".h"
self.writeToFile(json_reader_file, hfile)
def _JsonParameterWriter(self, keys, struct_id):
cpp = """template<>
void SaveJsonParametersHelper<%s>(rapidjson::PrettyWriter<rapidjson::FileWriteStream> & writer, const %s & data){
char unsigned_char_helper[2]; unsigned_char_helper[0]=0; unsigned_char_helper[1]=0;
(void)unsigned_char_helper;
writer.String(data.%s);
writer.StartObject();
"""
cpp %= (struct_id, struct_id, self.key_id_map[struct_id])
unsigned_char_fmt = """
unsigned_char_helper[0] = (char)data.%s;
writer.String(unsigned_char_helper);
"""
for key in keys:
tmpKey = key[1].replace(".", "_")
cpp += '\twriter.String("%s");\n' % tmpKey
raw_type = key[2]
if raw_type not in internal_json_writer_map:
unsupported_key(key[1], '_JsonParameterWriter', key[2], struct_id)
exit(-1)
method = internal_json_writer_map[raw_type]
# # array
if key[2] != 'char' and 1 == self.get_property(struct_id, key[1], 'array'):
cpp += "\twriter.StartArray();\n"
array_size = int(self.get_property(struct_id, key[1], 'array_size'))
assert array_size > 0
for i in range(array_size):
cpp += "\t\twriter.%s(data.%s[%d]);\n" % (method, key[1], i)
cpp += "\twriter.EndArray();\n"
else:
# DisplayProperty : DataStructProperty
if key[2] == "unsigned char":
cpp += unsigned_char_fmt % (key[1], tmpKey)
else:
cpp += "\twriter.%s(data.%s);\n" % (method, key[1])
cpp += "\twriter.EndObject();\n}\n\n"
return cpp.replace('\t', ' ')
def JsonParameterWriter(self, json_writer_file_prefix, include_header=None):
cpp = """#include "%s.h"
"""
tod = datetime.datetime.today().strftime('%Y%m%dT%H%M%S')
hfile = """#ifndef SAVE_JSONFILE_USING_RAPIDJSON_%s
#define SAVE_JSONFILE_USING_RAPIDJSON_%s
#include "save_parameters_json_base.hpp"
#include "%s"
"""
h_tpl = """template<>
void SaveJsonParametersHelper<%s>(rapidjson::PrettyWriter<rapidjson::FileWriteStream> & writer, const %s &);\n
"""
if not isinstance(include_header, str):
include_header = self.datastruct_file
hfile %= (tod, tod, include_header)
cpp %= json_writer_file_prefix
for struct_id in self.datastructs:
if struct_id in self.datastruct_property_dict:
cpp += self._JsonParameterWriter(self.datastruct_property_dict[struct_id], struct_id)
hfile += h_tpl % (struct_id, struct_id)
hfile += "\n#endif"
json_writer_file = json_writer_file_prefix + ".cpp"
self.writeToFile(json_writer_file, cpp)
json_writer_file = json_writer_file_prefix + ".h"
self.writeToFile(json_writer_file, hfile)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Json Reader/Writer cpp Generator')
parser.add_argument('-c', '--check', action='store_true',
help="check every member variable in DataStruct")
parser.add_argument('-i', '--include', help="include other header file instead of the parsing header file")
parser.add_argument('-l', '--lib', default='rwjson', help="library name")
parser.add_argument("-s", "--struct", default='',
help="Specify the datastruct names(separated by comma(,)) for createing csv reader/writer")
parser.add_argument('header', help="The cpp header file that need to parse;")
opt = parser.parse_args()
print(opt)
destination = opt.header
if not os.path.isfile(destination):
print("The file: %s is not a valid file!" % destination)
exit(-1)
project_struct_file = os.path.basename(destination)
if not re.match(r'.{3,}\.h$', project_struct_file):
print("A valid datastruct file at least contains three characters!")
exit(-1)
raw_destination = os.path.dirname(destination)
project_name = os.path.splitext(project_struct_file)[0]
destination = os.path.join(raw_destination, project_name)
ensure_exists_dir(destination)
src_destination = os.path.join(destination, opt.lib)
ensure_exists_dir(src_destination)
# -------------- folder structure has been setup ------------------------
datastruct_dict = []
if opt.struct:
datastruct_dict = opt.struct.split(',')
else:
with open(opt.header, "rb") as f:
t1 = f.readline().decode('utf8')
f.close()
t1 = t1.strip()
if not t1.startswith("//Configuration:"):
print("You need to specify a configuration in the first line starting with //Configuration: for file:",
opt.header)
exit(-1)
t1 = t1.replace("//Configuration:", "")
datastruct_dict = t1.split(",")
if len(datastruct_dict) < 1:
print("There is no datastruct, please configure!")
exit(1)
cpp_generator = CPPJsonGenerator(raw_destination, src_destination, datastruct_dict, project_struct_file, opt.check)
cpp_generator.JsonParameterReader("read_json_parameters", opt.include)
cpp_generator.JsonParameterWriter("save_json_parameters", opt.include)
cpp_generator.create_cmakelists(opt.lib)
``` |
{
"source": "7starsea/shark",
"score": 3
} |
#### File: example/env/catch_ball_env.py
```python
import numpy as np
import PIL
import torch
import torchvision.transforms as TF
from types import SimpleNamespace
from gym import spaces, Env
from .SharkExampleEnv import CatchBallSimulate
# internal_screen_h, internal_screen_w = 80, 140
class CatchBallEnvBase(Env):
metadata = {'render.modes': ['human']}
def __init__(self, screen=(80, 120), num_balls=10, action_penalty=.02, waiting=0, is_continuous=False):
self.game = CatchBallSimulate(screen, ball=(6, 6), ball_speed=(5, 2), bar=(5, 15),
action_penalty=action_penalty,
waiting=waiting, is_continuous=is_continuous)
self.num_balls = num_balls
self.index = 0
self.screen = np.zeros(self.game.screen_size + (3,), dtype=np.uint8)
h, w = screen
self.observation_space = spaces.Space(shape=(h, w, 1), dtype=np.uint8)
if is_continuous:
low, high = self.game.action_range
self.action_space = spaces.Box(low=low, high=high, shape=(1,))
else:
self.action_space = spaces.Discrete(n=3)
self.spec = SimpleNamespace(id='CatchBall_%d' % num_balls)
self.ax = None
self.fig = None
def set_action_range(self, low, high):
assert self.game.is_continuous and "Only continuous action supports set_action_range."
self.game.action_range = low, high
self.action_space = spaces.Box(low=low, high=high, shape=(1,))
def seed(self, seed):
self.game.seed(seed)
def close(self):
self.ax.clear()
def render(self, mode='human'):
import matplotlib.pyplot as plt
if not self.ax:
self.fig = plt.figure(1, figsize=(8, 10))
self.ax = plt.subplot(111)
self.ax.clear()
self.screen.fill(0)
self.game.get_display(self.screen)
self.ax.imshow(self.screen)
plt.pause(0.02)
def reset(self):
self.game.reset()
self.index = 0
state = np.zeros_like(self.screen)
self.game.get_display(state)
return state
def step(self, action):
# # in discrete-setting, action should be 0, 1, 2
is_game_over, reward = self.game.step(int(action))
if is_game_over:
if self.num_balls > 0:
self.index += 1
is_game_over = self.index >= self.num_balls
else:
is_game_over = reward < .5
self.game.reset_ball()
next_state = np.zeros_like(self.screen)
self.game.get_display(next_state)
return next_state, reward, is_game_over, {}
class CatchBallEnv(CatchBallEnvBase):
def __init__(self, *args, **kwargs):
super(CatchBallEnv, self).__init__(*args, **kwargs)
self.kwargs = dict(dtype=torch.float32)
h, w, _ = self.observation_space.shape
h, w = int(h / 2), int(w / 2)
self.observation_space = spaces.Space(shape=(1, h, w), dtype=np.float32)
self.composer = TF.Compose([TF.Grayscale(), TF.Resize((h, w)), TF.ToTensor()])
def _preprocess(self, image):
x = PIL.Image.fromarray(image)
image = self.composer(x)
image = image.to(**self.kwargs)
return image
def step(self, action):
if isinstance(action, np.ndarray):
action = int(action.reshape(-1)[0])
state, r, done, info = super().step(action)
return self._preprocess(state), r, done, info
def reset(self):
state = super().reset()
return self._preprocess(state)
```
#### File: shark/replay/replay_base.py
```python
from collections import deque
from abc import ABC, abstractmethod
class ReplayBufferBase(ABC):
def __init__(self, transition, capacity, batch_size):
# data instance, see shark/policy/(base_dpg.py/DPGTransition, dqn.py/DQNTransition)
self.transition = transition
self.capacity = capacity
self.batch_size = batch_size
self.memory = deque(maxlen=capacity)
self.position = 0
def append(self, item):
"""Saves a transition."""
assert isinstance(item, self.transition)
self.memory.append(item)
idx = self.position
self.position = (self.position + 1) % self.capacity
return idx
def __len__(self):
return len(self.memory)
@abstractmethod
def sample(self, **kwargs):
pass
class PrioritizedReplayBufferBase(ReplayBufferBase):
def __init__(self, transition, capacity, batch_size, alpha=0.6, eps=1e-10, decay_alpha=.5):
super(PrioritizedReplayBufferBase, self).__init__(transition, capacity, batch_size)
assert alpha >= 0 and eps > 0 and "alpla >= 0 and eps > 0"
self._alpha = alpha
self._eps = eps
self._decay_alpha = decay_alpha
@abstractmethod
def sample(self, **kwargs):
pass
@abstractmethod
def update_priorities(self, indices, old_priorities, priorities):
pass
```
#### File: shark/replay/simple_replay.py
```python
import random
from .replay_base import ReplayBufferBase
class SimpleReplayBuffer(ReplayBufferBase):
def __init__(self, transition, capacity, batch_size):
super(SimpleReplayBuffer, self).__init__(transition, capacity, batch_size)
def sample(self):
res = random.sample(self.memory, self.batch_size)
batch = self.transition(*zip(*res))
return batch
```
#### File: shark/test/atari_net.py
```python
import torch
import torch.nn as nn
import torch.nn.functional as F
class SharedDiscreteNet(nn.Module):
def __init__(self, in_channel, h, w, outputs):
super(SharedDiscreteNet, self).__init__()
self.conv1 = nn.Conv2d(in_channel, 64, kernel_size=8, stride=4)
self.bn1 = nn.BatchNorm2d(64)
self.conv2 = nn.Conv2d(64, 32, kernel_size=4, stride=2)
self.bn2 = nn.BatchNorm2d(32)
self.conv3 = nn.Conv2d(32, 16, kernel_size=3, stride=1)
self.bn3 = nn.BatchNorm2d(16)
self.layer = nn.Sequential(
self.conv1, self.bn1, nn.ReLU(),
self.conv2,
# self.bn2,
nn.ReLU(),
self.conv3, self.bn3, nn.ReLU()
)
# Number of Linear input connections depends on output of conv2d layers
# and therefore the input image size, so compute it.
def conv2d_size_out(size, kernel_size=5, stride=2):
return int((size - (kernel_size - 1) - 1) / stride + 1)
convw = (conv2d_size_out(conv2d_size_out(w, kernel_size=8, stride=4), kernel_size=4))
convh = (conv2d_size_out(conv2d_size_out(h, kernel_size=8, stride=4), kernel_size=4))
convw = conv2d_size_out(convw, kernel_size=3, stride=1)
convh = conv2d_size_out(convh, kernel_size=3, stride=1)
linear_input_size = convw * convh * 16
output_size = 256
self.advantage = nn.Sequential(
nn.Linear(linear_input_size, output_size),
nn.ReLU(),
nn.Linear(output_size, outputs)
)
self.value = nn.Sequential(
nn.Linear(linear_input_size, output_size),
nn.ReLU(),
nn.Linear(output_size, 1)
)
def forward(self, x):
x = x.to(dtype=torch.float32) / 255.0
x = self.layer(x)
x = x.view(x.size(0), -1)
advantage = self.advantage(x)
value = self.value(x)
return value + advantage - advantage.mean(1, keepdim=True)
def pi(self, x, softmax_dim=1):
x = x.to(dtype=torch.float32) / 255.0
x = self.layer(x)
x = x.view(x.size(0), -1)
x = self.advantage(x)
prob = F.softmax(x, dim=softmax_dim)
return prob
def v(self, x):
x = x.to(dtype=torch.float32) / 255.0
x = self.layer(x)
x = x.view(x.size(0), -1)
v = self.value(x)
return v.squeeze(1)
class Actor(nn.Module):
def __init__(self, state_dim, action_dim, max_action):
super(Actor, self).__init__()
self.l1 = nn.Linear(state_dim, 100)
self.l2 = nn.Linear(100, 30)
self.l3 = nn.Linear(30, action_dim)
self._max_action = max_action
def forward(self, x):
x = F.relu(self.l1(x))
x = F.relu(self.l2(x))
x = self._max_action * torch.tanh(self.l3(x))
return x
class ActorProb(nn.Module):
def __init__(self, state_dim, action_dim, max_action, log_std_min=-20, log_std_max=2):
super(ActorProb, self).__init__()
self.l1 = nn.Linear(state_dim, 100)
self.l2 = nn.Linear(100, 30)
self.mu = nn.Linear(30, action_dim)
self.sigma = nn.Linear(30, action_dim)
self._max_action = max_action
self._log_std_min, self._log_std_max = log_std_min, log_std_max
def forward(self, x):
x = F.relu(self.l1(x))
x = F.relu(self.l2(x))
mu = self._max_action * torch.tanh(self.mu(x))
sigma = torch.exp(torch.clamp(self.sigma(x), self._log_std_min, self._log_std_max))
return mu, sigma
class Critic(nn.Module):
def __init__(self, state_dim, action_dim):
super(Critic, self).__init__()
self.l1 = nn.Linear(state_dim + action_dim, 100)
self.l2 = nn.Linear(100, 30)
self.l3 = nn.Linear(30, 1)
def forward(self, x, u):
x = F.relu(self.l1(torch.cat([x, u], 1)))
x = F.relu(self.l2(x))
x = self.l3(x)
return x.squeeze(1)
``` |
{
"source": "7thFox/dragon",
"score": 3
} |
#### File: 7thFox/dragon/lexing_test.py
```python
import unittest
from enum import Enum
from lexing import LexingBuffer, Token, Span, EOF, _BUFFER_SIZE
from io import TextIOWrapper, BytesIO
class TestTags(Enum):
Test = 0
class LexingBufferTests(unittest.TestCase):
def test_basic(self):
length = 1000
l = LexingBuffer(TextIOWrapper(BytesIO(("D" * length).encode())))
for n in range(0, length + 1):
if n == length:
self.assertEqual(l.read(), EOF)
else:
self.assertEqual(l.read(), "D")
emitted = l.emit(TestTags.Test, None)
self.assertEqual(emitted, Token(
TestTags.Test, None, "D", Span(n, n + 1, 0, n)))
def test_linebreaks(self):
length = 1000
l = LexingBuffer(TextIOWrapper(BytesIO(("DDDD\n" * length).encode())))
for n in range(0, length*5 + 1):
if n == length*5:
self.assertEqual(l.read(), EOF)
elif n % 5 == 4:
self.assertEqual(l.read(), "\n")
emitted = l.emit(TestTags.Test, None)
self.assertEqual(emitted, Token(
TestTags.Test, None, "\n", Span(n, n + 1, n // 5, n % 5)))
else:
self.assertEqual(l.read(), "D")
emitted = l.emit(TestTags.Test, None)
self.assertEqual(emitted, Token(
TestTags.Test, None, "D", Span(n, n + 1, n // 5, n % 5)))
def test_value_func(self):
length = 1000
l = LexingBuffer(TextIOWrapper(BytesIO(("1" * length).encode())))
for n in range(0, length + 1):
if n == length:
self.assertEqual(l.read(), EOF)
else:
self.assertEqual(l.read(), "1")
emitted = l.emit(TestTags.Test, lambda x: int(x))
self.assertEqual(emitted, Token(
TestTags.Test, 1, "1", Span(n, n + 1, 0, n)))
self.assertIsInstance(emitted.value, int)
def test_longer_token(self):
length = 333*3
l = LexingBuffer(TextIOWrapper(BytesIO(("D" * length).encode())))
for n in range(0, length + 1):
if n == length:
self.assertEqual(l.read(), EOF)
else:
self.assertEqual(l.read(), "D")
if n % 3 == 2:
emitted = l.emit(TestTags.Test, None)
self.assertEqual(emitted, Token(
TestTags.Test, None, "DDD", Span(n - 2, n + 1, 0, n - 2)))
def test_long_token_throws(self):
length = 10000
l = LexingBuffer(TextIOWrapper(BytesIO(("D" * length).encode())))
for _ in range(0, _BUFFER_SIZE - 1): # consume all but 1 char of buffer
l.read()
l.emit(TestTags.Test, None)
for _ in range(0, _BUFFER_SIZE): # should still consume safely up to BUFFER_SIZE
l.read()
def unsafe_reads():
l.read() # N + 1: could technically read this if we wanted, but that would add a lot of work/complexity to the buffer
l.read() # N + 2: will be unable to safely read next buffer without overriding a buffer that's still in use
self.assertRaises(Exception, unsafe_reads)
def test_retract(self):
length = 1000
l = LexingBuffer(TextIOWrapper(BytesIO(("ABCDE" * length).encode())))
for _ in range(0, length):
# read 5, retract 4, emit
self.assertEqual(l.read(), "A")
self.assertEqual(l.read(), "B")
self.assertEqual(l.read(), "C")
self.assertEqual(l.read(), "D")
self.assertEqual(l.read(), "E")
l.retract(4)
self.assertEqual(l.emit(TestTags.Test, None).text, "A")
# re-read remaining 4, retract 1, emit
self.assertEqual(l.read(), "B")
self.assertEqual(l.read(), "C")
self.assertEqual(l.read(), "D")
self.assertEqual(l.read(), "E")
l.retract()
self.assertEqual(l.emit(TestTags.Test, None).text, "BCD")
# read final, emit
self.assertEqual(l.read(), "E")
self.assertEqual(l.emit(TestTags.Test, None).text, "E")
self.assertEqual(l.read(), EOF)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "7th-mod-korea/when_they_cry_converter",
"score": 3
} |
#### File: 7th-mod-korea/when_they_cry_converter/converter.py
```python
import translation_extractor
from folder_converter import *
import text_converter
import openpyxl
import urllib.request
import urllib.parse
import settings
import json
import onscript
import ui
def validate_folder(folder_path: str):
result = True
for file_name in os.listdir(folder_path):
if not file_name.endswith('.txt'):
continue
print(f"validating {file_name}")
file_path = os.path.join(folder_path, file_name)
with open(file_path, 'r', encoding='utf-8') as f:
text_converter = TextConverter(f.read())
result &= text_converter.validate_text()
if not result:
print('validation error found!!!')
sys.exit(-1)
else:
print('validation success.')
# deprecated
def combine_xlsx(original_folder, translated_folder):
for file_name in os.listdir(translated_folder):
if not file_name.endswith('.xlsx'):
continue
file_name = file_name.replace('kor', '')
original_path = os.path.join(original_folder, file_name)
if not os.path.exists(original_path):
continue
original_wb = openpyxl.load_workbook(original_path)
original_ws = original_wb.active
translated_wb = openpyxl.load_workbook(os.path.join(translated_folder, file_name))
translated_ws = translated_wb.active
for index, row in enumerate(translated_ws.iter_rows(), 1):
if len(row) >= 3:
original_ws.cell(row=index, column=4).value = row[2].value
original_wb.save(original_path)
original_wb.close()
# deprecated
def insert_actor_column(old_folder, actor_folder):
for file_name in os.listdir(old_folder):
if not file_name.endswith('.xlsx'):
continue
old_path = os.path.join(old_folder, file_name)
old_wb = openpyxl.load_workbook(old_path)
old_ws = old_wb.active
actor_wb = openpyxl.load_workbook(os.path.join(actor_folder, file_name))
actor_ws = actor_wb.active
for index, row in enumerate(actor_ws.iter_rows(), 1):
if old_ws.cell(row=index, column=2).value != row[2].value:
print(f"{file_name} has different row at {index} {old_ws.cell(row=index, column=2).value} != {row[2].value}")
break
old_ws.insert_cols(2)
for index, row in enumerate(actor_ws.iter_rows(), 1):
old_ws.cell(row=index, column=2).value = row[1].value
old_wb.save(old_path)
old_wb.close()
def remove_key_column(folder_path):
for file_name in os.listdir(folder_path):
file_path = os.path.join(folder_path, file_name)
wb = openpyxl.load_workbook(file_path)
ws = wb.active
cell = ws['A1']
if not cell.value or not cell.value.startswith(os.path.splitext(file_name)):
print(f'{file_name} is empty or has no key column', file=sys.stderr)
continue
ws.delete_cols(1)
wb.save(file_path)
wb.close()
print(f'{file_name} removed')
def compare_line_count(left_folder, right_folder):
for file_name in os.listdir(left_folder):
left_file_path = os.path.join(left_folder, file_name)
right_file_path = os.path.join(right_folder, file_name)
left_wb = openpyxl.load_workbook(left_file_path)
left_ws = left_wb.active
right_wb = openpyxl.load_workbook(right_file_path)
right_ws = right_wb.active
if left_ws.max_row != right_ws.max_row:
print(f'{file_name} has difference.')
def insert_new_rows(old_folder, new_folder):
for file_name in os.listdir(old_folder):
print(f'Start processing {file_name}')
old_file_path = os.path.join(old_folder, file_name)
new_file_path = os.path.join(new_folder, file_name)
old_wb = openpyxl.load_workbook(old_file_path)
old_ws = old_wb.active
new_wb = openpyxl.load_workbook(new_file_path)
new_ws = new_wb.active
old_cursor = 1
for new_cursor, row in enumerate(new_ws.iter_rows(), 1):
same = True
for col_index, r in enumerate(row, 1):
same &= old_ws.cell(row=old_cursor, column=col_index).value == r.value
first_cell = new_ws.cell(row=new_cursor, column=1)
if same:
pass
elif first_cell.value and first_cell.value == text_converter.play_bgm_method:
old_ws.insert_rows(old_cursor)
for col_index, r in enumerate(row, 1):
old_ws.cell(row=old_cursor, column=col_index).value = r.value
print(f'Added BGM line')
#
# elif ignore_row(first_cell) or first_cell.value and first_cell.value == text_converter.play_bgm_method:
# old_ws.insert_rows(old_cursor)
# for col_index, r in enumerate(row, 1):
# old_ws.cell(row=old_cursor, column=col_index).value = r.value
# print(f'New line added')
# elif new_ws.cell(new_cursor, 2).value == old_ws.cell(old_cursor, 2).value:
# old_ws.cell(old_cursor, 1).value = new_ws.cell(new_cursor, 1).value
# old_ws.cell(old_cursor, 3).value = new_ws.cell(new_cursor, 3).value
# else:
# print(f"Unknown cell missmatch occured at line {old_cursor}!!!")
# values = []
# for r in row:
# values.append(str(r.value))
# print(f"New row : ({','.join(values)})")
# values.clear()
# for rows in old_ws.iter_rows(old_cursor, old_cursor):
# for r in rows:
# values.append(str(r.value))
# print(f"Old row : ({','.join(values)})")
# return
old_cursor += 1
old_wb.save(old_file_path)
old_wb.close()
def get_actors(folder, filter_folder):
actors = set()
for chapter in os.listdir(folder):
if filter_folder and chapter != filter_folder:
continue
chapter_path = os.path.join(folder, chapter)
if not os.path.isdir(chapter_path):
continue
for file in os.listdir(chapter_path):
if not file.endswith('.txt'):
continue
file_path = os.path.join(folder, chapter, file)
with open(file_path, 'r', encoding='utf-8') as f:
conv = TextConverter(f.read())
actors.update(conv.extract_actor())
wb = openpyxl.Workbook()
ws = wb.active
actors_list = list(actors)
actors_list.sort()
for actor in actors_list:
ws.append(actor)
wb.save('actor_raw.xlsx')
wb.close()
def insert_papago(folder):
TRANSLATION_FILE = 'translation.json'
translation_dict = {}
if os.path.exists(TRANSLATION_FILE):
with open(TRANSLATION_FILE, 'r', encoding='utf-8') as fd:
translation_dict = json.load(fd)
for file in os.listdir(folder):
if not file.endswith('.xlsx'):
continue
has_update = False
try:
print(f"Processing {file}")
file_path = os.path.join(folder, file)
wb = openpyxl.load_workbook(file_path)
ws = wb.active
if not has_header(ws):
ws.insert_rows(1)
ws.insert_cols(5)
for col, header in enumerate(HEADER_ROW, start=1):
ws.cell(1, col, header)
has_update = True
for row_index, row in enumerate(ws.rows, start=1):
if ignore_row(row[0]):
continue
source = row[1].value
source = source and source.strip(' ')
if not source:
continue
if source in translation_dict:
old_translation_cell = ws.cell(row_index, 5)
if old_translation_cell.value != translation_dict[source]:
old_translation_cell.value = translation_dict[source]
has_update = True
continue
encText = urllib.parse.quote(source)
data = f"source=ja&target=ko&text={encText}"
url = "https://naveropenapi.apigw.ntruss.com/nmt/v1/translation"
request = urllib.request.Request(url)
request.add_header('X-NCP-APIGW-API-KEY-ID', settings.CLIENT_ID)
request.add_header('X-NCP-APIGW-API-KEY', settings.CLIENT_SECRET)
response = urllib.request.urlopen(request, data=data.encode('utf-8'))
rscode = response.getcode()
if rscode != 200:
raise Exception
response_raw = response.read().decode('utf-8')
response_json = json.loads(response_raw)
translated_text = response_json['message']['result']['translatedText']
translation_dict[source] = translated_text
ws.cell(row_index, 5).value = translated_text
print(f"Translated {source} to {translated_text}")
has_update = True
if has_update:
wb.save(file_path)
wb.close()
finally:
if has_update:
with open(TRANSLATION_FILE, 'w', encoding='utf-8') as fd:
json.dump(translation_dict, fd, ensure_ascii=False)
def unique_characters(folder_path):
characters = set()
for chapter in os.listdir(folder_path):
chapter_path = os.path.join(folder_path, chapter)
if not os.path.isdir(chapter_path):
continue
for file in os.listdir(chapter_path):
if not file.endswith('.xlsx'):
continue
wb = openpyxl.load_workbook(os.path.join(folder_path, chapter, file))
ws = wb.active
for index, row in enumerate(ws.rows, 1):
korean = ws.cell(index, 4).value
if not korean:
continue
for c in korean:
characters.add(c)
chars_list = list(characters)
chars_list.sort()
with open('characters.txt', 'w', encoding='utf-8') as fd:
fd.write(''.join(chars_list))
with open('old_characters.txt', 'r', encoding='utf-8') as fd:
text = fd.read()
old_characters = set()
for c in text:
old_characters.add(c)
half_characters = set()
for c in characters:
half_characters.add(c.translate(text_converter.full_to_half_ascii))
difference = half_characters.difference(old_characters)
difference = list(difference)
difference.sort()
with open('difference.txt', 'w', encoding='utf-8') as fd:
fd.write(''.join(difference))
def find_old_format(folder_path):
for chapter in os.listdir(folder_path):
chapter_path = os.path.join(folder_path, chapter)
if not os.path.isdir(chapter_path):
continue
for file in os.listdir(chapter_path):
if not file.endswith('.xlsx'):
continue
wb = openpyxl.load_workbook(os.path.join(folder_path, chapter, file))
ws = wb.active
if not has_header(ws):
print(f"{os.path.join(chapter, file)} has deprecated format")
def convert(argv):
if len(argv) == 1:
print('Starting GUI...')
ui.initializeUI()
elif argv[1] == 'help':
print(
"""\
usage: converter.py [commands]
available commands:
export_text <Update folder>
- export text parameter to xlsx file from the script
replace_text <Update folder> <translation folder>
- Replace english text to translated text
extract_text <file_path>
- extract text line from the onscript file and export to xlsx
combine_xlsx <original_folder> <translated_folder>
insert_actor_column <old_folder> <actor_folder>
"""
)
elif argv[1] == 'export_text':
converter = FolderConverter(argv[2])
converter.export_text('xlsx')
elif argv[1] == 'replace_text':
converter = FolderConverter(argv[2])
converter.replace_text(argv[3], argv[4] if len(argv) >= 5 else 'actor.xlsx')
elif argv[1] == 'replace_text_with_bgm':
converter = FolderConverter(argv[2])
converter.replace_text(argv[3], argv[4] if len(argv) >= 5 else 'actor.xlsx', use_bgm=True)
elif argv[1] == 'validate_folder':
validate_folder(argv[2])
elif argv[1] == 'extract_text':
extractor = translation_extractor.TextExtractor()
extractor.extract_text(argv[2])
elif argv[1] == 'combine_xlsx':
combine_xlsx(argv[2], argv[3])
elif argv[1] == 'insert_actor_column':
insert_actor_column(argv[2], argv[3])
elif argv[1] == 'remove_key_column':
remove_key_column(argv[2])
elif argv[1] == 'compare_line_count':
compare_line_count(argv[2], argv[3])
elif argv[1] == 'insert_new_rows':
insert_new_rows(argv[2], argv[3])
elif argv[1] == 'get_actors':
get_actors(argv[2], argv[3] if len(argv) >= 4 else None)
elif argv[1] == 'insert_papago':
insert_papago(argv[2])
elif argv[1] == 'unique_characters':
unique_characters(argv[2])
elif argv[1] == 'find_old_format':
find_old_format(argv[2])
elif argv[1] == 'export_text_onscript':
onscript.FolderParser(argv[2]).export_text(mode='onscript')
elif argv[1] == 'export_text_steam':
onscript.FolderParser(argv[2]).export_text()
elif argv[1] == 'replace_text_steam':
onscript.FolderParser(argv[2]).replace_text(argv[3])
else:
print("invalid command", file=sys.stderr)
exit(-1)
if __name__ == '__main__':
convert(sys.argv)
```
#### File: 7th-mod-korea/when_they_cry_converter/drive.py
```python
from __future__ import print_function
import pickle
import os.path
import sys
import hashlib
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from apiclient import errors
from googleapiclient.http import MediaIoBaseDownload, MediaFileUpload
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/drive']
TRANSLATION_FOLDER_ID = '1Q8BO4CB6tGk-hpYsPOq_Tc6FVPYqP5JA'
#TRANSLATION_FOLDER_ID = '1W7Yxvl3WRzZ1fDbuim8EPediY0qrxWBe'
def get_files(service, folderId, files, filter_folder_name):
page_token = None
while True:
try:
param = {}
if page_token:
param['pageToken'] = page_token
children = service.files().list(
fields='files(id, name, mimeType, md5Checksum)',
q=f"'{folderId}' in parents and trashed = false",
**param).execute()
for child in children['files']:
mimeType = child['mimeType']
if mimeType == 'application/vnd.google-apps.folder':
sub_folder_name = child['name']
print(f"searching {sub_folder_name}")
if filter_folder_name and sub_folder_name != filter_folder_name:
continue
files[sub_folder_name] = {}
get_files(service, child['id'], files[sub_folder_name], None)
# xlsx
elif mimeType == 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet':
present_files = files.get('.', [])
present_files.append(child)
files['.'] = present_files
elif mimeType == 'text/plain':
pass
else:
print(f"unexpected mimeType {mimeType} found, {child['name']}", file=sys.stderr)
page_token = children.get('nextPageToken')
if not page_token:
break
except errors.HttpError as error:
print(f'An error occured: {error}')
break
def get_creds():
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
return creds
def download_folder(drive_service, tree, folder_path):
downloaders = []
for folder_name, contents in tree.items():
parent_folder = os.path.normpath(os.path.join(folder_path, folder_name))
if folder_name != '.':
downloaders.extend(download_folder(drive_service, contents, parent_folder))
continue
for file in contents:
if not os.path.exists(parent_folder):
os.mkdir(parent_folder)
local_file_path = os.path.join(parent_folder, file['name'])
if os.path.exists(local_file_path):
with open(local_file_path, 'rb') as local_file_fd:
local_md5 = hashlib.md5(local_file_fd.read()).hexdigest()
if local_md5 == file['md5Checksum']:
continue
print(f"Downloading {file['name']} at {local_file_path}")
request = drive_service.files().get_media(fileId=file['id'])
fd = open(local_file_path, 'wb')
downloaders.append((MediaIoBaseDownload(fd, request), fd))
return downloaders
def upload_folder(drive_service, tree, folder_path):
for folder_name, contents in tree.items():
parent_folder = os.path.normpath(os.path.join(folder_path, folder_name))
if folder_name != '.':
upload_folder(drive_service, contents, parent_folder)
continue
for file in contents:
local_file_path = os.path.join(parent_folder, file['name'])
if not os.path.exists(local_file_path):
print(f"{local_file_path} not exist")
continue
with open(local_file_path, 'rb') as local_file_fd:
local_md5 = hashlib.md5(local_file_fd.read()).hexdigest()
if local_md5 == file['md5Checksum']:
continue
print(f"Uploading {local_file_path}")
file = drive_service.files().update(fileId=file['id'],
media_body=MediaFileUpload(local_file_path)
).execute()
def download_drive(local_folder, filter_folder_name=None):
creds = get_creds()
drive_service = build('drive', 'v3', credentials=creds)
root = {}
get_files(drive_service, TRANSLATION_FOLDER_ID, root, filter_folder_name)
downloaders = download_folder(drive_service, root, local_folder)
while downloaders:
for item in downloaders[:10]:
down, fd = item
try:
status, done = down.next_chunk()
except errors.HttpError:
print(f"Failed to downloading {fd.name}")
raise
if done:
fd.close()
downloaders.remove(item)
def upload_drive(local_folder, filter_folder_name=None):
creds = get_creds()
drive_service = build('drive', 'v3', credentials=creds)
root = {}
get_files(drive_service, TRANSLATION_FOLDER_ID, root, filter_folder_name)
upload_folder(drive_service, root, local_folder)
if __name__ == '__main__':
if sys.argv[1] == 'download':
download_drive(f"{os.path.pardir}{os.path.sep}Drive", sys.argv[2] if len(sys.argv) >= 3 else None)
elif sys.argv[1] == 'upload':
upload_drive(f"{os.path.pardir}{os.path.sep}Drive", sys.argv[2] if len(sys.argv) >= 3 else None)
```
#### File: 7th-mod-korea/when_they_cry_converter/text_converter.py
```python
import sys
import re
output_pattern = re.compile(r"""
( # method front part [0]
\s*OutputLine
\s*\(
\s*
)
([^,]*) # first parameter (actor) [1]
(\s*,\s*) # comma [2]
(.*) # second parameter (text) [3]
([ \t\x0B\f\r]*,\n?[ \t\x0B\f\r]*) # comma [4]
([^,\n]*) # third parameter (actor-alt) [5]
(\s*,\s*) # comma [6]
(.*) # fourth parameter (text-alt) [7]
(\s*,\s*) # comma [8]
(.*) # fifth parameter (show option) [9]
( # last part [10]
\s*\)
\s*;
)
""", re.VERBOSE | re.MULTILINE)
script_method = 'ModCallScriptSection'
script_pattern = re.compile(rf"""
{script_method}
\(
"(.*)"
,
"(.*)"
\);
""", re.VERBOSE | re.MULTILINE)
dialog_pattern = re.compile(r'void\ dialog\d+\s*\(\)', re.VERBOSE | re.MULTILINE)
play_bgm_method = 'PlayBGM'
play_bgm_pattern = re.compile(rf"""
(
{play_bgm_method}
\(\s*
) # [11]
([^,]*) # BGM channel [12]
(\s*,\s*)
([^,]*) # BGM name [14]
(\s*,\s*)
([^,]*)
(\s*,\s*)
([^,]*)
(\s*\);) # [19]
""", re.VERBOSE | re.MULTILINE)
fade_bgm_method = 'FadeOutBGM'
fade_bgm_pattern = re.compile(rf"""
{fade_bgm_method}
\(\s*
([^,]*)
\s*,\s*
([^,]*)
\s*,\s*
([^,]*)
\s*\);
""", re.VERBOSE | re.MULTILINE)
parse_pattern = re.compile(rf"""(?:
{output_pattern.pattern}|
{script_pattern.pattern}|
{dialog_pattern.pattern}|
{play_bgm_pattern.pattern}|
{fade_bgm_pattern.pattern})
""", re.VERBOSE | re.MULTILINE)
repl_pattern = re.compile(rf"""(?:
{output_pattern.pattern}|
{play_bgm_pattern.pattern})
""", re.VERBOSE | re.MULTILINE)
actor_pattern = re.compile(r'<color=[^>]*>([^<]*)</color>')
remove_quotation_mark_pattern = re.compile(r'"(.*)"')
font_size_pattern = re.compile(r'\"<size=-?\d*>(.*)\"')
full_to_half_ascii = dict((i + 0xFEE0, i) for i in range(0x21, 0x7F))
custom_map = {
ord('~'): '~',
ord('―'): '-',
ord('ー'): '-',
ord('…'): '...',
ord('궃'): '궂',
ord('줫'): '줬',
ord('졋'): '졌',
ord('됬'): '됐',
}
def strip_quotation_mark(line: str):
m = remove_quotation_mark_pattern.match(line)
if not m:
print(line)
raise ValueError
return m.groups()[0]
class OutputLine:
def __init__(self, match_obj):
self.match_obj = match_obj
self.groups = list(match_obj.groups())
@property
def param1(self):
return self.groups[1]
@param1.setter
def param1(self, param):
self.groups[1] = param
@property
def param2(self):
return self.groups[3]
@param2.setter
def param2(self, param):
self.groups[3] = param
@property
def param3(self):
return self.groups[5]
@param3.setter
def param3(self, param):
self.groups[5] = param
@property
def param4(self):
return self.groups[7]
@param4.setter
def param4(self, param):
self.groups[7] = param
@property
def param5(self):
return self.groups[9]
@param5.setter
def param5(self, param):
self.groups[9] = param
@property
def text(self):
return ''.join(self.groups[:11])
def is_ignore_line(self):
# ignore font size command
m1 = font_size_pattern.match(self.param2)
if m1 and not m1.group(1):
return True
m2 = font_size_pattern.match(self.param4)
if m2 and not m2.group(1):
return True
return False
def is_actor_line(self):
return self.param2 == 'NULL' and self.param4 == 'NULL'
def get_actor_text(self, param):
return strip_quotation_mark(actor_pattern.sub(r'\1', param)) if param != 'NULL' else None
def get_actor1(self):
return self.get_actor_text(self.param1)
def get_actor2(self):
return self.get_actor_text(self.param3)
def get_actors(self, param):
actors = []
for match in actor_pattern.finditer(param):
actors.append(match.group(1))
return actors
class TextConverter:
def __init__(self, text):
self.text = text
self.translation = {}
self.index = 0
self.last_translated_text:str = None
self.use_bgm = False
def extract_text(self):
sentences = []
last_actor = None
for match in parse_pattern.finditer(self.text):
if match.group().startswith(script_method):
sentences.append((script_method, match.group(12), match.group(13)))
elif match.group().startswith('void'):
sentences.append((match.group(),))
elif match.group().startswith(play_bgm_method):
sentences.append((play_bgm_method, strip_quotation_mark(match.group(17))))
elif match.group().startswith(fade_bgm_method):
pass
else:
line = OutputLine(match)
if line.is_actor_line():
last_actor = line.get_actor2()
continue
if line.is_ignore_line():
continue
sentences.append((last_actor, strip_quotation_mark(line.param2), strip_quotation_mark(line.param4)))
last_actor = None
return sentences
def repl_replace_text(self, match_obj) -> str:
if match_obj.group().startswith(play_bgm_method):
groups = list(match_obj.groups())
key = f"{self.index}_{play_bgm_method}"
try:
groups[14] = f"\"{self.translation[key]}\""
except KeyError:
# print("BGM not found...(ignore if xlsx is old version)")
return match_obj.group()
self.index += 1
if self.use_bgm:
return ''.join(groups[11:20])
else:
return match_obj.group()
line = OutputLine(match_obj)
if line.is_ignore_line():
return line.text
if line.is_actor_line():
keys = line.get_actors(line.param1)
line.param3 = line.param1
for key in keys:
line.param3 = line.param3.replace(key, self.translation[key])
line.param3 = line.param3.translate(full_to_half_ascii)
return line.text
# replace english text to translation text based on japanese text
try:
clean_param = strip_quotation_mark(line.param2)
if not clean_param:
clean_param = str(None)
half_param = clean_param.translate(full_to_half_ascii).translate(custom_map)
key = f"{self.index}_{half_param}"
self.index += 1
translated_text = self.translation[key]
if translated_text:
if len(translated_text.strip()) > 1:
translated_text = f"{translated_text.strip()} "
translated_text = translated_text.translate(full_to_half_ascii)
translated_text = translated_text.translate(custom_map)
translated_text = translated_text.replace('&', ' & ')
except KeyError:
print(f'Found text inconsistency between txt and xlsx, row at xlsx is {self.index+1}')
print(f"txt : {key}")
for k in self.translation.keys():
if str(k).startswith(str(self.index-1)):
print(f"xlsx: {k}")
break
raise
line.param4 = f'\"{translated_text}\"'
return line.text
def replace_text(self, translation: {}, use_bgm=False):
self.translation = translation
self.index = 0
self.last_translated_text = None
self.use_bgm = use_bgm
return repl_pattern.sub(self.repl_replace_text, self.text)
def validate_text(self):
result = True
for method_match in output_pattern.finditer(self.text):
line = OutputLine(method_match)
if not line.is_actor_line():
try:
param2 = strip_quotation_mark(line.param2)
param4 = strip_quotation_mark(line.param4)
index = -1
while True:
index = param4.find('\\', index + 1)
if index == -1:
break
if param4[index + 1] != '"' and param4[index + 1] != 'n':
raise Exception
index = -1
while True:
index = param4.find('"', index + 1)
if index == -1:
break
if param4[index - 1] != '\\':
raise Exception
except Exception:
print(f"\nValidation error!!\n{line.text}")
result = False
else:
pass
return result
def extract_actor(self):
actors = set()
for method_match in output_pattern.finditer(self.text):
line = OutputLine(method_match)
if not line.is_actor_line():
continue
jp_actors = line.get_actors(line.param1)
en_actors = line.get_actors(line.param3)
longest = max(len(jp_actors), len(en_actors))
for index in range(0, longest):
actors.add((jp_actors[index] if len(jp_actors) > index else '',
en_actors[index] if len(en_actors) > index else ''))
return actors
``` |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.